From f20cd46b96e3dca78cbc0c13cdff46b508f3bb8b Mon Sep 17 00:00:00 2001 From: Ryan Gerleve Date: Wed, 5 Sep 2018 10:17:10 -0400 Subject: [PATCH] Merging //UE4/Dev-Main to Dev-Networking (//UE4/Dev-Networking) #rb none #rnx [CL 4344260 by Ryan Gerleve in Dev-Networking branch] --- Engine/Build/Commit.gitdeps.xml | 1456 +++--- Engine/Config/BaseEngine.ini | 2 + .../LiveLink/Private/LiveLinkClient.cpp | 568 ++- .../LiveLink/Private/LiveLinkComponent.cpp | 13 + .../LiveLinkTimeSynchronizationSource.cpp | 117 + .../LiveLinkTimeSynchronizationSource.h | 55 + .../Source/LiveLink/Public/LiveLinkClient.h | 121 +- .../LiveLink/Public/LiveLinkComponent.h | 4 + ...vieSceneComposurePostMoveSettingsTrack.cpp | 2 +- .../EditorScriptingUtilities.uplugin | 9 +- .../EditorScriptingUtilities.Build.cs | 10 +- .../Private/EditorLevelLibrary.cpp | 126 +- .../Private/EditorPythonExecuter.cpp | 20 +- .../Private/EditorScriptingUtils.cpp | 63 +- .../Private/EditorScriptingUtils.h | 2 +- .../Private/EditorStaticMeshLibrary.cpp | 168 +- .../Public/EditorLevelLibrary.h | 41 +- .../Public/EditorStaticMeshLibrary.h | 47 +- .../DatasmithContent.Build.cs | 7 + .../DatasmithLandscapeTemplate.cpp | 54 + .../Public/DatasmithAssetImportData.h | 54 +- .../DatasmithLandscapeTemplate.h | 26 + .../Private/LevelVariantSets.cpp | 40 + .../Private/LevelVariantSetsActor.cpp | 50 + .../VariantManagerContent/Private/Variant.cpp | 123 + .../Private/VariantManagerContentModule.cpp | 24 + .../Private/VariantObjectBinding.cpp | 30 + .../Private/VariantSet.cpp | 79 + .../Public/LevelVariantSets.h | 41 + .../Public/LevelVariantSetsActor.h | 35 + .../VariantManagerContent/Public/Variant.h | 66 + .../Public/VariantManagerContentModule.h | 24 + .../Public/VariantObjectBinding.h | 56 + .../VariantManagerContent/Public/VariantSet.h | 61 + .../VariantManagerContent.Build.cs | 27 + .../VariantManagerContent.uplugin | 25 + .../Sequencer/MovieSceneControlRigTrack.cpp | 2 +- .../Config/BasePixelStreaming.ini | 2 + .../PixelStreaming/PixelStreaming.uplugin | 26 + .../PixelStreaming/PixelStreaming.Build.cs | 155 + .../PixelStreaming/Private/AudioEncoder.cpp | 89 + .../PixelStreaming/Private/AudioEncoder.h | 30 + .../Private/JavaScriptKeyCodes.inl | 270 ++ .../PixelStreaming/Private/NvVideoEncoder.cpp | 828 ++++ .../PixelStreaming/Private/NvVideoEncoder.h | 52 + .../Private/PixelStreamingInputComponent.cpp | 113 + .../Private/PixelStreamingInputDevice.cpp | 365 ++ .../Private/PixelStreamingInputDevice.h | 435 ++ .../Private/PixelStreamingPlugin.cpp | 182 + .../Private/PixelStreamingSettings.cpp | 30 + .../PixelStreaming/Private/ProtocolDefs.h | 79 + .../Private/ProxyConnection.cpp | 519 ++ .../PixelStreaming/Private/ProxyConnection.h | 60 + .../PixelStreaming/Private/Streamer.cpp | 338 ++ .../Source/PixelStreaming/Private/Streamer.h | 83 + .../Source/PixelStreaming/Private/Utils.h | 52 + .../PixelStreaming/Private/VideoEncoder.h | 86 + .../Public/IPixelStreamingPlugin.h | 60 + .../Public/PixelStreamingCommon.h | 13 + .../Public/PixelStreamingInputComponent.h | 89 + .../Public/PixelStreamingSettings.h | 33 + .../NvEncoder/NVIDIAVideoCodecSDK.tps | 11 + .../Source/ThirdParty/NvEncoder/nvEncodeAPI.h | 3216 +++++++++++++ .../ProxyLOD/Private/ProxyLODVolume.cpp | 147 + .../Source/ProxyLOD/Public/ProxyLODVolume.h | 58 + .../PythonScriptPlugin/Private/PyCore.cpp | 2 +- .../PythonScriptPlugin/Private/PyEngine.cpp | 5 + .../PythonScriptPlugin/Private/PyGenUtil.cpp | 2 +- .../PythonScriptPlugin/Private/PyTest.h | 2 +- .../PythonScriptPlugin/Private/PyUtil.cpp | 4 +- .../Private/PyWrapperObject.cpp | 3 + .../Private/PyWrapperTypeRegistry.cpp | 3 +- .../LevelSequencePlaybackController.cpp | 12 + .../VirtualCameraMovementComponent.cpp | 33 +- .../Public/LevelSequencePlaybackController.h | 5 + .../Public/VirtualCameraMovementComponent.h | 8 + .../MovieSceneNiagaraSystemTrack.cpp | 2 +- .../MovieSceneNiagaraBoolParameterTrack.cpp | 2 +- .../MovieSceneNiagaraColorParameterTrack.cpp | 2 +- .../MovieSceneNiagaraFloatParameterTrack.cpp | 2 +- ...MovieSceneNiagaraIntegerParameterTrack.cpp | 2 +- .../MovieSceneNiagaraVectorParameterTrack.cpp | 2 +- .../Private/NiagaraNodeReroute.cpp | 2 +- .../Plugins/Media/AjaMedia/AjaMedia.uplugin | 4 + .../Source/AjaMedia/Private/Aja/Aja.cpp | 4 +- .../AjaMedia/Private/AjaMediaModule.cpp | 120 +- .../Source/AjaMedia/Private/AjaMediaPrivate.h | 9 +- .../Private/Assets/AjaCustomTimeStep.cpp | 19 +- .../Private/Assets/AjaMediaFinder.cpp | 502 +- .../Private/Assets/AjaMediaSource.cpp | 82 +- .../Private/Assets/AjaTimecodeProvider.cpp | 23 +- .../Private/Player/AjaMediaAudioSample.h | 29 +- .../Private/Player/AjaMediaPlayer.cpp | 134 +- .../AjaMedia/Private/Player/AjaMediaPlayer.h | 10 +- .../Private/Player/AjaMediaTextureSample.h | 51 +- .../AjaMedia/Public/AjaCustomTimeStep.h | 10 + .../Source/AjaMedia/Public/AjaMediaFinder.h | 191 +- .../Source/AjaMedia/Public/AjaMediaSource.h | 62 +- .../AjaMediaEditor/AjaMediaEditor.Build.cs | 3 + .../Private/AjaMediaEditorModule.cpp | 3 + .../AjaMediaSourceDetailCustomization.cpp | 170 + .../AjaMediaSourceDetailCustomization.h | 29 + .../MediaPermutationsSelectorBuilder.cpp | 109 + .../MediaPermutationsSelectorBuilder.h | 25 + .../Private/AjaMediaCapture.cpp | 69 +- .../AjaMediaOutput/Private/AjaMediaOutput.cpp | 20 +- .../AjaMediaOutput/Public/AjaMediaCapture.h | 4 + .../Source/ThirdParty/Build/include/AJALib.h | 25 +- .../BlackmagicMedia/BlackmagicMedia.uplugin | 42 + .../BlackmagicMedia/BlackmagicMedia.Build.cs | 52 + .../Assets/BlackmagicCustomTimeStep.cpp | 144 + .../Private/Assets/BlackmagicMediaFinder.cpp | 215 + .../Private/Assets/BlackmagicMediaOutput.cpp | 21 + .../Private/Assets/BlackmagicMediaSource.cpp | 96 + .../Assets/BlackmagicTimecodeProvider.cpp | 167 + .../Private/Blackmagic/Blackmagic.cpp | 114 + .../Private/Blackmagic/Blackmagic.h | 21 + .../Private/BlackmagicMediaModule.cpp | 148 + .../Private/BlackmagicMediaPrivate.h | 42 + .../Player/BlackmagicMediaAudioSample.h | 59 + .../Private/Player/BlackmagicMediaPlayer.cpp | 400 ++ .../Private/Player/BlackmagicMediaPlayer.h | 112 + .../Player/BlackmagicMediaTextureSample.h | 71 + .../BlackmagicMediaAllowPlatformTypes.h | 22 + .../Shared/BlackmagicMediaHidePlatformTypes.h | 18 + .../Public/BlackmagicCustomTimeStep.h | 70 + .../Public/BlackmagicHardwareSync.h | 33 + .../Public/BlackmagicMediaFinder.h | 144 + .../Public/BlackmagicMediaOutput.h | 107 + .../Public/BlackmagicMediaSettings.h | 17 + .../Public/BlackmagicMediaSource.h | 95 + .../Public/BlackmagicTimecodeProvider.h | 73 + .../Public/IBlackmagicMediaModule.h | 28 + .../BlackmagicMediaEditor.Build.cs | 32 + .../Private/BlackmagicMediaEditorModule.cpp | 135 + .../BlackmagicMediaModeCustomization.cpp | 134 + .../BlackmagicMediaModeCustomization.h | 39 + .../BlackmagicMediaPortCustomization.cpp | 122 + .../BlackmagicMediaPortCustomization.h | 29 + .../BlackmagicMediaOutputFactoryNew.cpp | 39 + .../BlackmagicMediaOutputFactoryNew.h | 22 + .../BlackmagicMediaSourceFactoryNew.cpp | 39 + .../BlackmagicMediaSourceFactoryNew.h | 25 + .../BlackmagicMediaFactory.Build.cs | 43 + .../Private/BlackmagicMediaFactoryModule.cpp | 136 + .../BlackmagicMediaOutput.Build.cs | 44 + .../BlackmagicMediaFrameGrabberProtocol.cpp | 78 + .../Private/BlackmagicMediaOutputModule.cpp | 18 + .../Private/BlackmagicMediaViewportOutput.cpp | 128 + .../BlackmagicMediaViewportOutputImpl.cpp | 463 ++ .../BlackmagicMediaViewportOutputImpl.h | 127 + .../BlackmagicMediaFrameGrabberProtocol.h | 47 + .../Public/BlackmagicMediaViewportOutput.h | 42 + .../Public/IBlackmagicMediaOutputModule.h | 41 + .../ThirdParty/Build/Blackmagic.Build.cs | 46 + .../ThirdParty/Build/Include/BlackmagicLib.h | 287 ++ .../Dll/Include/DeckLinkAPIVersion.h | 37 + .../Source/ThirdParty/Dll/Source/List.h | 100 + .../ThirdParty/Dll/Source/PrivateDevice.cpp | 150 + .../ThirdParty/Dll/Source/PrivateDevice.h | 54 + .../ThirdParty/Dll/Source/PrivateFrame.cpp | 114 + .../ThirdParty/Dll/Source/PrivateFrame.h | 30 + .../ThirdParty/Dll/Source/PrivatePort.cpp | 670 +++ .../ThirdParty/Dll/Source/PrivatePort.h | 156 + .../Source/ThirdParty/Dll/Source/RefCount.h | 130 + .../Source/ThirdParty/Dll/Source/Thread.h | 159 + .../Source/ThirdParty/Dll/Source/VideoIO.cpp | 383 ++ .../ThirdParty/Dll/Source/VideoIOLog.cpp | 19 + .../Source/ThirdParty/Dll/Source/VideoIOLog.h | 49 + .../ThirdParty/Dll/Source/VideoIOPrivate.h | 98 + .../Source/ThirdParty/Dll/Source/dllmain.cpp | 21 + .../Source/ThirdParty/Dll/Source/stdafx.cpp | 3 + .../Source/ThirdParty/Dll/Source/stdafx.h | 37 + .../Source/ThirdParty/Dll/Source/targetver.h | 10 + .../Source/ThirdParty/Dll/VideoIO.sln | 22 + .../Source/ThirdParty/Dll/VideoIO.vcxproj | 134 + .../Private/MediaAssets/ProxyMediaOutput.cpp | 22 +- .../Private/MediaAssets/ProxyMediaSource.cpp | 20 + .../MediaBundleTimeSynchronizationSource.cpp | 97 +- .../Private/MediaFrameworkUtilitiesModule.cpp | 5 +- .../Private/Profile/MediaProfile.cpp | 89 + .../Private/Profile/MediaProfileManager.cpp | 9 +- .../Public/MediaAssets/ProxyMediaOutput.h | 8 + .../Public/MediaAssets/ProxyMediaSource.h | 8 + .../MediaBundleTimeSynchronizationSource.h | 13 +- .../Public/Profile/MediaProfile.h | 40 +- .../MediaPlayerEditor.Build.cs | 10 +- .../Widgets/SMediaPermutationsSelector.h | 195 + .../Widgets/SMediaPermutationsSelector.inl | 297 ++ .../MediaPlayerTimeSynchronizationSource.cpp | 116 +- .../Private/TimecodeSynchronizer.cpp | 1085 +++-- .../Private/TimecodeSynchronizerModule.cpp | 4 +- .../MediaPlayerTimeSynchronizationSource.h | 13 +- .../Public/TimecodeSynchronizer.h | 453 +- .../TimecodeSynchronizerEditorToolkit.cpp | 4 +- .../STimecodeSynchronizerSourceViewer.cpp | 71 +- .../STimecodeSynchronizerSourceViewport.cpp | 29 +- .../STimecodeSynchronizerSourceViewport.h | 6 +- .../Transport/TcpDeserializedMessage.cpp | 5 + .../Transport/TcpDeserializedMessage.h | 1 + .../Private/Shared/UdpMessageSegment.h | 112 +- .../Private/Tests/UdpMessageSegmenterTest.cpp | 7 +- .../Private/Tests/UdpMessagingTestTypes.h | 4 + .../Tests/UdpSerializeMessageTaskTest.cpp | 7 +- .../Transport/UdpDeserializedMessage.cpp | 349 +- .../Transport/UdpDeserializedMessage.h | 8 +- .../Private/Transport/UdpMessageBeacon.cpp | 45 +- .../Private/Transport/UdpMessageBeacon.h | 12 +- .../Private/Transport/UdpMessageProcessor.cpp | 355 +- .../Private/Transport/UdpMessageProcessor.h | 92 +- .../Private/Transport/UdpMessageSegmenter.cpp | 66 +- .../Private/Transport/UdpMessageSegmenter.h | 64 +- .../Private/Transport/UdpMessageTransport.cpp | 23 +- .../Private/Transport/UdpReassembledMessage.h | 93 +- .../Transport/UdpSerializeMessageTask.cpp | 144 +- .../Private/Transport/UdpSerializedMessage.h | 33 +- .../Private/Tunnel/UdpMessageTunnel.cpp | 4 +- .../Private/UdpMessagingPrivate.h | 14 +- .../Source/UdpMessaging/UdpMessaging.Build.cs | 1 + .../UdpMessaging/UdpMessaging.uplugin | 14 +- .../Misc/LevelSequenceEditorActorSpawner.cpp | 24 +- .../SequencerScripting.uplugin | 2 +- .../Source/EditableMesh/EditableMesh.cpp | 57 +- .../Source/EditableMesh/Public/EditableMesh.h | 4 +- .../OculusEditor/Private/OculusToolWidget.cpp | 2 +- .../DisplayClusterClusterNodeCtrlMaster.cpp | 11 + .../DisplayClusterClusterNodeCtrlMaster.h | 6 + .../DisplayClusterClusterNodeCtrlSlave.cpp | 5 + .../DisplayClusterClusterNodeCtrlSlave.h | 1 + .../DisplayClusterNodeCtrlStandalone.cpp | 5 + .../DisplayClusterNodeCtrlStandalone.h | 1 + .../Cluster/DisplayClusterClusterManager.h | 7 + .../Cluster/IPDisplayClusterClusterManager.h | 5 + .../Basics/DisplayClusterGameEngine.cpp | 9 + .../Misc/DisplayClusterTypesConverter.h | 43 + .../IPDisplayClusterClusterSyncProtocol.h | 5 + .../DisplayClusterClusterSyncClient.cpp | 23 +- .../DisplayClusterClusterSyncClient.h | 1 + .../DisplayClusterClusterSyncMsg.h | 7 + .../DisplayClusterClusterSyncService.cpp | 14 + .../DisplayClusterClusterSyncService.h | 1 + .../DisplayClusterNativePresentHandler.cpp | 21 + .../DisplayClusterNativePresentHandler.h | 22 + .../Render/DisplayClusterRenderManager.cpp | 34 +- .../Render/DisplayClusterRenderManager.h | 5 +- .../Config/DefaultEngine.ini | 6 + .../IOSTargetSettingsCustomization.cpp | 4 +- .../Private/SVisualLoggerFilters.cpp | 2 +- .../Private/MeshDescriptionOperations.cpp | 252 + .../Public/MeshDescriptionOperations.h | 9 + .../Public/UVMapSettings.h | 54 + .../Private/MeshMergeUtilities.cpp | 76 +- .../Public/IMeshReductionInterfaces.h | 2 +- .../Private/AnimationEditor.cpp | 4 +- .../Private/CollectionContextMenu.cpp | 2 +- .../Private/PathContextMenu.cpp | 2 +- .../Private/CollisionProfileDetails.cpp | 2 +- .../Private/ComponentTransformDetails.cpp | 17 +- .../Private/TextureLODSettingsDetails.cpp | 6 +- .../EditorStyle/Private/SlateEditorStyle.cpp | 45 +- .../FoliageEdit/Private/SFoliageEdit.cpp | 2 +- .../FoliageEdit/Private/SFoliagePalette.cpp | 4 +- .../SGraphNodeK2CreateDelegate.cpp | 2 +- .../Editor/Kismet/Private/CallStackViewer.cpp | 2 +- ...EditorDetailCustomization_NewLandscape.cpp | 257 +- ...peEditorDetailCustomization_NewLandscape.h | 3 - .../Private/NewLandscapeUtils.cpp | 293 ++ .../Public/LandscapeEditorObject.h | 2 +- .../Public/NewLandscapeUtils.h | 22 + .../Layers/Private/ActorLayerViewModel.cpp | 2 +- .../Source/Editor/Matinee/Private/Matinee.cpp | 2 +- .../Source/Editor/Matinee/Private/Matinee.h | 2 +- .../Private/MeshProxyTool/MeshProxyTool.cpp | 2 +- .../MeshProxyTool/SMeshProxyDialog.cpp | 72 +- .../Private/SSlotNameReferenceWindow.cpp | 4 +- .../Private/PropertyEditorHelpers.cpp | 2 + .../Private/SequenceRecorder.cpp | 3 + .../Editor/Sequencer/Private/SSequencer.cpp | 3 +- .../Editor/Sequencer/Private/Sequencer.cpp | 35 +- .../Editor/Sequencer/Private/Sequencer.h | 5 + .../Editor/Sequencer/Public/ISequencer.h | 5 + .../StaticMeshEditorViewportClient.cpp | 2 + .../Private/STimecodeProvider.cpp | 22 +- .../Private/STimecodeProviderTab.cpp | 16 +- .../Private/TimeManagementEditorModule.cpp | 6 +- .../Public/STimecodeProvider.h | 3 +- .../UnrealEd/Classes/Editor/EditorEngine.h | 50 +- .../UnrealEd/Classes/Editor/TransBuffer.h | 29 +- .../UnrealEd/Classes/Editor/Transactor.h | 118 +- .../UnrealEd/Classes/Editor/UnrealEdEngine.h | 2 +- .../UserDefinedStructEditorData.h | 2 +- .../Editor/UnrealEd/Private/EditorEngine.cpp | 7 +- .../UnrealEd/Private/EditorSelectUtils.cpp | 9 +- .../Editor/UnrealEd/Private/EditorServer.cpp | 110 +- .../UnrealEd/Private/EditorTransaction.cpp | 191 +- .../UnrealEd/Private/EditorViewportClient.cpp | 20 + .../UnrealEd/Private/EditorWorldExtension.cpp | 166 +- .../Private/Factories/EditorFactories.cpp | 6 +- .../UnrealEd/Private/Fbx/FbxCompareWindow.cpp | 2 +- .../Private/Fbx/FbxMaterialConflictWindow.cpp | 6 +- .../Private/Fbx/FbxStaticMeshImport.cpp | 2 +- .../Editor/UnrealEd/Private/GroupActor.cpp | 14 + .../Kismet2/KismetReinstanceUtilities.cpp | 2 +- .../UnrealEd/Private/LevelEditorViewport.cpp | 22 - .../Editor/UnrealEd/Private/PackageTools.cpp | 197 +- .../Editor/UnrealEd/Private/PlayLevel.cpp | 4 +- .../UnrealEd/Private/ScopedTransaction.cpp | 2 +- .../Toolkits/SGlobalTabSwitchingDialog.cpp | 2 +- .../Editor/UnrealEd/Private/UnrealEdSrv.cpp | 18 +- .../Private/UserDefinedStructEditorData.cpp | 42 +- .../Editor/UnrealEd/Public/EditorUndoClient.h | 7 +- .../UnrealEd/Public/EditorViewportClient.h | 11 + .../UnrealEd/Public/EditorWorldExtension.h | 47 +- .../Editor/UnrealEd/Public/FbxImporter.h | 9 +- .../UnrealEd/Public/LevelEditorViewport.h | 8 - .../Editor/UnrealEd/Public/PackageTools.h | 24 + .../Editor/VREditor/Public/VREditorMode.h | 2 +- .../Editor/VREditor/UI/VREditorUISystem.cpp | 42 +- .../Editor/VREditor/UI/VREditorUISystem.h | 5 +- .../Editor/VREditor/VREditorActions.cpp | 1 + .../Source/Editor/VREditor/VREditorMode.cpp | 7 +- .../Editor/VREditor/VREditorModeManager.cpp | 29 +- .../Editor/VREditor/VREditorModeManager.h | 3 + .../VREditorMotionControllerInteractor.cpp | 7 +- .../VREditorMotionControllerInteractor.h | 6 +- .../ActorViewportTransformable.cpp | 2 +- .../Public/ViewportWorldInteraction.h | 15 +- .../ViewportWorldInteraction.cpp | 80 +- .../Programs/MemoryProfiler2/App.config | 1 + .../PixelStreaming/WebRTCProxy/README.md | 7 + .../WebRTCProxy/WebRTCProxy.sln | 34 + .../WebRTCProxy/WebRTCProxy.vcxproj | 189 + .../WebRTCProxy/WebRTCProxy.vcxproj.filters | 110 + .../WebRTCProxy/bin/Start_AWS_WebRTCProxy.bat | 6 + .../WebRTCProxy/bin/Start_WebRTCProxy.bat | 5 + .../WebRTCProxy/src/AsyncConnection.cpp | 86 + .../WebRTCProxy/src/AsyncConnection.h | 52 + .../WebRTCProxy/src/CirrusConnection.cpp | 148 + .../WebRTCProxy/src/CirrusConnection.h | 52 + .../WebRTCProxy/src/ClientSession.cpp | 258 + .../WebRTCProxy/src/ClientSession.h | 57 + .../WebRTCProxy/src/CmdLine.cpp | 100 + .../PixelStreaming/WebRTCProxy/src/CmdLine.h | 74 + .../WebRTCProxy/src/Conductor.cpp | 453 ++ .../WebRTCProxy/src/Conductor.h | 75 + .../WebRTCProxy/src/Console.cpp | 122 + .../PixelStreaming/WebRTCProxy/src/Console.h | 92 + .../WebRTCProxy/src/CrashDetection.cpp | 105 + .../WebRTCProxy/src/CrashDetection.h | 7 + .../WebRTCProxy/src/FileLogOutput.cpp | 93 + .../WebRTCProxy/src/FileLogOutput.h | 67 + .../WebRTCProxy/src/H264FrameBuffer.h | 52 + .../WebRTCProxy/src/Logging.cpp | 106 + .../PixelStreaming/WebRTCProxy/src/Logging.h | 115 + .../WebRTCProxy/src/NetworkAudioCapturer.cpp | 324 ++ .../WebRTCProxy/src/NetworkAudioCapturer.h | 184 + .../WebRTCProxy/src/NetworkVideoCapturer.cpp | 36 + .../WebRTCProxy/src/NetworkVideoCapturer.h | 42 + .../WebRTCProxy/src/ScopeGuard.h | 91 + .../WebRTCProxy/src/Semaphore.h | 81 + .../src/SetSessionDescriptionObserver.h | 43 + .../WebRTCProxy/src/SharedQueue.h | 136 + .../WebRTCProxy/src/StringUtils.cpp | 152 + .../WebRTCProxy/src/StringUtils.h | 104 + .../WebRTCProxy/src/TimeUtils.cpp | 106 + .../WebRTCProxy/src/TimeUtils.h | 52 + .../WebRTCProxy/src/UE4Connection.cpp | 107 + .../WebRTCProxy/src/UE4Connection.h | 47 + .../WebRTCProxy/src/VideoEncoder.cpp | 301 ++ .../WebRTCProxy/src/VideoEncoder.h | 83 + .../WebRTCProxy/src/WebRTCLogging.cpp | 68 + .../WebRTCProxy/src/WebRTCLogging.h | 8 + .../WebRTCProxy/src/WebRTCProxy.cpp | 294 ++ .../WebRTCProxy/src/WebRTCProxyCommon.cpp | 111 + .../WebRTCProxy/src/WebRTCProxyCommon.h | 142 + .../WebRTCProxy/src/WebRTCProxyPCH.cpp | 3 + .../WebRTCProxy/src/WebRTCProxyPCH.h | 94 + .../WebRTCProxy/src/targetver.h | 10 + .../WebServers/Matchmaker/matchmaker.js | 122 + .../WebServers/Matchmaker/package-lock.json | 994 ++++ .../WebServers/Matchmaker/package.json | 10 + .../WebServers/Matchmaker/run.bat | 14 + .../WebServers/Matchmaker/setup.bat | 6 + .../WebServers/SignallingWebServer/cirrus.js | 790 +++ .../modules/authentication/db/index.js | 2 + .../authentication/db/store_password.js | 78 + .../modules/authentication/db/users.js | 35 + .../modules/authentication/index.js | 4 + .../modules/authentication/init.js | 109 + .../SignallingWebServer/modules/config.js | 49 + .../modules/httpsClient.js | 95 + .../SignallingWebServer/modules/logging.js | 108 + .../SignallingWebServer/package-lock.json | 1655 +++++++ .../SignallingWebServer/package.json | 15 + .../SignallingWebServer/public/login.css | 49 + .../SignallingWebServer/public/player.css | 289 ++ .../WebServers/SignallingWebServer/run.bat | 15 + .../WebServers/SignallingWebServer/runAWS.bat | 16 + .../SignallingWebServer/runNoSetup.bat | 13 + .../SignallingWebServer/scripts/app.js | 1238 +++++ .../scripts/webRtcPlayer.js | 288 ++ .../WebServers/SignallingWebServer/setup.bat | 6 + .../SignallingWebServer/tps/Bootstrap.tps | 13 + .../SignallingWebServer/tps/Express.tps | 13 + .../SignallingWebServer/tps/FontAwesome.tps | 11 + .../SignallingWebServer/tps/Helmet.tps | 13 + .../SignallingWebServer/tps/JQuery.tps | 13 + .../SignallingWebServer/tps/Popper.tps | 13 + .../SignallingWebServer/tps/Socket.io.tps | 13 + .../SignallingWebServer/tps/WebRTCadapter.tps | 13 + .../SignallingWebServer/tps/Yargs.tps | 13 + .../tps/expression-session.tps | 13 + .../SignallingWebServer/tps/nodebcryptjs.tps | 13 + .../tps/passport-local.tps | 13 + .../SignallingWebServer/tps/passport.tps | 13 + .../Configuration/TargetRules.cs | 5 +- .../Configuration/UEBuildTarget.cs | 26 +- .../UnrealBuildTool/System/RulesAssembly.cs | 8 +- .../Private/Commands/UserInterfaceCommand.cpp | 6 +- .../UnrealFrontend/UnrealFrontend.Build.cs | 10 +- .../UnrealFrontend/UnrealFrontend.Target.cs | 2 +- .../UnrealHeaderTool/Private/HeaderParser.cpp | 40 +- .../Private/Widgets/Testing/STestSuite.cpp | 5 +- Engine/Source/Runtime/Cbor/Cbor.Build.cs | 17 + .../Runtime/Cbor/Private/CborModule.cpp | 30 + .../Runtime/Cbor/Private/CborReader.cpp | 301 ++ .../Runtime/Cbor/Private/CborWriter.cpp | 178 + .../Runtime/Cbor/Private/Tests/CborTests.cpp | 288 ++ .../Source/Runtime/Cbor/Public/CborGlobals.h | 7 + .../Source/Runtime/Cbor/Public/CborReader.h | 61 + Engine/Source/Runtime/Cbor/Public/CborTypes.h | 283 ++ .../Source/Runtime/Cbor/Public/CborWriter.h | 55 + .../Core/Private/Containers/Ticker.cpp | 11 +- .../ICUInternationalization.cpp | 2 +- .../StringTableRegistry.cpp | 7 +- .../TextLocalizationManager.cpp | 13 +- .../Runtime/Core/Private/Math/UnrealMath.cpp | 17 +- .../Core/Private/Misc/DefaultValueHelper.cpp | 25 - .../Tests/Internationalization/TextTest.cpp | 2 - .../Source/Runtime/Core/Public/Async/Future.h | 191 +- .../Runtime/Core/Public/Containers/LruCache.h | 85 +- .../Core/Public/HAL/FeedbackContextAnsi.h | 7 +- .../Internationalization.h | 5 + .../Internationalization/StringTableCore.h | 7 + .../Core/Public/Math/UnrealMathUtility.h | 9 + .../Runtime/Core/Public/Misc/ITransaction.h | 137 +- .../Core/Public/Serialization/MemoryReader.h | 2 +- .../Public/UObject/EnterpriseObjectVersion.h | 3 + .../CoreUObject/Private/UObject/Obj.cpp | 25 +- .../Private/UObject/PropertyTag.cpp | 21 +- .../Private/UObject/UObjectGlobals.cpp | 18 +- .../Public/UObject/GCObjectScopeGuard.h | 32 + .../CoreUObject/Public/UObject/Object.h | 14 +- .../CoreUObject/Public/UObject/PropertyTag.h | 3 + .../Public/UObject/StructOnScope.h | 78 +- .../Classes/Components/SceneComponent.h | 2 +- .../Classes/EditorFramework/AssetImportData.h | 1 + .../Runtime/Engine/Classes/Engine/Engine.h | 86 +- .../Classes/Engine/GameViewportClient.h | 9 +- .../Engine/Classes/Engine/MeshMerging.h | 10 +- .../Engine/Classes/Engine/StaticMesh.h | 10 + .../Engine/SystemTimeTimecodeProvider.h | 63 + .../Engine/Classes/Engine/TimecodeProvider.h | 15 +- .../Engine/Classes/GameFramework/Actor.h | 30 +- .../Classes/Kismet/KismetSystemLibrary.h | 47 +- .../Engine/Classes/Sound/DialogueWave.h | 6 + .../Source/Runtime/Engine/Private/Actor.cpp | 9 +- .../Engine/Private/ActorConstruction.cpp | 2 +- .../Runtime/Engine/Private/ActorEditor.cpp | 168 +- .../Private/Components/SceneComponent.cpp | 15 +- .../Runtime/Engine/Private/DialogueWave.cpp | 116 +- .../EditorFramework/AssetImportData.cpp | 12 +- .../Engine/SystemTimeTimecodeProvider.cpp | 17 + .../Private/Engine/TimecodeProvider.cpp | 17 - .../Runtime/Engine/Private/GameEngine.cpp | 16 +- .../Engine/Private/GameViewportClient.cpp | 27 + .../Internationalization/StringTable.cpp | 7 +- .../Engine/Private/KismetSystemLibrary.cpp | 38 + .../Engine/Private/PlayerCameraManager.cpp | 2 +- .../Engine/Private/PrimitiveSceneProxy.cpp | 5 +- .../Engine/Private/Slate/SceneViewport.cpp | 147 +- .../Runtime/Engine/Private/StaticMesh.cpp | 28 + .../Runtime/Engine/Private/UnrealEngine.cpp | 244 +- .../Public/ComponentInstanceDataCache.h | 8 + .../Private/LiveLinkSourceSettings.cpp | 20 + .../Public/ILiveLinkClient.h | 9 +- .../Public/LiveLinkSourceSettings.h | 55 +- .../LiveLinkInterface/Public/LiveLinkTypes.h | 12 +- .../Runtime/Media/Public/IMediaAudioSample.h | 10 + .../Runtime/Media/Public/IMediaBinarySample.h | 10 + .../Media/Public/IMediaOverlaySample.h | 11 +- .../Media/Public/IMediaTextureSample.h | 10 + .../Public/TimeSynchronizableMediaSource.h | 6 +- .../MediaIOCore/Private/MediaCapture.cpp | 216 +- .../Private/Player/MediaIOCorePlayerBase.cpp | 88 +- .../Runtime/MediaIOCore/Public/MediaCapture.h | 46 +- .../Public/MediaIOCoreAudioSampleBase.h | 64 + .../Public/MediaIOCoreBinarySampleBase.h | 16 +- .../Public/MediaIOCorePlayerBase.h | 26 +- .../Public/MediaIOCoreTextureSampleBase.h | 84 +- .../Messaging/Private/Bus/MessageBus.cpp | 3 + .../Messaging/Private/Bus/MessageBus.h | 2 +- .../Messaging/Private/Bus/MessageContext.cpp | 9 + .../Messaging/Private/Bus/MessageContext.h | 8 + .../Runtime/Messaging/Public/IMessageBus.h | 4 +- .../Messaging/Public/IMessageContext.h | 16 + .../MessagingCommon/Public/MessageEndpoint.h | 38 +- .../MessagingRpc/Private/MessageRpcClient.cpp | 1 + .../MessagingRpc/Private/MessageRpcServer.cpp | 1 + .../Tracks/MovieScene3DAttachTrack.cpp | 6 +- .../Private/Tracks/MovieScene3DPathTrack.cpp | 2 +- .../Tracks/MovieScene3DTransformTrack.cpp | 2 +- .../Tracks/MovieSceneActorReferenceTrack.cpp | 2 +- .../Private/Tracks/MovieSceneAudioTrack.cpp | 2 +- .../Private/Tracks/MovieSceneBoolTrack.cpp | 2 +- .../Private/Tracks/MovieSceneByteTrack.cpp | 2 +- .../Private/Tracks/MovieSceneColorTrack.cpp | 2 +- .../Private/Tracks/MovieSceneEnumTrack.cpp | 2 +- .../Tracks/MovieSceneEulerTransformTrack.cpp | 2 +- .../Private/Tracks/MovieSceneEventTrack.cpp | 2 +- .../Private/Tracks/MovieSceneFadeTrack.cpp | 2 +- .../Private/Tracks/MovieSceneFloatTrack.cpp | 2 +- .../Private/Tracks/MovieSceneIntegerTrack.cpp | 2 +- .../Tracks/MovieSceneLevelVisibilityTrack.cpp | 2 +- ...eSceneMaterialParameterCollectionTrack.cpp | 2 +- .../Tracks/MovieSceneMaterialTrack.cpp | 2 +- .../MovieSceneParticleParameterTrack.cpp | 2 +- .../Tracks/MovieSceneParticleTrack.cpp | 2 +- .../Tracks/MovieScenePropertyTrack.cpp | 1 + .../Private/Tracks/MovieSceneSlomoTrack.cpp | 2 +- .../Private/Tracks/MovieSceneStringTrack.cpp | 2 +- .../Tracks/MovieSceneTransformTrack.cpp | 2 +- .../Private/Tracks/MovieSceneVectorTrack.cpp | 2 +- .../CborStructDeserializerBackend.cpp | 262 + .../Backends/CborStructSerializerBackend.cpp | 234 + .../JsonStructDeserializerBackend.cpp | 135 +- .../StructDeserializerBackendUtilities.h | 101 + .../Private/Tests/StructSerializerTest.cpp | 17 +- .../Backends/CborStructDeserializerBackend.h | 45 + .../Backends/CborStructSerializerBackend.h | 39 + .../Serialization/Serialization.Build.cs | 1 + .../Application/SlateApplication.cpp | 22 +- .../Widgets/Layout/SExpandableArea.cpp | 4 +- .../Widgets/Text/SlateEditableTextLayout.cpp | 8 +- .../Framework/Application/SlateApplication.h | 11 + .../Public/Widgets/Layout/SExpandableArea.h | 2 +- .../SlateCore/Private/Input/HittestGrid.cpp | 2 + .../SlateCore/Public/Input/HittestGrid.h | 23 + .../Public/Rendering/SlateRenderer.h | 8 + .../Private/SlateRHIRenderer.cpp | 6 + .../Public/TimeSynchronizationSource.h | 173 +- .../Animation/MovieScene2DTransformTrack.cpp | 2 +- .../Animation/MovieSceneMarginTrack.cpp | 2 +- .../UMG/Private/Components/ExpandableArea.cpp | 2 +- .../D3D11RHI/Private/D3D11Viewport.cpp | 63 +- .../Private/Windows/WindowsD3D11Viewport.cpp | 122 +- .../Windows/D3D11RHI/Public/D3D11Viewport.h | 5 + .../include/Win64/VS2017/api/array_view.h | 284 ++ .../Win64/VS2017/api/audio/audio_frame.h | 132 + .../Win64/VS2017/api/audio/audio_mixer.h | 80 + .../VS2017/api/audio/echo_canceller3_config.h | 159 + .../api/audio/echo_canceller3_factory.h | 38 + .../Win64/VS2017/api/audio/echo_control.h | 55 + .../api/audio_codecs/L16/audio_decoder_L16.h | 47 + .../api/audio_codecs/L16/audio_encoder_L16.h | 51 + .../api/audio_codecs/audio_codec_pair_id.h | 74 + .../VS2017/api/audio_codecs/audio_decoder.h | 180 + .../api/audio_codecs/audio_decoder_factory.h | 53 + .../audio_decoder_factory_template.h | 170 + .../VS2017/api/audio_codecs/audio_encoder.h | 250 + .../api/audio_codecs/audio_encoder_factory.h | 63 + .../audio_encoder_factory_template.h | 195 + .../VS2017/api/audio_codecs/audio_format.h | 137 + .../builtin_audio_decoder_factory.h | 25 + .../builtin_audio_encoder_factory.h | 25 + .../audio_codecs/g711/audio_decoder_g711.h | 46 + .../audio_codecs/g711/audio_encoder_g711.h | 51 + .../audio_codecs/g722/audio_decoder_g722.h | 42 + .../audio_codecs/g722/audio_encoder_g722.h | 43 + .../g722/audio_encoder_g722_config.h | 27 + .../audio_codecs/ilbc/audio_decoder_ilbc.h | 39 + .../audio_codecs/ilbc/audio_encoder_ilbc.h | 43 + .../ilbc/audio_encoder_ilbc_config.h | 29 + .../audio_codecs/isac/audio_decoder_isac.h | 32 + .../isac/audio_decoder_isac_fix.h | 39 + .../isac/audio_decoder_isac_float.h | 44 + .../audio_codecs/isac/audio_encoder_isac.h | 32 + .../isac/audio_encoder_isac_fix.h | 44 + .../isac/audio_encoder_isac_float.h | 49 + .../audio_codecs/opus/audio_decoder_opus.h | 41 + .../audio_codecs/opus/audio_encoder_opus.h | 43 + .../opus/audio_encoder_opus_config.h | 73 + .../include/Win64/VS2017/api/audio_options.h | 196 + .../Win64/VS2017/api/call/audio_sink.h | 53 + .../VS2017/api/call/callfactoryinterface.h | 36 + .../include/Win64/VS2017/api/call/transport.h | 47 + .../include/Win64/VS2017/api/candidate.h | 208 + .../include/Win64/VS2017/api/cryptoparams.h | 39 + .../Win64/VS2017/api/datachannelinterface.h | 182 + .../Win64/VS2017/api/dtmfsenderinterface.h | 97 + .../Win64/VS2017/api/fakemetricsobserver.h | 57 + .../include/Win64/VS2017/api/fec_controller.h | 91 + .../rev.23789/include/Win64/VS2017/api/jsep.h | 238 + .../Win64/VS2017/api/jsepicecandidate.h | 93 + .../Win64/VS2017/api/jsepsessiondescription.h | 89 + .../VS2017/api/mediaconstraintsinterface.h | 149 + .../Win64/VS2017/api/mediastreaminterface.h | 336 ++ .../Win64/VS2017/api/mediastreamproxy.h | 44 + .../Win64/VS2017/api/mediastreamtrackproxy.h | 65 + .../include/Win64/VS2017/api/mediatypes.h | 31 + .../include/Win64/VS2017/api/notifier.h | 61 + .../include/Win64/VS2017/api/optional.h | 443 ++ .../Win64/VS2017/api/ortc/mediadescription.h | 53 + .../VS2017/api/ortc/ortcfactoryinterface.h | 232 + .../api/ortc/ortcrtpreceiverinterface.h | 84 + .../VS2017/api/ortc/ortcrtpsenderinterface.h | 77 + .../api/ortc/packettransportinterface.h | 39 + .../ortc/rtptransportcontrollerinterface.h | 57 + .../VS2017/api/ortc/rtptransportinterface.h | 124 + .../VS2017/api/ortc/sessiondescription.h | 45 + .../VS2017/api/ortc/srtptransportinterface.h | 48 + .../VS2017/api/ortc/udptransportinterface.h | 49 + .../VS2017/api/peerconnectionfactoryproxy.h | 77 + .../VS2017/api/peerconnectioninterface.h | 1550 ++++++ .../Win64/VS2017/api/peerconnectionproxy.h | 156 + .../include/Win64/VS2017/api/proxy.h | 572 +++ .../include/Win64/VS2017/api/refcountedbase.h | 43 + .../include/Win64/VS2017/api/rtcerror.h | 310 ++ .../Win64/VS2017/api/rtceventlogoutput.h | 39 + .../include/Win64/VS2017/api/rtp_headers.h | 172 + .../include/Win64/VS2017/api/rtpparameters.h | 589 +++ .../Win64/VS2017/api/rtpreceiverinterface.h | 145 + .../Win64/VS2017/api/rtpsenderinterface.h | 93 + .../VS2017/api/rtptransceiverinterface.h | 128 + .../setremotedescriptionobserverinterface.h | 31 + .../include/Win64/VS2017/api/stats/rtcstats.h | 332 ++ .../Win64/VS2017/api/stats/rtcstats_objects.h | 442 ++ .../api/stats/rtcstatscollectorcallback.h | 30 + .../Win64/VS2017/api/stats/rtcstatsreport.h | 104 + .../include/Win64/VS2017/api/statstypes.h | 450 ++ .../Win64/VS2017/api/test/audioproc_float.h | 42 + .../api/test/create_videocodec_test_fixture.h | 34 + .../Win64/VS2017/api/test/fakeconstraints.h | 116 + .../Win64/VS2017/api/test/mock_audio_mixer.h | 32 + .../Win64/VS2017/api/test/mock_rtpreceiver.h | 38 + .../Win64/VS2017/api/test/mock_rtpsender.h | 37 + .../api/test/mock_video_decoder_factory.h | 42 + .../api/test/mock_video_encoder_factory.h | 45 + .../VS2017/api/test/videocodec_test_fixture.h | 159 + .../VS2017/api/test/videocodec_test_stats.h | 149 + .../VS2017/api/transport/bitrate_settings.h | 35 + .../VS2017/api/transport/network_control.h | 93 + .../VS2017/api/transport/network_types.h | 198 + .../api/transport/test/mock_network_control.h | 26 + .../transport/test/network_control_tester.h | 77 + .../include/Win64/VS2017/api/turncustomizer.h | 46 + .../include/Win64/VS2017/api/umametrics.h | 194 + .../Win64/VS2017/api/units/data_rate.h | 133 + .../Win64/VS2017/api/units/data_size.h | 105 + .../Win64/VS2017/api/units/time_delta.h | 150 + .../Win64/VS2017/api/units/timestamp.h | 95 + .../Win64/VS2017/api/video/encoded_frame.h | 92 + .../Win64/VS2017/api/video/i420_buffer.h | 111 + .../api/video/video_bitrate_allocation.h | 85 + .../VS2017/api/video/video_content_type.h | 41 + .../Win64/VS2017/api/video/video_frame.h | 115 + .../VS2017/api/video/video_frame_buffer.h | 138 + .../Win64/VS2017/api/video/video_rotation.h | 26 + .../VS2017/api/video/video_sink_interface.h | 32 + .../VS2017/api/video/video_source_interface.h | 61 + .../VS2017/api/video/video_stream_decoder.h | 51 + .../api/video/video_stream_decoder_create.h | 32 + .../video/video_stream_encoder_interface.h | 107 + .../Win64/VS2017/api/video/video_timing.h | 124 + .../builtin_video_decoder_factory.h | 25 + .../builtin_video_encoder_factory.h | 26 + .../api/video_codecs/sdp_video_format.h | 44 + .../VS2017/api/video_codecs/video_decoder.h | 77 + .../api/video_codecs/video_decoder_factory.h | 39 + .../VS2017/api/video_codecs/video_encoder.h | 211 + .../api/video_codecs/video_encoder_config.h | 162 + .../api/video_codecs/video_encoder_factory.h | 57 + .../Win64/VS2017/api/videosinkinterface.h | 18 + .../Win64/VS2017/api/videosourceinterface.h | 18 + .../Win64/VS2017/api/videosourceproxy.h | 41 + .../include/Win64/VS2017/common_types.h | 584 +++ .../common_video/h264/h264_bitstream_parser.h | 65 + .../VS2017/common_video/h264/h264_common.h | 88 + .../VS2017/common_video/h264/pps_parser.h | 62 + .../common_video/h264/profile_level_id.h | 19 + .../VS2017/common_video/h264/sps_parser.h | 53 + .../common_video/h264/sps_vui_rewriter.h | 54 + .../common_video/include/bitrate_adjuster.h | 86 + .../common_video/include/frame_callback.h | 59 + .../common_video/include/i420_buffer_pool.h | 64 + .../include/incoming_video_stream.h | 44 + .../include/video_bitrate_allocator.h | 39 + .../VS2017/common_video/include/video_frame.h | 77 + .../common_video/include/video_frame_buffer.h | 108 + .../libyuv/include/webrtc_libyuv.h | 126 + .../VS2017/common_video/video_render_frames.h | 52 + .../encoder/rtc_event_log_encoder.h | 35 + .../encoder/rtc_event_log_encoder_legacy.h | 110 + .../logging/rtc_event_log/events/rtc_event.h | 65 + .../events/rtc_event_alr_state.h | 33 + .../rtc_event_audio_network_adaptation.h | 37 + .../events/rtc_event_audio_playout.h | 32 + .../rtc_event_audio_receive_stream_config.h | 39 + .../rtc_event_audio_send_stream_config.h | 39 + .../events/rtc_event_bwe_update_delay_based.h | 36 + .../events/rtc_event_bwe_update_loss_based.h | 36 + .../events/rtc_event_ice_candidate_pair.h | 51 + .../rtc_event_ice_candidate_pair_config.h | 92 + .../events/rtc_event_probe_cluster_created.h | 38 + .../events/rtc_event_probe_result_failure.h | 40 + .../events/rtc_event_probe_result_success.h | 33 + .../events/rtc_event_rtcp_packet_incoming.h | 34 + .../events/rtc_event_rtcp_packet_outgoing.h | 34 + .../events/rtc_event_rtp_packet_incoming.h | 36 + .../events/rtc_event_rtp_packet_outgoing.h | 38 + .../rtc_event_video_receive_stream_config.h | 36 + .../rtc_event_video_send_stream_config.h | 36 + .../VS2017/logging/rtc_event_log/icelogger.h | 51 + .../rtc_event_log/mock/mock_rtc_event_log.h | 42 + .../output/rtc_event_log_output_file.h | 57 + .../logging/rtc_event_log/rtc_event_log.h | 75 + .../rtc_event_log/rtc_event_log_factory.h | 36 + .../rtc_event_log_factory_interface.h | 40 + .../rtc_event_log/rtc_event_log_parser.h | 252 + .../rtc_event_log/rtc_event_log_parser_new.h | 921 ++++ .../rtc_event_log_unittest_helper.h | 93 + .../logging/rtc_event_log/rtc_stream_config.h | 58 + .../media/base/adaptedvideotracksource.h | 85 + .../Win64/VS2017/media/base/audiosource.h | 49 + .../include/Win64/VS2017/media/base/codec.h | 245 + .../Win64/VS2017/media/base/cryptoparams.h | 17 + .../include/Win64/VS2017/media/base/device.h | 36 + .../Win64/VS2017/media/base/fakeframesource.h | 47 + .../Win64/VS2017/media/base/fakemediaengine.h | 971 ++++ .../VS2017/media/base/fakenetworkinterface.h | 228 + .../include/Win64/VS2017/media/base/fakertp.h | 140 + .../VS2017/media/base/fakevideocapturer.h | 80 + .../VS2017/media/base/fakevideorenderer.h | 142 + .../VS2017/media/base/h264_profile_level_id.h | 107 + .../Win64/VS2017/media/base/mediachannel.h | 878 ++++ .../Win64/VS2017/media/base/mediaconfig.h | 83 + .../Win64/VS2017/media/base/mediaconstants.h | 146 + .../Win64/VS2017/media/base/mediaengine.h | 165 + .../Win64/VS2017/media/base/rtpdataengine.h | 115 + .../Win64/VS2017/media/base/rtputils.h | 91 + .../Win64/VS2017/media/base/streamparams.h | 364 ++ .../Win64/VS2017/media/base/testutils.h | 135 + .../Win64/VS2017/media/base/turnutils.h | 30 + .../Win64/VS2017/media/base/videoadapter.h | 98 + .../VS2017/media/base/videobroadcaster.h | 71 + .../Win64/VS2017/media/base/videocapturer.h | 289 ++ .../VS2017/media/base/videocapturerfactory.h | 32 + .../Win64/VS2017/media/base/videocommon.h | 231 + .../Win64/VS2017/media/base/videosourcebase.h | 49 + .../Win64/VS2017/media/engine/adm_helpers.h | 27 + .../Win64/VS2017/media/engine/apm_helpers.h | 50 + .../Win64/VS2017/media/engine/constants.h | 25 + .../engine/convert_legacy_video_factory.h | 38 + .../VS2017/media/engine/fakewebrtccall.h | 346 ++ .../media/engine/fakewebrtcdeviceinfo.h | 109 + .../media/engine/fakewebrtcvcmfactory.h | 49 + .../engine/fakewebrtcvideocapturemodule.h | 90 + .../media/engine/fakewebrtcvideoengine.h | 136 + .../media/engine/internaldecoderfactory.h | 30 + .../media/engine/internalencoderfactory.h | 33 + .../media/engine/multiplexcodecfactory.h | 51 + .../media/engine/nullwebrtcvideoengine.h | 49 + .../VS2017/media/engine/payload_type_mapper.h | 56 + .../VS2017/media/engine/scopedvideodecoder.h | 34 + .../VS2017/media/engine/scopedvideoencoder.h | 33 + .../Win64/VS2017/media/engine/simulcast.h | 63 + .../media/engine/simulcast_encoder_adapter.h | 116 + .../videodecodersoftwarefallbackwrapper.h | 70 + .../videoencodersoftwarefallbackwrapper.h | 103 + .../engine/vp8_encoder_simulcast_proxy.h | 56 + .../VS2017/media/engine/webrtcmediaengine.h | 92 + .../VS2017/media/engine/webrtcvideocapturer.h | 85 + .../media/engine/webrtcvideocapturerfactory.h | 29 + .../media/engine/webrtcvideodecoderfactory.h | 56 + .../media/engine/webrtcvideoencoderfactory.h | 50 + .../VS2017/media/engine/webrtcvideoengine.h | 526 ++ .../VS2017/media/engine/webrtcvoiceengine.h | 291 ++ .../Win64/VS2017/media/sctp/sctptransport.h | 192 + .../VS2017/media/sctp/sctptransportinternal.h | 135 + .../audio_coding/acm2/acm_codec_database.h | 82 + .../audio_coding/acm2/acm_receive_test.h | 97 + .../modules/audio_coding/acm2/acm_receiver.h | 298 ++ .../modules/audio_coding/acm2/acm_resampler.h | 39 + .../modules/audio_coding/acm2/acm_send_test.h | 89 + .../audio_coding/acm2/call_statistics.h | 64 + .../modules/audio_coding/acm2/codec_manager.h | 75 + .../modules/audio_coding/acm2/rent_a_codec.h | 201 + .../audio_network_adaptor_impl.h | 93 + .../bitrate_controller.h | 54 + .../channel_controller.h | 53 + .../audio_network_adaptor/controller.h | 43 + .../controller_manager.h | 123 + .../audio_network_adaptor/debug_dump_writer.h | 55 + .../audio_network_adaptor/dtx_controller.h | 49 + .../audio_network_adaptor/event_log_writer.h | 42 + .../fec_controller_plr_based.h | 72 + .../fec_controller_rplr_based.h | 66 + .../frame_length_controller.h | 91 + .../include/audio_network_adaptor.h | 52 + .../include/audio_network_adaptor_config.h | 49 + .../mock/mock_audio_network_adaptor.h | 49 + .../mock/mock_controller.h | 30 + .../mock/mock_controller_manager.h | 33 + .../mock/mock_debug_dump_writer.h | 40 + .../util/threshold_curve.h | 118 + .../audio_coding/codecs/audio_decoder.h | 20 + .../audio_coding/codecs/audio_encoder.h | 20 + .../codecs/audio_format_conversion.h | 24 + .../codecs/cng/audio_encoder_cng.h | 96 + .../audio_coding/codecs/cng/webrtc_cng.h | 99 + .../codecs/g711/audio_decoder_pcm.h | 70 + .../codecs/g711/audio_encoder_pcm.h | 124 + .../modules/audio_coding/codecs/g711/g711.h | 344 ++ .../audio_coding/codecs/g711/g711_interface.h | 135 + .../codecs/g722/audio_decoder_g722.h | 79 + .../codecs/g722/audio_encoder_g722.h | 68 + .../audio_coding/codecs/g722/g722_enc_dec.h | 160 + .../audio_coding/codecs/g722/g722_interface.h | 182 + .../audio_coding/codecs/ilbc/abs_quant.h | 39 + .../audio_coding/codecs/ilbc/abs_quant_loop.h | 33 + .../codecs/ilbc/audio_decoder_ilbc.h | 46 + .../codecs/ilbc/audio_encoder_ilbc.h | 54 + .../codecs/ilbc/augmented_cb_corr.h | 41 + .../audio_coding/codecs/ilbc/bw_expand.h | 36 + .../audio_coding/codecs/ilbc/cb_construct.h | 39 + .../audio_coding/codecs/ilbc/cb_mem_energy.h | 34 + .../codecs/ilbc/cb_mem_energy_augmentation.h | 31 + .../codecs/ilbc/cb_mem_energy_calc.h | 33 + .../audio_coding/codecs/ilbc/cb_search.h | 35 + .../audio_coding/codecs/ilbc/cb_search_core.h | 40 + .../codecs/ilbc/cb_update_best_index.h | 38 + .../audio_coding/codecs/ilbc/chebyshev.h | 37 + .../audio_coding/codecs/ilbc/comp_corr.h | 39 + .../audio_coding/codecs/ilbc/constants.h | 92 + .../codecs/ilbc/create_augmented_vec.h | 37 + .../modules/audio_coding/codecs/ilbc/decode.h | 38 + .../codecs/ilbc/decode_residual.h | 40 + .../codecs/ilbc/decoder_interpolate_lsf.h | 38 + .../audio_coding/codecs/ilbc/defines.h | 221 + .../modules/audio_coding/codecs/ilbc/do_plc.h | 41 + .../modules/audio_coding/codecs/ilbc/encode.h | 35 + .../audio_coding/codecs/ilbc/energy_inverse.h | 32 + .../audio_coding/codecs/ilbc/enh_upsample.h | 33 + .../audio_coding/codecs/ilbc/enhancer.h | 39 + .../codecs/ilbc/enhancer_interface.h | 34 + .../codecs/ilbc/filtered_cb_vecs.h | 38 + .../audio_coding/codecs/ilbc/frame_classify.h | 29 + .../audio_coding/codecs/ilbc/gain_dequant.h | 36 + .../audio_coding/codecs/ilbc/gain_quant.h | 35 + .../audio_coding/codecs/ilbc/get_cd_vec.h | 36 + .../audio_coding/codecs/ilbc/get_lsp_poly.h | 47 + .../audio_coding/codecs/ilbc/get_sync_seq.h | 40 + .../audio_coding/codecs/ilbc/hp_input.h | 34 + .../audio_coding/codecs/ilbc/hp_output.h | 34 + .../modules/audio_coding/codecs/ilbc/ilbc.h | 258 + .../audio_coding/codecs/ilbc/index_conv_dec.h | 28 + .../audio_coding/codecs/ilbc/index_conv_enc.h | 32 + .../audio_coding/codecs/ilbc/init_decode.h | 35 + .../audio_coding/codecs/ilbc/init_encode.h | 33 + .../audio_coding/codecs/ilbc/interpolate.h | 35 + .../codecs/ilbc/interpolate_samples.h | 34 + .../audio_coding/codecs/ilbc/lpc_encode.h | 39 + .../audio_coding/codecs/ilbc/lsf_check.h | 33 + .../codecs/ilbc/lsf_interpolate_to_poly_dec.h | 37 + .../codecs/ilbc/lsf_interpolate_to_poly_enc.h | 38 + .../audio_coding/codecs/ilbc/lsf_to_lsp.h | 34 + .../audio_coding/codecs/ilbc/lsf_to_poly.h | 33 + .../audio_coding/codecs/ilbc/lsp_to_lsf.h | 35 + .../audio_coding/codecs/ilbc/my_corr.h | 36 + .../codecs/ilbc/nearest_neighbor.h | 36 + .../audio_coding/codecs/ilbc/pack_bits.h | 34 + .../audio_coding/codecs/ilbc/poly_to_lsf.h | 33 + .../audio_coding/codecs/ilbc/poly_to_lsp.h | 36 + .../audio_coding/codecs/ilbc/refiner.h | 43 + .../codecs/ilbc/simple_interpolate_lsf.h | 46 + .../codecs/ilbc/simple_lpc_analysis.h | 35 + .../codecs/ilbc/simple_lsf_dequant.h | 34 + .../codecs/ilbc/simple_lsf_quant.h | 37 + .../modules/audio_coding/codecs/ilbc/smooth.h | 36 + .../codecs/ilbc/smooth_out_data.h | 35 + .../audio_coding/codecs/ilbc/sort_sq.h | 36 + .../audio_coding/codecs/ilbc/split_vq.h | 38 + .../codecs/ilbc/state_construct.h | 35 + .../audio_coding/codecs/ilbc/state_search.h | 38 + .../audio_coding/codecs/ilbc/swap_bytes.h | 34 + .../audio_coding/codecs/ilbc/unpack_bits.h | 34 + .../modules/audio_coding/codecs/ilbc/vq3.h | 36 + .../modules/audio_coding/codecs/ilbc/vq4.h | 36 + .../audio_coding/codecs/ilbc/window32_w32.h | 35 + .../audio_coding/codecs/ilbc/xcorr_coef.h | 38 + .../codecs/isac/audio_decoder_isac_t.h | 59 + .../codecs/isac/audio_decoder_isac_t_impl.h | 107 + .../codecs/isac/audio_encoder_isac_t.h | 98 + .../codecs/isac/audio_encoder_isac_t_impl.h | 189 + .../audio_coding/codecs/isac/bandwidth_info.h | 24 + .../isac/fix/include/audio_decoder_isacfix.h | 22 + .../isac/fix/include/audio_encoder_isacfix.h | 22 + .../codecs/isac/fix/include/isacfix.h | 637 +++ .../codecs/isac/fix/source/arith_routins.h | 159 + .../isac/fix/source/bandwidth_estimator.h | 134 + .../codecs/isac/fix/source/codec.h | 228 + .../codecs/isac/fix/source/entropy_coding.h | 189 + .../audio_coding/codecs/isac/fix/source/fft.h | 39 + .../isac/fix/source/filterbank_internal.h | 90 + .../isac/fix/source/filterbank_tables.h | 52 + .../codecs/isac/fix/source/isac_fix_type.h | 123 + .../isac/fix/source/lpc_masking_model.h | 69 + .../codecs/isac/fix/source/lpc_tables.h | 97 + .../codecs/isac/fix/source/pitch_estimator.h | 65 + .../isac/fix/source/pitch_gain_tables.h | 44 + .../codecs/isac/fix/source/pitch_lag_tables.h | 101 + .../codecs/isac/fix/source/settings.h | 215 + .../fix/source/spectrum_ar_model_tables.h | 95 + .../codecs/isac/fix/source/structs.h | 382 ++ .../codecs/isac/locked_bandwidth_info.h | 56 + .../isac/main/include/audio_decoder_isac.h | 22 + .../isac/main/include/audio_encoder_isac.h | 22 + .../codecs/isac/main/include/isac.h | 724 +++ .../codecs/isac/main/source/arith_routines.h | 62 + .../isac/main/source/bandwidth_estimator.h | 185 + .../codecs/isac/main/source/codec.h | 209 + .../codecs/isac/main/source/crc.h | 46 + .../codecs/isac/main/source/encode_lpc_swb.h | 282 ++ .../codecs/isac/main/source/entropy_coding.h | 343 ++ .../codecs/isac/main/source/fft.h | 43 + .../isac/main/source/filter_functions.h | 23 + .../codecs/isac/main/source/isac_float_type.h | 117 + .../codecs/isac/main/source/isac_vad.h | 45 + .../codecs/isac/main/source/lpc_analysis.h | 44 + .../isac/main/source/lpc_gain_swb_tables.h | 49 + .../isac/main/source/lpc_shape_swb12_tables.h | 65 + .../isac/main/source/lpc_shape_swb16_tables.h | 78 + .../codecs/isac/main/source/lpc_tables.h | 100 + .../isac/main/source/os_specific_inline.h | 41 + .../codecs/isac/main/source/pitch_estimator.h | 31 + .../codecs/isac/main/source/pitch_filter.h | 42 + .../isac/main/source/pitch_gain_tables.h | 45 + .../isac/main/source/pitch_lag_tables.h | 114 + .../codecs/isac/main/source/settings.h | 205 + .../main/source/spectrum_ar_model_tables.h | 78 + .../codecs/isac/main/source/structs.h | 494 ++ .../codecs/isac/main/util/utility.h | 144 + .../codecs/legacy_encoded_audio_frame.h | 48 + .../codecs/opus/audio_decoder_opus.h | 55 + .../codecs/opus/audio_encoder_opus.h | 181 + .../audio_coding/codecs/opus/opus_inst.h | 36 + .../audio_coding/codecs/opus/opus_interface.h | 432 ++ .../codecs/pcm16b/audio_decoder_pcm16b.h | 44 + .../codecs/pcm16b/audio_encoder_pcm16b.h | 50 + .../audio_coding/codecs/pcm16b/pcm16b.h | 68 + .../codecs/pcm16b/pcm16b_common.h | 22 + .../codecs/red/audio_encoder_copy_red.h | 77 + .../codecs/tools/audio_codec_speed_test.h | 91 + .../include/audio_coding_module.h | 795 +++ .../include/audio_coding_module_typedefs.h | 50 + .../modules/audio_coding/neteq/accelerate.h | 81 + .../audio_coding/neteq/audio_multi_vector.h | 139 + .../modules/audio_coding/neteq/audio_vector.h | 168 + .../audio_coding/neteq/background_noise.h | 131 + .../audio_coding/neteq/buffer_level_filter.h | 49 + .../audio_coding/neteq/comfort_noise.h | 71 + .../audio_coding/neteq/cross_correlation.h | 50 + .../audio_coding/neteq/decision_logic.h | 169 + .../audio_coding/neteq/decision_logic_fax.h | 58 + .../neteq/decision_logic_normal.h | 113 + .../audio_coding/neteq/decoder_database.h | 256 + .../modules/audio_coding/neteq/defines.h | 52 + .../audio_coding/neteq/delay_manager.h | 181 + .../audio_coding/neteq/delay_peak_detector.h | 75 + .../modules/audio_coding/neteq/dsp_helper.h | 144 + .../modules/audio_coding/neteq/dtmf_buffer.h | 114 + .../audio_coding/neteq/dtmf_tone_generator.h | 54 + .../modules/audio_coding/neteq/expand.h | 161 + .../audio_coding/neteq/expand_uma_logger.h | 54 + .../audio_coding/neteq/include/neteq.h | 309 ++ .../VS2017/modules/audio_coding/neteq/merge.h | 99 + .../neteq/mock/mock_buffer_level_filter.h | 37 + .../neteq/mock/mock_decoder_database.h | 61 + .../neteq/mock/mock_delay_manager.h | 62 + .../neteq/mock/mock_delay_peak_detector.h | 35 + .../neteq/mock/mock_dtmf_buffer.h | 38 + .../neteq/mock/mock_dtmf_tone_generator.h | 35 + .../audio_coding/neteq/mock/mock_expand.h | 64 + .../neteq/mock/mock_external_decoder_pcm16b.h | 98 + .../neteq/mock/mock_packet_buffer.h | 68 + .../neteq/mock/mock_red_payload_splitter.h | 29 + .../neteq/mock/mock_statistics_calculator.h | 27 + .../modules/audio_coding/neteq/nack_tracker.h | 209 + .../audio_coding/neteq/neteq_decoder_enum.h | 56 + .../modules/audio_coding/neteq/neteq_impl.h | 450 ++ .../modules/audio_coding/neteq/normal.h | 73 + .../modules/audio_coding/neteq/packet.h | 124 + .../audio_coding/neteq/packet_buffer.h | 153 + .../audio_coding/neteq/post_decode_vad.h | 72 + .../audio_coding/neteq/preemptive_expand.h | 88 + .../audio_coding/neteq/random_vector.h | 50 + .../audio_coding/neteq/red_payload_splitter.h | 51 + .../VS2017/modules/audio_coding/neteq/rtcp.h | 58 + .../neteq/statistics_calculator.h | 202 + .../modules/audio_coding/neteq/sync_buffer.h | 101 + .../modules/audio_coding/neteq/tick_timer.h | 110 + .../modules/audio_coding/neteq/time_stretch.h | 116 + .../audio_coding/neteq/timestamp_scaler.h | 68 + .../audio_coding/neteq/tools/audio_checksum.h | 65 + .../audio_coding/neteq/tools/audio_loop.h | 59 + .../audio_coding/neteq/tools/audio_sink.h | 71 + .../neteq/tools/constant_pcm_packet_source.h | 55 + .../neteq/tools/encode_neteq_input.h | 70 + .../neteq/tools/fake_decode_from_file.h | 73 + .../neteq/tools/input_audio_file.h | 59 + .../neteq/tools/neteq_delay_analyzer.h | 76 + .../neteq/tools/neteq_external_decoder_test.h | 64 + .../audio_coding/neteq/tools/neteq_input.h | 83 + .../neteq/tools/neteq_packet_source_input.h | 84 + .../neteq/tools/neteq_performance_test.h | 32 + .../neteq/tools/neteq_quality_test.h | 172 + .../neteq/tools/neteq_replacement_input.h | 51 + .../neteq/tools/neteq_stats_getter.h | 102 + .../audio_coding/neteq/tools/neteq_test.h | 107 + .../neteq/tools/output_audio_file.h | 50 + .../neteq/tools/output_wav_file.h | 43 + .../modules/audio_coding/neteq/tools/packet.h | 118 + .../audio_coding/neteq/tools/packet_source.h | 50 + .../neteq/tools/resample_input_audio_file.h | 51 + .../neteq/tools/rtc_event_log_source.h | 65 + .../neteq/tools/rtp_file_source.h | 66 + .../audio_coding/neteq/tools/rtp_generator.h | 84 + .../modules/audio_coding/test/ACMTest.h | 21 + .../modules/audio_coding/test/APITest.h | 162 + .../modules/audio_coding/test/Channel.h | 129 + .../audio_coding/test/EncodeDecodeTest.h | 124 + .../modules/audio_coding/test/PCMFile.h | 76 + .../audio_coding/test/PacketLossTest.h | 67 + .../modules/audio_coding/test/RTPFile.h | 125 + .../modules/audio_coding/test/TestAllCodecs.h | 83 + .../modules/audio_coding/test/TestRedFec.h | 50 + .../modules/audio_coding/test/TestStereo.h | 116 + .../modules/audio_coding/test/TestVADDTX.h | 103 + .../audio_coding/test/TwoWayCommunication.h | 61 + .../modules/audio_coding/test/iSACTest.h | 80 + .../modules/audio_coding/test/opus_test.h | 61 + .../modules/audio_coding/test/utility.h | 138 + .../audio_device/android/aaudio_player.h | 146 + .../audio_device/android/aaudio_recorder.h | 128 + .../audio_device/android/aaudio_wrapper.h | 127 + .../audio_device/android/audio_common.h | 28 + .../android/audio_device_template.h | 454 ++ .../audio_device/android/audio_manager.h | 224 + .../audio_device/android/audio_record_jni.h | 163 + .../audio_device/android/audio_track_jni.h | 155 + .../modules/audio_device/android/build_info.h | 85 + .../audio_device/android/ensure_initialized.h | 17 + .../audio_device/android/opensles_common.h | 62 + .../audio_device/android/opensles_player.h | 195 + .../audio_device/android/opensles_recorder.h | 193 + .../audio_device/audio_device_buffer.h | 245 + .../audio_device/audio_device_config.h | 31 + .../audio_device/audio_device_generic.h | 139 + .../modules/audio_device/audio_device_impl.h | 170 + .../modules/audio_device/audio_device_name.h | 47 + .../audio_device/dummy/audio_device_dummy.h | 114 + .../audio_device/dummy/file_audio_device.h | 167 + .../dummy/file_audio_device_factory.h | 42 + .../modules/audio_device/fine_audio_buffer.h | 94 + .../audio_device/include/audio_device.h | 169 + .../include/audio_device_data_observer.h | 52 + .../include/audio_device_default.h | 130 + .../include/audio_device_defines.h | 157 + .../audio_device/include/fake_audio_device.h | 32 + .../audio_device/include/mock_audio_device.h | 101 + .../include/mock_audio_transport.h | 60 + .../audio_device/include/test_audio_device.h | 171 + .../audio_device/ios/audio_device_ios.h | 293 ++ .../audio_device/ios/audio_session_observer.h | 42 + .../audio_device/ios/objc/RTCAudioSession.h | 11 + .../ios/objc/RTCAudioSessionConfiguration.h | 11 + .../ios/objc/RTCAudioSessionDelegateAdapter.h | 30 + .../ios/voice_processing_audio_unit.h | 137 + .../linux/alsasymboltable_linux.h | 149 + .../linux/audio_device_alsa_linux.h | 202 + .../linux/audio_device_pulse_linux.h | 337 ++ .../linux/audio_mixer_manager_alsa_linux.h | 72 + .../linux/audio_mixer_manager_pulse_linux.h | 113 + .../linux/latebindingsymboltable_linux.h | 178 + .../linux/pulseaudiosymboltable_linux.h | 106 + .../audio_device/mac/audio_device_mac.h | 337 ++ .../mac/audio_mixer_manager_mac.h | 72 + .../mac/portaudio/pa_memorybarrier.h | 127 + .../mac/portaudio/pa_ringbuffer.h | 233 + .../audio_device/mock_audio_device_buffer.h | 33 + .../audio_device/win/audio_device_core_win.h | 318 ++ .../audio_device/win/core_audio_utility_win.h | 433 ++ .../audio_mixer/audio_frame_manipulator.h | 30 + .../modules/audio_mixer/audio_mixer_impl.h | 115 + .../default_output_rate_calculator.h | 35 + .../modules/audio_mixer/frame_combiner.h | 58 + .../audio_mixer/gain_change_calculator.h | 40 + .../audio_mixer/output_rate_calculator.h | 29 + .../modules/audio_mixer/sine_wave_generator.h | 38 + .../modules/audio_processing/aec/aec_common.h | 39 + .../modules/audio_processing/aec/aec_core.h | 334 ++ .../aec/aec_core_optimized_methods.h | 80 + .../audio_processing/aec/aec_resampler.h | 39 + .../audio_processing/aec/echo_cancellation.h | 299 ++ .../aec3/adaptive_fir_filter.h | 180 + .../audio_processing/aec3/aec3_common.h | 120 + .../modules/audio_processing/aec3/aec3_fft.h | 73 + .../modules/audio_processing/aec3/aec_state.h | 214 + .../audio_processing/aec3/block_framer.h | 47 + .../audio_processing/aec3/block_processor.h | 65 + .../aec3/block_processor_metrics.h | 47 + .../aec3/cascaded_biquad_filter.h | 75 + .../audio_processing/aec3/coherence_gain.h | 77 + .../aec3/comfort_noise_generator.h | 68 + .../modules/audio_processing/aec3/decimator.h | 40 + .../audio_processing/aec3/delay_estimate.h | 31 + .../aec3/downsampled_render_buffer.h | 57 + .../audio_processing/aec3/echo_audibility.h | 82 + .../audio_processing/aec3/echo_canceller3.h | 137 + .../aec3/echo_path_delay_estimator.h | 65 + .../aec3/echo_path_variability.h | 39 + .../audio_processing/aec3/echo_remover.h | 55 + .../aec3/echo_remover_metrics.h | 80 + .../audio_processing/aec3/erl_estimator.h | 47 + .../audio_processing/aec3/erle_estimator.h | 61 + .../audio_processing/aec3/fft_buffer.h | 58 + .../modules/audio_processing/aec3/fft_data.h | 96 + .../audio_processing/aec3/filter_analyzer.h | 93 + .../audio_processing/aec3/frame_blocker.h | 48 + .../aec3/main_filter_update_gain.h | 82 + .../audio_processing/aec3/matched_filter.h | 130 + .../aec3/matched_filter_lag_aggregator.h | 50 + .../audio_processing/aec3/matrix_buffer.h | 57 + .../aec3/mock/mock_block_processor.h | 40 + .../aec3/mock/mock_echo_remover.h | 43 + .../aec3/mock/mock_render_delay_buffer.h | 73 + .../aec3/mock/mock_render_delay_controller.h | 40 + .../audio_processing/aec3/render_buffer.h | 102 + .../aec3/render_delay_buffer.h | 83 + .../aec3/render_delay_controller.h | 47 + .../aec3/render_delay_controller_metrics.h | 51 + .../aec3/render_signal_analyzer.h | 60 + .../aec3/residual_echo_estimator.h | 99 + .../audio_processing/aec3/reverb_model.h | 68 + .../aec3/reverb_model_fallback.h | 53 + .../aec3/shadow_filter_update_gain.h | 71 + .../audio_processing/aec3/skew_estimator.h | 50 + .../aec3/stationarity_estimator.h | 116 + .../audio_processing/aec3/subtractor.h | 133 + .../audio_processing/aec3/subtractor_output.h | 61 + .../aec3/suppression_filter.h | 44 + .../audio_processing/aec3/suppression_gain.h | 94 + .../aec3/suppression_gain_limiter.h | 49 + .../audio_processing/aec3/vector_buffer.h | 57 + .../audio_processing/aec3/vector_math.h | 212 + .../aec_dump/aec_dump_factory.h | 47 + .../audio_processing/aec_dump/aec_dump_impl.h | 81 + .../aec_dump/capture_stream_info.h | 66 + .../audio_processing/aec_dump/mock_aec_dump.h | 51 + .../aec_dump/write_to_file_task.h | 58 + .../modules/audio_processing/aecm/aecm_core.h | 436 ++ .../audio_processing/aecm/aecm_defines.h | 87 + .../aecm/echo_control_mobile.h | 209 + .../VS2017/modules/audio_processing/agc/agc.h | 56 + .../audio_processing/agc/agc_manager_direct.h | 113 + .../audio_processing/agc/gain_map_internal.h | 275 ++ .../audio_processing/agc/legacy/analog_agc.h | 132 + .../audio_processing/agc/legacy/digital_agc.h | 79 + .../agc/legacy/gain_control.h | 247 + .../audio_processing/agc/loudness_histogram.h | 92 + .../modules/audio_processing/agc/mock_agc.h | 35 + .../modules/audio_processing/agc/utility.h | 23 + .../audio_processing/agc2/adaptive_agc.h | 41 + .../agc2/adaptive_digital_gain_applier.h | 46 + .../agc2/adaptive_mode_level_estimator.h | 39 + .../audio_processing/agc2/agc2_common.h | 85 + .../agc2/agc2_testing_common.h | 78 + .../audio_processing/agc2/biquad_filter.h | 64 + .../agc2/compute_interpolated_gain_curve.h | 48 + .../audio_processing/agc2/down_sampler.h | 40 + .../agc2/fixed_digital_level_estimator.h | 60 + .../agc2/fixed_gain_controller.h | 39 + .../audio_processing/agc2/gain_applier.h | 41 + .../agc2/gain_curve_applier.h | 56 + .../agc2/interpolated_gain_curve.h | 134 + .../modules/audio_processing/agc2/limiter.h | 74 + .../agc2/noise_level_estimator.h | 43 + .../agc2/noise_spectrum_estimator.h | 40 + .../audio_processing/agc2/rnn_vad/common.h | 67 + .../agc2/rnn_vad/features_extraction.h | 62 + .../audio_processing/agc2/rnn_vad/fft_util.h | 51 + .../agc2/rnn_vad/lp_residual.h | 39 + .../agc2/rnn_vad/pitch_info.h | 29 + .../agc2/rnn_vad/pitch_search.h | 49 + .../agc2/rnn_vad/pitch_search_internal.h | 107 + .../agc2/rnn_vad/ring_buffer.h | 66 + .../audio_processing/agc2/rnn_vad/rnn.h | 115 + .../agc2/rnn_vad/sequence_buffer.h | 79 + .../agc2/rnn_vad/spectral_features.h | 92 + .../agc2/rnn_vad/spectral_features_internal.h | 65 + .../agc2/rnn_vad/symmetric_matrix_buffer.h | 94 + .../agc2/rnn_vad/test_utils.h | 125 + .../agc2/saturation_protector.h | 64 + .../audio_processing/agc2/signal_classifier.h | 67 + .../agc2/vector_float_frame.h | 42 + .../modules/audio_processing/audio_buffer.h | 166 + .../audio_generator/file_audio_generator.h | 48 + .../audio_processing/audio_processing_impl.h | 462 ++ .../audio_processing/beamformer/array_util.h | 117 + .../beamformer/complex_matrix.h | 96 + .../beamformer/covariance_matrix_generator.h | 54 + .../audio_processing/beamformer/matrix.h | 369 ++ .../beamformer/matrix_test_helpers.h | 102 + .../beamformer/mock_nonlinear_beamformer.h | 39 + .../beamformer/nonlinear_beamformer.h | 230 + .../VS2017/modules/audio_processing/common.h | 34 + .../audio_processing/echo_cancellation_impl.h | 119 + .../echo_control_mobile_impl.h | 84 + .../echo_detector/circular_buffer.h | 42 + .../echo_detector/mean_variance_estimator.h | 33 + .../echo_detector/moving_max.h | 36 + .../normalized_covariance_estimator.h | 43 + .../gain_control_for_experimental_agc.h | 77 + .../audio_processing/gain_control_impl.h | 97 + .../audio_processing/gain_controller2.h | 54 + .../audio_processing/include/aec_dump.h | 103 + .../include/audio_frame_view.h | 67 + .../include/audio_generator.h | 36 + .../include/audio_generator_factory.h | 31 + .../include/audio_processing.h | 1220 +++++ .../include/audio_processing_statistics.h | 56 + .../modules/audio_processing/include/config.h | 133 + .../include/mock_audio_processing.h | 249 + .../intelligibility_enhancer.h | 137 + .../intelligibility/intelligibility_utils.h | 86 + .../audio_processing/level_estimator_impl.h | 47 + .../logging/apm_data_dumper.h | 212 + .../modules/audio_processing/low_cut_filter.h | 36 + .../audio_processing/noise_suppression_impl.h | 56 + .../modules/audio_processing/ns/defines.h | 49 + .../audio_processing/ns/noise_suppression.h | 135 + .../audio_processing/ns/noise_suppression_x.h | 113 + .../modules/audio_processing/ns/ns_core.h | 189 + .../modules/audio_processing/ns/nsx_core.h | 263 + .../modules/audio_processing/ns/nsx_defines.h | 64 + .../audio_processing/ns/windows_private.h | 574 +++ .../render_queue_item_verifier.h | 36 + .../audio_processing/residual_echo_detector.h | 90 + .../modules/audio_processing/rms_level.h | 75 + .../audio_processing/splitting_filter.h | 69 + .../test/aec_dump_based_simulator.h | 68 + .../test/audio_buffer_tools.h | 35 + .../test/audio_processing_simulator.h | 201 + .../test/audioproc_float_impl.h | 29 + .../test/bitexactness_tools.h | 56 + .../test/conversational_speech/config.h | 41 + .../conversational_speech/mock_wavreader.h | 49 + .../mock_wavreader_factory.h | 56 + .../conversational_speech/multiend_call.h | 98 + .../test/conversational_speech/simulator.h | 44 + .../test/conversational_speech/timing.h | 50 + .../wavreader_abstract_factory.h | 34 + .../conversational_speech/wavreader_factory.h | 36 + .../wavreader_interface.h | 41 + .../test/debug_dump_replayer.h | 77 + .../test/echo_canceller_test_tools.h | 44 + .../test/fake_recording_device.h | 75 + .../audio_processing/test/performance_timer.h | 47 + .../audio_processing/test/protobuf_utils.h | 34 + .../audio_processing/test/simulator_buffers.h | 66 + .../audio_processing/test/test_utils.h | 158 + .../test/wav_based_simulator.h | 56 + .../audio_processing/three_band_filter_bank.h | 69 + .../audio_processing/transient/common.h | 27 + .../transient/daubechies_8_wavelet_coeffs.h | 63 + .../transient/dyadic_decimator.h | 70 + .../audio_processing/transient/file_utils.h | 118 + .../transient/moving_moments.h | 52 + .../transient/transient_detector.h | 87 + .../transient/transient_suppressor.h | 120 + .../audio_processing/transient/wpd_node.h | 47 + .../audio_processing/transient/wpd_tree.h | 92 + .../audio_processing/typing_detection.h | 92 + .../utility/block_mean_calculator.h | 52 + .../utility/delay_estimator.h | 251 + .../utility/delay_estimator_internal.h | 48 + .../utility/delay_estimator_wrapper.h | 244 + .../audio_processing/utility/ooura_fft.h | 60 + .../utility/ooura_fft_tables_common.h | 54 + .../utility/ooura_fft_tables_neon_sse2.h | 94 + .../modules/audio_processing/vad/common.h | 29 + .../VS2017/modules/audio_processing/vad/gmm.h | 45 + .../audio_processing/vad/noise_gmm_tables.h | 85 + .../audio_processing/vad/pitch_based_vad.h | 58 + .../audio_processing/vad/pitch_internal.h | 26 + .../audio_processing/vad/pole_zero_filter.h | 52 + .../audio_processing/vad/standalone_vad.h | 69 + .../audio_processing/vad/vad_audio_proc.h | 93 + .../vad/vad_audio_proc_internal.h | 94 + .../vad/vad_circular_buffer.h | 69 + .../audio_processing/vad/vad_with_level.h | 40 + .../vad/voice_activity_detector.h | 69 + .../audio_processing/vad/voice_gmm_tables.h | 85 + .../audio_processing/voice_detection_impl.h | 58 + .../bitrate_controller_impl.h | 108 + .../include/bitrate_controller.h | 102 + .../include/mock/mock_bitrate_controller.h | 54 + .../send_side_bandwidth_estimation.h | 116 + .../acknowledged_bitrate_estimator.h | 44 + .../bbr/bandwidth_sampler.h | 261 + .../congestion_controller/bbr/bbr_factory.h | 29 + .../bbr/bbr_network_controller.h | 406 ++ .../bbr/data_transfer_tracker.h | 50 + .../bbr/packet_number_indexed_queue.h | 217 + .../congestion_controller/bbr/rtt_stats.h | 89 + .../bbr/windowed_filter.h | 168 + .../congestion_controller/bitrate_estimator.h | 46 + .../congestion_controller_unittests_helper.h | 23 + .../congestion_controller/delay_based_bwe.h | 90 + .../delay_based_bwe_unittest_helper.h | 178 + .../delay_increase_detector_interface.h | 37 + .../goog_cc/acknowledged_bitrate_estimator.h | 48 + .../goog_cc/alr_detector.h | 77 + .../goog_cc/bitrate_estimator.h | 48 + .../goog_cc/delay_based_bwe.h | 94 + .../goog_cc/delay_based_bwe_unittest_helper.h | 180 + .../delay_increase_detector_interface.h | 39 + .../goog_cc/goog_cc_network_control.h | 98 + .../goog_cc/include/goog_cc_factory.h | 34 + .../goog_cc/median_slope_estimator.h | 74 + .../goog_cc/probe_bitrate_estimator.h | 57 + .../goog_cc/probe_controller.h | 110 + .../goog_cc/trendline_estimator.h | 94 + .../include/mock/mock_congestion_observer.h | 31 + .../include/network_changed_observer.h | 41 + .../receive_side_congestion_controller.h | 104 + .../include/send_side_congestion_controller.h | 171 + ...end_side_congestion_controller_interface.h | 72 + .../median_slope_estimator.h | 72 + .../probe_bitrate_estimator.h | 55 + .../congestion_controller/probe_controller.h | 99 + .../congestion_controller_unittests_helper.h | 25 + .../include/send_side_congestion_controller.h | 224 + .../rtp/pacer_controller.h | 53 + .../rtp/send_time_history.h | 59 + .../rtp/transport_feedback_adapter.h | 80 + .../congestion_controller/send_time_history.h | 54 + .../transport_feedback_adapter.h | 80 + .../trendline_estimator.h | 92 + .../blank_detector_desktop_capturer_wrapper.h | 74 + .../capture_result_desktop_capturer_wrapper.h | 64 + .../desktop_capture/cropped_desktop_frame.h | 30 + .../cropping_window_capturer.h | 79 + .../desktop_and_cursor_composer.h | 76 + .../desktop_capture/desktop_capture_options.h | 146 + .../desktop_capture/desktop_capture_types.h | 59 + .../desktop_capture/desktop_capturer.h | 155 + .../desktop_capturer_differ_wrapper.h | 62 + .../desktop_capturer_wrapper.h | 45 + .../modules/desktop_capture/desktop_frame.h | 196 + .../desktop_capture/desktop_frame_generator.h | 121 + .../desktop_capture/desktop_frame_rotation.h | 52 + .../desktop_capture/desktop_frame_win.h | 51 + .../desktop_capture/desktop_geometry.h | 166 + .../modules/desktop_capture/desktop_region.h | 169 + .../modules/desktop_capture/differ_block.h | 44 + .../desktop_capture/differ_vector_sse2.h | 31 + .../desktop_capture/fake_desktop_capturer.h | 76 + .../fallback_desktop_capturer_wrapper.h | 63 + .../mac/desktop_configuration.h | 95 + .../mac/desktop_configuration_monitor.h | 61 + .../mac/desktop_frame_cgimage.h | 48 + .../mac/desktop_frame_iosurface.h | 44 + .../mac/desktop_frame_provider.h | 58 + .../mac/full_screen_chrome_window_detector.h | 62 + .../desktop_capture/mac/screen_capturer_mac.h | 116 + .../desktop_capture/mac/window_list_utils.h | 88 + .../mock_desktop_capturer_callback.h | 38 + .../modules/desktop_capture/mouse_cursor.h | 49 + .../desktop_capture/mouse_cursor_monitor.h | 113 + .../desktop_capture/resolution_tracker.h | 34 + .../modules/desktop_capture/rgba_color.h | 59 + .../screen_capture_frame_queue.h | 85 + .../desktop_capture/screen_capturer_helper.h | 90 + .../modules/desktop_capture/screen_drawer.h | 83 + .../screen_drawer_lock_posix.h | 38 + .../desktop_capture/shared_desktop_frame.h | 63 + .../modules/desktop_capture/shared_memory.h | 83 + .../modules/desktop_capture/test_utils.h | 27 + .../modules/desktop_capture/win/cursor.h | 25 + .../win/cursor_unittest_resources.h | 24 + .../modules/desktop_capture/win/d3d_device.h | 59 + .../modules/desktop_capture/win/desktop.h | 63 + .../win/display_configuration_monitor.h | 38 + .../win/dxgi_adapter_duplicator.h | 92 + .../desktop_capture/win/dxgi_context.h | 61 + .../win/dxgi_duplicator_controller.h | 238 + .../modules/desktop_capture/win/dxgi_frame.h | 63 + .../win/dxgi_output_duplicator.h | 145 + .../desktop_capture/win/dxgi_texture.h | 73 + .../win/dxgi_texture_mapping.h | 47 + .../win/dxgi_texture_staging.h | 68 + .../desktop_capture/win/scoped_gdi_object.h | 95 + .../win/scoped_thread_desktop.h | 54 + .../win/screen_capture_utils.h | 47 + .../win/screen_capturer_win_directx.h | 95 + .../win/screen_capturer_win_gdi.h | 83 + .../win/screen_capturer_win_magnifier.h | 139 + .../win/window_capture_utils.h | 82 + .../modules/desktop_capture/window_finder.h | 65 + .../desktop_capture/window_finder_mac.h | 37 + .../desktop_capture/window_finder_win.h | 30 + .../desktop_capture/window_finder_x11.h | 35 + .../desktop_capture/x11/shared_x_display.h | 81 + .../desktop_capture/x11/window_list_utils.h | 56 + .../desktop_capture/x11/x_atom_cache.h | 43 + .../desktop_capture/x11/x_error_trap.h | 39 + .../x11/x_server_pixel_buffer.h | 84 + .../Win64/VS2017/modules/include/module.h | 63 + .../modules/include/module_common_types.h | 278 ++ .../include/module_common_types_public.h | 110 + .../VS2017/modules/include/module_fec_types.h | 34 + .../VS2017/modules/pacing/alr_detector.h | 72 + .../VS2017/modules/pacing/bitrate_prober.h | 106 + .../VS2017/modules/pacing/interval_budget.h | 43 + .../modules/pacing/mock/mock_paced_sender.h | 45 + .../VS2017/modules/pacing/paced_sender.h | 211 + .../Win64/VS2017/modules/pacing/pacer.h | 39 + .../VS2017/modules/pacing/packet_queue.h | 84 + .../modules/pacing/packet_queue_interface.h | 69 + .../VS2017/modules/pacing/packet_router.h | 127 + .../modules/pacing/round_robin_packet_queue.h | 111 + .../aimd_rate_control.h | 93 + .../include/bwe_defines.h | 61 + .../include/remote_bitrate_estimator.h | 92 + .../remote_bitrate_estimator/inter_arrival.h | 95 + .../overuse_detector.h | 62 + .../overuse_estimator.h | 73 + .../remote_bitrate_estimator_abs_send_time.h | 142 + .../remote_bitrate_estimator_single_stream.h | 75 + ...remote_bitrate_estimator_unittest_helper.h | 221 + .../remote_estimator_proxy.h | 78 + .../test/bbr_paced_sender.h | 93 + .../remote_bitrate_estimator/test/bwe.h | 200 + .../remote_bitrate_estimator/test/bwe_test.h | 196 + .../test/bwe_test_baselinefile.h | 45 + .../test/bwe_test_fileutils.h | 59 + .../test/bwe_test_framework.h | 473 ++ .../test/bwe_test_logging.h | 358 ++ .../test/estimators/bbr.h | 242 + .../test/estimators/congestion_window.h | 49 + .../test/estimators/max_bandwidth_filter.h | 54 + .../test/estimators/min_rtt_filter.h | 73 + .../test/estimators/nada.h | 110 + .../test/estimators/remb.h | 87 + .../test/estimators/send_side.h | 76 + .../test/estimators/tcp.h | 38 + .../test/metric_recorder.h | 188 + .../remote_bitrate_estimator/test/packet.h | 220 + .../test/packet_receiver.h | 71 + .../test/packet_sender.h | 207 + .../remote_bitrate_estimator/tools/bwe_rtp.h | 36 + .../rtp_rtcp/include/flexfec_receiver.h | 68 + .../modules/rtp_rtcp/include/flexfec_sender.h | 96 + .../rtp_rtcp/include/receive_statistics.h | 90 + .../include/remote_ntp_time_estimator.h | 53 + .../VS2017/modules/rtp_rtcp/include/rtp_cvo.h | 56 + .../include/rtp_header_extension_map.h | 79 + .../rtp_rtcp/include/rtp_header_parser.h | 44 + .../rtp_rtcp/include/rtp_payload_registry.h | 87 + .../modules/rtp_rtcp/include/rtp_receiver.h | 113 + .../modules/rtp_rtcp/include/rtp_rtcp.h | 460 ++ .../rtp_rtcp/include/rtp_rtcp_defines.h | 597 +++ .../rtp_rtcp/include/ulpfec_receiver.h | 57 + .../mocks/mock_recovered_packet_receiver.h | 28 + .../mocks/mock_rtcp_bandwidth_observer.h | 29 + .../rtp_rtcp/mocks/mock_rtcp_rtt_stats.h | 28 + .../modules/rtp_rtcp/mocks/mock_rtp_rtcp.h | 196 + .../VS2017/modules/rtp_rtcp/source/byte_io.h | 408 ++ .../modules/rtp_rtcp/source/dtmf_queue.h | 41 + .../source/fec_private_tables_bursty.h | 39 + .../source/fec_private_tables_random.h | 29 + .../modules/rtp_rtcp/source/fec_test_helper.h | 130 + .../source/flexfec_header_reader_writer.h | 85 + .../source/forward_error_correction.h | 418 ++ .../forward_error_correction_internal.h | 119 + .../rtp_rtcp/source/packet_loss_stats.h | 57 + .../rtp_rtcp/source/playout_delay_oracle.h | 81 + .../rtp_rtcp/source/receive_statistics_impl.h | 135 + .../modules/rtp_rtcp/source/rtcp_nack_stats.h | 40 + .../modules/rtp_rtcp/source/rtcp_packet.h | 94 + .../modules/rtp_rtcp/source/rtcp_packet/app.h | 60 + .../modules/rtp_rtcp/source/rtcp_packet/bye.h | 59 + .../source/rtcp_packet/common_header.h | 52 + .../source/rtcp_packet/compound_packet.h | 47 + .../rtp_rtcp/source/rtcp_packet/dlrr.h | 70 + .../rtcp_packet/extended_jitter_report.h | 54 + .../source/rtcp_packet/extended_reports.h | 86 + .../modules/rtp_rtcp/source/rtcp_packet/fir.h | 61 + .../rtp_rtcp/source/rtcp_packet/nack.h | 58 + .../modules/rtp_rtcp/source/rtcp_packet/pli.h | 38 + .../rtp_rtcp/source/rtcp_packet/psfb.h | 47 + .../source/rtcp_packet/rapid_resync_request.h | 40 + .../source/rtcp_packet/receiver_report.h | 59 + .../rtp_rtcp/source/rtcp_packet/remb.h | 59 + .../source/rtcp_packet/report_block.h | 72 + .../rtp_rtcp/source/rtcp_packet/rrtr.h | 48 + .../rtp_rtcp/source/rtcp_packet/rtpfb.h | 47 + .../rtp_rtcp/source/rtcp_packet/sdes.h | 55 + .../source/rtcp_packet/sender_report.h | 84 + .../source/rtcp_packet/target_bitrate.h | 63 + .../rtp_rtcp/source/rtcp_packet/tmmb_item.h | 52 + .../rtp_rtcp/source/rtcp_packet/tmmbn.h | 55 + .../rtp_rtcp/source/rtcp_packet/tmmbr.h | 54 + .../source/rtcp_packet/transport_feedback.h | 154 + .../rtp_rtcp/source/rtcp_packet/voip_metric.h | 52 + .../modules/rtp_rtcp/source/rtcp_receiver.h | 271 ++ .../modules/rtp_rtcp/source/rtcp_sender.h | 299 ++ .../rtp_rtcp/source/rtcp_transceiver.h | 95 + .../rtp_rtcp/source/rtcp_transceiver_config.h | 109 + .../rtp_rtcp/source/rtcp_transceiver_impl.h | 105 + .../modules/rtp_rtcp/source/rtp_format.h | 70 + .../modules/rtp_rtcp/source/rtp_format_h264.h | 121 + .../source/rtp_format_video_generic.h | 74 + .../modules/rtp_rtcp/source/rtp_format_vp8.h | 169 + .../source/rtp_format_vp8_test_helper.h | 77 + .../modules/rtp_rtcp/source/rtp_format_vp9.h | 101 + .../rtp_rtcp/source/rtp_header_extensions.h | 190 + .../modules/rtp_rtcp/source/rtp_packet.h | 198 + .../rtp_rtcp/source/rtp_packet_history.h | 148 + .../rtp_rtcp/source/rtp_packet_received.h | 73 + .../rtp_rtcp/source/rtp_packet_to_send.h | 78 + .../rtp_rtcp/source/rtp_receiver_audio.h | 88 + .../rtp_rtcp/source/rtp_receiver_impl.h | 112 + .../rtp_rtcp/source/rtp_receiver_strategy.h | 82 + .../rtp_rtcp/source/rtp_receiver_video.h | 48 + .../modules/rtp_rtcp/source/rtp_rtcp_config.h | 43 + .../modules/rtp_rtcp/source/rtp_rtcp_impl.h | 360 ++ .../modules/rtp_rtcp/source/rtp_sender.h | 348 ++ .../rtp_rtcp/source/rtp_sender_audio.h | 97 + .../rtp_rtcp/source/rtp_sender_video.h | 166 + .../modules/rtp_rtcp/source/rtp_utility.h | 68 + .../modules/rtp_rtcp/source/time_util.h | 55 + .../modules/rtp_rtcp/source/tmmbr_help.h | 33 + .../rtp_rtcp/source/ulpfec_generator.h | 111 + .../source/ulpfec_header_reader_writer.h | 66 + .../rtp_rtcp/source/ulpfec_receiver_impl.h | 57 + .../modules/rtp_rtcp/test/testAPI/test_api.h | 72 + .../testFec/average_residual_loss_xor_codes.h | 191 + .../modules/utility/include/helpers_android.h | 73 + .../modules/utility/include/jvm_android.h | 188 + .../include/mock/mock_process_thread.h | 44 + .../modules/utility/include/process_thread.h | 77 + .../utility/source/process_thread_impl.h | 88 + .../modules/video_capture/device_info_impl.h | 60 + .../video_capture/linux/device_info_linux.h | 53 + .../video_capture/linux/video_capture_linux.h | 67 + .../modules/video_capture/objc/device_info.h | 61 + .../video_capture/objc/device_info_objc.h | 29 + .../objc/rtc_video_capture_objc.h | 40 + .../video_capture/objc/video_capture.h | 44 + .../modules/video_capture/video_capture.h | 121 + .../video_capture/video_capture_config.h | 33 + .../video_capture/video_capture_defines.h | 83 + .../video_capture/video_capture_factory.h | 44 + .../video_capture/video_capture_impl.h | 120 + .../video_capture/windows/device_info_ds.h | 106 + .../video_capture/windows/device_info_mf.h | 43 + .../video_capture/windows/help_functions_ds.h | 35 + .../video_capture/windows/sink_filter_ds.h | 95 + .../video_capture/windows/video_capture_ds.h | 79 + .../video_capture/windows/video_capture_mf.h | 43 + .../VS2017/modules/video_coding/codec_timer.h | 52 + .../codecs/h264/h264_decoder_impl.h | 86 + .../codecs/h264/h264_encoder_impl.h | 103 + .../video_coding/codecs/h264/include/h264.h | 54 + .../codecs/h264/include/h264_globals.h | 82 + .../video_coding/codecs/i420/include/i420.h | 138 + .../codecs/interface/common_constants.h | 26 + .../include/multiplex_decoder_adapter.h | 74 + .../include/multiplex_encoded_image_packer.h | 106 + .../include/multiplex_encoder_adapter.h | 84 + .../test/android_codec_factory_helper.h | 30 + .../codecs/test/objc_codec_factory_helper.h | 28 + .../codecs/test/video_codec_unittest.h | 133 + .../test/videocodec_test_fixture_impl.h | 111 + .../codecs/test/videocodec_test_stats_impl.h | 85 + .../video_coding/codecs/test/videoprocessor.h | 258 + .../codecs/vp8/default_temporal_layers.h | 84 + .../video_coding/codecs/vp8/include/vp8.h | 37 + .../codecs/vp8/include/vp8_common_types.h | 29 + .../codecs/vp8/include/vp8_globals.h | 49 + .../codecs/vp8/libvpx_vp8_decoder.h | 74 + .../codecs/vp8/libvpx_vp8_encoder.h | 111 + .../codecs/vp8/screenshare_layers.h | 122 + .../codecs/vp8/simulcast_rate_allocator.h | 61 + .../codecs/vp8/simulcast_test_utility.h | 813 ++++ .../video_coding/codecs/vp8/temporal_layers.h | 168 + .../video_coding/codecs/vp9/include/vp9.h | 38 + .../codecs/vp9/include/vp9_globals.h | 220 + .../video_coding/codecs/vp9/svc_config.h | 27 + .../codecs/vp9/svc_rate_allocator.h | 48 + .../codecs/vp9/vp9_frame_buffer_pool.h | 123 + .../video_coding/codecs/vp9/vp9_impl.h | 150 + .../modules/video_coding/decoder_database.h | 96 + .../modules/video_coding/decoding_state.h | 90 + .../modules/video_coding/encoded_frame.h | 141 + .../modules/video_coding/encoder_database.h | 65 + .../video_coding/fec_controller_default.h | 58 + .../modules/video_coding/fec_rate_table.h | 461 ++ .../modules/video_coding/frame_buffer.h | 96 + .../modules/video_coding/frame_buffer2.h | 180 + .../modules/video_coding/frame_object.h | 63 + .../modules/video_coding/generic_decoder.h | 118 + .../modules/video_coding/generic_encoder.h | 163 + .../video_coding/h264_sprop_parameter_sets.h | 36 + .../video_coding/h264_sps_pps_tracker.h | 57 + .../VS2017/modules/video_coding/histogram.h | 46 + .../include/mock/mock_vcm_callbacks.h | 45 + .../include/mock/mock_video_codec_interface.h | 85 + .../include/video_codec_initializer.h | 51 + .../include/video_codec_interface.h | 103 + .../video_coding/include/video_coding.h | 321 ++ .../include/video_coding_defines.h | 152 + .../video_coding/include/video_error_codes.h | 34 + .../modules/video_coding/inter_frame_delay.h | 67 + .../modules/video_coding/internal_defines.h | 41 + .../modules/video_coding/jitter_buffer.h | 381 ++ .../video_coding/jitter_buffer_common.h | 72 + .../modules/video_coding/jitter_estimator.h | 170 + .../modules/video_coding/media_opt_util.h | 360 ++ .../modules/video_coding/media_optimization.h | 82 + .../modules/video_coding/nack_fec_tables.h | 31 + .../VS2017/modules/video_coding/nack_module.h | 103 + .../VS2017/modules/video_coding/packet.h | 57 + .../modules/video_coding/packet_buffer.h | 187 + .../VS2017/modules/video_coding/qp_parser.h | 30 + .../VS2017/modules/video_coding/receiver.h | 104 + .../video_coding/rtp_frame_reference_finder.h | 231 + .../VS2017/modules/video_coding/rtt_filter.h | 66 + .../modules/video_coding/session_info.h | 166 + .../video_coding/test/stream_generator.h | 72 + .../modules/video_coding/test/test_util.h | 33 + .../modules/video_coding/timestamp_map.h | 48 + .../VS2017/modules/video_coding/timing.h | 139 + .../utility/default_video_bitrate_allocator.h | 33 + .../video_coding/utility/frame_dropper.h | 94 + .../video_coding/utility/ivf_file_writer.h | 60 + .../utility/mock/mock_frame_dropper.h | 34 + .../video_coding/utility/moving_average.h | 36 + .../video_coding/utility/quality_scaler.h | 95 + .../video_coding/utility/vp8_header_parser.h | 68 + .../utility/vp9_uncompressed_header_parser.h | 29 + .../modules/video_coding/video_coding_impl.h | 250 + .../video_processing/util/denoiser_filter.h | 56 + .../video_processing/util/denoiser_filter_c.h | 42 + .../util/denoiser_filter_neon.h | 42 + .../util/denoiser_filter_sse2.h | 42 + .../video_processing/util/noise_estimation.h | 63 + .../video_processing/util/skin_detection.h | 30 + .../modules/video_processing/video_denoiser.h | 84 + .../VS2017/p2p/base/asyncstuntcpsocket.h | 50 + .../p2p/base/basicpacketsocketfactory.h | 66 + .../VS2017/p2p/base/candidatepairinterface.h | 28 + .../Win64/VS2017/p2p/base/dtlstransport.h | 245 + .../VS2017/p2p/base/dtlstransportinternal.h | 116 + .../Win64/VS2017/p2p/base/fakecandidatepair.h | 54 + .../Win64/VS2017/p2p/base/fakedtlstransport.h | 286 ++ .../Win64/VS2017/p2p/base/fakeicetransport.h | 292 ++ .../VS2017/p2p/base/fakepackettransport.h | 140 + .../Win64/VS2017/p2p/base/fakeportallocator.h | 256 + .../VS2017/p2p/base/icetransportinternal.h | 280 ++ .../Win64/VS2017/p2p/base/mockicetransport.h | 79 + .../Win64/VS2017/p2p/base/p2pconstants.h | 108 + .../VS2017/p2p/base/p2ptransportchannel.h | 424 ++ .../VS2017/p2p/base/packetlossestimator.h | 87 + .../VS2017/p2p/base/packetsocketfactory.h | 93 + .../p2p/base/packettransportinterface.h | 24 + .../VS2017/p2p/base/packettransportinternal.h | 108 + .../include/Win64/VS2017/p2p/base/port.h | 848 ++++ .../Win64/VS2017/p2p/base/portallocator.h | 615 +++ .../Win64/VS2017/p2p/base/portinterface.h | 138 + .../include/Win64/VS2017/p2p/base/pseudotcp.h | 241 + .../include/Win64/VS2017/p2p/base/relayport.h | 114 + .../Win64/VS2017/p2p/base/relayserver.h | 238 + .../include/Win64/VS2017/p2p/base/stun.h | 613 +++ .../include/Win64/VS2017/p2p/base/stunport.h | 288 ++ .../Win64/VS2017/p2p/base/stunrequest.h | 147 + .../Win64/VS2017/p2p/base/stunserver.h | 67 + .../include/Win64/VS2017/p2p/base/tcpport.h | 185 + .../Win64/VS2017/p2p/base/testrelayserver.h | 103 + .../Win64/VS2017/p2p/base/teststunserver.h | 43 + .../VS2017/p2p/base/testturncustomizer.h | 59 + .../Win64/VS2017/p2p/base/testturnserver.h | 142 + .../include/Win64/VS2017/p2p/base/transport.h | 19 + .../VS2017/p2p/base/transportdescription.h | 148 + .../p2p/base/transportdescriptionfactory.h | 80 + .../p2p/base/transportfactoryinterface.h | 42 + .../Win64/VS2017/p2p/base/transportinfo.h | 43 + .../include/Win64/VS2017/p2p/base/turnport.h | 349 ++ .../Win64/VS2017/p2p/base/turnserver.h | 320 ++ .../include/Win64/VS2017/p2p/base/udpport.h | 17 + .../Win64/VS2017/p2p/base/udptransport.h | 89 + .../VS2017/p2p/client/basicportallocator.h | 408 ++ .../p2p/client/relayportfactoryinterface.h | 72 + .../Win64/VS2017/p2p/client/turnportfactory.h | 37 + .../Win64/VS2017/p2p/stunprober/stunprober.h | 251 + .../include/Win64/VS2017/rtc_base/arraysize.h | 31 + .../Win64/VS2017/rtc_base/asyncinvoker-inl.h | 63 + .../Win64/VS2017/rtc_base/asyncinvoker.h | 265 + .../Win64/VS2017/rtc_base/asyncpacketsocket.h | 155 + .../VS2017/rtc_base/asyncresolverinterface.h | 47 + .../Win64/VS2017/rtc_base/asyncsocket.h | 83 + .../Win64/VS2017/rtc_base/asynctcpsocket.h | 108 + .../Win64/VS2017/rtc_base/asyncudpsocket.h | 67 + .../include/Win64/VS2017/rtc_base/atomicops.h | 87 + .../include/Win64/VS2017/rtc_base/base64.h | 123 + .../Win64/VS2017/rtc_base/basictypes.h | 62 + .../include/Win64/VS2017/rtc_base/bind.h | 284 ++ .../include/Win64/VS2017/rtc_base/bitbuffer.h | 126 + .../rtc_base/bitrateallocationstrategy.h | 101 + .../include/Win64/VS2017/rtc_base/buffer.h | 429 ++ .../Win64/VS2017/rtc_base/bufferqueue.h | 61 + .../Win64/VS2017/rtc_base/bytebuffer.h | 201 + .../include/Win64/VS2017/rtc_base/byteorder.h | 178 + .../include/Win64/VS2017/rtc_base/callback.h | 261 + .../include/Win64/VS2017/rtc_base/checks.h | 290 ++ .../Win64/VS2017/rtc_base/compile_assert_c.h | 21 + .../Win64/VS2017/rtc_base/constructormagic.h | 34 + .../Win64/VS2017/rtc_base/copyonwritebuffer.h | 242 + .../include/Win64/VS2017/rtc_base/cpu_time.h | 28 + .../include/Win64/VS2017/rtc_base/crc32.h | 35 + .../Win64/VS2017/rtc_base/criticalsection.h | 158 + .../Win64/VS2017/rtc_base/cryptstring.h | 86 + .../Win64/VS2017/rtc_base/data_rate_limiter.h | 56 + .../Win64/VS2017/rtc_base/deprecation.h | 45 + .../include/Win64/VS2017/rtc_base/dscp.h | 45 + .../include/Win64/VS2017/rtc_base/event.h | 68 + .../Win64/VS2017/rtc_base/event_tracer.h | 85 + .../rtc_base/experiments/alr_experiment.h | 40 + .../congestion_controller_experiment.h | 42 + .../experiments/quality_scaling_experiment.h | 59 + .../include/Win64/VS2017/rtc_base/fakeclock.h | 71 + .../Win64/VS2017/rtc_base/fakenetwork.h | 129 + .../Win64/VS2017/rtc_base/fakesslidentity.h | 80 + .../include/Win64/VS2017/rtc_base/file.h | 82 + .../VS2017/rtc_base/filerotatingstream.h | 173 + .../include/Win64/VS2017/rtc_base/fileutils.h | 150 + .../VS2017/rtc_base/firewallsocketserver.h | 122 + .../include/Win64/VS2017/rtc_base/flags.h | 268 ++ .../Win64/VS2017/rtc_base/format_macros.h | 94 + .../Win64/VS2017/rtc_base/function_view.h | 130 + .../Win64/VS2017/rtc_base/gtest_prod_util.h | 38 + .../include/Win64/VS2017/rtc_base/gunit.h | 172 + .../Win64/VS2017/rtc_base/gunit_prod.h | 24 + .../include/Win64/VS2017/rtc_base/helpers.h | 67 + .../include/Win64/VS2017/rtc_base/httpbase.h | 187 + .../Win64/VS2017/rtc_base/httpcommon-inl.h | 132 + .../Win64/VS2017/rtc_base/httpcommon.h | 457 ++ .../Win64/VS2017/rtc_base/httpserver.h | 139 + .../Win64/VS2017/rtc_base/ifaddrs-android.h | 39 + .../Win64/VS2017/rtc_base/ifaddrs_converter.h | 45 + .../Win64/VS2017/rtc_base/ignore_wundef.h | 33 + .../include/Win64/VS2017/rtc_base/ipaddress.h | 198 + .../include/Win64/VS2017/rtc_base/json.h | 91 + .../VS2017/rtc_base/keep_ref_until_done.h | 43 + .../include/Win64/VS2017/rtc_base/location.h | 57 + .../include/Win64/VS2017/rtc_base/logging.h | 396 ++ .../include/Win64/VS2017/rtc_base/logsinks.h | 68 + .../include/Win64/VS2017/rtc_base/macutils.h | 22 + .../VS2017/rtc_base/memory/aligned_array.h | 78 + .../VS2017/rtc_base/memory/aligned_malloc.h | 57 + .../Win64/VS2017/rtc_base/memory_usage.h | 24 + .../Win64/VS2017/rtc_base/messagedigest.h | 109 + .../Win64/VS2017/rtc_base/messagehandler.h | 75 + .../Win64/VS2017/rtc_base/messagequeue.h | 328 ++ .../include/Win64/VS2017/rtc_base/natserver.h | 124 + .../Win64/VS2017/rtc_base/natsocketfactory.h | 163 + .../include/Win64/VS2017/rtc_base/nattypes.h | 47 + .../include/Win64/VS2017/rtc_base/nethelper.h | 33 + .../Win64/VS2017/rtc_base/nethelpers.h | 64 + .../include/Win64/VS2017/rtc_base/network.h | 443 ++ .../Win64/VS2017/rtc_base/network_constants.h | 36 + .../Win64/VS2017/rtc_base/networkmonitor.h | 124 + .../Win64/VS2017/rtc_base/networkroute.h | 60 + .../Win64/VS2017/rtc_base/nullsocketserver.h | 36 + .../VS2017/rtc_base/numerics/exp_filter.h | 48 + .../numerics/histogram_percentile_counter.h | 43 + .../VS2017/rtc_base/numerics/mathutils.h | 39 + .../Win64/VS2017/rtc_base/numerics/mod_ops.h | 143 + .../rtc_base/numerics/moving_max_counter.h | 116 + .../rtc_base/numerics/moving_median_filter.h | 79 + .../rtc_base/numerics/percentile_filter.h | 124 + .../VS2017/rtc_base/numerics/safe_compare.h | 176 + .../rtc_base/numerics/safe_conversions.h | 76 + .../rtc_base/numerics/safe_conversions_impl.h | 175 + .../VS2017/rtc_base/numerics/safe_minmax.h | 335 ++ .../VS2017/rtc_base/numerics/sample_counter.h | 41 + .../rtc_base/numerics/sequence_number_util.h | 128 + .../Win64/VS2017/rtc_base/onetimeevent.h | 61 + .../include/Win64/VS2017/rtc_base/openssl.h | 25 + .../Win64/VS2017/rtc_base/openssladapter.h | 185 + .../VS2017/rtc_base/opensslcertificate.h | 80 + .../Win64/VS2017/rtc_base/openssldigest.h | 50 + .../Win64/VS2017/rtc_base/opensslidentity.h | 102 + .../VS2017/rtc_base/opensslsessioncache.h | 63 + .../VS2017/rtc_base/opensslstreamadapter.h | 221 + .../Win64/VS2017/rtc_base/opensslutility.h | 41 + .../Win64/VS2017/rtc_base/optionsfile.h | 50 + .../include/Win64/VS2017/rtc_base/pathutils.h | 93 + .../VS2017/rtc_base/physicalsocketserver.h | 267 + .../Win64/VS2017/rtc_base/platform_file.h | 65 + .../Win64/VS2017/rtc_base/platform_thread.h | 115 + .../VS2017/rtc_base/platform_thread_types.h | 53 + .../Win64/VS2017/rtc_base/protobuf_utils.h | 36 + .../include/Win64/VS2017/rtc_base/proxyinfo.h | 43 + .../Win64/VS2017/rtc_base/proxyserver.h | 100 + .../include/Win64/VS2017/rtc_base/ptr_util.h | 82 + .../Win64/VS2017/rtc_base/race_checker.h | 78 + .../include/Win64/VS2017/rtc_base/random.h | 93 + .../Win64/VS2017/rtc_base/rate_limiter.h | 56 + .../Win64/VS2017/rtc_base/rate_statistics.h | 84 + .../Win64/VS2017/rtc_base/ratetracker.h | 69 + .../include/Win64/VS2017/rtc_base/refcount.h | 67 + .../Win64/VS2017/rtc_base/refcountedobject.h | 63 + .../Win64/VS2017/rtc_base/refcounter.h | 52 + .../VS2017/rtc_base/rollingaccumulator.h | 174 + .../Win64/VS2017/rtc_base/rtccertificate.h | 88 + .../VS2017/rtc_base/rtccertificategenerator.h | 86 + .../include/Win64/VS2017/rtc_base/sanitizer.h | 142 + .../Win64/VS2017/rtc_base/scoped_ref_ptr.h | 163 + .../VS2017/rtc_base/sequenced_task_checker.h | 88 + .../rtc_base/sequenced_task_checker_impl.h | 49 + .../Win64/VS2017/rtc_base/signalthread.h | 157 + .../include/Win64/VS2017/rtc_base/sigslot.h | 647 +++ .../Win64/VS2017/rtc_base/sigslotrepeater.h | 56 + .../Win64/VS2017/rtc_base/sigslottester.h | 216 + .../include/Win64/VS2017/rtc_base/socket.h | 231 + .../Win64/VS2017/rtc_base/socket_unittest.h | 100 + .../Win64/VS2017/rtc_base/socketadapters.h | 207 + .../Win64/VS2017/rtc_base/socketaddress.h | 204 + .../Win64/VS2017/rtc_base/socketaddresspair.h | 41 + .../Win64/VS2017/rtc_base/socketfactory.h | 33 + .../Win64/VS2017/rtc_base/socketserver.h | 62 + .../Win64/VS2017/rtc_base/socketstream.h | 61 + .../Win64/VS2017/rtc_base/ssladapter.h | 105 + .../Win64/VS2017/rtc_base/sslcertificate.h | 147 + .../Win64/VS2017/rtc_base/sslfingerprint.h | 56 + .../Win64/VS2017/rtc_base/sslidentity.h | 171 + .../include/Win64/VS2017/rtc_base/sslroots.h | 4280 +++++++++++++++++ .../Win64/VS2017/rtc_base/sslstreamadapter.h | 284 ++ .../include/Win64/VS2017/rtc_base/stream.h | 675 +++ .../Win64/VS2017/rtc_base/string_to_number.h | 101 + .../Win64/VS2017/rtc_base/stringencode.h | 166 + .../Win64/VS2017/rtc_base/stringize_macros.h | 38 + .../rtc_base/strings/audio_format_to_string.h | 24 + .../VS2017/rtc_base/strings/string_builder.h | 87 + .../Win64/VS2017/rtc_base/stringutils.h | 319 ++ .../Win64/VS2017/rtc_base/swap_queue.h | 211 + .../rtc_base/synchronization/rw_lock_posix.h | 41 + .../rtc_base/synchronization/rw_lock_win.h | 39 + .../synchronization/rw_lock_wrapper.h | 66 + .../VS2017/rtc_base/system/asm_defines.h | 66 + .../VS2017/rtc_base/system/fallthrough.h | 31 + .../VS2017/rtc_base/system/file_wrapper.h | 87 + .../VS2017/rtc_base/system/ignore_warnings.h | 29 + .../Win64/VS2017/rtc_base/system/no_inline.h | 22 + .../Win64/VS2017/rtc_base/task_queue.h | 241 + .../VS2017/rtc_base/task_queue_for_test.h | 61 + .../Win64/VS2017/rtc_base/task_queue_posix.h | 36 + .../Win64/VS2017/rtc_base/template_util.h | 127 + .../Win64/VS2017/rtc_base/testbase64.h | 20 + .../VS2017/rtc_base/testcertificateverifier.h | 34 + .../Win64/VS2017/rtc_base/testclient.h | 114 + .../Win64/VS2017/rtc_base/testechoserver.h | 64 + .../include/Win64/VS2017/rtc_base/testutils.h | 233 + .../include/Win64/VS2017/rtc_base/thread.h | 337 ++ .../VS2017/rtc_base/thread_annotations.h | 95 + .../Win64/VS2017/rtc_base/thread_checker.h | 172 + .../VS2017/rtc_base/thread_checker_impl.h | 48 + .../rtc_base/time/timestamp_extrapolator.h | 54 + .../Win64/VS2017/rtc_base/timestampaligner.h | 74 + .../include/Win64/VS2017/rtc_base/timeutils.h | 160 + .../Win64/VS2017/rtc_base/trace_event.h | 915 ++++ .../Win64/VS2017/rtc_base/transformadapter.h | 84 + .../Win64/VS2017/rtc_base/type_traits.h | 140 + .../Win64/VS2017/rtc_base/unixfilesystem.h | 45 + .../VS2017/rtc_base/virtualsocketserver.h | 409 ++ .../include/Win64/VS2017/rtc_base/weak_ptr.h | 272 ++ .../VS2017/rtc_base/win/windows_version.h | 149 + .../include/Win64/VS2017/rtc_base/win32.h | 94 + .../Win64/VS2017/rtc_base/win32filesystem.h | 41 + .../Win64/VS2017/rtc_base/win32socketinit.h | 20 + .../Win64/VS2017/rtc_base/win32socketserver.h | 155 + .../Win64/VS2017/rtc_base/win32window.h | 60 + .../Win64/VS2017/rtc_base/zero_memory.h | 33 + .../jsoncpp/source/include/json/assertions.h | 31 + .../jsoncpp/source/include/json/autolink.h | 24 + .../jsoncpp/source/include/json/config.h | 98 + .../jsoncpp/source/include/json/features.h | 49 + .../jsoncpp/source/include/json/forwards.h | 44 + .../jsoncpp/source/include/json/json.h | 15 + .../jsoncpp/source/include/json/reader.h | 213 + .../jsoncpp/source/include/json/value.h | 1109 +++++ .../jsoncpp/source/include/json/writer.h | 184 + .../rev.23789/include/Win64/VS2017/typedefs.h | 115 + 1916 files changed, 189490 insertions(+), 4127 deletions(-) create mode 100644 Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/TimeSync/LiveLinkTimeSynchronizationSource.cpp create mode 100644 Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/TimeSync/LiveLinkTimeSynchronizationSource.h create mode 100644 Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/Private/ObjectTemplates/DatasmithLandscapeTemplate.cpp create mode 100644 Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/Public/ObjectTemplates/DatasmithLandscapeTemplate.h create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/LevelVariantSets.cpp create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/LevelVariantSetsActor.cpp create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/Variant.cpp create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/VariantManagerContentModule.cpp create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/VariantObjectBinding.cpp create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/VariantSet.cpp create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/LevelVariantSets.h create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/LevelVariantSetsActor.h create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/Variant.h create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/VariantManagerContentModule.h create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/VariantObjectBinding.h create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/VariantSet.h create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/VariantManagerContent.Build.cs create mode 100644 Engine/Plugins/Enterprise/VariantManagerContent/VariantManagerContent.uplugin create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Config/BasePixelStreaming.ini create mode 100644 Engine/Plugins/Experimental/PixelStreaming/PixelStreaming.uplugin create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/PixelStreaming.Build.cs create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/AudioEncoder.cpp create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/AudioEncoder.h create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/JavaScriptKeyCodes.inl create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/NvVideoEncoder.cpp create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/NvVideoEncoder.h create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingInputComponent.cpp create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingInputDevice.cpp create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingInputDevice.h create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingPlugin.cpp create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingSettings.cpp create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/ProtocolDefs.h create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/ProxyConnection.cpp create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/ProxyConnection.h create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/Streamer.cpp create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/Streamer.h create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/Utils.h create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/VideoEncoder.h create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/IPixelStreamingPlugin.h create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/PixelStreamingCommon.h create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/PixelStreamingInputComponent.h create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/PixelStreamingSettings.h create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/ThirdParty/NvEncoder/NVIDIAVideoCodecSDK.tps create mode 100644 Engine/Plugins/Experimental/PixelStreaming/Source/ThirdParty/NvEncoder/nvEncodeAPI.h create mode 100644 Engine/Plugins/Experimental/ProxyLODPlugin/Source/ProxyLOD/Private/ProxyLODVolume.cpp create mode 100644 Engine/Plugins/Experimental/ProxyLODPlugin/Source/ProxyLOD/Public/ProxyLODVolume.h create mode 100644 Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Customizations/AjaMediaSourceDetailCustomization.cpp create mode 100644 Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Customizations/AjaMediaSourceDetailCustomization.h create mode 100644 Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Widgets/MediaPermutationsSelectorBuilder.cpp create mode 100644 Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Widgets/MediaPermutationsSelectorBuilder.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/BlackmagicMedia.uplugin create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/BlackmagicMedia.Build.cs create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicCustomTimeStep.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicMediaFinder.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicMediaOutput.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicMediaSource.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicTimecodeProvider.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Blackmagic/Blackmagic.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Blackmagic/Blackmagic.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/BlackmagicMediaModule.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/BlackmagicMediaPrivate.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaAudioSample.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaPlayer.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaPlayer.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaTextureSample.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Shared/BlackmagicMediaAllowPlatformTypes.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Shared/BlackmagicMediaHidePlatformTypes.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicCustomTimeStep.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicHardwareSync.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaFinder.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaOutput.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaSettings.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaSource.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicTimecodeProvider.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/IBlackmagicMediaModule.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/BlackmagicMediaEditor.Build.cs create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/BlackmagicMediaEditorModule.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaModeCustomization.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaModeCustomization.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaPortCustomization.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaPortCustomization.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaOutputFactoryNew.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaOutputFactoryNew.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaSourceFactoryNew.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaSourceFactoryNew.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaFactory/BlackmagicMediaFactory.Build.cs create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaFactory/Private/BlackmagicMediaFactoryModule.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/BlackmagicMediaOutput.Build.cs create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaFrameGrabberProtocol.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaOutputModule.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaViewportOutput.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaViewportOutputImpl.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaViewportOutputImpl.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Public/BlackmagicMediaFrameGrabberProtocol.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Public/BlackmagicMediaViewportOutput.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Public/IBlackmagicMediaOutputModule.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Build/Blackmagic.Build.cs create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Build/Include/BlackmagicLib.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Include/DeckLinkAPIVersion.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/List.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateDevice.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateDevice.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateFrame.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateFrame.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivatePort.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivatePort.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/RefCount.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/Thread.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIO.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIOLog.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIOLog.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIOPrivate.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/dllmain.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/stdafx.cpp create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/stdafx.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/targetver.h create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/VideoIO.sln create mode 100644 Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/VideoIO.vcxproj create mode 100644 Engine/Plugins/Media/MediaPlayerEditor/Source/MediaPlayerEditor/Public/Widgets/SMediaPermutationsSelector.h create mode 100644 Engine/Plugins/Media/MediaPlayerEditor/Source/MediaPlayerEditor/Public/Widgets/SMediaPermutationsSelector.inl create mode 100644 Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/Devices/DisplayClusterNativePresentHandler.cpp create mode 100644 Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/Devices/DisplayClusterNativePresentHandler.h create mode 100644 Engine/Programs/UnrealCollaborationServer/Config/DefaultEngine.ini create mode 100644 Engine/Source/Developer/MeshDescriptionOperations/Public/UVMapSettings.h create mode 100644 Engine/Source/Editor/LandscapeEditor/Private/NewLandscapeUtils.cpp create mode 100644 Engine/Source/Editor/LandscapeEditor/Public/NewLandscapeUtils.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/README.md create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/WebRTCProxy.sln create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/WebRTCProxy.vcxproj create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/WebRTCProxy.vcxproj.filters create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/bin/Start_AWS_WebRTCProxy.bat create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/bin/Start_WebRTCProxy.bat create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/AsyncConnection.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/AsyncConnection.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CirrusConnection.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CirrusConnection.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/ClientSession.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/ClientSession.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CmdLine.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CmdLine.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Conductor.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Conductor.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Console.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Console.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CrashDetection.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CrashDetection.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/FileLogOutput.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/FileLogOutput.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/H264FrameBuffer.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Logging.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Logging.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkAudioCapturer.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkAudioCapturer.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkVideoCapturer.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkVideoCapturer.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/ScopeGuard.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Semaphore.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/SetSessionDescriptionObserver.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/SharedQueue.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/StringUtils.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/StringUtils.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/TimeUtils.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/TimeUtils.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/UE4Connection.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/UE4Connection.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/VideoEncoder.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/VideoEncoder.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCLogging.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCLogging.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxy.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyCommon.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyCommon.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyPCH.cpp create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyPCH.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/targetver.h create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/matchmaker.js create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/package-lock.json create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/package.json create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/run.bat create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/setup.bat create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/cirrus.js create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/db/index.js create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/db/store_password.js create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/db/users.js create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/index.js create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/init.js create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/config.js create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/httpsClient.js create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/logging.js create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/package-lock.json create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/package.json create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/public/login.css create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/public/player.css create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/run.bat create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/runAWS.bat create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/runNoSetup.bat create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/scripts/app.js create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/scripts/webRtcPlayer.js create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/setup.bat create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Bootstrap.tps create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Express.tps create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/FontAwesome.tps create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Helmet.tps create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/JQuery.tps create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Popper.tps create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Socket.io.tps create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/WebRTCadapter.tps create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Yargs.tps create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/expression-session.tps create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/nodebcryptjs.tps create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/passport-local.tps create mode 100644 Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/passport.tps create mode 100644 Engine/Source/Runtime/Cbor/Cbor.Build.cs create mode 100644 Engine/Source/Runtime/Cbor/Private/CborModule.cpp create mode 100644 Engine/Source/Runtime/Cbor/Private/CborReader.cpp create mode 100644 Engine/Source/Runtime/Cbor/Private/CborWriter.cpp create mode 100644 Engine/Source/Runtime/Cbor/Private/Tests/CborTests.cpp create mode 100644 Engine/Source/Runtime/Cbor/Public/CborGlobals.h create mode 100644 Engine/Source/Runtime/Cbor/Public/CborReader.h create mode 100644 Engine/Source/Runtime/Cbor/Public/CborTypes.h create mode 100644 Engine/Source/Runtime/Cbor/Public/CborWriter.h create mode 100644 Engine/Source/Runtime/Engine/Classes/Engine/SystemTimeTimecodeProvider.h create mode 100644 Engine/Source/Runtime/Engine/Private/Engine/SystemTimeTimecodeProvider.cpp delete mode 100644 Engine/Source/Runtime/Engine/Private/Engine/TimecodeProvider.cpp create mode 100644 Engine/Source/Runtime/LiveLinkInterface/Private/LiveLinkSourceSettings.cpp create mode 100644 Engine/Source/Runtime/Serialization/Private/Backends/CborStructDeserializerBackend.cpp create mode 100644 Engine/Source/Runtime/Serialization/Private/Backends/CborStructSerializerBackend.cpp create mode 100644 Engine/Source/Runtime/Serialization/Private/Backends/StructDeserializerBackendUtilities.h create mode 100644 Engine/Source/Runtime/Serialization/Public/Backends/CborStructDeserializerBackend.h create mode 100644 Engine/Source/Runtime/Serialization/Public/Backends/CborStructSerializerBackend.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/array_view.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/audio_frame.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/audio_mixer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/echo_canceller3_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/echo_canceller3_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/echo_control.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/L16/audio_decoder_L16.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/L16/audio_encoder_L16.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_codec_pair_id.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_decoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_decoder_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_decoder_factory_template.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_encoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_encoder_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_encoder_factory_template.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_format.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/builtin_audio_decoder_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/builtin_audio_encoder_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g711/audio_decoder_g711.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g711/audio_encoder_g711.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g722/audio_decoder_g722.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g722/audio_encoder_g722.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g722/audio_encoder_g722_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/ilbc/audio_decoder_ilbc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/ilbc/audio_encoder_ilbc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/ilbc/audio_encoder_ilbc_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_decoder_isac.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_decoder_isac_fix.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_decoder_isac_float.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_encoder_isac.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_encoder_isac_fix.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_encoder_isac_float.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/opus/audio_decoder_opus.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/opus/audio_encoder_opus.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/opus/audio_encoder_opus_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_options.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/call/audio_sink.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/call/callfactoryinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/call/transport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/candidate.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/cryptoparams.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/datachannelinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/dtmfsenderinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/fakemetricsobserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/fec_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/jsep.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/jsepicecandidate.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/jsepsessiondescription.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediaconstraintsinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediastreaminterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediastreamproxy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediastreamtrackproxy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediatypes.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/notifier.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/optional.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/mediadescription.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/ortcfactoryinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/ortcrtpreceiverinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/ortcrtpsenderinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/packettransportinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/rtptransportcontrollerinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/rtptransportinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/sessiondescription.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/srtptransportinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/udptransportinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/peerconnectionfactoryproxy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/peerconnectioninterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/peerconnectionproxy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/proxy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/refcountedbase.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtcerror.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtceventlogoutput.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtp_headers.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtpparameters.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtpreceiverinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtpsenderinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtptransceiverinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/setremotedescriptionobserverinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstats.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstats_objects.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstatscollectorcallback.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstatsreport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/statstypes.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/audioproc_float.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/create_videocodec_test_fixture.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/fakeconstraints.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_audio_mixer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_rtpreceiver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_rtpsender.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_video_decoder_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_video_encoder_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/videocodec_test_fixture.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/videocodec_test_stats.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/bitrate_settings.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/network_control.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/network_types.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/test/mock_network_control.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/test/network_control_tester.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/turncustomizer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/umametrics.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/data_rate.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/data_size.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/time_delta.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/timestamp.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/encoded_frame.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/i420_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_bitrate_allocation.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_content_type.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_frame.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_frame_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_rotation.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_sink_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_source_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_stream_decoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_stream_decoder_create.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_stream_encoder_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_timing.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video_codecs/builtin_video_decoder_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video_codecs/builtin_video_encoder_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video_codecs/sdp_video_format.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video_codecs/video_decoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video_codecs/video_decoder_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video_codecs/video_encoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video_codecs/video_encoder_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video_codecs/video_encoder_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/videosinkinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/videosourceinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/videosourceproxy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_types.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/h264/h264_bitstream_parser.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/h264/h264_common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/h264/pps_parser.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/h264/profile_level_id.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/h264/sps_parser.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/h264/sps_vui_rewriter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/include/bitrate_adjuster.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/include/frame_callback.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/include/i420_buffer_pool.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/include/incoming_video_stream.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/include/video_bitrate_allocator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/include/video_frame.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/include/video_frame_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/libyuv/include/webrtc_libyuv.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/common_video/video_render_frames.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/encoder/rtc_event_log_encoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_alr_state.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_audio_playout.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_probe_result_failure.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_probe_result_success.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/icelogger.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/mock/mock_rtc_event_log.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/output/rtc_event_log_output_file.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/rtc_event_log.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/rtc_event_log_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/rtc_event_log_factory_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/rtc_event_log_parser.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/rtc_event_log_parser_new.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/rtc_event_log_unittest_helper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/logging/rtc_event_log/rtc_stream_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/adaptedvideotracksource.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/audiosource.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/codec.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/cryptoparams.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/device.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/fakeframesource.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/fakemediaengine.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/fakenetworkinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/fakertp.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/fakevideocapturer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/fakevideorenderer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/h264_profile_level_id.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/mediachannel.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/mediaconfig.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/mediaconstants.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/mediaengine.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/rtpdataengine.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/rtputils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/streamparams.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/testutils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/turnutils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/videoadapter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/videobroadcaster.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/videocapturer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/videocapturerfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/videocommon.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/base/videosourcebase.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/adm_helpers.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/apm_helpers.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/constants.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/convert_legacy_video_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/fakewebrtccall.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/fakewebrtcdeviceinfo.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/fakewebrtcvcmfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/fakewebrtcvideocapturemodule.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/fakewebrtcvideoengine.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/internaldecoderfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/internalencoderfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/multiplexcodecfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/nullwebrtcvideoengine.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/payload_type_mapper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/scopedvideodecoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/scopedvideoencoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/simulcast.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/simulcast_encoder_adapter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/videodecodersoftwarefallbackwrapper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/videoencodersoftwarefallbackwrapper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/vp8_encoder_simulcast_proxy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/webrtcmediaengine.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/webrtcvideocapturer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/webrtcvideocapturerfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/webrtcvideodecoderfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/webrtcvideoencoderfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/webrtcvideoengine.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/engine/webrtcvoiceengine.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/sctp/sctptransport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/media/sctp/sctptransportinternal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/acm2/acm_codec_database.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/acm2/acm_receive_test.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/acm2/acm_receiver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/acm2/acm_resampler.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/acm2/acm_send_test.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/acm2/call_statistics.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/acm2/codec_manager.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/acm2/rent_a_codec.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/bitrate_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/channel_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/controller_manager.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/dtx_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/event_log_writer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/fec_controller_rplr_based.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/frame_length_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/mock/mock_audio_network_adaptor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/mock/mock_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/mock/mock_controller_manager.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/audio_network_adaptor/util/threshold_curve.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/audio_decoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/audio_encoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/audio_format_conversion.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/cng/audio_encoder_cng.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/cng/webrtc_cng.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/g711/audio_decoder_pcm.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/g711/audio_encoder_pcm.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/g711/g711.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/g711/g711_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/g722/audio_decoder_g722.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/g722/audio_encoder_g722.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/g722/g722_enc_dec.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/g722/g722_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/abs_quant.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/abs_quant_loop.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/bw_expand.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/cb_construct.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/cb_mem_energy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/cb_search.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/cb_search_core.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/cb_update_best_index.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/chebyshev.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/comp_corr.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/constants.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/create_augmented_vec.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/decode.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/decode_residual.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/defines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/do_plc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/encode.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/energy_inverse.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/enh_upsample.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/enhancer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/enhancer_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/frame_classify.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/gain_dequant.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/gain_quant.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/get_cd_vec.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/get_lsp_poly.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/get_sync_seq.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/hp_input.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/hp_output.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/ilbc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/index_conv_dec.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/index_conv_enc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/init_decode.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/init_encode.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/interpolate.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/interpolate_samples.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/lpc_encode.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/lsf_check.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/lsf_to_poly.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/my_corr.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/nearest_neighbor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/pack_bits.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/poly_to_lsf.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/poly_to_lsp.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/refiner.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/smooth.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/smooth_out_data.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/sort_sq.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/split_vq.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/state_construct.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/state_search.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/swap_bytes.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/unpack_bits.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/vq3.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/vq4.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/window32_w32.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/ilbc/xcorr_coef.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/audio_decoder_isac_t_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/bandwidth_info.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/include/isacfix.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/arith_routins.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/codec.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/fft.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/filterbank_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/isac_fix_type.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/settings.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/fix/source/structs.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/locked_bandwidth_info.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/include/isac.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/arith_routines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/codec.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/crc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/entropy_coding.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/fft.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/filter_functions.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/isac_float_type.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/isac_vad.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/lpc_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/os_specific_inline.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/pitch_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/pitch_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/settings.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/source/structs.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/isac/main/util/utility.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/legacy_encoded_audio_frame.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/opus/audio_decoder_opus.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/opus/audio_encoder_opus.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/opus/opus_inst.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/opus/opus_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/pcm16b/pcm16b.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/pcm16b/pcm16b_common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/red/audio_encoder_copy_red.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/codecs/tools/audio_codec_speed_test.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/include/audio_coding_module.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/include/audio_coding_module_typedefs.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/accelerate.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/audio_multi_vector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/audio_vector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/background_noise.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/buffer_level_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/comfort_noise.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/cross_correlation.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/decision_logic.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/decision_logic_fax.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/decision_logic_normal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/decoder_database.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/defines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/delay_manager.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/delay_peak_detector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/dsp_helper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/dtmf_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/dtmf_tone_generator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/expand.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/expand_uma_logger.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/include/neteq.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/merge.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/mock/mock_decoder_database.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/mock/mock_delay_manager.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/mock/mock_expand.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/mock/mock_packet_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/mock/mock_red_payload_splitter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/mock/mock_statistics_calculator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/nack_tracker.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/neteq_decoder_enum.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/neteq_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/normal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/packet.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/packet_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/post_decode_vad.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/preemptive_expand.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/random_vector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/red_payload_splitter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/rtcp.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/statistics_calculator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/sync_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tick_timer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/time_stretch.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/timestamp_scaler.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/audio_checksum.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/audio_loop.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/audio_sink.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/constant_pcm_packet_source.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/encode_neteq_input.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/fake_decode_from_file.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/input_audio_file.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/neteq_delay_analyzer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/neteq_external_decoder_test.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/neteq_input.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/neteq_packet_source_input.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/neteq_performance_test.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/neteq_quality_test.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/neteq_replacement_input.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/neteq_stats_getter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/neteq_test.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/output_audio_file.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/output_wav_file.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/packet.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/packet_source.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/resample_input_audio_file.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/rtc_event_log_source.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/rtp_file_source.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/neteq/tools/rtp_generator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/ACMTest.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/APITest.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/Channel.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/EncodeDecodeTest.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/PCMFile.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/PacketLossTest.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/RTPFile.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/TestAllCodecs.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/TestRedFec.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/TestStereo.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/TestVADDTX.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/TwoWayCommunication.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/iSACTest.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/opus_test.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_coding/test/utility.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/aaudio_player.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/aaudio_recorder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/aaudio_wrapper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/audio_common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/audio_device_template.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/audio_manager.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/audio_record_jni.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/audio_track_jni.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/build_info.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/ensure_initialized.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/opensles_common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/opensles_player.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/android/opensles_recorder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/audio_device_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/audio_device_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/audio_device_generic.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/audio_device_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/audio_device_name.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/dummy/audio_device_dummy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/dummy/file_audio_device.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/dummy/file_audio_device_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/fine_audio_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/include/audio_device.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/include/audio_device_data_observer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/include/audio_device_default.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/include/audio_device_defines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/include/fake_audio_device.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/include/mock_audio_device.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/include/mock_audio_transport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/include/test_audio_device.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/ios/audio_device_ios.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/ios/audio_session_observer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/ios/objc/RTCAudioSession.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/ios/voice_processing_audio_unit.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/linux/alsasymboltable_linux.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/linux/audio_device_alsa_linux.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/linux/audio_device_pulse_linux.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/linux/audio_mixer_manager_pulse_linux.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/linux/latebindingsymboltable_linux.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/linux/pulseaudiosymboltable_linux.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/mac/audio_device_mac.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/mac/audio_mixer_manager_mac.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/mac/portaudio/pa_memorybarrier.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/mac/portaudio/pa_ringbuffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/mock_audio_device_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/win/audio_device_core_win.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_device/win/core_audio_utility_win.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_mixer/audio_frame_manipulator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_mixer/audio_mixer_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_mixer/default_output_rate_calculator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_mixer/frame_combiner.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_mixer/gain_change_calculator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_mixer/output_rate_calculator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_mixer/sine_wave_generator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec/aec_common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec/aec_core.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec/aec_core_optimized_methods.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec/aec_resampler.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec/echo_cancellation.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/adaptive_fir_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/aec3_common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/aec3_fft.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/aec_state.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/block_framer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/block_processor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/block_processor_metrics.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/cascaded_biquad_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/coherence_gain.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/comfort_noise_generator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/decimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/delay_estimate.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/downsampled_render_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/echo_audibility.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/echo_canceller3.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/echo_path_delay_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/echo_path_variability.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/echo_remover.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/echo_remover_metrics.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/erl_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/erle_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/fft_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/fft_data.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/filter_analyzer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/frame_blocker.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/main_filter_update_gain.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/matched_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/matched_filter_lag_aggregator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/matrix_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/mock/mock_block_processor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/mock/mock_echo_remover.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/mock/mock_render_delay_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/mock/mock_render_delay_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/render_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/render_delay_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/render_delay_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/render_delay_controller_metrics.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/render_signal_analyzer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/residual_echo_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/reverb_model.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/reverb_model_fallback.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/shadow_filter_update_gain.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/skew_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/stationarity_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/subtractor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/subtractor_output.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/suppression_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/suppression_gain.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/suppression_gain_limiter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/vector_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec3/vector_math.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec_dump/aec_dump_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec_dump/aec_dump_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec_dump/capture_stream_info.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec_dump/mock_aec_dump.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aec_dump/write_to_file_task.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aecm/aecm_core.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aecm/aecm_defines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/aecm/echo_control_mobile.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc/agc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc/agc_manager_direct.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc/gain_map_internal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc/legacy/analog_agc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc/legacy/digital_agc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc/legacy/gain_control.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc/loudness_histogram.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc/mock_agc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc/utility.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/adaptive_agc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/adaptive_digital_gain_applier.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/adaptive_mode_level_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/agc2_common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/agc2_testing_common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/biquad_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/compute_interpolated_gain_curve.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/down_sampler.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/fixed_digital_level_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/fixed_gain_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/gain_applier.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/gain_curve_applier.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/interpolated_gain_curve.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/limiter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/noise_level_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/noise_spectrum_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/features_extraction.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/fft_util.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/lp_residual.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/pitch_info.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/pitch_search.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/ring_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/rnn.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/sequence_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/spectral_features.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/symmetric_matrix_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/rnn_vad/test_utils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/saturation_protector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/signal_classifier.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/agc2/vector_float_frame.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/audio_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/audio_generator/file_audio_generator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/audio_processing_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/beamformer/array_util.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/beamformer/complex_matrix.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/beamformer/covariance_matrix_generator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/beamformer/matrix.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/beamformer/matrix_test_helpers.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/beamformer/mock_nonlinear_beamformer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/beamformer/nonlinear_beamformer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/echo_cancellation_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/echo_control_mobile_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/echo_detector/circular_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/echo_detector/mean_variance_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/echo_detector/moving_max.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/echo_detector/normalized_covariance_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/gain_control_for_experimental_agc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/gain_control_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/gain_controller2.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/include/aec_dump.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/include/audio_frame_view.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/include/audio_generator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/include/audio_generator_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/include/audio_processing.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/include/audio_processing_statistics.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/include/config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/include/mock_audio_processing.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/intelligibility/intelligibility_enhancer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/intelligibility/intelligibility_utils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/level_estimator_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/logging/apm_data_dumper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/low_cut_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/noise_suppression_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/ns/defines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/ns/noise_suppression.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/ns/noise_suppression_x.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/ns/ns_core.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/ns/nsx_core.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/ns/nsx_defines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/ns/windows_private.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/render_queue_item_verifier.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/residual_echo_detector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/rms_level.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/splitting_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/aec_dump_based_simulator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/audio_buffer_tools.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/audio_processing_simulator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/audioproc_float_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/bitexactness_tools.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/conversational_speech/config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/conversational_speech/mock_wavreader.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/conversational_speech/mock_wavreader_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/conversational_speech/multiend_call.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/conversational_speech/simulator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/conversational_speech/timing.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/conversational_speech/wavreader_abstract_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/conversational_speech/wavreader_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/conversational_speech/wavreader_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/debug_dump_replayer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/echo_canceller_test_tools.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/fake_recording_device.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/performance_timer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/protobuf_utils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/simulator_buffers.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/test_utils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/test/wav_based_simulator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/three_band_filter_bank.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/transient/common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/transient/dyadic_decimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/transient/file_utils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/transient/moving_moments.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/transient/transient_detector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/transient/transient_suppressor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/transient/wpd_node.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/transient/wpd_tree.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/typing_detection.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/utility/block_mean_calculator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/utility/delay_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/utility/delay_estimator_internal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/utility/delay_estimator_wrapper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/utility/ooura_fft.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/utility/ooura_fft_tables_common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/utility/ooura_fft_tables_neon_sse2.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/gmm.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/noise_gmm_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/pitch_based_vad.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/pitch_internal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/pole_zero_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/standalone_vad.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/vad_audio_proc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/vad_audio_proc_internal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/vad_circular_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/vad_with_level.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/voice_activity_detector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/vad/voice_gmm_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/audio_processing/voice_detection_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/bitrate_controller/bitrate_controller_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/bitrate_controller/include/bitrate_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/bitrate_controller/include/mock/mock_bitrate_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/bitrate_controller/send_side_bandwidth_estimation.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/acknowledged_bitrate_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/bbr/bandwidth_sampler.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/bbr/bbr_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/bbr/bbr_network_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/bbr/data_transfer_tracker.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/bbr/packet_number_indexed_queue.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/bbr/rtt_stats.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/bbr/windowed_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/bitrate_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/congestion_controller_unittests_helper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/delay_based_bwe.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/delay_based_bwe_unittest_helper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/delay_increase_detector_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/goog_cc/alr_detector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/goog_cc/bitrate_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/goog_cc/delay_based_bwe.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/goog_cc/goog_cc_network_control.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/goog_cc/include/goog_cc_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/goog_cc/median_slope_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/goog_cc/probe_bitrate_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/goog_cc/probe_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/goog_cc/trendline_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/include/mock/mock_congestion_observer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/include/network_changed_observer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/include/receive_side_congestion_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/include/send_side_congestion_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/include/send_side_congestion_controller_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/median_slope_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/probe_bitrate_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/probe_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/rtp/congestion_controller_unittests_helper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/rtp/include/send_side_congestion_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/rtp/pacer_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/rtp/send_time_history.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/rtp/transport_feedback_adapter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/send_time_history.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/transport_feedback_adapter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/congestion_controller/trendline_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/capture_result_desktop_capturer_wrapper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/cropped_desktop_frame.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/cropping_window_capturer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/desktop_and_cursor_composer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/desktop_capture_options.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/desktop_capture_types.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/desktop_capturer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/desktop_capturer_differ_wrapper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/desktop_capturer_wrapper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/desktop_frame.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/desktop_frame_generator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/desktop_frame_rotation.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/desktop_frame_win.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/desktop_geometry.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/desktop_region.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/differ_block.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/differ_vector_sse2.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/fake_desktop_capturer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/fallback_desktop_capturer_wrapper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/mac/desktop_configuration.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/mac/desktop_configuration_monitor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/mac/desktop_frame_cgimage.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/mac/desktop_frame_iosurface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/mac/desktop_frame_provider.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/mac/full_screen_chrome_window_detector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/mac/screen_capturer_mac.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/mac/window_list_utils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/mock_desktop_capturer_callback.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/mouse_cursor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/mouse_cursor_monitor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/resolution_tracker.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/rgba_color.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/screen_capture_frame_queue.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/screen_capturer_helper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/screen_drawer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/screen_drawer_lock_posix.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/shared_desktop_frame.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/shared_memory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/test_utils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/cursor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/cursor_unittest_resources.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/d3d_device.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/desktop.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/display_configuration_monitor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/dxgi_adapter_duplicator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/dxgi_context.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/dxgi_duplicator_controller.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/dxgi_frame.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/dxgi_output_duplicator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/dxgi_texture.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/dxgi_texture_mapping.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/dxgi_texture_staging.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/scoped_gdi_object.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/scoped_thread_desktop.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/screen_capture_utils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/screen_capturer_win_directx.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/screen_capturer_win_gdi.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/screen_capturer_win_magnifier.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/win/window_capture_utils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/window_finder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/window_finder_mac.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/window_finder_win.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/window_finder_x11.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/x11/shared_x_display.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/x11/window_list_utils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/x11/x_atom_cache.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/x11/x_error_trap.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/desktop_capture/x11/x_server_pixel_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/include/module.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/include/module_common_types.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/include/module_common_types_public.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/include/module_fec_types.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/pacing/alr_detector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/pacing/bitrate_prober.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/pacing/interval_budget.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/pacing/mock/mock_paced_sender.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/pacing/paced_sender.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/pacing/pacer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/pacing/packet_queue.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/pacing/packet_queue_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/pacing/packet_router.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/pacing/round_robin_packet_queue.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/aimd_rate_control.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/include/bwe_defines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/inter_arrival.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/overuse_detector.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/overuse_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/remote_estimator_proxy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/bbr_paced_sender.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/bwe.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/bwe_test.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/bwe_test_baselinefile.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/bwe_test_fileutils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/bwe_test_framework.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/bwe_test_logging.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/estimators/bbr.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/estimators/congestion_window.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/estimators/max_bandwidth_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/estimators/min_rtt_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/estimators/nada.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/estimators/remb.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/estimators/send_side.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/estimators/tcp.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/metric_recorder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/packet.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/packet_receiver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/test/packet_sender.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/remote_bitrate_estimator/tools/bwe_rtp.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/include/flexfec_receiver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/include/flexfec_sender.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/include/receive_statistics.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/include/remote_ntp_time_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/include/rtp_cvo.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/include/rtp_header_extension_map.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/include/rtp_header_parser.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/include/rtp_payload_registry.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/include/rtp_receiver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/include/rtp_rtcp.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/include/rtp_rtcp_defines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/include/ulpfec_receiver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/mocks/mock_rtcp_bandwidth_observer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/byte_io.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/dtmf_queue.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/fec_private_tables_bursty.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/fec_private_tables_random.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/fec_test_helper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/flexfec_header_reader_writer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/forward_error_correction.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/forward_error_correction_internal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/packet_loss_stats.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/playout_delay_oracle.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/receive_statistics_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_nack_stats.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/app.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/bye.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/common_header.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/dlrr.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/fir.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/nack.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/pli.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/psfb.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/remb.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/report_block.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/rrtr.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/sdes.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/sender_report.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_packet/voip_metric.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_receiver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_sender.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_transceiver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_transceiver_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtcp_transceiver_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_format.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_format_h264.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_format_video_generic.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_format_vp8.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_format_vp9.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_header_extensions.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_packet.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_packet_history.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_packet_received.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_packet_to_send.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_receiver_audio.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_receiver_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_receiver_strategy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_receiver_video.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_rtcp_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_rtcp_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_sender.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_sender_audio.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_sender_video.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/rtp_utility.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/time_util.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/tmmbr_help.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/ulpfec_generator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/ulpfec_header_reader_writer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/source/ulpfec_receiver_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/test/testAPI/test_api.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/rtp_rtcp/test/testFec/average_residual_loss_xor_codes.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/utility/include/helpers_android.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/utility/include/jvm_android.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/utility/include/mock/mock_process_thread.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/utility/include/process_thread.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/utility/source/process_thread_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/device_info_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/linux/device_info_linux.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/linux/video_capture_linux.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/objc/device_info.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/objc/device_info_objc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/objc/rtc_video_capture_objc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/objc/video_capture.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/video_capture.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/video_capture_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/video_capture_defines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/video_capture_factory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/video_capture_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/windows/device_info_ds.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/windows/device_info_mf.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/windows/help_functions_ds.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/windows/sink_filter_ds.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/windows/video_capture_ds.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_capture/windows/video_capture_mf.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codec_timer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/h264/h264_decoder_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/h264/h264_encoder_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/h264/include/h264.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/h264/include/h264_globals.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/i420/include/i420.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/interface/common_constants.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/multiplex/include/multiplex_encoded_image_packer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/test/android_codec_factory_helper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/test/objc_codec_factory_helper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/test/video_codec_unittest.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/test/videocodec_test_fixture_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/test/videocodec_test_stats_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/test/videoprocessor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp8/default_temporal_layers.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp8/include/vp8.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp8/include/vp8_common_types.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp8/include/vp8_globals.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp8/screenshare_layers.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp8/simulcast_rate_allocator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp8/simulcast_test_utility.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp8/temporal_layers.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp9/include/vp9.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp9/include/vp9_globals.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp9/svc_config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp9/svc_rate_allocator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/codecs/vp9/vp9_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/decoder_database.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/decoding_state.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/encoded_frame.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/encoder_database.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/fec_controller_default.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/fec_rate_table.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/frame_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/frame_buffer2.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/frame_object.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/generic_decoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/generic_encoder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/h264_sprop_parameter_sets.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/h264_sps_pps_tracker.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/histogram.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/include/mock/mock_vcm_callbacks.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/include/mock/mock_video_codec_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/include/video_codec_initializer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/include/video_codec_interface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/include/video_coding.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/include/video_coding_defines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/include/video_error_codes.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/inter_frame_delay.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/internal_defines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/jitter_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/jitter_buffer_common.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/jitter_estimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/media_opt_util.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/media_optimization.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/nack_fec_tables.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/nack_module.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/packet.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/packet_buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/qp_parser.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/receiver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/rtp_frame_reference_finder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/rtt_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/session_info.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/test/stream_generator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/test/test_util.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/timestamp_map.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/timing.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/utility/default_video_bitrate_allocator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/utility/frame_dropper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/utility/ivf_file_writer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/utility/mock/mock_frame_dropper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/utility/moving_average.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/utility/quality_scaler.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/utility/vp8_header_parser.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/utility/vp9_uncompressed_header_parser.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_coding/video_coding_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_processing/util/denoiser_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_processing/util/denoiser_filter_c.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_processing/util/denoiser_filter_neon.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_processing/util/denoiser_filter_sse2.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_processing/util/noise_estimation.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_processing/util/skin_detection.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/modules/video_processing/video_denoiser.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/asyncstuntcpsocket.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/basicpacketsocketfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/candidatepairinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/dtlstransport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/dtlstransportinternal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/fakecandidatepair.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/fakedtlstransport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/fakeicetransport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/fakepackettransport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/fakeportallocator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/icetransportinternal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/mockicetransport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/p2pconstants.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/p2ptransportchannel.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/packetlossestimator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/packetsocketfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/packettransportinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/packettransportinternal.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/port.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/portallocator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/portinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/pseudotcp.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/relayport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/relayserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/stun.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/stunport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/stunrequest.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/stunserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/tcpport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/testrelayserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/teststunserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/testturncustomizer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/testturnserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/transport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/transportdescription.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/transportdescriptionfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/transportfactoryinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/transportinfo.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/turnport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/turnserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/udpport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/base/udptransport.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/client/basicportallocator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/client/relayportfactoryinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/client/turnportfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/p2p/stunprober/stunprober.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/arraysize.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/asyncinvoker-inl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/asyncinvoker.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/asyncpacketsocket.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/asyncresolverinterface.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/asyncsocket.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/asynctcpsocket.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/asyncudpsocket.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/atomicops.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/base64.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/basictypes.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/bind.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/bitbuffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/bitrateallocationstrategy.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/buffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/bufferqueue.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/bytebuffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/byteorder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/callback.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/checks.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/compile_assert_c.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/constructormagic.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/copyonwritebuffer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/cpu_time.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/crc32.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/criticalsection.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/cryptstring.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/data_rate_limiter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/deprecation.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/dscp.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/event.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/event_tracer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/experiments/alr_experiment.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/experiments/congestion_controller_experiment.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/experiments/quality_scaling_experiment.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/fakeclock.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/fakenetwork.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/fakesslidentity.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/file.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/filerotatingstream.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/fileutils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/firewallsocketserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/flags.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/format_macros.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/function_view.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/gtest_prod_util.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/gunit.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/gunit_prod.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/helpers.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/httpbase.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/httpcommon-inl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/httpcommon.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/httpserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/ifaddrs-android.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/ifaddrs_converter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/ignore_wundef.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/ipaddress.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/json.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/keep_ref_until_done.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/location.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/logging.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/logsinks.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/macutils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/memory/aligned_array.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/memory/aligned_malloc.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/memory_usage.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/messagedigest.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/messagehandler.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/messagequeue.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/natserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/natsocketfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/nattypes.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/nethelper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/nethelpers.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/network.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/network_constants.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/networkmonitor.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/networkroute.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/nullsocketserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/exp_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/histogram_percentile_counter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/mathutils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/mod_ops.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/moving_max_counter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/moving_median_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/percentile_filter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/safe_compare.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/safe_conversions.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/safe_conversions_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/safe_minmax.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/sample_counter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/numerics/sequence_number_util.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/onetimeevent.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/openssl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/openssladapter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/opensslcertificate.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/openssldigest.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/opensslidentity.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/opensslsessioncache.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/opensslstreamadapter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/opensslutility.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/optionsfile.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/pathutils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/physicalsocketserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/platform_file.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/platform_thread.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/platform_thread_types.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/protobuf_utils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/proxyinfo.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/proxyserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/ptr_util.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/race_checker.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/random.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/rate_limiter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/rate_statistics.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/ratetracker.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/refcount.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/refcountedobject.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/refcounter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/rollingaccumulator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/rtccertificate.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/rtccertificategenerator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/sanitizer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/scoped_ref_ptr.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/sequenced_task_checker.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/sequenced_task_checker_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/signalthread.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/sigslot.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/sigslotrepeater.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/sigslottester.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/socket.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/socket_unittest.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/socketadapters.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/socketaddress.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/socketaddresspair.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/socketfactory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/socketserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/socketstream.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/ssladapter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/sslcertificate.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/sslfingerprint.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/sslidentity.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/sslroots.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/sslstreamadapter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/stream.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/string_to_number.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/stringencode.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/stringize_macros.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/strings/audio_format_to_string.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/strings/string_builder.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/stringutils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/swap_queue.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/synchronization/rw_lock_posix.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/synchronization/rw_lock_win.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/synchronization/rw_lock_wrapper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/system/asm_defines.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/system/fallthrough.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/system/file_wrapper.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/system/ignore_warnings.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/system/no_inline.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/task_queue.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/task_queue_for_test.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/task_queue_posix.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/template_util.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/testbase64.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/testcertificateverifier.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/testclient.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/testechoserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/testutils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/thread.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/thread_annotations.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/thread_checker.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/thread_checker_impl.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/time/timestamp_extrapolator.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/timestampaligner.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/timeutils.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/trace_event.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/transformadapter.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/type_traits.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/unixfilesystem.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/virtualsocketserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/weak_ptr.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/win/windows_version.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/win32.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/win32filesystem.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/win32socketinit.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/win32socketserver.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/win32window.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/rtc_base/zero_memory.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/third_party/jsoncpp/source/include/json/assertions.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/third_party/jsoncpp/source/include/json/autolink.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/third_party/jsoncpp/source/include/json/config.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/third_party/jsoncpp/source/include/json/features.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/third_party/jsoncpp/source/include/json/forwards.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/third_party/jsoncpp/source/include/json/json.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/third_party/jsoncpp/source/include/json/reader.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/third_party/jsoncpp/source/include/json/value.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/third_party/jsoncpp/source/include/json/writer.h create mode 100644 Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/typedefs.h diff --git a/Engine/Build/Commit.gitdeps.xml b/Engine/Build/Commit.gitdeps.xml index 78e9b0daa18f..b80e0196b190 100644 --- a/Engine/Build/Commit.gitdeps.xml +++ b/Engine/Build/Commit.gitdeps.xml @@ -6821,7 +6821,7 @@ - + @@ -7399,7 +7399,7 @@ - + @@ -15757,76 +15757,76 @@ - - - + + + - - - - + + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - - + + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + @@ -15865,40 +15865,40 @@ - - - + + + - - - + + + - - - - + + + + - - - + + + - - - + + + - - - + + + - - - + + + @@ -15937,71 +15937,71 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - - + + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - - + + + + @@ -16600,6 +16600,8 @@ + + @@ -26039,57 +26041,81 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -26106,33 +26132,38 @@ - + - - - - - - - - - - - - + + + + + + + + + + + + - - - - - - - + + + + + + + + + + + + - - + + @@ -27404,6 +27435,8 @@ + + @@ -27618,14 +27651,14 @@ - + - + @@ -28075,8 +28108,8 @@ - - + + @@ -28105,6 +28138,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -29274,6 +29336,18 @@ + + + + + + + + + + + + @@ -38282,6 +38356,18 @@ + + + + + + + + + + + + @@ -39573,6 +39659,7 @@ + @@ -39620,6 +39707,7 @@ + @@ -39634,7 +39722,6 @@ - @@ -39737,7 +39824,7 @@ - + @@ -39870,6 +39957,7 @@ + @@ -39887,6 +39975,7 @@ + @@ -39899,7 +39988,6 @@ - @@ -39944,7 +40032,7 @@ - + @@ -39968,7 +40056,6 @@ - @@ -40035,6 +40122,7 @@ + @@ -40057,7 +40145,6 @@ - @@ -40106,6 +40193,7 @@ + @@ -40164,6 +40252,7 @@ + @@ -40188,11 +40277,10 @@ - - + @@ -40280,7 +40368,6 @@ - @@ -40303,6 +40390,7 @@ + @@ -40402,7 +40490,6 @@ - @@ -40504,7 +40591,6 @@ - @@ -40550,7 +40636,6 @@ - @@ -40584,9 +40669,8 @@ - + - @@ -40665,15 +40749,14 @@ - + - @@ -40684,7 +40767,6 @@ - @@ -40704,7 +40786,7 @@ - + @@ -40802,7 +40884,7 @@ - + @@ -40823,7 +40905,6 @@ - @@ -40832,7 +40913,6 @@ - @@ -40843,6 +40923,7 @@ + @@ -40979,7 +41060,6 @@ - @@ -41027,7 +41107,7 @@ - + @@ -41036,7 +41116,6 @@ - @@ -41230,7 +41309,6 @@ - @@ -41298,8 +41376,9 @@ - + + @@ -41323,8 +41402,10 @@ + + @@ -41444,7 +41525,7 @@ - + @@ -41560,6 +41641,7 @@ + @@ -41568,7 +41650,7 @@ - + @@ -41606,6 +41688,7 @@ + @@ -41644,7 +41727,7 @@ - + @@ -41652,6 +41735,7 @@ + @@ -41675,7 +41759,6 @@ - @@ -41697,7 +41780,6 @@ - @@ -41825,7 +41907,6 @@ - @@ -41885,6 +41966,7 @@ + @@ -41910,7 +41992,6 @@ - @@ -41939,6 +42020,7 @@ + @@ -41946,7 +42028,6 @@ - @@ -41960,6 +42041,7 @@ + @@ -41969,7 +42051,7 @@ - + @@ -42048,7 +42130,7 @@ - + @@ -42134,6 +42216,7 @@ + @@ -42212,6 +42295,7 @@ + @@ -42234,6 +42318,7 @@ + @@ -42279,7 +42364,7 @@ - + @@ -42296,6 +42381,7 @@ + @@ -42386,6 +42472,7 @@ + @@ -42467,8 +42554,6 @@ - - @@ -42486,11 +42571,11 @@ + - @@ -42519,7 +42604,7 @@ - + @@ -42545,7 +42630,6 @@ - @@ -42559,7 +42643,7 @@ - + @@ -42576,9 +42660,9 @@ + - @@ -42617,7 +42701,7 @@ - + @@ -42633,6 +42717,7 @@ + @@ -42708,6 +42793,7 @@ + @@ -42751,7 +42837,6 @@ - @@ -42772,7 +42857,6 @@ - @@ -42831,12 +42915,12 @@ - + - + @@ -42906,7 +42990,7 @@ - + @@ -43031,7 +43115,7 @@ - + @@ -43048,6 +43132,7 @@ + @@ -43154,7 +43239,7 @@ - + @@ -43164,6 +43249,7 @@ + @@ -43195,9 +43281,9 @@ - + - + @@ -43205,7 +43291,6 @@ - @@ -43249,7 +43334,6 @@ - @@ -43273,6 +43357,7 @@ + @@ -43300,8 +43385,8 @@ - + @@ -43330,7 +43415,6 @@ - @@ -43351,6 +43435,7 @@ + @@ -43402,6 +43487,7 @@ + @@ -43421,7 +43507,6 @@ - @@ -43437,6 +43522,7 @@ + @@ -43507,7 +43593,6 @@ - @@ -43521,12 +43606,12 @@ - + @@ -43558,11 +43643,10 @@ - - + @@ -43585,7 +43669,7 @@ - + @@ -43608,7 +43692,7 @@ - + @@ -43651,7 +43735,7 @@ - + @@ -43708,6 +43792,7 @@ + @@ -43717,6 +43802,7 @@ + @@ -44043,6 +44129,7 @@ + @@ -44057,6 +44144,7 @@ + @@ -44064,8 +44152,10 @@ + + @@ -44103,7 +44193,7 @@ - + @@ -44148,7 +44238,7 @@ - + @@ -44167,9 +44257,9 @@ + - @@ -44185,7 +44275,6 @@ - @@ -44198,7 +44287,6 @@ - @@ -44232,6 +44320,7 @@ + @@ -44386,6 +44475,7 @@ + @@ -44405,7 +44495,6 @@ - @@ -44471,7 +44560,7 @@ - + @@ -44492,14 +44581,12 @@ - - @@ -44534,7 +44621,6 @@ - @@ -44547,6 +44633,7 @@ + @@ -44712,14 +44799,12 @@ - - @@ -44843,6 +44928,7 @@ + @@ -44885,6 +44971,7 @@ + @@ -44934,6 +45021,7 @@ + @@ -44969,7 +45057,6 @@ - @@ -45107,7 +45194,7 @@ - + @@ -45133,6 +45220,7 @@ + @@ -45142,11 +45230,12 @@ - + + @@ -45181,10 +45270,11 @@ + - + @@ -45290,7 +45380,6 @@ - @@ -45360,6 +45449,7 @@ + @@ -45367,7 +45457,6 @@ - @@ -45455,7 +45544,6 @@ - @@ -45540,7 +45628,6 @@ - @@ -45554,7 +45641,7 @@ - + @@ -45603,6 +45690,7 @@ + @@ -45629,6 +45717,7 @@ + @@ -45661,6 +45750,7 @@ + @@ -45689,6 +45779,7 @@ + @@ -45723,7 +45814,7 @@ - + @@ -45761,6 +45852,7 @@ + @@ -45773,7 +45865,7 @@ - + @@ -45870,6 +45962,7 @@ + @@ -45881,6 +45974,7 @@ + @@ -45932,6 +46026,7 @@ + @@ -45957,6 +46052,7 @@ + @@ -45976,6 +46072,7 @@ + @@ -45995,6 +46092,7 @@ + @@ -46017,6 +46115,7 @@ + @@ -46045,6 +46144,7 @@ + @@ -46084,6 +46184,7 @@ + @@ -46142,7 +46243,7 @@ - + @@ -46212,7 +46313,6 @@ - @@ -46256,10 +46356,8 @@ - - @@ -46287,7 +46385,7 @@ - + @@ -46306,17 +46404,19 @@ + - + + @@ -46333,7 +46433,6 @@ - @@ -46410,7 +46509,6 @@ - @@ -46447,7 +46545,6 @@ - @@ -46492,7 +46589,6 @@ - @@ -46612,7 +46708,6 @@ - @@ -46650,6 +46745,7 @@ + @@ -46726,6 +46822,7 @@ + @@ -46768,6 +46865,7 @@ + @@ -46786,6 +46884,7 @@ + @@ -46834,6 +46933,7 @@ + @@ -46927,7 +47027,7 @@ - + @@ -47146,6 +47246,7 @@ + @@ -47176,6 +47277,7 @@ + @@ -47183,7 +47285,6 @@ - @@ -47340,7 +47441,6 @@ - @@ -47354,7 +47454,7 @@ - + @@ -47379,10 +47479,12 @@ + + @@ -47403,7 +47505,7 @@ - + @@ -47415,7 +47517,6 @@ - @@ -47485,6 +47586,7 @@ + @@ -47538,7 +47640,6 @@ - @@ -47571,6 +47672,7 @@ + @@ -47628,7 +47730,7 @@ - + @@ -47746,7 +47848,7 @@ - + @@ -47783,7 +47885,7 @@ - + @@ -47801,7 +47903,6 @@ - @@ -47891,7 +47992,7 @@ - + @@ -47915,7 +48016,6 @@ - @@ -48013,6 +48113,7 @@ + @@ -48083,7 +48184,7 @@ - + @@ -48115,7 +48216,6 @@ - @@ -48134,6 +48234,7 @@ + @@ -48161,6 +48262,7 @@ + @@ -48241,7 +48343,7 @@ - + @@ -48285,6 +48387,7 @@ + @@ -48312,6 +48415,7 @@ + @@ -48394,7 +48498,6 @@ - @@ -48523,6 +48626,7 @@ + @@ -48722,7 +48826,6 @@ - @@ -48744,16 +48847,16 @@ + - - + @@ -48794,6 +48897,7 @@ + @@ -48857,6 +48961,7 @@ + @@ -48926,6 +49031,7 @@ + @@ -48983,6 +49089,7 @@ + @@ -49021,7 +49128,6 @@ - @@ -49087,6 +49193,7 @@ + @@ -49095,7 +49202,6 @@ - @@ -49399,6 +49505,7 @@ + @@ -49432,7 +49539,6 @@ - @@ -49454,7 +49560,7 @@ - + @@ -49507,7 +49613,6 @@ - @@ -49591,7 +49696,6 @@ - @@ -49693,6 +49797,7 @@ + @@ -49718,6 +49823,7 @@ + @@ -49776,10 +49882,10 @@ + - @@ -49818,7 +49924,6 @@ - @@ -49838,6 +49943,7 @@ + @@ -49879,6 +49985,7 @@ + @@ -49999,10 +50106,9 @@ - - + @@ -50022,6 +50128,7 @@ + @@ -50043,7 +50150,6 @@ - @@ -50064,7 +50170,9 @@ + + @@ -50078,7 +50186,6 @@ - @@ -50130,6 +50237,7 @@ + @@ -50147,6 +50255,7 @@ + @@ -50222,8 +50331,7 @@ - - + @@ -50273,7 +50381,6 @@ - @@ -50289,6 +50396,7 @@ + @@ -50343,7 +50451,7 @@ - + @@ -50388,7 +50496,7 @@ - + @@ -50438,8 +50546,8 @@ - + @@ -50489,6 +50597,7 @@ + @@ -50512,7 +50621,7 @@ - + @@ -50557,7 +50666,6 @@ - @@ -50589,6 +50697,7 @@ + @@ -50599,7 +50708,6 @@ - @@ -50609,6 +50717,7 @@ + @@ -50631,7 +50740,7 @@ - + @@ -50671,9 +50780,8 @@ - + - @@ -50694,6 +50802,7 @@ + @@ -50748,7 +50857,6 @@ - @@ -50758,6 +50866,7 @@ + @@ -50825,7 +50934,6 @@ - @@ -50835,7 +50943,6 @@ - @@ -50897,6 +51004,7 @@ + @@ -50958,7 +51066,7 @@ - + @@ -50990,6 +51098,7 @@ + @@ -50998,7 +51107,7 @@ - + @@ -51021,6 +51130,7 @@ + @@ -51098,7 +51208,6 @@ - @@ -51133,6 +51242,7 @@ + @@ -51208,6 +51318,7 @@ + @@ -51232,6 +51343,7 @@ + @@ -51296,7 +51408,6 @@ - @@ -51312,7 +51423,6 @@ - @@ -51330,11 +51440,9 @@ - - @@ -51464,7 +51572,7 @@ - + @@ -51493,6 +51601,7 @@ + @@ -51543,7 +51652,6 @@ - @@ -51570,7 +51678,7 @@ - + @@ -51610,6 +51718,7 @@ + @@ -51644,7 +51753,7 @@ - + @@ -51659,7 +51768,6 @@ - @@ -51670,7 +51778,6 @@ - @@ -51709,7 +51816,7 @@ - + @@ -51725,7 +51832,7 @@ - + @@ -51778,7 +51885,7 @@ - + @@ -51960,6 +52067,7 @@ + @@ -51982,6 +52090,7 @@ + @@ -52014,7 +52123,7 @@ - + @@ -52231,6 +52340,7 @@ + @@ -52291,12 +52401,13 @@ - + + @@ -52307,7 +52418,7 @@ - + @@ -52388,6 +52499,7 @@ + @@ -52407,6 +52519,7 @@ + @@ -52450,7 +52563,7 @@ - + @@ -52471,7 +52584,6 @@ - @@ -52483,7 +52595,6 @@ - @@ -52656,7 +52767,7 @@ - + @@ -52743,7 +52854,7 @@ - + @@ -52773,7 +52884,6 @@ - @@ -52791,7 +52901,7 @@ - + @@ -52806,6 +52916,8 @@ + + @@ -52903,7 +53015,6 @@ - @@ -53106,7 +53217,6 @@ - @@ -53149,7 +53259,7 @@ - + @@ -53194,6 +53304,7 @@ + @@ -53229,6 +53340,7 @@ + @@ -53254,7 +53366,7 @@ - + @@ -53413,6 +53525,7 @@ + @@ -53439,6 +53552,7 @@ + @@ -53560,7 +53674,7 @@ - + @@ -53643,6 +53757,7 @@ + @@ -53671,6 +53786,7 @@ + @@ -53683,6 +53799,7 @@ + @@ -53701,7 +53818,7 @@ - + @@ -53773,7 +53890,6 @@ - @@ -53843,7 +53959,6 @@ - @@ -53915,7 +54030,6 @@ - @@ -53978,6 +54092,7 @@ + @@ -54005,6 +54120,7 @@ + @@ -54048,7 +54164,7 @@ - + @@ -54062,7 +54178,6 @@ - @@ -54158,7 +54273,6 @@ - @@ -54182,7 +54296,7 @@ - + @@ -54287,7 +54401,6 @@ - @@ -54300,6 +54413,7 @@ + @@ -54309,6 +54423,7 @@ + @@ -54325,7 +54440,6 @@ - @@ -54379,6 +54493,7 @@ + @@ -54450,9 +54565,9 @@ + - @@ -54479,6 +54594,7 @@ + @@ -54518,7 +54634,7 @@ - + @@ -54543,6 +54659,7 @@ + @@ -54562,6 +54679,7 @@ + @@ -54648,7 +54766,6 @@ - @@ -54662,7 +54779,6 @@ - @@ -54720,6 +54836,7 @@ + @@ -54735,7 +54852,7 @@ - + @@ -54759,6 +54876,7 @@ + @@ -54853,7 +54971,7 @@ - + @@ -54916,6 +55034,7 @@ + @@ -54998,6 +55117,7 @@ + @@ -55039,7 +55159,7 @@ - + @@ -55059,6 +55179,7 @@ + @@ -55068,6 +55189,7 @@ + @@ -55093,7 +55215,6 @@ - @@ -55131,8 +55252,7 @@ - - + @@ -55145,6 +55265,7 @@ + @@ -55195,6 +55316,7 @@ + @@ -55222,6 +55344,7 @@ + @@ -55252,7 +55375,7 @@ - + @@ -55304,6 +55427,7 @@ + @@ -55315,7 +55439,7 @@ - + @@ -55358,7 +55482,7 @@ - + @@ -55367,7 +55491,6 @@ - @@ -55389,6 +55512,7 @@ + @@ -55404,7 +55528,6 @@ - @@ -55441,7 +55564,7 @@ - + @@ -55477,9 +55600,8 @@ - + - @@ -55507,6 +55629,7 @@ + @@ -55563,7 +55686,6 @@ - @@ -55604,7 +55726,9 @@ + + @@ -55653,6 +55777,7 @@ + @@ -55660,6 +55785,7 @@ + @@ -55717,6 +55843,7 @@ + @@ -55808,6 +55935,7 @@ + @@ -55824,7 +55952,6 @@ - @@ -55853,7 +55980,7 @@ - + @@ -55862,6 +55989,7 @@ + @@ -55918,7 +56046,7 @@ - + @@ -56010,7 +56138,6 @@ - @@ -56048,7 +56175,7 @@ - + @@ -56057,6 +56184,7 @@ + @@ -56122,7 +56250,7 @@ - + @@ -56201,6 +56329,7 @@ + @@ -56245,7 +56374,6 @@ - @@ -56321,7 +56449,7 @@ - + @@ -56367,7 +56495,7 @@ - + @@ -56385,6 +56513,7 @@ + @@ -56412,6 +56541,7 @@ + @@ -56569,6 +56699,7 @@ + @@ -56710,6 +56841,7 @@ + @@ -56723,7 +56855,7 @@ - + @@ -56747,7 +56879,7 @@ - + @@ -56801,9 +56933,10 @@ - + + @@ -56817,11 +56950,13 @@ + + @@ -56848,6 +56983,7 @@ + @@ -56988,7 +57124,7 @@ - + @@ -56996,7 +57132,6 @@ - @@ -57009,10 +57144,9 @@ - + - @@ -57054,6 +57188,7 @@ + @@ -57113,7 +57248,7 @@ - + @@ -57175,6 +57310,8 @@ + + @@ -57215,16 +57352,15 @@ - + - @@ -57263,12 +57399,14 @@ + + @@ -57289,6 +57427,7 @@ + @@ -57376,7 +57515,7 @@ - + @@ -57459,7 +57598,6 @@ - @@ -57552,6 +57690,7 @@ + @@ -57647,12 +57786,15 @@ + + + @@ -57746,7 +57888,7 @@ - + @@ -57757,7 +57899,6 @@ - @@ -57919,7 +58060,7 @@ - + @@ -57946,8 +58087,7 @@ - - + @@ -57964,6 +58104,7 @@ + @@ -58033,6 +58174,7 @@ + @@ -58071,7 +58213,6 @@ - @@ -58090,7 +58231,6 @@ - @@ -58139,6 +58279,7 @@ + @@ -58146,8 +58287,8 @@ - + @@ -58267,6 +58408,7 @@ + @@ -58368,9 +58510,11 @@ + + @@ -58378,7 +58522,6 @@ - @@ -58422,7 +58565,6 @@ - @@ -58500,12 +58642,10 @@ - - @@ -58555,6 +58695,7 @@ + @@ -58563,7 +58704,7 @@ - + @@ -58588,7 +58729,7 @@ - + @@ -58642,6 +58783,7 @@ + @@ -58671,6 +58813,7 @@ + @@ -58693,7 +58836,6 @@ - @@ -58756,7 +58898,6 @@ - @@ -58775,6 +58916,7 @@ + @@ -58792,7 +58934,6 @@ - @@ -58879,6 +59020,7 @@ + @@ -58894,6 +59036,7 @@ + @@ -58908,7 +59051,8 @@ - + + @@ -58937,7 +59081,7 @@ - + @@ -59085,6 +59229,7 @@ + @@ -59140,7 +59285,6 @@ - @@ -59248,6 +59392,7 @@ + @@ -59269,7 +59414,6 @@ - @@ -59329,7 +59473,7 @@ - + @@ -59364,7 +59508,7 @@ - + @@ -59374,7 +59518,7 @@ - + @@ -59457,6 +59601,7 @@ + @@ -59515,6 +59660,7 @@ + @@ -59542,7 +59688,7 @@ - + @@ -59601,9 +59747,9 @@ - + @@ -59694,12 +59840,12 @@ - + - + @@ -59849,7 +59995,7 @@ - + @@ -59873,6 +60019,7 @@ + @@ -59974,7 +60121,7 @@ - + @@ -60041,7 +60188,6 @@ - @@ -60056,6 +60202,7 @@ + @@ -60128,6 +60275,7 @@ + @@ -60221,7 +60369,6 @@ - @@ -60253,7 +60400,7 @@ - + @@ -60267,13 +60414,13 @@ - + @@ -60314,7 +60461,6 @@ - @@ -60333,7 +60479,7 @@ - + @@ -60346,7 +60492,7 @@ - + @@ -60407,7 +60553,6 @@ - @@ -60516,10 +60661,10 @@ - + - + @@ -60537,7 +60682,6 @@ - @@ -60652,6 +60796,7 @@ + @@ -60731,7 +60876,7 @@ - + @@ -60780,6 +60925,7 @@ + @@ -60801,7 +60947,6 @@ - @@ -60847,7 +60992,7 @@ - + @@ -60954,6 +61099,7 @@ + @@ -61072,7 +61218,7 @@ - + @@ -61122,6 +61268,7 @@ + @@ -61172,7 +61319,7 @@ - + @@ -61207,6 +61354,7 @@ + @@ -61215,7 +61363,6 @@ - @@ -61223,7 +61370,6 @@ - @@ -61329,6 +61475,7 @@ + @@ -61396,6 +61543,7 @@ + @@ -61440,6 +61588,7 @@ + @@ -61522,7 +61671,7 @@ - + @@ -61540,11 +61689,12 @@ + - + @@ -61565,6 +61715,7 @@ + @@ -61592,12 +61743,12 @@ - + @@ -61653,7 +61804,7 @@ - + @@ -61670,7 +61821,6 @@ - @@ -61685,6 +61835,7 @@ + @@ -61717,7 +61868,7 @@ - + @@ -61786,6 +61937,7 @@ + @@ -61812,7 +61964,7 @@ - + @@ -61852,6 +62004,7 @@ + @@ -61895,6 +62048,7 @@ + @@ -61962,7 +62116,6 @@ - @@ -62024,6 +62177,7 @@ + @@ -62061,7 +62215,6 @@ - @@ -62162,7 +62315,7 @@ - + @@ -62224,7 +62377,6 @@ - @@ -62255,6 +62407,7 @@ + @@ -62270,12 +62423,12 @@ + - @@ -62345,7 +62498,6 @@ - @@ -62428,6 +62580,7 @@ + @@ -62453,7 +62606,7 @@ - + @@ -62468,7 +62621,6 @@ - @@ -62493,7 +62645,7 @@ - + @@ -62670,7 +62822,7 @@ - + @@ -62679,7 +62831,7 @@ - + @@ -62690,12 +62842,12 @@ + - @@ -62707,6 +62859,7 @@ + @@ -62969,7 +63122,6 @@ - @@ -63017,6 +63169,7 @@ + @@ -63026,6 +63179,7 @@ + @@ -63090,7 +63244,6 @@ - @@ -63099,7 +63252,7 @@ - + @@ -63165,7 +63318,7 @@ - + @@ -63319,6 +63472,7 @@ + @@ -63467,6 +63621,7 @@ + @@ -63490,7 +63645,7 @@ - + @@ -63526,7 +63681,6 @@ - @@ -63534,20 +63688,20 @@ - + - + @@ -63577,7 +63731,6 @@ - @@ -63595,6 +63748,7 @@ + @@ -63748,10 +63902,12 @@ + + @@ -63771,7 +63927,6 @@ - @@ -63800,8 +63955,9 @@ - + + @@ -63831,7 +63987,7 @@ - + @@ -63867,6 +64023,7 @@ + @@ -63987,8 +64144,10 @@ + + @@ -64076,6 +64235,7 @@ + @@ -64135,7 +64295,6 @@ - @@ -64179,6 +64338,7 @@ + @@ -64244,7 +64404,7 @@ - + @@ -64300,6 +64460,7 @@ + @@ -64345,6 +64506,7 @@ + @@ -64354,6 +64516,7 @@ + @@ -64511,6 +64674,7 @@ + @@ -64558,7 +64722,7 @@ - + @@ -64614,7 +64778,6 @@ - @@ -64629,13 +64792,14 @@ - + + @@ -64643,6 +64807,7 @@ + @@ -64710,6 +64875,7 @@ + @@ -64738,7 +64904,6 @@ - @@ -64760,6 +64925,7 @@ + @@ -64796,7 +64962,6 @@ - @@ -64836,6 +65001,7 @@ + @@ -64852,7 +65018,6 @@ - @@ -64912,6 +65077,7 @@ + @@ -64923,7 +65089,7 @@ - + @@ -65056,7 +65222,8 @@ - + + @@ -65075,6 +65242,7 @@ + @@ -65112,7 +65280,6 @@ - @@ -65157,7 +65324,6 @@ - @@ -65201,6 +65367,7 @@ + @@ -65210,9 +65377,9 @@ - + @@ -65226,6 +65393,7 @@ + @@ -65240,7 +65408,7 @@ - + @@ -65450,7 +65618,6 @@ - @@ -65462,7 +65629,6 @@ - @@ -65514,10 +65680,11 @@ + - + @@ -65575,7 +65742,6 @@ - @@ -65617,6 +65783,7 @@ + @@ -65885,7 +66052,7 @@ - + @@ -65936,7 +66103,7 @@ - + @@ -66033,7 +66200,6 @@ - @@ -66073,6 +66239,7 @@ + @@ -66216,7 +66383,6 @@ - @@ -66292,7 +66458,7 @@ - + @@ -66304,7 +66470,6 @@ - @@ -66324,10 +66489,12 @@ + + @@ -66366,7 +66533,6 @@ - @@ -66480,6 +66646,7 @@ + @@ -66563,6 +66730,7 @@ + @@ -66641,7 +66809,7 @@ - + @@ -66724,6 +66892,7 @@ + @@ -66763,6 +66932,7 @@ + @@ -66781,6 +66951,7 @@ + @@ -66796,7 +66967,6 @@ - @@ -66878,9 +67048,10 @@ - + + @@ -66964,6 +67135,7 @@ + @@ -66979,7 +67151,7 @@ - + @@ -67034,13 +67206,13 @@ - + @@ -67060,7 +67232,6 @@ - @@ -67091,6 +67262,7 @@ + @@ -67117,7 +67289,6 @@ - @@ -67125,6 +67296,7 @@ + @@ -67138,12 +67310,11 @@ - - + @@ -67155,14 +67326,15 @@ + - + @@ -67178,18 +67350,15 @@ - + - - - @@ -67221,6 +67390,7 @@ + @@ -67236,9 +67406,10 @@ - + + @@ -67302,7 +67473,6 @@ - @@ -67325,11 +67495,11 @@ - + @@ -67356,9 +67526,9 @@ + - @@ -67374,13 +67544,10 @@ - - - @@ -67394,7 +67561,6 @@ - @@ -67410,8 +67576,8 @@ + - @@ -67434,7 +67600,6 @@ - @@ -67442,15 +67607,15 @@ + - - + @@ -67483,7 +67648,6 @@ - @@ -67514,7 +67678,6 @@ - @@ -67541,7 +67704,6 @@ - @@ -67553,12 +67715,10 @@ - - @@ -67648,6 +67808,7 @@ + @@ -67658,7 +67819,6 @@ - @@ -67669,6 +67829,7 @@ + @@ -67677,7 +67838,6 @@ - @@ -67702,7 +67862,6 @@ - @@ -67721,10 +67880,9 @@ + - - @@ -67790,10 +67948,9 @@ - + - @@ -67811,6 +67968,7 @@ + @@ -67827,6 +67985,7 @@ + @@ -67935,6 +68094,7 @@ + @@ -67943,6 +68103,7 @@ + @@ -67955,10 +68116,10 @@ + - @@ -67967,12 +68128,14 @@ - + + + @@ -67987,10 +68150,10 @@ - + @@ -68003,11 +68166,11 @@ + - @@ -68078,11 +68241,13 @@ + + - + @@ -68094,10 +68259,11 @@ + + - @@ -68123,16 +68289,17 @@ + + - + - @@ -68204,7 +68371,6 @@ - @@ -68247,6 +68413,7 @@ + @@ -68265,7 +68432,6 @@ - @@ -68280,6 +68446,7 @@ + @@ -68290,6 +68457,7 @@ + @@ -68335,9 +68503,9 @@ - - + + @@ -68347,7 +68515,6 @@ - @@ -68355,9 +68522,11 @@ + + @@ -68371,7 +68540,6 @@ - @@ -68380,11 +68548,11 @@ - + @@ -68397,6 +68565,7 @@ + @@ -68424,6 +68593,8 @@ + + @@ -68451,6 +68622,7 @@ + @@ -68475,7 +68647,6 @@ - @@ -68487,9 +68658,7 @@ - - @@ -68507,17 +68676,17 @@ + - + - @@ -68541,6 +68710,7 @@ + @@ -68549,6 +68719,7 @@ + @@ -68556,10 +68727,11 @@ + + - @@ -68567,10 +68739,11 @@ - + + @@ -68601,7 +68774,6 @@ - @@ -68618,7 +68790,6 @@ - @@ -68635,7 +68806,6 @@ - @@ -68644,7 +68814,6 @@ - @@ -68652,6 +68821,7 @@ + @@ -68662,8 +68832,10 @@ + + @@ -68693,6 +68865,7 @@ + @@ -68712,6 +68885,7 @@ + @@ -68731,6 +68905,7 @@ + @@ -68738,7 +68913,6 @@ - @@ -68748,16 +68922,18 @@ + + + - @@ -68815,6 +68991,7 @@ + @@ -68833,7 +69010,6 @@ - @@ -68847,13 +69023,15 @@ - + + + @@ -68861,6 +69039,7 @@ + @@ -68874,15 +69053,14 @@ - - + @@ -68895,6 +69073,7 @@ + @@ -68912,6 +69091,7 @@ + @@ -68921,11 +69101,11 @@ + - @@ -68961,7 +69141,6 @@ - @@ -68972,16 +69151,17 @@ + - + @@ -68989,6 +69169,7 @@ + @@ -69023,7 +69204,6 @@ - @@ -69034,7 +69214,6 @@ - @@ -69078,12 +69257,12 @@ - + @@ -69103,6 +69282,7 @@ + @@ -69125,6 +69305,7 @@ + @@ -69137,9 +69318,9 @@ - + @@ -69160,8 +69341,10 @@ + + @@ -69189,9 +69372,9 @@ + - @@ -69227,7 +69410,6 @@ - @@ -69239,7 +69421,6 @@ - @@ -69257,17 +69438,17 @@ - - + + @@ -69282,6 +69463,7 @@ + @@ -69301,6 +69483,7 @@ + @@ -69327,7 +69510,6 @@ - @@ -69348,7 +69530,6 @@ - @@ -69357,18 +69538,17 @@ + - - @@ -69397,7 +69577,7 @@ - + @@ -69431,6 +69611,7 @@ + @@ -69465,12 +69646,12 @@ + - @@ -69483,9 +69664,9 @@ - + @@ -69508,6 +69689,7 @@ + @@ -69526,7 +69708,6 @@ - @@ -69558,11 +69739,13 @@ + + @@ -69571,6 +69754,7 @@ + @@ -69578,21 +69762,21 @@ + - - + + - @@ -69618,6 +69802,8 @@ + + @@ -69634,7 +69820,6 @@ - @@ -69644,8 +69829,8 @@ - + @@ -69660,15 +69845,12 @@ - - - @@ -69680,6 +69862,7 @@ + @@ -69695,13 +69878,11 @@ - - @@ -69729,18 +69910,16 @@ + - - - @@ -69749,9 +69928,9 @@ - + @@ -69765,6 +69944,7 @@ + @@ -69780,8 +69960,8 @@ + - @@ -69790,6 +69970,7 @@ + @@ -69813,16 +69994,15 @@ - + - @@ -69835,12 +70015,14 @@ + + @@ -69856,6 +70038,7 @@ + @@ -69891,6 +70074,7 @@ + @@ -69900,7 +70084,6 @@ - @@ -69911,11 +70094,13 @@ + + @@ -69923,7 +70108,6 @@ - @@ -69960,7 +70144,6 @@ - @@ -70001,14 +70184,13 @@ - + - @@ -70037,7 +70219,6 @@ - @@ -70045,6 +70226,7 @@ + @@ -70067,6 +70249,7 @@ + @@ -70091,6 +70274,7 @@ + @@ -70113,7 +70297,6 @@ - @@ -70130,7 +70313,6 @@ - @@ -70145,7 +70327,6 @@ - @@ -70223,7 +70404,6 @@ - diff --git a/Engine/Config/BaseEngine.ini b/Engine/Config/BaseEngine.ini index f1d0e9574d02..053e2e7fcecc 100644 --- a/Engine/Config/BaseEngine.ini +++ b/Engine/Config/BaseEngine.ini @@ -106,6 +106,8 @@ EditorBrushMaterialName=/Engine/EngineMaterials/EditorBrushMaterial.EditorBrushM DefaultPhysMaterialName=/Engine/EngineMaterials/DefaultPhysicalMaterial.DefaultPhysicalMaterial DefaultDeferredDecalMaterialName=/Engine/EngineMaterials/DefaultDeferredDecalMaterial.DefaultDeferredDecalMaterial DefaultPostProcessMaterialName=/Engine/EngineMaterials/DefaultPostProcessMaterial.DefaultPostProcessMaterial +TimecodeProviderClassName=None +DefaultTimecodeProviderClassName=/Script/Engine.SystemTimeTimecodeProvider TextureStreamingBoundsMaterialName=/Engine/EditorMaterials/Utilities/TextureStreamingBounds_MATInst.TextureStreamingBounds_MATInst ArrowMaterialName=/Engine/EditorMaterials/GizmoMaterial.GizmoMaterial bAllowHostMigration=false diff --git a/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/LiveLinkClient.cpp b/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/LiveLinkClient.cpp index 8554fd43e838..69cbc40a8131 100644 --- a/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/LiveLinkClient.cpp +++ b/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/LiveLinkClient.cpp @@ -5,7 +5,10 @@ #include "UObject/UObjectHash.h" #include "LiveLinkSourceFactory.h" #include "Misc/Guid.h" +#include "Misc/App.h" #include "UObject/Package.h" +#include "UObject/Class.h" +#include "TimeSynchronizationSource.h" DEFINE_LOG_CATEGORY(LogLiveLink); @@ -19,7 +22,7 @@ FLiveLinkCurveIntegrationData FLiveLinkCurveKey::UpdateCurveKey(const TArray 0) + { + for (FLiveLinkFrame& Frame : Frames) + { + Frame.ExtendCurveData(IntegrationData.NumNewCurves); + } + } +} + +int32 FLiveLinkSubject::AddFrame_Default(const FLiveLinkWorldTime& WorldTime, bool bSaveFrame) +{ + if (!bSaveFrame && WorldTime.Time < LastReadTime) { //Gone back in time Frames.Reset(); LastReadTime = 0; - SubjectTimeOffset = FrameData.WorldTime.Offset; + SubjectTimeOffset = WorldTime.Offset; } + int32 FrameIndex = 0; if (Frames.Num() == 0) { - Frames.AddDefaulted(); - NewFrame = &Frames[0]; LastReadFrame = 0; } else @@ -92,36 +135,78 @@ void FLiveLinkSubject::AddFrame(const FLiveLinkFrameData& FrameData, FGuid Frame LastReadFrame = 0; } - int32 FrameIndex = Frames.Num() - 1; - - for (; FrameIndex >= 0; --FrameIndex) + for (FrameIndex = Frames.Num() - 1; FrameIndex >= 0; --FrameIndex) { - if (Frames[FrameIndex].WorldTime.Time <= FrameData.WorldTime.Time) + if (Frames[FrameIndex].WorldTime.Time <= WorldTime.Time) { break; } } - int32 NewFrameIndex = Frames.Insert(FLiveLinkFrame(), FrameIndex + 1); - NewFrame = &Frames[NewFrameIndex]; + FrameIndex += 1; } - FLiveLinkCurveIntegrationData IntegrationData = CurveKeyData.UpdateCurveKey(FrameData.CurveElements); + return FrameIndex; +} - check(NewFrame); - NewFrame->Transforms = FrameData.Transforms; - NewFrame->Curves = MoveTemp(IntegrationData.CurveValues); - NewFrame->MetaData = FrameData.MetaData; - NewFrame->WorldTime = FrameData.WorldTime; +int32 FLiveLinkSubject::AddFrame_Interpolated(const FLiveLinkWorldTime& WorldTime, bool bSaveFrame) +{ + return AddFrame_Default(WorldTime, bSaveFrame); +} - // update existing curves - if (IntegrationData.NumNewCurves > 0) +int32 FLiveLinkSubject::AddFrame_TimeSynchronized(const FFrameTime& FrameTime, bool bSaveFrame) +{ + int32 FrameIndex = 0; + + const FLiveLinkTimeSynchronizationData& TimeSyncDataLocal = TimeSyncData.GetValue(); + + // If we're not actively synchronizing, we don't need to do anything special. + if (Frames.Num() == 0) { - for (FLiveLinkFrame& Frame : Frames) - { - Frame.ExtendCurveData(IntegrationData.NumNewCurves); - } + LastReadTime = 0; + LastReadFrame = 0; } + else if (TimeSyncData->RolloverModulus.IsSet()) + { + const FFrameTime UseFrameTime = UTimeSynchronizationSource::AddOffsetWithRolloverModulus(FrameTime, TimeSyncDataLocal.Offset, TimeSyncDataLocal.RolloverModulus.GetValue()); + FrameIndex = AddFrame_TimeSynchronized(UseFrameTime, (!TimeSyncDataLocal.bHasEstablishedSync) || bSaveFrame); + } + else + { + FrameIndex = AddFrame_TimeSynchronized(FrameTime + TimeSyncDataLocal.Offset, (!TimeSyncDataLocal.bHasEstablishedSync) || bSaveFrame); + } + + return FrameIndex; +} + +template +int32 FLiveLinkSubject::AddFrame_TimeSynchronized(const FFrameTime& FrameTime, bool bSaveFrame) +{ + if (!bSaveFrame && (LastReadFrame > MIN_FRAMES_TO_REMOVE)) + { + check(Frames.Num() > LastReadFrame); + + if (bWithRollover) + { + int32& RolloverFrame = TimeSyncData->RolloverFrame; + + // If we had previously detected that a roll over had occurred in the range of frames we have, + // then we need to adjust that as well. + if (RolloverFrame > 0) + { + RolloverFrame = RolloverFrame - LastReadFrame; + if (RolloverFrame <= 0) + { + RolloverFrame = INDEX_NONE; + } + } + } + + Frames.RemoveAt(0, LastReadFrame, false); + LastReadFrame = 0; + } + + return FindFrameIndex_TimeSynchronized(FrameTime); } void FLiveLinkSubject::CopyFrameData(const FLiveLinkFrame& InFrame, FLiveLinkSubjectFrame& OutFrame) @@ -137,8 +222,7 @@ void FLiveLinkSubject::CopyFrameDataBlended(const FLiveLinkFrame& PreFrame, cons Blend(PreFrame.Curves, PostFrame.Curves, OutFrame.Curves, BlendWeight); } - -void FLiveLinkSubject::GetFrameAtWorldTime(const double InSeconds, FLiveLinkSubjectFrame& OutFrame) +void FLiveLinkSubject::ResetFrame(FLiveLinkSubjectFrame& OutFrame) const { OutFrame.RefSkeleton = RefSkeleton; OutFrame.RefSkeletonGuid = RefSkeletonGuid; @@ -147,56 +231,249 @@ void FLiveLinkSubject::GetFrameAtWorldTime(const double InSeconds, FLiveLinkSubj OutFrame.Transforms.Reset(); OutFrame.Curves.Reset(); OutFrame.MetaData.StringMetaData.Reset(); +} - if (!CachedInterpolationSettings.bUseInterpolation) +void FLiveLinkSubject::GetFrameAtWorldTime(const double InSeconds, FLiveLinkSubjectFrame& OutFrame) +{ + ResetFrame(OutFrame); + + switch (CachedSettings.SourceMode) { - CopyFrameData(Frames.Last(), OutFrame); - LastReadTime = Frames.Last().WorldTime.Time; - LastReadFrame = Frames.Num()-1; + case ELiveLinkSourceMode::TimeSynchronized: + ensureMsgf(false, TEXT("Attempting to use WorldTime for a TimeSynchronized source! Source = %s"), *Name.ToString()); + GetFrameAtWorldTime_Default(InSeconds, OutFrame); + break; + + case ELiveLinkSourceMode::Interpolated: + GetFrameAtWorldTime_Interpolated(InSeconds, OutFrame); + break; + + default: + GetFrameAtWorldTime_Default(InSeconds, OutFrame); + break; } - else +} + +void FLiveLinkSubject::GetFrameAtSceneTime(const FQualifiedFrameTime& InSceneTime, FLiveLinkSubjectFrame& OutFrame) +{ + ResetFrame(OutFrame); + + switch (CachedSettings.SourceMode) { - LastReadTime = (InSeconds - SubjectTimeOffset) - CachedInterpolationSettings.InterpolationOffset; + case ELiveLinkSourceMode::TimeSynchronized: - bool bBuiltFrame = false; - - for (int32 FrameIndex = Frames.Num() - 1; FrameIndex >= 0; --FrameIndex) + if (TimeSyncData.IsSet()) { - if (Frames[FrameIndex].WorldTime.Time < LastReadTime) + const FFrameTime FrameTime = InSceneTime.ConvertTo(CachedSettings.TimeSynchronizationSettings->FrameRate); + if (TimeSyncData->RolloverModulus.IsSet()) { - //Found Start frame + GetFrameAtSceneTime_TimeSynchronized(FrameTime, OutFrame); + } + else + { + GetFrameAtSceneTime_TimeSynchronized(FrameTime, OutFrame); + } + } + else + { + GetFrameAtWorldTime_Default(InSceneTime.AsSeconds(), OutFrame); + } + break; - if (FrameIndex == Frames.Num() - 1) + default: + ensureMsgf(false, TEXT("Attempting to use SceneTime for a non TimeSynchronized source! Source = %s Mode = %d"), *Name.ToString(), static_cast(CachedSettings.SourceMode)); + GetFrameAtWorldTime_Default(InSceneTime.AsSeconds(), OutFrame); + break; + } +} + +void FLiveLinkSubject::GetFrameAtWorldTime_Default(const double InSeconds, FLiveLinkSubjectFrame& OutFrame) +{ + CopyFrameData(Frames.Last(), OutFrame); + LastReadTime = Frames.Last().WorldTime.Time; + LastReadFrame = Frames.Num() - 1; +} + +void FLiveLinkSubject::GetFrameAtWorldTime_Interpolated(const double InSeconds, FLiveLinkSubjectFrame& OutFrame) +{ + LastReadTime = (InSeconds - SubjectTimeOffset) - CachedSettings.InterpolationSettings->InterpolationOffset; + + bool bBuiltFrame = false; + + for (int32 FrameIndex = Frames.Num() - 1; FrameIndex >= 0; --FrameIndex) + { + if (Frames[FrameIndex].WorldTime.Time < LastReadTime) + { + //Found Start frame + + if (FrameIndex == Frames.Num() - 1) + { + LastReadFrame = FrameIndex; + CopyFrameData(Frames[FrameIndex], OutFrame); + bBuiltFrame = true; + break; + } + else + { + LastReadFrame = FrameIndex; + const FLiveLinkFrame& PreFrame = Frames[FrameIndex]; + const FLiveLinkFrame& PostFrame = Frames[FrameIndex + 1]; + + // Calc blend weight (Amount through frame gap / frame gap) + const float BlendWeight = (LastReadTime - PreFrame.WorldTime.Time) / (PostFrame.WorldTime.Time - PreFrame.WorldTime.Time); + + CopyFrameDataBlended(PreFrame, PostFrame, BlendWeight, OutFrame); + + bBuiltFrame = true; + break; + } + } + } + + if (!bBuiltFrame) + { + LastReadFrame = 0; + // Failed to find an interp point so just take earliest frame + CopyFrameData(Frames[0], OutFrame); + } +} + +template +void FLiveLinkSubject::GetFrameAtSceneTime_TimeSynchronized(const FFrameTime& InTime, FLiveLinkSubjectFrame& OutFrame) +{ + const int32 UseFrame = FindFrameIndex_TimeSynchronized(InTime); + CopyFrameData(Frames[UseFrame], OutFrame); + LastReadTime = Frames[UseFrame].WorldTime.Time; + LastReadFrame = UseFrame; +} + +template +int32 FLiveLinkSubject::FindFrameIndex_TimeSynchronized(const FFrameTime& FrameTime) +{ + if (Frames.Num() == 0) + { + return 0; + } + + FLiveLinkTimeSynchronizationData& TimeSyncDataLocal = TimeSyncData.GetValue(); + + // Preroll / Synchronization should handle the case where there are any time skips by simply clearing out the buffered data. + // Therefore, there are only 2 cases where time would go backwards: + // 1. We've received frames out of order. In this case, we want to push it backwards. + // 2. We've rolled over. In that case, value have wrapped around zero (and appear "smaller") but should be treated as newer. + + // Further, when we're not inserting a value, we're guaranteed that the frame time should always go up + // (or stay the same). So, in that case we only need to search between our LastReadFrameTime and the Newest Frame. + // That assumption will break if external code tries to grab anything other than the frame of data we build internally. + + // Finally, we only update the RolloverFrame value when inserting values. This is because we may query for a rollover frame + // before we receive a rollover frame (in the case of missing or unordered frames). + // We generally don't want to modify state if we're just reading data. + + int32 HighFrame = Frames.Num() - 1; + int32 LowFrame = bForInsert ? 0 : LastReadFrame; + int32 FrameIndex = HighFrame; + + if (bWithRollover) + { + bool bDidRollover = false; + int32& RolloverFrame = TimeSyncDataLocal.RolloverFrame; + const FFrameTime& CompareFrameTime = ((RolloverFrame == INDEX_NONE) ? Frames.Last() : Frames[RolloverFrame - 1]).MetaData.SceneTime.Time; + UTimeSynchronizationSource::FindDistanceBetweenFramesWithRolloverModulus(CompareFrameTime, FrameTime, TimeSyncDataLocal.RolloverModulus.GetValue(), bDidRollover); + + if (RolloverFrame == INDEX_NONE) + { + if (bDidRollover) + { + if (bForInsert) { - LastReadFrame = FrameIndex; - CopyFrameData(Frames[FrameIndex], OutFrame); - bBuiltFrame = true; - break; + RolloverFrame = HighFrame; + FrameIndex = Frames.Num(); } else { - LastReadFrame = FrameIndex; - const FLiveLinkFrame& PreFrame = Frames[FrameIndex]; - const FLiveLinkFrame& PostFrame = Frames[FrameIndex + 1]; + FrameIndex = HighFrame; + } - // Calc blend weight (Amount through frame gap / frame gap) - const float BlendWeight = (LastReadTime - PreFrame.WorldTime.Time) / (PostFrame.WorldTime.Time - PreFrame.WorldTime.Time); - - CopyFrameDataBlended(PreFrame, PostFrame, BlendWeight, OutFrame); - - bBuiltFrame = true; - break; + return FrameIndex; + } + } + else + { + if (bDidRollover) + { + LowFrame = RolloverFrame; + } + else + { + HighFrame = RolloverFrame - 1; + if (bForInsert) + { + ++RolloverFrame; } } } + } - if (!bBuiltFrame) + if (bForInsert) + { + for (; LowFrame <= FrameIndex && Frames[FrameIndex].MetaData.SceneTime.Time > FrameTime; --FrameIndex); + FrameIndex += 1; + } + else + { + for (; LowFrame < FrameIndex && Frames[FrameIndex].MetaData.SceneTime.Time > FrameTime; --FrameIndex); + } + + return FrameIndex; +} + +void FLiveLinkSubject::ClearFrames() +{ + LastReadFrame = INDEX_NONE; + LastReadTime = 0; + Frames.Reset(); +} + +void FLiveLinkSubject::CacheSourceSettings(const ULiveLinkSourceSettings* Settings) +{ + check(IsInGameThread()); + + const bool bSourceModeChanged = Settings->Mode != CachedSettings.SourceMode; + if (bSourceModeChanged) + { + ClearFrames(); + CachedSettings.TimeSynchronizationSettings.Reset(); + CachedSettings.InterpolationSettings.Reset(); + + switch (CachedSettings.SourceMode) { - LastReadFrame = 0; - // Failed to find an interp point so just take earliest frame - CopyFrameData(Frames[0], OutFrame); + case ELiveLinkSourceMode::TimeSynchronized: + TimeSyncData.Reset(); + break; + + default: + break; } } + + CachedSettings.SourceMode = Settings->Mode; + + // Even if the mode didn't change, settings may have updated. + // Handle those changes now. + switch (CachedSettings.SourceMode) + { + case ELiveLinkSourceMode::TimeSynchronized: + CachedSettings.TimeSynchronizationSettings = Settings->TimeSynchronizationSettings; + break; + + case ELiveLinkSourceMode::Interpolated: + CachedSettings.InterpolationSettings = Settings->InterpolationSettings; + break; + + default: + break; + } } FLiveLinkClient::~FLiveLinkClient() @@ -218,7 +495,7 @@ FLiveLinkClient::~FLiveLinkClient() for (int32 Idx = ToRemove.Num() - 1; Idx >= 0; --Idx) { - Sources.RemoveAtSwap(ToRemove[Idx],1,false); + Sources.RemoveAtSwap(ToRemove[Idx], 1, false); } } } @@ -254,7 +531,7 @@ void FLiveLinkClient::ValidateSources() } } - for (int32 SourceIdx = SourcesToRemove.Num()-1; SourceIdx >= 0; --SourceIdx) + for (int32 SourceIdx = SourcesToRemove.Num() - 1; SourceIdx >= 0; --SourceIdx) { if (SourcesToRemove[SourceIdx]->RequestSourceShutdown()) { @@ -276,10 +553,14 @@ void FLiveLinkClient::BuildThisTicksSubjectSnapshot() TArray OldSubjectSnapshotNames; ActiveSubjectSnapshots.GenerateKeyArray(OldSubjectSnapshotNames); - + const double CurrentInterpTime = FPlatformTime::Seconds(); // Set this up once, every subject // uses the same time + const FFrameRate FrameRate = FApp::GetTimecodeFrameRate(); + const FTimecode Timecode = FApp::GetTimecode(); + const FQualifiedFrameTime CurrentSyncTime(Timecode.ToFrameNumber(FrameRate), FrameRate); + { FScopeLock Lock(&SubjectDataAccessCriticalSection); @@ -290,10 +571,9 @@ void FLiveLinkClient::BuildThisTicksSubjectSnapshot() FLiveLinkSubject& SourceSubject = SubjectPair.Value; - FLiveLinkInterpolationSettings* SubjectInterpolationSettings = GetInterpolationSettingsForEntry(SourceSubject.LastModifier); - if (SubjectInterpolationSettings) + if (const ULiveLinkSourceSettings* Settings = GetSourceSettingsForEntry(SourceSubject.LastModifier)) { - SourceSubject.CachedInterpolationSettings = *SubjectInterpolationSettings; + SourceSubject.CacheSourceSettings(Settings); } if (SourceSubject.Frames.Num() > 0) @@ -305,7 +585,14 @@ void FLiveLinkClient::BuildThisTicksSubjectSnapshot() SnapshotSubject = &ActiveSubjectSnapshots.Add(SubjectName); } - SourceSubject.GetFrameAtWorldTime(CurrentInterpTime, *SnapshotSubject); + if (SourceSubject.GetMode() == ELiveLinkSourceMode::TimeSynchronized) + { + SourceSubject.GetFrameAtSceneTime(CurrentSyncTime, *SnapshotSubject); + } + else + { + SourceSubject.GetFrameAtWorldTime(CurrentInterpTime, *SnapshotSubject); + } } } } @@ -313,7 +600,7 @@ void FLiveLinkClient::BuildThisTicksSubjectSnapshot() //Now that ActiveSubjectSnapshots is up to date we now need to build the virtual subject data for (TPair& SubjectPair : VirtualSubjects) { - if(SubjectPair.Value.GetSubjects().Num() > 0) + if (SubjectPair.Value.GetSubjects().Num() > 0) { const FName SubjectName = SubjectPair.Key; OldSubjectSnapshotNames.RemoveSingleSwap(SubjectName, false); @@ -381,7 +668,7 @@ void FLiveLinkClient::AddSource(TSharedPtr InSource) ULiveLinkSourceSettings* NewSettings = NewObject(GetTransientPackage(), SettingsClass); SourceSettings.Add(NewSettings); - + InSource->ReceiveClient(this, SourceGuids.Last()); InSource->InitializeSettings(NewSettings); @@ -443,7 +730,7 @@ void FLiveLinkClient::RemoveAllSources() void FLiveLinkClient::PushSubjectSkeleton(FGuid SourceGuid, FName SubjectName, const FLiveLinkRefSkeleton& RefSkeleton) { FScopeLock Lock(&SubjectDataAccessCriticalSection); - + if (FLiveLinkSubject* Subject = LiveSubjectData.Find(SubjectName)) { Subject->Frames.Reset(); @@ -452,7 +739,7 @@ void FLiveLinkClient::PushSubjectSkeleton(FGuid SourceGuid, FName SubjectName, c } else { - LiveSubjectData.Emplace(SubjectName, FLiveLinkSubject(RefSkeleton)).LastModifier = SourceGuid; + LiveSubjectData.Emplace(SubjectName, FLiveLinkSubject(RefSkeleton, SubjectName)).LastModifier = SourceGuid; } } @@ -468,19 +755,16 @@ void FLiveLinkClient::ClearSubjectsFrames(FName SubjectName) FScopeLock Lock(&SubjectDataAccessCriticalSection); if (FLiveLinkSubject* Subject = LiveSubjectData.Find(SubjectName)) { - Subject->Frames.Reset(); - Subject->LastReadTime = 0; + Subject->ClearFrames(); } - } void FLiveLinkClient::ClearAllSubjectsFrames() { FScopeLock Lock(&SubjectDataAccessCriticalSection); - for (TPair& LiveSubject : LiveSubjectData) + for (TPair& Subject : LiveSubjectData) { - LiveSubject.Value.Frames.Reset(); - LiveSubject.Value.LastReadTime = 0; + Subject.Value.ClearFrames(); } } @@ -511,7 +795,7 @@ const FLiveLinkSubjectFrame* FLiveLinkClient::GetSubjectDataAtWorldTime(FName Su FScopeLock Lock(&SubjectDataAccessCriticalSection); Subject = LiveSubjectData.Find(SubjectName); - + if (Subject != nullptr) { OutFrame = new FLiveLinkSubjectFrame(); @@ -526,6 +810,32 @@ const FLiveLinkSubjectFrame* FLiveLinkClient::GetSubjectDataAtWorldTime(FName Su return OutFrame; } +const FLiveLinkSubjectFrame* FLiveLinkClient::GetSubjectDataAtSceneTime(FName SubjectName, const FTimecode& Timecode) +{ + FLiveLinkSubjectFrame* OutFrame = nullptr; + + FLiveLinkSubject* Subject; + FScopeLock Lock(&SubjectDataAccessCriticalSection); + + Subject = LiveSubjectData.Find(SubjectName); + + if (Subject != nullptr) + { + const FFrameRate FrameRate = FApp::GetTimecodeFrameRate(); + const FQualifiedFrameTime UseTime(Timecode.ToFrameNumber(FrameRate), FrameRate); + + OutFrame = new FLiveLinkSubjectFrame(); + Subject->GetFrameAtSceneTime(UseTime, *OutFrame); + } + else + { + // Try Virtual Subjects + // TODO: Currently only works on real subjects + } + + return OutFrame; +} + const TArray* FLiveLinkClient::GetSubjectRawFrames(FName SubjectName) { FLiveLinkSubject* Subject; @@ -545,7 +855,7 @@ TArray FLiveLinkClient::GetSubjects() TArray SubjectEntries; { FScopeLock Lock(&SubjectDataAccessCriticalSection); - + SubjectEntries.Reserve(LiveSubjectData.Num() + VirtualSubjects.Num()); for (const TPair& LiveSubject : LiveSubjectData) @@ -562,6 +872,35 @@ TArray FLiveLinkClient::GetSubjects() return SubjectEntries; } +FLiveLinkSubjectTimeSyncData FLiveLinkClient::GetTimeSyncData(FName SubjectName) +{ + FScopeLock Lock(&SubjectDataAccessCriticalSection); + + FLiveLinkSubjectTimeSyncData SyncData; + if (FLiveLinkSubject* Subject = LiveSubjectData.Find(SubjectName)) + { + SyncData = Subject->GetTimeSyncData(); + } + + return SyncData; +} + +FLiveLinkSubjectTimeSyncData FLiveLinkSubject::GetTimeSyncData() +{ + FLiveLinkSubjectTimeSyncData SyncData; + SyncData.bIsValid = Frames.Num() > 0; + SyncData.Settings = CachedSettings.TimeSynchronizationSettings.Get(FLiveLinkTimeSynchronizationSettings()); + + if (SyncData.bIsValid) + { + SyncData.NewestSampleTime = Frames.Last().MetaData.SceneTime.Time; + SyncData.OldestSampleTime = Frames[0].MetaData.SceneTime.Time; + SyncData.SkeletonGuid = RefSkeletonGuid; + } + + return SyncData; +} + void FLiveLinkClient::GetSubjectNames(TArray& SubjectNames) { SubjectNames.Reset(); @@ -621,7 +960,7 @@ FText FLiveLinkClient::GetSourceTypeForEntry(FGuid InEntryGuid) const { return Source->GetSourceType(); } - return FText(NSLOCTEXT("TempLocTextLiveLink","InvalidSourceType", "Invalid Source Type")); + return FText(NSLOCTEXT("TempLocTextLiveLink", "InvalidSourceType", "Invalid Source Type")); } FText FLiveLinkClient::GetMachineNameForEntry(FGuid InEntryGuid) const @@ -631,7 +970,7 @@ FText FLiveLinkClient::GetMachineNameForEntry(FGuid InEntryGuid) const { return Source->GetSourceMachineName(); } - return FText(NSLOCTEXT("TempLocTextLiveLink","InvalidSourceMachineName", "Invalid Source Machine Name")); + return FText(NSLOCTEXT("TempLocTextLiveLink", "InvalidSourceMachineName", "Invalid Source Machine Name")); } bool FLiveLinkClient::ShowSourceInUI(FGuid InEntryGuid) const @@ -656,7 +995,7 @@ FText FLiveLinkClient::GetEntryStatusForEntry(FGuid InEntryGuid) const { return Source->GetSourceStatus(); } - return FText(NSLOCTEXT("TempLocTextLiveLink","InvalidSourceStatus", "Invalid Source Status")); + return FText(NSLOCTEXT("TempLocTextLiveLink", "InvalidSourceStatus", "Invalid Source Status")); } FLiveLinkInterpolationSettings* FLiveLinkClient::GetInterpolationSettingsForEntry(FGuid InEntryGuid) @@ -684,7 +1023,7 @@ void FLiveLinkClient::UpdateVirtualSubjectProperties(const FLiveLinkSubjectKey& FLiveLinkVirtualSubject FLiveLinkClient::GetVirtualSubjectProperties(const FLiveLinkSubjectKey& SubjectKey) const { check(SubjectKey.Source == VirtualSubjectGuid); - + return VirtualSubjects.FindChecked(SubjectKey.SubjectName); } @@ -722,3 +1061,74 @@ FText FLiveLinkVirtualSubjectSource::GetSourceType() const return NSLOCTEXT("TempLocTextLiveLink", "LiveLinkVirtualSubjectName", "Virtual Subjects"); } +void FLiveLinkClient::OnStartSynchronization(FName SubjectName, const struct FTimeSynchronizationOpenData& OpenData, const int32 FrameOffset) +{ + FScopeLock Lock(&SubjectDataAccessCriticalSection); + if (FLiveLinkSubject* Subject = LiveSubjectData.Find(SubjectName)) + { + Subject->OnStartSynchronization(OpenData, FrameOffset); + } +} + +void FLiveLinkClient::OnSynchronizationEstablished(FName SubjectName, const struct FTimeSynchronizationStartData& StartData) +{ + FScopeLock Lock(&SubjectDataAccessCriticalSection); + if (FLiveLinkSubject* Subject = LiveSubjectData.Find(SubjectName)) + { + Subject->OnSynchronizationEstablished(StartData); + } +} + +void FLiveLinkClient::OnStopSynchronization(FName SubjectName) +{ + FScopeLock Lock(&SubjectDataAccessCriticalSection); + if (FLiveLinkSubject* Subject = LiveSubjectData.Find(SubjectName)) + { + Subject->OnStopSynchronization(); + } +} + +void FLiveLinkSubject::OnStartSynchronization(const FTimeSynchronizationOpenData& OpenData, const int32 FrameOffset) +{ + if (ensure(CachedSettings.SourceMode == ELiveLinkSourceMode::TimeSynchronized)) + { + ensure(!TimeSyncData.IsSet()); + TimeSyncData = FLiveLinkTimeSynchronizationData(); + TimeSyncData->RolloverModulus = OpenData.RolloverFrame; + TimeSyncData->SyncFrameRate = OpenData.SynchronizationFrameRate; + TimeSyncData->Offset = FrameOffset; + + // Still need to check this, because OpenData.RolloverFrame is a TOptional which may be unset. + if (TimeSyncData->RolloverModulus.IsSet()) + { + TimeSyncData->RolloverModulus = FFrameRate::TransformTime(TimeSyncData->RolloverModulus.GetValue(), OpenData.SynchronizationFrameRate, CachedSettings.TimeSynchronizationSettings->FrameRate); + } + + ClearFrames(); + } + else + { + TimeSyncData.Reset(); + } +} + +void FLiveLinkSubject::OnSynchronizationEstablished(const struct FTimeSynchronizationStartData& StartData) +{ + if (ensure(CachedSettings.SourceMode == ELiveLinkSourceMode::TimeSynchronized)) + { + TimeSyncData->SyncStartTime = StartData.StartFrame; + TimeSyncData->bHasEstablishedSync = true; + + // Prevent buffers from being deleted if new data is pushed before we build snapshots. + LastReadTime = 0.f; + LastReadFrame = 0.f; + } +} + +void FLiveLinkSubject::OnStopSynchronization() +{ + if (ensure(CachedSettings.SourceMode == ELiveLinkSourceMode::TimeSynchronized)) + { + TimeSyncData.Reset(); + } +} \ No newline at end of file diff --git a/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/LiveLinkComponent.cpp b/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/LiveLinkComponent.cpp index 5b73711f15b6..e2c4c44ad67f 100644 --- a/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/LiveLinkComponent.cpp +++ b/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/LiveLinkComponent.cpp @@ -95,3 +95,16 @@ void ULiveLinkComponent::GetSubjectDataAtWorldTime(const FName SubjectName, cons } } } + +void ULiveLinkComponent::GetSubjectDataAtSceneTime(const FName SubjectName, const FTimecode& SceneTime, bool& bSuccess, FSubjectFrameHandle& SubjectFrameHandle) +{ + bSuccess = false; + if (HasLiveLinkClient()) + { + if (const FLiveLinkSubjectFrame* SubjectFrame = LiveLinkClient->GetSubjectDataAtSceneTime(SubjectName, SceneTime)) + { + SubjectFrameHandle.SetCachedFrame(MakeShared(SubjectFrame)); + bSuccess = true; + } + } +} diff --git a/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/TimeSync/LiveLinkTimeSynchronizationSource.cpp b/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/TimeSync/LiveLinkTimeSynchronizationSource.cpp new file mode 100644 index 000000000000..02d3125ae764 --- /dev/null +++ b/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/TimeSync/LiveLinkTimeSynchronizationSource.cpp @@ -0,0 +1,117 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "LiveLinkTimeSynchronizationSource.h" +#include "LiveLinkClient.h" +#include "Features/IModularFeatures.h" +#include "Math/NumericLimits.h" + +ULiveLinkTimeSynchronizationSource::ULiveLinkTimeSynchronizationSource() +{ + if (!HasAnyFlags(RF_ArchetypeObject | RF_ClassDefaultObject)) + { + IModularFeatures& ModularFeatures = IModularFeatures::Get(); + ModularFeatures.OnModularFeatureRegistered().AddUObject(this, &ThisClass::OnModularFeatureRegistered); + ModularFeatures.OnModularFeatureUnregistered().AddUObject(this, &ThisClass::OnModularFeatureUnregistered); + + if (ModularFeatures.IsModularFeatureAvailable(ILiveLinkClient::ModularFeatureName)) + { + LiveLinkClient = &ModularFeatures.GetModularFeature(ILiveLinkClient::ModularFeatureName); + } + } +} + +FFrameTime ULiveLinkTimeSynchronizationSource::GetNewestSampleTime() const +{ + UpdateCachedState(); + return CachedData.NewestSampleTime + FrameOffset; +} + +FFrameTime ULiveLinkTimeSynchronizationSource::GetOldestSampleTime() const +{ + UpdateCachedState(); + return CachedData.OldestSampleTime + FrameOffset; +} + +FFrameRate ULiveLinkTimeSynchronizationSource::GetFrameRate() const +{ + UpdateCachedState(); + return CachedData.Settings.FrameRate; +} + +bool ULiveLinkTimeSynchronizationSource::IsReady() const +{ + UpdateCachedState(); + return LiveLinkClient && CachedData.bIsValid && (ESyncState::NotSynced == State || LastUpdateGuid == CachedData.SkeletonGuid); +} + +bool ULiveLinkTimeSynchronizationSource::Open(const FTimeSynchronizationOpenData& OpenData) +{ + UE_LOG(LogLiveLink, Log, TEXT("ULiveLinkTimeSynchronizationSource::Open %s"), *SubjectName.ToString()); + if (ensure(LiveLinkClient != nullptr) && IsReady()) + { + State = ESyncState::Opened; + LastUpdateGuid = CachedData.SkeletonGuid; + LiveLinkClient->OnStartSynchronization(SubjectName, OpenData, FrameOffset); + return true; + } + else + { + State = ESyncState::NotSynced; + return false; + } +} + +void ULiveLinkTimeSynchronizationSource::Start(const FTimeSynchronizationStartData& StartData) +{ + UE_LOG(LogLiveLink, Log, TEXT("ULiveLinkTimeSynchronizationSource::Start %s"), *SubjectName.ToString()); + if (ensure(LiveLinkClient != nullptr)) + { + State = ESyncState::Synced; + LiveLinkClient->OnSynchronizationEstablished(SubjectName, StartData); + } + else + { + State = ESyncState::NotSynced; + } +} + +void ULiveLinkTimeSynchronizationSource::Close() +{ + UE_LOG(LogLiveLink, Log, TEXT("ULiveLinkTimeSynchronizationSource::Close %s"), *SubjectName.ToString()); + if (ensure(LiveLinkClient != nullptr)) + { + LiveLinkClient->OnStopSynchronization(SubjectName); + } + + State = ESyncState::NotSynced; +} + +FString ULiveLinkTimeSynchronizationSource::GetDisplayName() const +{ + return SubjectName.ToString(); +} + +void ULiveLinkTimeSynchronizationSource::OnModularFeatureRegistered(const FName& FeatureName, class IModularFeature* Feature) +{ + if (FeatureName == ILiveLinkClient::ModularFeatureName) + { + LiveLinkClient = static_cast(Feature); + } +} + +void ULiveLinkTimeSynchronizationSource::OnModularFeatureUnregistered(const FName& FeatureName, class IModularFeature* Feature) +{ + if (FeatureName == ILiveLinkClient::ModularFeatureName && (LiveLinkClient != nullptr) && ensure(Feature == LiveLinkClient)) + { + LiveLinkClient = nullptr; + } +} + +void ULiveLinkTimeSynchronizationSource::UpdateCachedState() const +{ + if (LastUpdateFrame != GFrameCounter && LiveLinkClient != nullptr) + { + LastUpdateFrame = GFrameCounter; + CachedData = LiveLinkClient->GetTimeSyncData(SubjectName); + } +} \ No newline at end of file diff --git a/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/TimeSync/LiveLinkTimeSynchronizationSource.h b/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/TimeSync/LiveLinkTimeSynchronizationSource.h new file mode 100644 index 000000000000..3a361b60fcc7 --- /dev/null +++ b/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Private/TimeSync/LiveLinkTimeSynchronizationSource.h @@ -0,0 +1,55 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "TimeSynchronizationSource.h" +#include "LiveLinkClient.h" +#include "Misc/Guid.h" +#include "LiveLinkTimeSynchronizationSource.generated.h" + +UCLASS(EditInlineNew) +class ULiveLinkTimeSynchronizationSource : public UTimeSynchronizationSource +{ + GENERATED_BODY() + +private: + + UPROPERTY(EditAnywhere, Category="LiveLink") + FName SubjectName; + + FLiveLinkClient* LiveLinkClient; + + enum class ESyncState + { + NotSynced, + Opened, + Synced + }; + + mutable ESyncState State = ESyncState::NotSynced; + mutable int64 LastUpdateFrame; + mutable FLiveLinkSubjectTimeSyncData CachedData; + mutable FGuid LastUpdateGuid; + +public: + + ULiveLinkTimeSynchronizationSource(); + + //~ Begin TimeSynchronizationSource API + virtual FFrameTime GetNewestSampleTime() const override; + virtual FFrameTime GetOldestSampleTime() const override; + virtual FFrameRate GetFrameRate() const override; + virtual bool IsReady() const override; + virtual bool Open(const FTimeSynchronizationOpenData& OpenData) override; + virtual void Start(const FTimeSynchronizationStartData& StartData) override; + virtual void Close() override; + virtual FString GetDisplayName() const override; + //~ End TimeSynchronizationSource API + +private: + + void OnModularFeatureRegistered(const FName& FeatureName, class IModularFeature* Feature); + void OnModularFeatureUnregistered(const FName& FeatureName, class IModularFeature* Feature); + void UpdateCachedState() const; +}; \ No newline at end of file diff --git a/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Public/LiveLinkClient.h b/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Public/LiveLinkClient.h index 013b9ac95470..f6cffdf069a1 100644 --- a/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Public/LiveLinkClient.h +++ b/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Public/LiveLinkClient.h @@ -17,15 +17,22 @@ // Live Link Log Category DECLARE_LOG_CATEGORY_EXTERN(LogLiveLink, Log, All); - +struct FLiveLinkSubjectTimeSyncData +{ + bool bIsValid = false; + FGuid SkeletonGuid; + FFrameTime OldestSampleTime; + FFrameTime NewestSampleTime; + FLiveLinkTimeSynchronizationSettings Settings; +}; struct FLiveLinkSubject { // Key for storing curve data (Names) - FLiveLinkCurveKey CurveKeyData; + FLiveLinkCurveKey CurveKeyData; // Subject data frames that we have received (transforms and curve values) - TArray Frames; + TArray Frames; // Time difference between current system time and TimeCode times double SubjectTimeOffset; @@ -40,42 +47,110 @@ struct FLiveLinkSubject // Guid to track the last live link source that modified us FGuid LastModifier; - // Connection settings specified by user - FLiveLinkInterpolationSettings CachedInterpolationSettings; - - FLiveLinkSubject(const FLiveLinkRefSkeleton& InRefSkeleton) - : RefSkeleton(InRefSkeleton) + FLiveLinkSubject(const FLiveLinkRefSkeleton& InRefSkeleton, FName InName) + : Name(InName) + , RefSkeleton(InRefSkeleton) , RefSkeletonGuid(FGuid::NewGuid()) {} - FLiveLinkSubject() - {} - // Add a frame of data from a FLiveLinkFrameData void AddFrame(const FLiveLinkFrameData& FrameData, FGuid FrameSource, bool bSaveFrame); // Populate OutFrame with a frame based off of the supplied time and our own offsets void GetFrameAtWorldTime(const double InSeconds, FLiveLinkSubjectFrame& OutFrame); + // Populate OutFrame with a frame based off of the supplied scene time. + void GetFrameAtSceneTime(const FQualifiedFrameTime& InSceneTime, FLiveLinkSubjectFrame& OutFrame); + // Get this subjects ref skeleton const FLiveLinkRefSkeleton& GetRefSkeleton() const { return RefSkeleton; } // Handling setting a new ref skeleton void SetRefSkeleton(const FLiveLinkRefSkeleton& InRefSkeleton) { RefSkeleton = InRefSkeleton; RefSkeletonGuid = FGuid::NewGuid(); } + // Free all subject data frames. + void ClearFrames(); + + void CacheSourceSettings(const ULiveLinkSourceSettings* DataToCache); + + FName GetName() const { return Name; } + + ELiveLinkSourceMode GetMode() const { return CachedSettings.SourceMode; } + + FLiveLinkSubjectTimeSyncData GetTimeSyncData(); + + void OnStartSynchronization(const struct FTimeSynchronizationOpenData& OpenData, const int32 FrameOffset); + void OnSynchronizationEstablished(const struct FTimeSynchronizationStartData& StartData); + void OnStopSynchronization(); + private: + // Copy a frame from the buffer to a FLiveLinkSubjectFrame + static void CopyFrameData(const FLiveLinkFrame& InFrame, FLiveLinkSubjectFrame& OutFrame); + + // Blend two frames from the buffer and copy the result to a FLiveLinkSubjectFrame + static void CopyFrameDataBlended(const FLiveLinkFrame& PreFrame, const FLiveLinkFrame& PostFrame, float BlendWeight, FLiveLinkSubjectFrame& OutFrame); + + void ResetFrame(FLiveLinkSubjectFrame& OutFrame) const;; + + int32 AddFrame_Default(const FLiveLinkWorldTime& FrameTime, bool bSaveFrame); + int32 AddFrame_Interpolated(const FLiveLinkWorldTime& FrameTime, bool bSaveFrame); + int32 AddFrame_TimeSynchronized(const FFrameTime& FrameTime, bool bSaveFrame); + + template + int32 AddFrame_TimeSynchronized(const FFrameTime& FrameTime, bool bSaveFrame); + + void GetFrameAtWorldTime_Default(const double InSeconds, FLiveLinkSubjectFrame& OutFrame); + void GetFrameAtWorldTime_Interpolated(const double InSeconds, FLiveLinkSubjectFrame& OutFrame); + + template + void GetFrameAtSceneTime_TimeSynchronized(const FFrameTime& FrameTime, FLiveLinkSubjectFrame& OutFrame); + + template + int32 FindFrameIndex_TimeSynchronized(const FFrameTime& FrameTime); + + FName Name; + + struct FLiveLinkCachedSettings + { + ELiveLinkSourceMode SourceMode = ELiveLinkSourceMode::Default; + TOptional InterpolationSettings; + TOptional TimeSynchronizationSettings; + }; + + // Connection settings specified by user + // May only store settings relevant to the current mode (ELiveLinkSourceMode). + FLiveLinkCachedSettings CachedSettings; + // Ref Skeleton for transforms FLiveLinkRefSkeleton RefSkeleton; // Allow us to track changes to the ref skeleton FGuid RefSkeletonGuid; - // Copy a frame from the buffer to a FLiveLinkSubjectFrame - void CopyFrameData(const FLiveLinkFrame& InFrame, FLiveLinkSubjectFrame& OutFrame); + struct FLiveLinkTimeSynchronizationData + { + // Whether or not synchronization has been established. + bool bHasEstablishedSync = false; - // Blend two frames from the buffer and copy the result to a FLiveLinkSubjectFrame - void CopyFrameDataBlended(const FLiveLinkFrame& PreFrame, const FLiveLinkFrame& PostFrame, float BlendWeight, FLiveLinkSubjectFrame& OutFrame); + // The frame in our buffer where a rollover was detected. Only applicable for time synchronized sources. + int32 RolloverFrame = INDEX_NONE; + + // Frame offset that will be used for this source. + int32 Offset = 0; + + // Frame Time value modulus. When this value is not set, we assume no rollover occurs. + TOptional RolloverModulus; + + // Frame rate used as the base for synchronization. + FFrameRate SyncFrameRate; + + // Frame time that synchronization was established (relative to SynchronizationFrameRate). + FFrameTime SyncStartTime; + }; + + TOptional TimeSyncData; + }; // Structure that identifies an individual subject @@ -149,6 +224,7 @@ public: virtual const FLiveLinkSubjectFrame* GetSubjectData(FName SubjectName) override; const FLiveLinkSubjectFrame* GetSubjectDataAtWorldTime(FName SubjectName, double WorldTime) override; + const FLiveLinkSubjectFrame* GetSubjectDataAtSceneTime(FName SubjectName, const FTimecode& SceneTime) override; const TArray& GetSourceEntries() const { return SourceGuids; } const TArray* GetSubjectRawFrames(FName SubjectName) override; @@ -161,6 +237,10 @@ public: // Get a list of currently active subjects TArray GetSubjects(); + FLiveLinkSubjectTimeSyncData GetTimeSyncData(FName SubjectName); + + FGuid GetCurrentSubjectOwner(FName SubjectName) const; + // Populates an array with in-use subject names virtual void GetSubjectNames(TArray& SubjectNames) override; @@ -196,8 +276,19 @@ public: FDelegateHandle RegisterSubjectsChangedHandle(const FSimpleMulticastDelegate::FDelegate& SubjectsChanged); void UnregisterSubjectsChangedHandle(FDelegateHandle Handle); + /** Called when time synchronization is starting for a subject. */ + void OnStartSynchronization(FName SubjectName, const struct FTimeSynchronizationOpenData& OpenData, const int32 FrameOffset); + + /** Called when time synchronization has been established for a subject. */ + void OnSynchronizationEstablished(FName SubjectName, const struct FTimeSynchronizationStartData& StartData); + + /** Called when time synchronization has been stopped for a subject. */ + void OnStopSynchronization(FName SubjectName); + private: + void RemoveSource(int32 SourceIndex); + // Setup the source for virtual subjects void AddVirtualSubjectSource(); diff --git a/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Public/LiveLinkComponent.h b/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Public/LiveLinkComponent.h index 2eb80f035b7a..408efaf67192 100644 --- a/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Public/LiveLinkComponent.h +++ b/Engine/Plugins/Animation/LiveLink/Source/LiveLink/Public/LiveLinkComponent.h @@ -48,6 +48,10 @@ public: UFUNCTION(BlueprintCallable, Category = "LiveLink") void GetSubjectDataAtWorldTime(const FName SubjectName, const float WorldTime, bool& bSuccess, FSubjectFrameHandle& SubjectFrameHandle); + // Returns a handle to the frame of data in LiveLink for a given subject at the specified time along with a boolean for whether a frame was found. + // Returns a handle to an empty frame if no frame of data is found. + UFUNCTION(BlueprintCallable, Category = "LiveLink") + void GetSubjectDataAtSceneTime(const FName SubjectName, const FTimecode& SceneTime, bool& bSuccess, FSubjectFrameHandle& SubjectFrameHandle); private: bool HasLiveLinkClient(); diff --git a/Engine/Plugins/Compositing/Composure/Source/Composure/Private/MovieScene/MovieSceneComposurePostMoveSettingsTrack.cpp b/Engine/Plugins/Compositing/Composure/Source/Composure/Private/MovieScene/MovieSceneComposurePostMoveSettingsTrack.cpp index adbc89232601..2fad8395f099 100644 --- a/Engine/Plugins/Compositing/Composure/Source/Composure/Private/MovieScene/MovieSceneComposurePostMoveSettingsTrack.cpp +++ b/Engine/Plugins/Compositing/Composure/Source/Composure/Private/MovieScene/MovieSceneComposurePostMoveSettingsTrack.cpp @@ -16,7 +16,7 @@ UMovieSceneComposurePostMoveSettingsTrack::UMovieSceneComposurePostMoveSettingsT UMovieSceneSection* UMovieSceneComposurePostMoveSettingsTrack::CreateNewSection() { - return NewObject(this, UMovieSceneComposurePostMoveSettingsSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } diff --git a/Engine/Plugins/Editor/EditorScriptingUtilities/EditorScriptingUtilities.uplugin b/Engine/Plugins/Editor/EditorScriptingUtilities/EditorScriptingUtilities.uplugin index 5395448a26f3..2cb698ff0355 100644 --- a/Engine/Plugins/Editor/EditorScriptingUtilities/EditorScriptingUtilities.uplugin +++ b/Engine/Plugins/Editor/EditorScriptingUtilities/EditorScriptingUtilities.uplugin @@ -21,12 +21,5 @@ "Type" : "Editor", "LoadingPhase" : "Default" } - ], - "Plugins" : - [ - { - "Name" : "PythonScriptPlugin", - "Enabled" : true - } ] -} +} \ No newline at end of file diff --git a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/EditorScriptingUtilities.Build.cs b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/EditorScriptingUtilities.Build.cs index 8607f8bdfcf2..63e36aae0ed8 100644 --- a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/EditorScriptingUtilities.Build.cs +++ b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/EditorScriptingUtilities.Build.cs @@ -21,18 +21,14 @@ namespace UnrealBuildTool.Rules "AssetTools", "EditorStyle", "MainFrame", + "MeshDescription", + "MeshDescriptionOperations", + "RawMesh", "Slate", "SlateCore", - "MeshDescription", "UnrealEd", } ); - - PrivateIncludePathModuleNames.AddRange( - new string[] { - "PythonScriptPlugin", - } - ); } } } diff --git a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorLevelLibrary.cpp b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorLevelLibrary.cpp index 19d6e76c6f6b..68e00591c907 100644 --- a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorLevelLibrary.cpp +++ b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorLevelLibrary.cpp @@ -12,6 +12,7 @@ #include "Editor/UnrealEdEngine.h" #include "EngineUtils.h" #include "Engine/Brush.h" +#include "Engine/MapBuildDataRegistry.h" #include "Engine/Selection.h" #include "Engine/StaticMesh.h" #include "Engine/StaticMeshActor.h" @@ -23,7 +24,8 @@ #include "Kismet2/ComponentEditorUtils.h" #include "Layers/ILayers.h" #include "LevelEditorViewport.h" -#include "Engine/MapBuildDataRegistry.h" +#include "Materials/Material.h" +#include "Materials/MaterialInterface.h" #include "MeshMergeModule.h" #include "ScopedTransaction.h" #include "UnrealEdGlobals.h" @@ -914,11 +916,6 @@ namespace InternalEditorLevelLibrary ActorsToTest.RemoveAtSwap(Index); } } - if (ActorsToTest.Num() < 2) - { - OutFailureReason = TEXT("A merge operation requires at least 2 Actors."); - return false; - } // All actors need to come from the same World UWorld* CurrentWorld = ActorsToTest[0]->GetWorld(); @@ -983,12 +980,6 @@ namespace InternalEditorLevelLibrary } } - if (OutValidActor.Num() < 2) - { - OutFailureReason = TEXT("A merge operation requires at least 2 valid Actors."); - return false; - } - OutAverageLocation = PivotLocation / OutValidActor.Num(); return true; @@ -1034,7 +1025,13 @@ AActor* UEditorLevelLibrary::JoinStaticMeshActors(const TArrayGetWorld()->SpawnActor(PivotLocation, FRotator::ZeroRotator, Params); if (!NewActor) { - UE_LOG(LogEditorScripting, Error, TEXT("JoinStaticMeshSctors failed. Internal error while creating the join actor.")); + UE_LOG(LogEditorScripting, Error, TEXT("JoinStaticMeshActors failed. Internal error while creating the join actor.")); return nullptr; } @@ -1129,7 +1126,7 @@ bool UEditorLevelLibrary::MergeStaticMeshActors(const TArray& FVector MergedActorLocation; TArray CreatedAssets; const float ScreenAreaSize = TNumericLimits::Max(); - MeshUtilities.MergeComponentsToStaticMesh(AllComponents, AllActors[0]->GetWorld(), MergeOptions.MeshMergingSettings, nullptr, nullptr, MergeOptions.BasePackageName, CreatedAssets, MergedActorLocation, ScreenAreaSize, true); + MeshUtilities.MergeComponentsToStaticMesh(AllComponents, AllActors[0]->GetWorld(), MergeOptions.MeshMergingSettings, nullptr, nullptr, PackageName, CreatedAssets, MergedActorLocation, ScreenAreaSize, true); UStaticMesh* MergedMesh = nullptr; if (!CreatedAssets.FindItemByClass(&MergedMesh)) @@ -1184,4 +1181,103 @@ bool UEditorLevelLibrary::MergeStaticMeshActors(const TArray& return true; } +bool UEditorLevelLibrary::CreateProxyMeshActor(const TArray& ActorsToMerge, const FEditorScriptingCreateProxyMeshActorOptions& MergeOptions, class AStaticMeshActor*& OutMergedActor) +{ + // See FMeshProxyTool::RunMerge (Engine\Source\Editor\MergeActors\Private\MeshProxyTool\MeshProxyTool.cpp) + TGuardValue UnattendedScriptGuard(GIsRunningUnattendedScript, true); + + OutMergedActor = nullptr; + + if (!EditorScriptingUtils::CheckIfInEditorAndPIE()) + { + return false; + } + + // Cleanup actors + TArray StaticMeshActors; + TArray AllComponents_UNUSED; + FVector PivotLocation; + FString FailureReason; + if (!InternalEditorLevelLibrary::FindValidActorAndComponents(ActorsToMerge, StaticMeshActors, AllComponents_UNUSED, PivotLocation, FailureReason)) + { + UE_LOG(LogEditorScripting, Error, TEXT("MergeStaticMeshActors failed. %s"), *FailureReason); + return false; + } + TArray AllActors(StaticMeshActors); + + const IMeshMergeUtilities& MeshUtilities = FModuleManager::Get().LoadModuleChecked("MeshMergeUtilities").GetUtilities(); + + FCreateProxyDelegate ProxyDelegate; + TArray CreatedAssets; + ProxyDelegate.BindLambda([&CreatedAssets](const FGuid Guid, TArray& InAssetsToSync){CreatedAssets.Append(InAssetsToSync);}); + + MeshUtilities.CreateProxyMesh( + AllActors, // List of Actors to merge + MergeOptions.MeshProxySettings, // Merge settings + nullptr, // Base Material used for final proxy material. Note: nullptr for default impl: /Engine/EngineMaterials/BaseFlattenMaterial.BaseFlattenMaterial + nullptr, // Package for generated assets. Note: if nullptr, BasePackageName is used + MergeOptions.BasePackageName, // Will be used for naming generated assets, in case InOuter is not specified ProxyBasePackageName will be used as long package name for creating new packages + FGuid::NewGuid(), // Identify a job, First argument of the ProxyDelegate + ProxyDelegate // Called back on asset creation + ); + + UStaticMesh* MergedMesh = nullptr; + if (!CreatedAssets.FindItemByClass(&MergedMesh)) + { + UE_LOG(LogEditorScripting, Error, TEXT("CreateProxyMeshActor failed. No mesh created.")); + return false; + } + + // Update the asset registry that a new static mesh and material has been created + FAssetRegistryModule& AssetRegistry = FModuleManager::Get().LoadModuleChecked("AssetRegistry"); + for (UObject* Asset : CreatedAssets) + { + AssetRegistry.AssetCreated(Asset); + GEditor->BroadcastObjectReimported(Asset); + } + + // Also notify the content browser that the new assets exists + FContentBrowserModule& ContentBrowserModule = FModuleManager::Get().LoadModuleChecked("ContentBrowser"); + ContentBrowserModule.Get().SyncBrowserToAssets(CreatedAssets, true); + + // Place new mesh in the world + UWorld* ActorWorld = AllActors[0]->GetWorld(); + ULevel* ActorLevel = AllActors[0]->GetLevel(); + if (MergeOptions.bSpawnMergedActor) + { + FActorSpawnParameters Params; + Params.OverrideLevel = ActorLevel; + OutMergedActor = ActorWorld->SpawnActor(FVector::ZeroVector, FRotator::ZeroRotator, Params); + if (!OutMergedActor) + { + UE_LOG(LogEditorScripting, Error, TEXT("CreateProxyMeshActor failed. Internal error while creating the merged actor.")); + return false; + } + + OutMergedActor->GetStaticMeshComponent()->SetStaticMesh(MergedMesh); + OutMergedActor->SetActorLabel(MergeOptions.NewActorLabel); + ActorWorld->UpdateCullDistanceVolumes(OutMergedActor, OutMergedActor->GetStaticMeshComponent()); + } + + // Remove source actors + if (MergeOptions.bDestroySourceActors) + { + for (AActor* Actor : AllActors) + { + GEditor->Layers->DisassociateActorFromLayers(Actor); + ActorWorld->EditorDestroyActor(Actor, true); + } + } + + //Select newly created actor + if (OutMergedActor) + { + GEditor->SelectNone(false, true, false); + GEditor->SelectActor(OutMergedActor, true, false); // don't notify but manually call NoteSelectionChange ? + GEditor->NoteSelectionChange(); + } + + return true; +} + #undef LOCTEXT_NAMESPACE diff --git a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorPythonExecuter.cpp b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorPythonExecuter.cpp index f03663beb4ee..f031b5feda90 100644 --- a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorPythonExecuter.cpp +++ b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorPythonExecuter.cpp @@ -1,7 +1,6 @@ // Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. #include "EditorPythonExecuter.h" -#include "IPythonScriptPlugin.h" #include "AssetRegistryModule.h" #include "Editor.h" @@ -164,7 +163,12 @@ namespace InternalEditorPythonRunner if (!AssetRegistryModule.Get().IsLoadingAssets()) { bIsRunning = true; - IPythonScriptPlugin::Get()->ExecPythonCommand(*FileName); + + // Try and run the command + if (!GEngine->Exec(GWorld, *FString::Printf(TEXT("PY \"%s\""), *FileName), *GLog)) + { + UE_LOG(LogEditorPythonExecuter, Error, TEXT("-ExecutePythonScript cannot be used without a valid Python Script Plugin. Ensure the plugin is enabled and wasn't compiled with Python support stubbed out.")); + } } } else @@ -212,16 +216,8 @@ void FEditorPythonExecuter::OnStartupModule() } else { - IPythonScriptPlugin* PythonScriptPlugin = IPythonScriptPlugin::Get(); - if (PythonScriptPlugin && PythonScriptPlugin->IsPythonAvailable()) - { - InternalEditorPythonRunner::Executer = new InternalEditorPythonRunner::FExecuterTickable(MoveTemp(FileValue)); - InternalEditorPythonRunner::SExecutingDialog::OpenDialog(); - } - else - { - UE_LOG(LogEditorPythonExecuter, Error, TEXT("-ExecutePythonScript cannot be used without a valid Python Script Plugin. Ensure the plugin is enabled and wasn't compiled with Python support stubbed out.")); - } + InternalEditorPythonRunner::Executer = new InternalEditorPythonRunner::FExecuterTickable(MoveTemp(FileValue)); + InternalEditorPythonRunner::SExecutingDialog::OpenDialog(); } } } diff --git a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorScriptingUtils.cpp b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorScriptingUtils.cpp index dc1db839153d..9543863b3463 100644 --- a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorScriptingUtils.cpp +++ b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorScriptingUtils.cpp @@ -42,14 +42,15 @@ namespace EditorScriptingUtils } // Test for invalid characters - bool IsAValidPath(const FString& Path, FString& OutFailureReason) + bool IsAValidPath(const FString& Path, const TCHAR* InvalidChar, FString& OutFailureReason) { - // Like !FName::IsValidGroupName(Path)), but with INVALID_OBJECTPATH_CHARACTERS and no conversion to from FName - const int32 StrLen = FCString::Strlen(INVALID_OBJECTPATH_CHARACTERS); + // Like !FName::IsValidGroupName(Path)), but with another list and no conversion to from FName + // InvalidChar may be INVALID_OBJECTPATH_CHARACTERS or INVALID_LONGPACKAGE_CHARACTERS or ... + const int32 StrLen = FCString::Strlen(InvalidChar); for (int32 Index = 0; Index < StrLen; ++Index) { int32 FoundIndex = 0; - if (Path.FindChar(INVALID_OBJECTPATH_CHARACTERS[Index], FoundIndex)) + if (Path.FindChar(InvalidChar[Index], FoundIndex)) { OutFailureReason = FString::Printf(TEXT("Can't convert the path %s because it contains invalid characters."), *Path); return false; @@ -185,34 +186,35 @@ namespace EditorScriptingUtils TextPath.ReplaceInline(TEXT("\\"), TEXT("/"), ESearchCase::CaseSensitive); FPaths::RemoveDuplicateSlashes(TextPath); - // Test for invalid characters - if (!IsAValidPath(TextPath, OutFailureReason)) - { - return FString(); - } - // Get asset full name, i.e."PackageName.ObjectName:InnerAssetName.2ndInnerAssetName" from "/Game/Folder/PackageName.ObjectName:InnerAssetName.2ndInnerAssetName" - FString AssetFullName = FPackageName::GetShortName(TextPath); - - // Remove possible ':' character from asset full name + FString AssetFullName; { - int32 IndexOfSemiColumn; - if (AssetFullName.FindChar(TEXT(':'), IndexOfSemiColumn)) + // Get everything after the last slash + int32 IndexOfLastSlash = INDEX_NONE; + TextPath.FindLastChar('/', IndexOfLastSlash); + + FString Folders = TextPath.Left(IndexOfLastSlash); + // Test for invalid characters + if (!IsAValidPath(Folders, INVALID_LONGPACKAGE_CHARACTERS, OutFailureReason)) { - AssetFullName = AssetFullName.Left(IndexOfSemiColumn); + return FString(); } + + AssetFullName = TextPath.Mid(IndexOfLastSlash + 1); } // Get the object name - FString ObjectName = FPackageName::ObjectPathToObjectName(AssetFullName); + FString ObjectName = FPackageName::ObjectPathToPackageName(AssetFullName); if (ObjectName.IsEmpty()) { - ObjectName = FPackageName::ObjectPathToPackageName(AssetFullName); - if (ObjectName.IsEmpty()) - { - OutFailureReason = FString::Printf(TEXT("Can't convert the path '%s' because it doesn't contain an asset name."), *AnyAssetPath); - return FString(); - } + OutFailureReason = FString::Printf(TEXT("Can't convert the path '%s' because it doesn't contain an asset name."), *AnyAssetPath); + return FString(); + } + + // Test for invalid characters + if (!IsAValidPath(ObjectName, INVALID_OBJECTNAME_CHARACTERS, OutFailureReason)) + { + return FString(); } // Confirm that we have a valid Root Package and get the valid PackagePath /Game/MyFolder/MyAsset @@ -234,7 +236,7 @@ namespace EditorScriptingUtils return FString(); } - FString ObjectPath = FString::Printf(TEXT("%s.%s"), *PackagePath, *ObjectName); + FString ObjectPath = FString::Printf(TEXT("%s.%s"), *PackagePath, *ObjectName); // #todo-ueent should be asset name, not object name (as ObjectName == PackageName) if (FPackageName::IsScriptPackage(ObjectPath)) { @@ -281,13 +283,6 @@ namespace EditorScriptingUtils TextPath.ReplaceInline(TEXT("\\"), TEXT("/"), ESearchCase::CaseSensitive); FPaths::RemoveDuplicateSlashes(TextPath); - // Test for invalid characters - if (!IsAValidPath(TextPath, OutFailureReason)) - { - return FString(); - } - - //ObjectName = FPackageName::ObjectPathToObjectName(ObjectName); { // Remove . int32 ObjectDelimiterIdx; @@ -303,6 +298,12 @@ namespace EditorScriptingUtils } } + // Test for invalid characters + if (!IsAValidPath(TextPath, INVALID_LONGPACKAGE_CHARACTERS, OutFailureReason)) + { + return FString(); + } + // Confirm that we have a valid Root Package and get the valid PackagePath /Game/MyFolder FString PackagePath; if (!FPackageName::TryConvertFilenameToLongPackageName(TextPath, PackagePath, &OutFailureReason)) diff --git a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorScriptingUtils.h b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorScriptingUtils.h index 09a84f742178..e2fc6a672fb3 100644 --- a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorScriptingUtils.h +++ b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorScriptingUtils.h @@ -26,7 +26,7 @@ namespace EditorScriptingUtils /* * Check if the Path is a valid ContentBrowser Path */ - bool IsAValidPath(const FString& Path, FString& OutFailureReason); + bool IsAValidPath(const FString& Path, const TCHAR* InvalidChar, FString& OutFailureReason); /* * Check if the AssetPath can be used to create a new asset diff --git a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorStaticMeshLibrary.cpp b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorStaticMeshLibrary.cpp index e4d9897855f3..5d453c49234d 100644 --- a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorStaticMeshLibrary.cpp +++ b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Private/EditorStaticMeshLibrary.cpp @@ -24,16 +24,18 @@ #include "Layers/ILayers.h" #include "LevelEditorViewport.h" #include "Engine/MapBuildDataRegistry.h" +#include "MeshAttributes.h" +#include "MeshAttributeArray.h" +#include "MeshDescription.h" +#include "MeshDescriptionOperations.h" #include "MeshMergeModule.h" #include "PhysicsEngine/BodySetup.h" +#include "RawMesh.h" #include "ScopedTransaction.h" #include "Toolkits/AssetEditorManager.h" #include "UnrealEdGlobals.h" #include "UnrealEd/Private/GeomFitUtils.h" #include "UnrealEd/Private/ConvexDecompTool.h" -#include "MeshDescription.h" -#include "MeshAttributes.h" -#include "MeshAttributeArray.h" #define LOCTEXT_NAMESPACE "EditorStaticMeshLibrary" @@ -114,6 +116,37 @@ namespace InternalEditorMeshLibrary return true; } + + bool IsUVChannelValid(UStaticMesh* StaticMesh, int32 LODIndex, int32 UVChannelIndex) + { + if (StaticMesh == nullptr) + { + UE_LOG(LogEditorScripting, Error, TEXT("The StaticMesh is null.")); + return false; + } + + if (LODIndex >= StaticMesh->GetNumLODs() || LODIndex < 0) + { + UE_LOG(LogEditorScripting, Error, TEXT("The StaticMesh doesn't have LOD %d."), LODIndex); + return false; + } + + FMeshDescription* MeshDescription = StaticMesh->GetOriginalMeshDescription(LODIndex); + if (!MeshDescription) + { + UE_LOG(LogEditorScripting, Error, TEXT("No mesh description for LOD %d."), LODIndex); + return false; + } + + int32 NumUVChannels = StaticMesh->GetNumUVChannels(LODIndex); + if (UVChannelIndex < 0 || UVChannelIndex >= NumUVChannels) + { + UE_LOG(LogEditorScripting, Error, TEXT("The given UV channel index %d is out of bounds."), UVChannelIndex); + return false; + } + + return true; + } } int32 UEditorStaticMeshLibrary::SetLods(UStaticMesh* StaticMesh, const FEditorScriptingMeshReductionOptions& ReductionOptions) @@ -196,6 +229,68 @@ int32 UEditorStaticMeshLibrary::SetLods(UStaticMesh* StaticMesh, const FEditorSc return LODIndex; } +int32 UEditorStaticMeshLibrary::SetLodFromStaticMesh(UStaticMesh* DestinationStaticMesh, int32 DestinationLodIndex, UStaticMesh* SourceStaticMesh, int32 SourceLodIndex) +{ + TGuardValue UnattendedScriptGuard( GIsRunningUnattendedScript, true ); + + if ( !EditorScriptingUtils::CheckIfInEditorAndPIE( )) + { + return -1; + } + + if ( DestinationStaticMesh == nullptr ) + { + UE_LOG(LogEditorScripting, Error, TEXT("SetLodFromStaticMesh: The DestinationStaticMesh is null.")); + return -1; + } + + if ( SourceStaticMesh == nullptr ) + { + UE_LOG(LogEditorScripting, Error, TEXT("SetLodFromStaticMesh: The SourceStaticMesh is null.")); + return -1; + } + + if ( !SourceStaticMesh->SourceModels.IsValidIndex( SourceLodIndex ) ) + { + UE_LOG(LogEditorScripting, Error, TEXT("SetLodFromStaticMesh: SourceLodIndex is invalid.")); + return -1; + } + + // Close the mesh editor to prevent crashing. Reopen it after the mesh has been built. + FAssetEditorManager& AssetEditorManager = FAssetEditorManager::Get(); + bool bStaticMeshIsEdited = false; + if ( AssetEditorManager.FindEditorForAsset( DestinationStaticMesh, false ) ) + { + AssetEditorManager.CloseAllEditorsForAsset( DestinationStaticMesh ); + bStaticMeshIsEdited = true; + } + + DestinationStaticMesh->Modify(); + + if ( DestinationStaticMesh->SourceModels.Num() < DestinationLodIndex + 1 ) + { + // Add one LOD + DestinationStaticMesh->AddSourceModel(); + + DestinationLodIndex = DestinationStaticMesh->SourceModels.Num() - 1; + } + + FRawMesh SourceRawMesh; + SourceStaticMesh->SourceModels[ SourceLodIndex ].LoadRawMesh( SourceRawMesh ); + + DestinationStaticMesh->SourceModels[ DestinationLodIndex ].SaveRawMesh( SourceRawMesh ); + + DestinationStaticMesh->PostEditChange(); + + // Reopen MeshEditor on this mesh if the MeshEditor was previously opened in it + if ( bStaticMeshIsEdited ) + { + AssetEditorManager.OpenEditorForAsset( DestinationStaticMesh ); + } + + return DestinationLodIndex; +} + int32 UEditorStaticMeshLibrary::GetLodCount(UStaticMesh* StaticMesh) { TGuardValue UnattendedScriptGuard(GIsRunningUnattendedScript, true); @@ -935,5 +1030,70 @@ bool UEditorStaticMeshLibrary::RemoveUVChannel(UStaticMesh* StaticMesh, int32 LO return StaticMesh->RemoveUVChannel(LODIndex, UVChannelIndex); } -#undef LOCTEXT_NAMESPACE +bool UEditorStaticMeshLibrary::GeneratePlanarUVChannel(UStaticMesh* StaticMesh, int32 LODIndex, int32 UVChannelIndex, const FUVMapSettings& UVSettings) +{ + TGuardValue UnattendedScriptGuard(GIsRunningUnattendedScript, true); + if (!EditorScriptingUtils::CheckIfInEditorAndPIE()) + { + return false; + } + + if (!InternalEditorMeshLibrary::IsUVChannelValid(StaticMesh, LODIndex, UVChannelIndex)) + { + return false; + } + + FMeshDescription* MeshDescription = StaticMesh->GetOriginalMeshDescription(LODIndex); + + TArray TexCoords; + FMeshDescriptionOperations::GeneratePlanarUV(*MeshDescription, UVSettings, TexCoords); + + return StaticMesh->SetUVChannel(LODIndex, UVChannelIndex, TexCoords); +} + +bool UEditorStaticMeshLibrary::GenerateCylindricalUVChannel(UStaticMesh* StaticMesh, int32 LODIndex, int32 UVChannelIndex, const FUVMapSettings& UVSettings) +{ + TGuardValue UnattendedScriptGuard(GIsRunningUnattendedScript, true); + + if (!EditorScriptingUtils::CheckIfInEditorAndPIE()) + { + return false; + } + + if (!InternalEditorMeshLibrary::IsUVChannelValid(StaticMesh, LODIndex, UVChannelIndex)) + { + return false; + } + + FMeshDescription* MeshDescription = StaticMesh->GetOriginalMeshDescription(LODIndex); + + TArray TexCoords; + FMeshDescriptionOperations::GenerateCylindricalUV(*MeshDescription, UVSettings, TexCoords); + + return StaticMesh->SetUVChannel(LODIndex, UVChannelIndex, TexCoords); +} + +bool UEditorStaticMeshLibrary::GenerateBoxUVChannel(UStaticMesh* StaticMesh, int32 LODIndex, int32 UVChannelIndex, const FUVMapSettings& UVSettings) +{ + TGuardValue UnattendedScriptGuard(GIsRunningUnattendedScript, true); + + if (!EditorScriptingUtils::CheckIfInEditorAndPIE()) + { + return false; + } + + if (!InternalEditorMeshLibrary::IsUVChannelValid(StaticMesh, LODIndex, UVChannelIndex)) + { + return false; + } + + FMeshDescription* MeshDescription = StaticMesh->GetOriginalMeshDescription(LODIndex); + + TArray TexCoords; + FMeshDescriptionOperations::GenerateBoxUV(*MeshDescription, UVSettings, TexCoords); + + return StaticMesh->SetUVChannel(LODIndex, UVChannelIndex, TexCoords); +} + +#undef LOCTEXT_NAMESPACE diff --git a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Public/EditorLevelLibrary.h b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Public/EditorLevelLibrary.h index 6f8015505273..c606e62b67c3 100644 --- a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Public/EditorLevelLibrary.h +++ b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Public/EditorLevelLibrary.h @@ -12,7 +12,7 @@ USTRUCT(BlueprintType) struct FEditorScriptingJoinStaticMeshActorsOptions { - GENERATED_USTRUCT_BODY() + GENERATED_BODY() FEditorScriptingJoinStaticMeshActorsOptions() : bDestroySourceActors(true) @@ -35,7 +35,7 @@ struct FEditorScriptingJoinStaticMeshActorsOptions USTRUCT(BlueprintType) struct FEditorScriptingMergeStaticMeshActorsOptions : public FEditorScriptingJoinStaticMeshActorsOptions { - GENERATED_USTRUCT_BODY() + GENERATED_BODY() FEditorScriptingMergeStaticMeshActorsOptions() : bSpawnMergedActor(true) @@ -53,6 +53,27 @@ struct FEditorScriptingMergeStaticMeshActorsOptions : public FEditorScriptingJoi FMeshMergingSettings MeshMergingSettings; }; +USTRUCT(BlueprintType) +struct FEditorScriptingCreateProxyMeshActorOptions : public FEditorScriptingJoinStaticMeshActorsOptions +{ + GENERATED_BODY() + + FEditorScriptingCreateProxyMeshActorOptions() + : bSpawnMergedActor(true) + { } + + // Spawn the new merged actors + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = Options) + bool bSpawnMergedActor; + + // The package path you want to save to. ie: /Game/MyFolder + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = Options) + FString BasePackageName; + + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = Options) + FMeshProxySettings MeshProxySettings; +}; + /** * Utility class to do most of the common functionalities in the World Editor. * The editor should not be in play in editor mode. @@ -111,7 +132,7 @@ public: */ UFUNCTION(BlueprintCallable, Category = "Editor Scripting | Level Utility", meta = (DeterminesOutputType = "ActorClass")) static AActor* SpawnActorFromClass(TSubclassOf ActorClass, FVector Location, FRotator Rotation = FRotator::ZeroRotator); - + /** * Destroy the actor from the world editor. Notify the Editor that the actor got destroyed. * @param ToDestroyActor Actor to destroy. @@ -243,12 +264,22 @@ public: * Merge the meshes into a unique mesh with the provided StaticMeshActors. There are multiple options on how to merge the meshes and their materials. * The ActorsToMerge need to be in the same Level. * This may have a high impact on performance depending of the MeshMergingSettings options. - * @param ActorsToMerge List of Actors to join. - * @param MergeOptions Options on how to join the actors. + * @param ActorsToMerge List of Actors to merge. + * @param MergeOptions Options on how to merge the actors. * @param OutMergedActor The new created actor, if requested. * @return if the operation is successful. */ UFUNCTION(BlueprintCallable, Category = "Editor Scripting | DataPrep") static bool MergeStaticMeshActors(const TArray& ActorsToMerge, const FEditorScriptingMergeStaticMeshActorsOptions& MergeOptions, class AStaticMeshActor*& OutMergedActor); + + /** + * Build a proxy mesh actor that can replace a set of mesh actors. + * @param ActorsToMerge List of actors to build a proxy for. + * @param MergeOptions + * @param OutMergedActor generated actor if requested + * @return Success of the proxy creation + */ + UFUNCTION(BlueprintCallable, Category = "Editor Scripting | DataPrep") + static bool CreateProxyMeshActor(const TArray& ActorsToMerge, const FEditorScriptingCreateProxyMeshActorOptions& MergeOptions, class AStaticMeshActor*& OutMergedActor); }; diff --git a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Public/EditorStaticMeshLibrary.h b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Public/EditorStaticMeshLibrary.h index 673580a25802..b26d6306c139 100644 --- a/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Public/EditorStaticMeshLibrary.h +++ b/Engine/Plugins/Editor/EditorScriptingUtilities/Source/EditorScriptingUtilities/Public/EditorStaticMeshLibrary.h @@ -9,6 +9,7 @@ #include "Engine/MeshMerging.h" #include "GameFramework/Actor.h" #include "PhysicsEngine/BodySetupEnums.h" +#include "UVMapSettings.h" #include "EditorStaticMeshLibrary.generated.h" @@ -90,6 +91,18 @@ public: UFUNCTION(BlueprintCallable, Category = "Editor Scripting | StaticMesh") static int32 SetLods(UStaticMesh* StaticMesh, const FEditorScriptingMeshReductionOptions& ReductionOptions); + /** + * Adds or create a LOD at DestinationLodIndex using the geometry from SourceStaticMesh SourceLodIndex + * @param DestinationStaticMesh The static mesh to set the LOD in. + * @param DestinationLodIndex The index of the LOD to set. + * @param SourceStaticMesh The static mesh to get the LOD from. + * @param SourceLodIndex The index of the LOD to get. + * @return The index of the LOD that was set. It can be different than DestinationLodIndex if it wasn't a valid index. + * A negative value indicates that the LOD was not set. See log for explanation. + */ + UFUNCTION(BlueprintCallable, Category = "Editor Scripting | StaticMesh") + static int32 SetLodFromStaticMesh(UStaticMesh* DestinationStaticMesh, int32 DestinationLodIndex, UStaticMesh* SourceStaticMesh, int32 SourceLodIndex); + /** * Get number of LODs present on a static mesh. * @param StaticMesh Mesh to process. @@ -266,5 +279,37 @@ public: */ UFUNCTION(BlueprintCallable, Category = "Editor Scripting | StaticMesh") static bool RemoveUVChannel(UStaticMesh* StaticMesh, int32 LODIndex, int32 UVChannelIndex); -}; + /** + * Generates planar UV mapping in the specified UV channel on the given LOD of a StaticMesh. + * @param StaticMesh Static mesh on which to generate the UV mapping. + * @param LODIndex Index of the StaticMesh LOD. + * @param UVChannelIndex Channel where to save the UV mapping. + * @param UVSettings The settings to use to generate the UV mapping. + * @return true if the UV mapping was generated. + */ + UFUNCTION(BlueprintCallable, Category = "Editor Scripting | StaticMesh") + static bool GeneratePlanarUVChannel(UStaticMesh* StaticMesh, int32 LODIndex, int32 UVChannelIndex, const FUVMapSettings& UVSettings); + + /** + * Generates cylindrical UV mapping in the specified UV channel on the given LOD of a StaticMesh. + * @param StaticMesh Static mesh on which to generate the UV mapping. + * @param LODIndex Index of the StaticMesh LOD. + * @param UVChannelIndex Channel where to save the UV mapping. + * @param UVSettings The settings to use to generate the UV mapping. + * @return true if the UV mapping was generated. + */ + UFUNCTION(BlueprintCallable, Category = "Editor Scripting | StaticMesh") + static bool GenerateCylindricalUVChannel(UStaticMesh* StaticMesh, int32 LODIndex, int32 UVChannelIndex, const FUVMapSettings& UVSettings); + + /** + * Generates box UV mapping in the specified UV channel on the given LOD of a StaticMesh. + * @param StaticMesh Static mesh on which to generate the UV mapping. + * @param LODIndex Index of the StaticMesh LOD. + * @param UVChannelIndex Channel where to save the UV mapping. + * @param UVSettings The settings to use to generate the UV mapping. + * @return true if the UV mapping was generated. + */ + UFUNCTION(BlueprintCallable, Category = "Editor Scripting | StaticMesh") + static bool GenerateBoxUVChannel(UStaticMesh* StaticMesh, int32 LODIndex, int32 UVChannelIndex, const FUVMapSettings& UVSettings); +}; diff --git a/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/DatasmithContent.Build.cs b/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/DatasmithContent.Build.cs index 4d22afe125f9..6fcc5623c8ff 100644 --- a/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/DatasmithContent.Build.cs +++ b/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/DatasmithContent.Build.cs @@ -16,6 +16,13 @@ namespace UnrealBuildTool.Rules "Engine" } ); + + PrivateDependencyModuleNames.AddRange( + new string[] + { + "Landscape" + } + ); } } } \ No newline at end of file diff --git a/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/Private/ObjectTemplates/DatasmithLandscapeTemplate.cpp b/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/Private/ObjectTemplates/DatasmithLandscapeTemplate.cpp new file mode 100644 index 000000000000..990d708b7727 --- /dev/null +++ b/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/Private/ObjectTemplates/DatasmithLandscapeTemplate.cpp @@ -0,0 +1,54 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "ObjectTemplates/DatasmithLandscapeTemplate.h" + +#include "Landscape.h" + +void UDatasmithLandscapeTemplate::Apply( UObject* Destination, bool bForce ) +{ +#if WITH_EDITORONLY_DATA + ALandscape* Landscape = Cast< ALandscape >( Destination ); + + if( !Landscape ) + { + return; + } + + UDatasmithLandscapeTemplate* PreviousTemplate = !bForce ? FDatasmithObjectTemplateUtils::GetObjectTemplate< UDatasmithLandscapeTemplate >( Destination ) : nullptr; + + DATASMITHOBJECTTEMPLATE_CONDITIONALSET(LandscapeMaterial, Landscape, PreviousTemplate); + DATASMITHOBJECTTEMPLATE_CONDITIONALSET(StaticLightingLOD, Landscape, PreviousTemplate); + + FDatasmithObjectTemplateUtils::SetObjectTemplate( Landscape->GetRootComponent(), this ); +#endif // #if WITH_EDITORONLY_DATA +} + +void UDatasmithLandscapeTemplate::Load( const UObject* Source ) +{ +#if WITH_EDITORONLY_DATA + const ALandscape* Landscape = Cast< ALandscape >( Source ); + + if( !Landscape ) + { + return; + } + + LandscapeMaterial = Landscape->LandscapeMaterial; + StaticLightingLOD = Landscape->StaticLightingLOD; +#endif // #if WITH_EDITORONLY_DATA +} + +bool UDatasmithLandscapeTemplate::Equals( const UDatasmithObjectTemplate* Other ) const +{ + const UDatasmithLandscapeTemplate* TypedOther = Cast< UDatasmithLandscapeTemplate >( Other ); + + if ( !TypedOther ) + { + return false; + } + + bool bEquals = ( LandscapeMaterial == TypedOther->LandscapeMaterial ); + bEquals = bEquals && ( StaticLightingLOD == TypedOther->StaticLightingLOD ); + + return bEquals; +} diff --git a/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/Public/DatasmithAssetImportData.h b/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/Public/DatasmithAssetImportData.h index af78faad35ea..fbf8465316f0 100644 --- a/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/Public/DatasmithAssetImportData.h +++ b/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/Public/DatasmithAssetImportData.h @@ -105,4 +105,56 @@ public: UPROPERTY(EditAnywhere, Category = "Tessellation", meta = (ShowOnlyInnerProperties)) FDatasmithTessellationOptions TessellationOptions; #endif // WITH_EDITORONLY_DATA -}; \ No newline at end of file +}; + +UCLASS() +class DATASMITHCONTENT_API UDatasmithMDLSceneImportData : public UDatasmithSceneImportData +{ + GENERATED_BODY() +}; + +UCLASS(EditInlineNew) +class DATASMITHCONTENT_API UDatasmithDeltaGenAssetImportData : public UDatasmithAssetImportData +{ + GENERATED_BODY() +}; + +UCLASS(EditInlineNew) +class DATASMITHCONTENT_API UDatasmithDeltaGenSceneImportData : public UDatasmithSceneImportData +{ + GENERATED_BODY() +}; + +UENUM(BlueprintType) +enum class EVREDDataTableType : uint8 +{ + NotDatatable, + Variants, + AnimClips, + AnimNodes +}; + +UCLASS(EditInlineNew) +class DATASMITHCONTENT_API UDatasmithVREDAssetImportData : public UDatasmithAssetImportData +{ + GENERATED_BODY() + +public: + EVREDDataTableType DataTableType; +}; + +UCLASS(EditInlineNew) +class DATASMITHCONTENT_API UDatasmithVREDSceneImportData : public UDatasmithSceneImportData +{ + GENERATED_BODY() + +// TODO +/*#if WITH_EDITORONLY_DATA +public: + UPROPERTY(EditAnywhere, Category="VREDOptions", meta=(ShowOnlyInnerProperties)) + class UDatasmithVREDImportOptions* VREDOptions; + +#endif // WITH_EDITORONLY_DATA*/ +}; + + diff --git a/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/Public/ObjectTemplates/DatasmithLandscapeTemplate.h b/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/Public/ObjectTemplates/DatasmithLandscapeTemplate.h new file mode 100644 index 000000000000..1aa26f73ea7b --- /dev/null +++ b/Engine/Plugins/Enterprise/DatasmithContent/Source/DatasmithContent/Public/ObjectTemplates/DatasmithLandscapeTemplate.h @@ -0,0 +1,26 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "ObjectTemplates/DatasmithObjectTemplate.h" + +#include "DatasmithLandscapeTemplate.generated.h" + +class UMaterialInterface; + +UCLASS() +class DATASMITHCONTENT_API UDatasmithLandscapeTemplate : public UDatasmithObjectTemplate +{ + GENERATED_BODY() + +public: + UPROPERTY() + UMaterialInterface* LandscapeMaterial; + + UPROPERTY() + int32 StaticLightingLOD; + + virtual void Apply(UObject* Destination, bool bForce = false) override; + virtual void Load(const UObject* Source) override; + virtual bool Equals(const UDatasmithObjectTemplate* Other) const override; +}; \ No newline at end of file diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/LevelVariantSets.cpp b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/LevelVariantSets.cpp new file mode 100644 index 000000000000..2a48e1f69c49 --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/LevelVariantSets.cpp @@ -0,0 +1,40 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "LevelVariantSets.h" + +#include "VariantSet.h" +#include "Engine/EngineTypes.h" +#include "Components/ActorComponent.h" + + +ULevelVariantSets::ULevelVariantSets(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) +{ +} + +void ULevelVariantSets::AddVariantSet(UVariantSet* NewVariantSet) +{ + // Take ownership of NewVariantSet + NewVariantSet->Rename(*NewVariantSet->GetName(), this); + + VariantSets.Add(NewVariantSet); +} + +void ULevelVariantSets::RemoveVariantSet(UVariantSet* ThisVariantSet) +{ + VariantSets.RemoveSingle(ThisVariantSet); +} + +void ULevelVariantSets::SaveExpansionState(UVariantSet* VarSetOfNode, bool bExpanded) +{ + DisplayNodeExpansionStates.Add(VarSetOfNode, bExpanded); +} + +bool ULevelVariantSets::GetExpansionState(UVariantSet* VarSetOfNode) +{ + if (DisplayNodeExpansionStates.Contains(VarSetOfNode)) + { + return DisplayNodeExpansionStates[VarSetOfNode]; + } + return false; +} \ No newline at end of file diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/LevelVariantSetsActor.cpp b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/LevelVariantSetsActor.cpp new file mode 100644 index 000000000000..d19f7c772a0c --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/LevelVariantSetsActor.cpp @@ -0,0 +1,50 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "LevelVariantSetsActor.h" + +#include "LevelVariantSets.h" + +#include "CoreMinimal.h" +#include "UObject/ObjectMacros.h" +#include "UObject/Object.h" +#include "UObject/SoftObjectPath.h" + + +ALevelVariantSetsActor::ALevelVariantSetsActor(const FObjectInitializer& Init) + : Super(Init) +{ + USceneComponent* SceneComponent = CreateDefaultSubobject(TEXT("SceneComp")); + RootComponent = SceneComponent; +} + +ULevelVariantSets* ALevelVariantSetsActor::GetLevelVariantSets(bool bLoad) const +{ + if (LevelVariantSets.IsValid()) + { + ULevelVariantSets* VarSets = Cast(LevelVariantSets.ResolveObject()); + if (VarSets) + { + return VarSets; + } + + if (bLoad) + { + if (IsAsyncLoading()) + { + LoadPackageAsync(LevelVariantSets.GetLongPackageName()); + return nullptr; + } + else + { + return Cast(LevelVariantSets.TryLoad()); + } + } + } + + return nullptr; +} + +void ALevelVariantSetsActor::SetLevelVariantSets(ULevelVariantSets* InVariantSets) +{ + LevelVariantSets = InVariantSets; +} \ No newline at end of file diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/Variant.cpp b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/Variant.cpp new file mode 100644 index 000000000000..73d686cf5b89 --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/Variant.cpp @@ -0,0 +1,123 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "Variant.h" +#include "VariantSet.h" +#include "VariantObjectBinding.h" + +#include "CoreMinimal.h" +#include "GameFramework/Actor.h" + +#define LOCTEXT_NAMESPACE "VariantManagerVariant" + + +UVariant::UVariant(const FObjectInitializer& Init) +{ +} + +UVariant* UVariant::Clone(UObject* ClonesOuter) +{ + if (ClonesOuter == INVALID_OBJECT) + { + ClonesOuter = GetOuter(); + } + + UVariant* NewVariant = DuplicateObject(this, ClonesOuter); + + for (UVariantObjectBinding* OurBinding : GetBindings()) + { + NewVariant->AddBinding(OurBinding->Clone()); + } + NewVariant->SetSortingOrder(GetSortingOrder() + 1); + + return NewVariant; +} + +UVariantSet* UVariant::GetParent() +{ + return Cast(GetOuter()); +} + +FText UVariant::GetDisplayName() const +{ + if (DisplayName.IsEmpty()) + { + return GetDefaultDisplayName(); + } + + return DisplayName; +} + +void UVariant::SetDisplayName(const FText& NewDisplayName) +{ + if (NewDisplayName.EqualTo(DisplayName)) + { + return; + } + + SetFlags(RF_Transactional); + Modify(); + + DisplayName = NewDisplayName; +} + +FText UVariant::GetDefaultDisplayName() const +{ + return LOCTEXT("UnnamedVariantName", "Variant"); +} + +void UVariant::AddBinding(UVariantObjectBinding* NewBinding) +{ + NewBinding->Rename(*NewBinding->GetName(), this); + + ObjectBindings.Add(NewBinding); +} + +void UVariant::AddActors(TWeakObjectPtr InActor) +{ + UVariantObjectBinding* NewBinding = NewObject(this); + NewBinding->Init(InActor.Get()); + + FGuid NewGuid = NewBinding->GetObjectGuid(); + for (UVariantObjectBinding* Binding : ObjectBindings) + { + if (Binding->GetObjectGuid() == NewGuid) + { + return; + } + } + + AddBinding(NewBinding); +} + +void UVariant::AddActors(const TArray>& InActors) +{ + for (TWeakObjectPtr InActor : InActors) + { + UVariantObjectBinding* NewBinding = NewObject(this); + NewBinding->Init(InActor.Get()); + + FGuid NewGuid = NewBinding->GetObjectGuid(); + + bool bDuplicate = false; + for (UVariantObjectBinding* Binding : ObjectBindings) + { + if (Binding->GetObjectGuid() == NewGuid) + { + bDuplicate = true; + break; + } + } + + if (!bDuplicate) + { + AddBinding(NewBinding); + } + } +} + +void UVariant::RemoveBinding(UVariantObjectBinding* ThisBinding) +{ + ObjectBindings.RemoveSingle(ThisBinding); +} + +#undef LOCTEXT_NAMESPACE \ No newline at end of file diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/VariantManagerContentModule.cpp b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/VariantManagerContentModule.cpp new file mode 100644 index 000000000000..97acb7930efa --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/VariantManagerContentModule.cpp @@ -0,0 +1,24 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "VariantManagerContentModule.h" + +#include "Modules/ModuleManager.h" + +#define LOCTEXT_NAMESPACE "VariantManagerContentModule" + + +class FVariantManagerContentModule : public IVariantManagerContentModule +{ +public: + virtual void StartupModule() override + { + } + + virtual void ShutdownModule() override + { + } +}; + +IMPLEMENT_MODULE(FVariantManagerContentModule, VariantManagerContent); + +#undef LOCTEXT_NAMESPACE \ No newline at end of file diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/VariantObjectBinding.cpp b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/VariantObjectBinding.cpp new file mode 100644 index 000000000000..1c598f7d9eb0 --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/VariantObjectBinding.cpp @@ -0,0 +1,30 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "VariantObjectBinding.h" + +#include "Variant.h" +#include "MovieSceneTrack.h" + + +UVariantObjectBinding::UVariantObjectBinding(const FObjectInitializer& Init) +{ + +} + +UVariantObjectBinding* UVariantObjectBinding::Clone(UObject* ClonesOuter) +{ + if (ClonesOuter == INVALID_OBJECT) + { + ClonesOuter = GetOuter(); + } + + UVariantObjectBinding* NewBinding = DuplicateObject(this, ClonesOuter); + NewBinding->Init(GetObject(), GetSortingOrder() + 1); + + return NewBinding; +} + +UVariant* UVariantObjectBinding::GetParent() +{ + return Cast(GetOuter()); +} diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/VariantSet.cpp b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/VariantSet.cpp new file mode 100644 index 000000000000..6c4b18139cdd --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Private/VariantSet.cpp @@ -0,0 +1,79 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "VariantSet.h" +#include "LevelVariantSets.h" +#include "Variant.h" + +#include "CoreMinimal.h" + +#define LOCTEXT_NAMESPACE "VariantManagerVariantSet" + + +UVariantSet::UVariantSet(const FObjectInitializer& Init) +{ +} + +UVariantSet* UVariantSet::Clone(UObject* ClonesOuter) +{ + if (ClonesOuter == INVALID_OBJECT) + { + ClonesOuter = GetOuter(); + } + + UVariantSet* NewVariantSet = DuplicateObject(this, GetOuter()); + + for (UVariant* OurVariant : GetVariants()) + { + NewVariantSet->AddVariant(OurVariant->Clone()); + } + NewVariantSet->SetSortingOrder(GetSortingOrder() + 1); + + return NewVariantSet; +} + +ULevelVariantSets* UVariantSet::GetParent() +{ + return Cast(GetOuter()); +} + +FText UVariantSet::GetDisplayName() const +{ + if (DisplayName.IsEmpty()) + { + return GetDefaultDisplayName(); + } + + return DisplayName; +} + +void UVariantSet::SetDisplayName(const FText& NewDisplayName) +{ + if (NewDisplayName.EqualTo(DisplayName)) + { + return; + } + + SetFlags(RF_Transactional); + Modify(); + + DisplayName = NewDisplayName; +} + +FText UVariantSet::GetDefaultDisplayName() const +{ + return LOCTEXT("UnnamedVariantSetName", "VariantSet"); +} + +void UVariantSet::AddVariant(UVariant* NewVariant) +{ + NewVariant->Rename(*NewVariant->GetName(), this); + + Variants.Add(NewVariant); +} + +void UVariantSet::RemoveVariant(UVariant* ThisVariant) +{ + Variants.RemoveSingle(ThisVariant); +} + +#undef LOCTEXT_NAMESPACE \ No newline at end of file diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/LevelVariantSets.h b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/LevelVariantSets.h new file mode 100644 index 000000000000..0e8bbb5bfe38 --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/LevelVariantSets.h @@ -0,0 +1,41 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "UObject/ObjectMacros.h" + +#include "LevelVariantSets.generated.h" + + +class UVariantSet; + + +UCLASS(DefaultToInstanced) +class VARIANTMANAGERCONTENT_API ULevelVariantSets : public UObject +{ + GENERATED_UCLASS_BODY() + +public: + + /**~ UObject implementation */ + //virtual void Serialize(FArchive& Ar) override; + + void AddVariantSet(UVariantSet* NewVariantSet); + const TArray& GetVariantSets() const + { + return VariantSets; + } + void RemoveVariantSet(UVariantSet* ThisVariantSet); + + void SaveExpansionState(UVariantSet* VarSetOfNode, bool bExpanded); + bool GetExpansionState(UVariantSet* VarSetOfNode); + +private: + + UPROPERTY() + TArray VariantSets; + + UPROPERTY() + TMap DisplayNodeExpansionStates; +}; diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/LevelVariantSetsActor.h b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/LevelVariantSetsActor.h new file mode 100644 index 000000000000..022dcfbf9a73 --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/LevelVariantSetsActor.h @@ -0,0 +1,35 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "UObject/ObjectMacros.h" +#include "UObject/Object.h" +#include "UObject/SoftObjectPath.h" +#include "GameFramework/Actor.h" + +#include "LevelVariantSetsActor.generated.h" + + +UCLASS(hideCategories=(Rendering, Physics, LOD, Activation, Input)) +class VARIANTMANAGERCONTENT_API ALevelVariantSetsActor : public AActor +{ +public: + + GENERATED_BODY() + + ALevelVariantSetsActor(const FObjectInitializer& Init); + +public: + + UPROPERTY(EditAnywhere, BlueprintReadOnly, Category="Variants", meta=(AllowedClasses="LevelVariantSets")) + FSoftObjectPath LevelVariantSets; + +public: + + UFUNCTION(BlueprintCallable, Category="Variants") + ULevelVariantSets* GetLevelVariantSets(bool bLoad = false) const; + + UFUNCTION(BlueprintCallable, Category="Variants") + void SetLevelVariantSets(ULevelVariantSets* InVariantSets); +}; diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/Variant.h b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/Variant.h new file mode 100644 index 000000000000..0ab7ace3fc27 --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/Variant.h @@ -0,0 +1,66 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "UObject/ObjectMacros.h" + +#include "Variant.generated.h" + + +class UVariantObjectBinding; + + +UCLASS(DefaultToInstanced) +class VARIANTMANAGERCONTENT_API UVariant : public UObject +{ + GENERATED_UCLASS_BODY() + +public: + + /**~ UObject implementation */ + //virtual void Serialize(FArchive& Ar) override; + + // We need this because UVariantObjectBindings have non-UPROPERTY FLazyObjectPtrs + // that need to be manually copied + UVariant* Clone(UObject* ClonesOuter = INVALID_OBJECT); + + class UVariantSet* GetParent(); + + void SetDisplayName(const FText& NewDisplayName); + FText GetDisplayName() const; + FText GetDefaultDisplayName() const; + + void AddActors(TWeakObjectPtr InActor); + void AddActors(const TArray>& InActors); + + void AddBinding(UVariantObjectBinding* NewBinding); + + const TArray& GetBindings() + { + return ObjectBindings; + } + + void RemoveBinding(UVariantObjectBinding* ThisBinding); + + int32 GetSortingOrder() const + { + return SortingOrder; + } + + void SetSortingOrder(const int32 InSortingOrder) + { + SortingOrder = InSortingOrder; + } + +private: + UPROPERTY() + FText DisplayName; + + UPROPERTY() + int32 SortingOrder; + + // We manually duplicate these within our Clone function + UPROPERTY(DuplicateTransient) + TArray ObjectBindings; +}; diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/VariantManagerContentModule.h b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/VariantManagerContentModule.h new file mode 100644 index 000000000000..7851c57b1a6a --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/VariantManagerContentModule.h @@ -0,0 +1,24 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Modules/ModuleInterface.h" +#include "Modules/ModuleManager.h" // For inline LoadModuleChecked() + +#define VARIANTMANAGERCONTENTMODULE_MODULE_NAME TEXT("VariantManagerContentModule") + + +class IVariantManagerContentModule : public IModuleInterface +{ +public: + static inline IVariantManagerContentModule& Get() + { + return FModuleManager::LoadModuleChecked(VARIANTMANAGERCONTENTMODULE_MODULE_NAME); + } + + static inline bool IsAvailable() + { + return FModuleManager::Get().IsModuleLoaded(VARIANTMANAGERCONTENTMODULE_MODULE_NAME); + } +}; + diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/VariantObjectBinding.h b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/VariantObjectBinding.h new file mode 100644 index 000000000000..78de3d03295f --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/VariantObjectBinding.h @@ -0,0 +1,56 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "UObject/ObjectMacros.h" +#include "UObject/LazyObjectPtr.h" +#include "Misc/Guid.h" + +#include "VariantObjectBinding.generated.h" + + +UCLASS(BlueprintType) +class VARIANTMANAGERCONTENT_API UVariantObjectBinding : public UObject +{ + GENERATED_UCLASS_BODY() + +public: + void Init(const UObject* InObject, int32 InSortingOrder = -1) + { + ObjectPtr = FLazyObjectPtr(InObject); + SetSortingOrder(InSortingOrder); + } + + // We need this because UVariantObjectBindings have non-UPROPERTY FLazyObjectPtrs + // that need to be manually copied + UVariantObjectBinding* Clone(UObject* ClonesOuter = INVALID_OBJECT); + + class UVariant* GetParent(); + + const FGuid& GetObjectGuid() const + { + return ObjectPtr.GetUniqueID().GetGuid(); + } + + UObject* GetObject() const + { + return ObjectPtr.Get(); + } + + int32 GetSortingOrder() const + { + return SortingOrder; + } + + void SetSortingOrder(const int32 InSortingOrder) + { + SortingOrder = InSortingOrder; + } + +private: + FLazyObjectPtr ObjectPtr; + + UPROPERTY() + int32 SortingOrder; +}; diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/VariantSet.h b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/VariantSet.h new file mode 100644 index 000000000000..8d8a789bee20 --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/Public/VariantSet.h @@ -0,0 +1,61 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "UObject/ObjectMacros.h" + +#include "VariantSet.generated.h" + +class UVariant; + + +UCLASS(BlueprintType) +class VARIANTMANAGERCONTENT_API UVariantSet : public UObject +{ + GENERATED_UCLASS_BODY() + +public: + + /**~ UObject implementation */ + //virtual void Serialize(FArchive& Ar) override; + + // We need this because UVariantObjectBindings have non-UPROPERTY FLazyObjectPtrs + // that need to be manually copied + UVariantSet* Clone(UObject* ClonesOuter = INVALID_OBJECT); + + class ULevelVariantSets* GetParent(); + + void SetDisplayName(const FText& NewDisplayName); + FText GetDisplayName() const; + FText GetDefaultDisplayName() const; + + void AddVariant(UVariant* NewVariant); + const TArray& GetVariants() const + { + return Variants; + } + + void RemoveVariant(UVariant* ThisVariant); + + int32 GetSortingOrder() const + { + return SortingOrder; + } + + void SetSortingOrder(const int32 InSortingOrder) + { + SortingOrder = InSortingOrder; + } + +private: + UPROPERTY() + FText DisplayName; + + UPROPERTY() + int32 SortingOrder; + + // We manually duplicate these within our Clone function + UPROPERTY(DuplicateTransient) + TArray Variants; +}; diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/VariantManagerContent.Build.cs b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/VariantManagerContent.Build.cs new file mode 100644 index 000000000000..1ffe003788ff --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/Source/VariantManagerContent/VariantManagerContent.Build.cs @@ -0,0 +1,27 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +namespace UnrealBuildTool.Rules +{ + public class VariantManagerContent : ModuleRules + { + public VariantManagerContent(ReadOnlyTargetRules Target) + : base(Target) + { + PublicDependencyModuleNames.AddRange( + new string[] + { + "Core", + "CoreUObject", + "Engine", + "MovieScene" + } + ); + + PrivateDependencyModuleNames.AddRange( + new string[] + { + } + ); + } + } +} \ No newline at end of file diff --git a/Engine/Plugins/Enterprise/VariantManagerContent/VariantManagerContent.uplugin b/Engine/Plugins/Enterprise/VariantManagerContent/VariantManagerContent.uplugin new file mode 100644 index 000000000000..6021a2a0ce1b --- /dev/null +++ b/Engine/Plugins/Enterprise/VariantManagerContent/VariantManagerContent.uplugin @@ -0,0 +1,25 @@ +{ + "FileVersion": 3, + "Version": 1, + "VersionName": "1.0", + "FriendlyName": "Variant Manager Content", + "Description": "Data classes and assets for the Variant Manager enterprise plugin", + "Category": "Unreal Studio", + "CreatedBy": "Epic Games, Inc.", + "CreatedByURL": "http://epicgames.com", + "DocsURL": "https://docs.unrealengine.com/en-US/Studio/Datasmith", + "MarketplaceURL": "", + "SupportURL": "", + "EnabledByDefault" : true, + "CanContainContent": true, + "IsBetaVersion": false, + "Installed": false, + "Modules" : + [ + { + "Name" : "VariantManagerContent", + "Type" : "Runtime", + "LoadingPhase" : "Default" + } + ] +} \ No newline at end of file diff --git a/Engine/Plugins/Experimental/ControlRig/Source/ControlRig/Private/Sequencer/MovieSceneControlRigTrack.cpp b/Engine/Plugins/Experimental/ControlRig/Source/ControlRig/Private/Sequencer/MovieSceneControlRigTrack.cpp index 048b9e9dad89..517058464359 100644 --- a/Engine/Plugins/Experimental/ControlRig/Source/ControlRig/Private/Sequencer/MovieSceneControlRigTrack.cpp +++ b/Engine/Plugins/Experimental/ControlRig/Source/ControlRig/Private/Sequencer/MovieSceneControlRigTrack.cpp @@ -40,7 +40,7 @@ void UMovieSceneControlRigTrack::AddNewControlRig(FFrameNumber KeyTime, UControl UMovieSceneSection* UMovieSceneControlRigTrack::CreateNewSection() { - return NewObject(this); + return NewObject(this, NAME_None, RF_Transactional); } #if WITH_EDITORONLY_DATA diff --git a/Engine/Plugins/Experimental/PixelStreaming/Config/BasePixelStreaming.ini b/Engine/Plugins/Experimental/PixelStreaming/Config/BasePixelStreaming.ini new file mode 100644 index 000000000000..2a83026db736 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Config/BasePixelStreaming.ini @@ -0,0 +1,2 @@ +[/Script/PixelStreaming.PixelStreamingSettings] +PixelStreamingDefaultCursorClassName=/PixelStreaming/DefaultCursor.DefaultCursor_C \ No newline at end of file diff --git a/Engine/Plugins/Experimental/PixelStreaming/PixelStreaming.uplugin b/Engine/Plugins/Experimental/PixelStreaming/PixelStreaming.uplugin new file mode 100644 index 000000000000..3a50939dbfe9 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/PixelStreaming.uplugin @@ -0,0 +1,26 @@ +{ + "FileVersion" : 3, + "Version" : 0.1, + "VersionName" : "0.1", + "FriendlyName" : "Pixel Streaming", + "Description" : "Support for pixel streaming from UE4", + "Category" : "Graphics", + "CreatedBy" : "Epic Games, Inc.", + "CreatedByURL" : "http://epicgames.com", + "DocsURL" : "", + "MarketplaceURL" : "", + "SupportURL" : "", + "EnabledByDefault" : false, + "CanContainContent" : true, + "IsBetaVersion" : true, + "Installed" : false, + "Modules" : + [ + { + "Name" : "PixelStreaming", + "Type" : "Runtime", + "LoadingPhase" : "PostEngineInit", + "WhitelistPlatforms" : [ "Win64" ] + } + ] +} diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/PixelStreaming.Build.cs b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/PixelStreaming.Build.cs new file mode 100644 index 000000000000..0771ae984d5c --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/PixelStreaming.Build.cs @@ -0,0 +1,155 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +using System.IO; +using System.Collections.Generic; +using Tools.DotNETCommon; + +namespace UnrealBuildTool.Rules +{ + public class PixelStreaming : ModuleRules + { + private void AddWebRTCProxy() + { + string PixelStreamingProgramsDirectory = "./Programs/PixelStreaming"; + string WebRTCProxyDir = PixelStreamingProgramsDirectory + "/WebRTCProxy/bin"; + + if (!Directory.Exists(WebRTCProxyDir)) + { + Log.TraceInformation(string.Format("WebRTC Proxy path '{0}' does not exist", WebRTCProxyDir)); + return; + } + + List DependenciesToAdd = new List(); + DependenciesToAdd.AddRange(Directory.GetFiles(WebRTCProxyDir, "*Development.exe")); + DependenciesToAdd.AddRange(Directory.GetFiles(WebRTCProxyDir, "*Development.pdb")); + DependenciesToAdd.AddRange(Directory.GetFiles(WebRTCProxyDir, "*.bat")); + DependenciesToAdd.AddRange(Directory.GetFiles(WebRTCProxyDir, "*.ps1")); + + foreach(string Dependency in DependenciesToAdd) + { + RuntimeDependencies.Add(Dependency, StagedFileType.NonUFS); + } + } + + private void AddSignallingServer() + { + string PixelStreamingProgramsDirectory = "./Programs/PixelStreaming"; + string SignallingServerDir = new DirectoryInfo(PixelStreamingProgramsDirectory + "/WebServers/SignallingWebServer").FullName; + + if (!Directory.Exists(SignallingServerDir)) + { + Log.TraceInformation(string.Format("Signalling Server path '{0}' does not exist", SignallingServerDir)); + return; + } + + List DependenciesToAdd = new List(); + DependenciesToAdd.AddRange(Directory.GetFiles(SignallingServerDir, "*.*", SearchOption.AllDirectories)); + + string NodeModulesDirPath = new DirectoryInfo(SignallingServerDir + "/node_modules").FullName; + string LogsDirPath = new DirectoryInfo(SignallingServerDir + "/logs").FullName; + foreach (string Dependency in DependenciesToAdd) + { + if (!Dependency.StartsWith(NodeModulesDirPath) && + !Dependency.StartsWith(LogsDirPath)) + { + RuntimeDependencies.Add(Dependency, StagedFileType.NonUFS); + } + } + } + + private void AddMatchmakingServer() + { + string PixelStreamingProgramsDirectory = "./Programs/PixelStreaming"; + string MatchmakingServerDir = new DirectoryInfo(PixelStreamingProgramsDirectory + "/WebServers/Matchmaker").FullName; + + if (!Directory.Exists(MatchmakingServerDir)) + { + Log.TraceInformation(string.Format("Matchmaking Server path '{0}' does not exist", MatchmakingServerDir)); + return; + } + + List DependenciesToAdd = new List(); + DependenciesToAdd.AddRange(Directory.GetFiles(MatchmakingServerDir, "*.*", SearchOption.AllDirectories)); + + string NodeModulesDirPath = new DirectoryInfo(MatchmakingServerDir + "/node_modules").FullName; + string LogsDirPath = new DirectoryInfo(MatchmakingServerDir + "/logs").FullName; + foreach (string Dependency in DependenciesToAdd) + { + if (!Dependency.StartsWith(NodeModulesDirPath) && + !Dependency.StartsWith(LogsDirPath)) + { + RuntimeDependencies.Add(Dependency, StagedFileType.NonUFS); + } + } + } + + private void AddWebRTCServers() + { + string webRTCRevision = "23789"; + string webRTCRevisionDirectory = "./ThirdParty/WebRTC/rev." + webRTCRevision; + string webRTCProgramsDirectory = System.IO.Path.Combine(webRTCRevisionDirectory, "programs/Win64/VS2017/release"); + + List DependenciesToAdd = new List(); + DependenciesToAdd.AddRange(Directory.GetFiles(webRTCProgramsDirectory, "*.exe")); + DependenciesToAdd.AddRange(Directory.GetFiles(webRTCProgramsDirectory, "*.pdb")); + + foreach (string Dependency in DependenciesToAdd) + { + RuntimeDependencies.Add(Dependency, StagedFileType.NonUFS); + } + } + + public PixelStreaming(ReadOnlyTargetRules Target) : base(Target) + { + PublicIncludePaths.Add(ModuleDirectory); + PrivateIncludePaths.Add(ModuleDirectory); + PrivateIncludePaths.Add(System.IO.Path.Combine(ModuleDirectory, "../ThirdParty")); + + // NOTE: General rule is not to access the private folder of another module, + // but to use the ISubmixBufferListener interface, we need to include some private headers + PrivateIncludePaths.Add(System.IO.Path.Combine(Directory.GetCurrentDirectory(), "./Runtime/AudioMixer/Private")); + + PublicDependencyModuleNames.AddRange( + new string[] + { + "ApplicationCore", + "Core", + "CoreUObject", + "Engine", + "InputCore", + "InputDevice", + "Json", + "RenderCore", + "ShaderCore", + "AnimGraphRuntime", + "RHI", + "Slate", + "SlateCore", + "Sockets", + "Networking" + } + ); + + PrivateDependencyModuleNames.AddRange( + new string[] + { + "Slate", + "SlateCore", + "AudioMixer", + "Json" + } + ); + + if (Target.Platform == UnrealTargetPlatform.Win32 || Target.Platform == UnrealTargetPlatform.Win64) + { + PrivateDependencyModuleNames.AddRange(new string[] { "D3D11RHI" }); + } + + AddWebRTCProxy(); + AddSignallingServer(); + AddMatchmakingServer(); + AddWebRTCServers(); + + } + } +} diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/AudioEncoder.cpp b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/AudioEncoder.cpp new file mode 100644 index 000000000000..0243ad16dc2b --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/AudioEncoder.cpp @@ -0,0 +1,89 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "AudioEncoder.h" +#include "Engine/Engine.h" +#include "Misc/CommandLine.h" +#include "Engine/GameEngine.h" +#include "Streamer.h" +#include "PixelStreamingCommon.h" + +FAudioEncoder::FAudioEncoder(FStreamer& Outer) + : Outer(Outer) + , bInitialized(false) + , bFormatChecked(false) +{ +} + +FAudioEncoder::~FAudioEncoder() +{ + if (bInitialized) + { + if (UGameEngine* GameEngine = Cast(GEngine)) + { + FAudioDevice* AudioDevice = GameEngine->GetMainAudioDevice(); + if (AudioDevice) + { + AudioDevice->UnregisterSubmixBufferListener(this); + } + } + } +} + +void FAudioEncoder::Init() +{ + if (!FParse::Param(FCommandLine::Get(), TEXT("AudioMixer"))) + { + UE_LOG(PixelStreaming, Warning, TEXT("No audio supported. Needs -audiomixer parameter")); + return; + } + + if (UGameEngine* GameEngine = Cast(GEngine)) + { + FAudioDevice* AudioDevice = GameEngine->GetMainAudioDevice(); + if (AudioDevice) + { + AudioDevice->RegisterSubmixBufferListener(this); + bInitialized = true; + } + } +} + +void FAudioEncoder::OnNewSubmixBuffer(const USoundSubmix* OwningSubmix, float* AudioData, int32 NumSamples, int32 NumChannels, const int32 SampleRate, double AudioClock) +{ + if (!bInitialized) + { + return; + } + + // Only 48000hz supported for now + if (SampleRate != 48000) + { + // Only report the problem once + if (!bFormatChecked) + { + bFormatChecked = true; + UE_LOG(PixelStreaming, Warning, TEXT("Audio samplerate needs to be 48000hz")); + } + return; + } + + Audio::TSampleBuffer Buffer(AudioData, NumSamples, NumChannels, SampleRate); + // Mix to stereo if required, since PixelStreaming only accept stereo at the moment + if (Buffer.GetNumChannels() != 2) + { + Buffer.MixBufferToChannels(2); + } + + // Convert to signed PCM 16-bits + PCM16.Reset(Buffer.GetNumSamples()); + PCM16.AddZeroed(Buffer.GetNumSamples()); + const float* Ptr = Buffer.GetData(); + for (int16& S : PCM16) + { + int32 N = *Ptr >= 0 ? *Ptr * int32(MAX_int16) : *Ptr * (int32(MAX_int16)+1); + S = static_cast(FMath::Clamp(N, int32(MIN_int16), int32(MAX_int16))); + Ptr++; + } + + Outer.OnAudioPCMPacketReady(reinterpret_cast(PCM16.GetData()), PCM16.Num() * sizeof(int16)); +} diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/AudioEncoder.h b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/AudioEncoder.h new file mode 100644 index 000000000000..2593c175b01e --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/AudioEncoder.h @@ -0,0 +1,30 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +#pragma once + +#include "CoreMinimal.h" +#include "AudioMixerDevice.h" + +// Forward declaration +class FStreamer; + +class FAudioEncoder final : public ISubmixBufferListener +{ +public: + FAudioEncoder(FStreamer& Outer); + ~FAudioEncoder(); + FAudioEncoder(FAudioEncoder& Other) = delete; + FAudioEncoder& operator=(FAudioEncoder& Other) = delete; + void Init(); +private: + + // ISubmixBufferListener interface + virtual void OnNewSubmixBuffer(const USoundSubmix* OwningSubmix, float* AudioData, int32 NumSamples, int32 NumChannels, const int32 SampleRate, double AudioClock) override; + + FStreamer& Outer; + bool bInitialized; + bool bFormatChecked; + + // Used as scratchpad to convert the floats to pcm-16bits, to avoid + // reallocating memory + TArray PCM16; +}; diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/JavaScriptKeyCodes.inl b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/JavaScriptKeyCodes.inl new file mode 100644 index 000000000000..984317c72834 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/JavaScriptKeyCodes.inl @@ -0,0 +1,270 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "InputCoreTypes.h" + +/** + * https://www.cambiaresearch.com/articles/15/javascript-char-codes-key-codes + * http://keycode.info/ + */ + +static const FKey* AgnosticKeys[256] = +{ + /* 0 */ &EKeys::Invalid, + /* 1 */ &EKeys::Invalid, + /* 2 */ &EKeys::Invalid, + /* 3 */ &EKeys::Invalid, + /* 4 */ &EKeys::Invalid, + /* 5 */ &EKeys::Invalid, + /* 6 */ &EKeys::Invalid, + /* 7 */ &EKeys::Invalid, + /* 8 */ &EKeys::BackSpace, + /* 9 */ &EKeys::Tab, + /* 10 */ &EKeys::Invalid, + /* 11 */ &EKeys::Invalid, + /* 12 */ /*&EKeys::Clear*/ &EKeys::Invalid, + /* 13 */ &EKeys::Enter, + /* 14 */ &EKeys::Invalid, + /* 15 */ &EKeys::Invalid, + /* 16 */ &EKeys::LeftShift, + /* 17 */ &EKeys::LeftControl, + /* 18 */ &EKeys::LeftAlt, + /* 19 */ &EKeys::Pause, + /* 20 */ &EKeys::CapsLock, + /* 21 */ &EKeys::Invalid, + /* 22 */ &EKeys::Invalid, + /* 23 */ &EKeys::Invalid, + /* 24 */ &EKeys::Invalid, + /* 25 */ &EKeys::Invalid, + /* 26 */ &EKeys::Invalid, + /* 27 */ &EKeys::Escape, + /* 28 */ &EKeys::Invalid, + /* 29 */ &EKeys::Invalid, + /* 30 */ &EKeys::Invalid, + /* 31 */ &EKeys::Invalid, + /* 32 */ &EKeys::SpaceBar, + /* 33 */ &EKeys::PageUp, + /* 34 */ &EKeys::PageDown, + /* 35 */ &EKeys::End, + /* 36 */ &EKeys::Home, + /* 37 */ &EKeys::Left, + /* 38 */ &EKeys::Up, + /* 39 */ &EKeys::Right, + /* 40 */ &EKeys::Down, + /* 41 */ &EKeys::Invalid, + /* 42 */ &EKeys::Invalid, + /* 43 */ &EKeys::Invalid, + /* 44 */ /*&EKeys::PrintScreen*/ &EKeys::Invalid, + /* 45 */ &EKeys::Insert, + /* 46 */ &EKeys::Delete, + /* 47 */ &EKeys::Invalid, + /* 48 */ &EKeys::Zero, + /* 49 */ &EKeys::One, + /* 50 */ &EKeys::Two, + /* 51 */ &EKeys::Three, + /* 52 */ &EKeys::Four, + /* 53 */ &EKeys::Five, + /* 54 */ &EKeys::Six, + /* 55 */ &EKeys::Seven, + /* 56 */ &EKeys::Eight, + /* 57 */ &EKeys::Nine, + /* 58 */ &EKeys::Invalid, + /* 59 */ &EKeys::Invalid, + /* 60 */ &EKeys::Invalid, + /* 61 */ &EKeys::Invalid, + /* 62 */ &EKeys::Invalid, + /* 63 */ &EKeys::Invalid, + /* 64 */ &EKeys::Invalid, + /* 65 */ &EKeys::A, + /* 66 */ &EKeys::B, + /* 67 */ &EKeys::C, + /* 68 */ &EKeys::D, + /* 69 */ &EKeys::E, + /* 70 */ &EKeys::F, + /* 71 */ &EKeys::G, + /* 72 */ &EKeys::H, + /* 73 */ &EKeys::I, + /* 74 */ &EKeys::J, + /* 75 */ &EKeys::K, + /* 76 */ &EKeys::L, + /* 77 */ &EKeys::M, + /* 78 */ &EKeys::N, + /* 79 */ &EKeys::O, + /* 80 */ &EKeys::P, + /* 81 */ &EKeys::Q, + /* 82 */ &EKeys::R, + /* 83 */ &EKeys::S, + /* 84 */ &EKeys::T, + /* 85 */ &EKeys::U, + /* 86 */ &EKeys::V, + /* 87 */ &EKeys::W, + /* 88 */ &EKeys::X, + /* 89 */ &EKeys::Y, + /* 90 */ &EKeys::Z, + /* 91 */ /*&EKeys::LeftWindowKey*/ &EKeys::Invalid, + /* 92 */ /*&EKeys::RightWindowKey*/ &EKeys::Invalid, + /* 93 */ /*&EKeys::SelectKey*/ &EKeys::Invalid, + /* 94 */ &EKeys::Invalid, + /* 95 */ &EKeys::Invalid, + /* 96 */ &EKeys::NumPadZero, + /* 97 */ &EKeys::NumPadOne, + /* 98 */ &EKeys::NumPadTwo, + /* 99 */ &EKeys::NumPadThree, + /* 100 */ &EKeys::NumPadFour, + /* 101 */ &EKeys::NumPadFive, + /* 102 */ &EKeys::NumPadSix, + /* 103 */ &EKeys::NumPadSeven, + /* 104 */ &EKeys::NumPadEight, + /* 105 */ &EKeys::NumPadNine, + /* 106 */ &EKeys::Multiply, + /* 107 */ &EKeys::Add, + /* 108 */ &EKeys::Invalid, + /* 109 */ &EKeys::Subtract, + /* 110 */ &EKeys::Decimal, + /* 111 */ &EKeys::Divide, + /* 112 */ &EKeys::F1, + /* 113 */ &EKeys::F2, + /* 114 */ &EKeys::F3, + /* 115 */ &EKeys::F4, + /* 116 */ &EKeys::F5, + /* 117 */ &EKeys::F6, + /* 118 */ &EKeys::F7, + /* 119 */ &EKeys::F8, + /* 120 */ &EKeys::F9, + /* 121 */ &EKeys::F10, + /* 122 */ &EKeys::F11, + /* 123 */ &EKeys::F12, + /* 124 */ &EKeys::Invalid, + /* 125 */ &EKeys::Invalid, + /* 126 */ &EKeys::Invalid, + /* 127 */ &EKeys::Invalid, + /* 128 */ &EKeys::Invalid, + /* 129 */ &EKeys::Invalid, + /* 130 */ &EKeys::Invalid, + /* 131 */ &EKeys::Invalid, + /* 132 */ &EKeys::Invalid, + /* 133 */ &EKeys::Invalid, + /* 134 */ &EKeys::Invalid, + /* 135 */ &EKeys::Invalid, + /* 136 */ &EKeys::Invalid, + /* 137 */ &EKeys::Invalid, + /* 138 */ &EKeys::Invalid, + /* 139 */ &EKeys::Invalid, + /* 140 */ &EKeys::Invalid, + /* 141 */ &EKeys::Invalid, + /* 142 */ &EKeys::Invalid, + /* 143 */ &EKeys::Invalid, + /* 144 */ &EKeys::NumLock, + /* 145 */ &EKeys::ScrollLock, + /* 146 */ &EKeys::Invalid, + /* 147 */ &EKeys::Invalid, + /* 148 */ &EKeys::Invalid, + /* 149 */ &EKeys::Invalid, + /* 150 */ &EKeys::Invalid, + /* 151 */ &EKeys::Invalid, + /* 152 */ &EKeys::Invalid, + /* 153 */ &EKeys::Invalid, + /* 154 */ &EKeys::Invalid, + /* 155 */ &EKeys::Invalid, + /* 156 */ &EKeys::Invalid, + /* 157 */ &EKeys::Invalid, + /* 158 */ &EKeys::Invalid, + /* 159 */ &EKeys::Invalid, + /* 160 */ &EKeys::Invalid, + /* 161 */ &EKeys::Invalid, + /* 162 */ &EKeys::Invalid, + /* 163 */ &EKeys::Invalid, + /* 164 */ &EKeys::Invalid, + /* 165 */ &EKeys::Invalid, + /* 166 */ &EKeys::Invalid, + /* 167 */ &EKeys::Invalid, + /* 168 */ &EKeys::Invalid, + /* 169 */ &EKeys::Invalid, + /* 170 */ &EKeys::Invalid, + /* 171 */ &EKeys::Invalid, + /* 172 */ &EKeys::Invalid, + /* 173 */ &EKeys::Invalid, + /* 174 */ &EKeys::Invalid, + /* 175 */ &EKeys::Invalid, + /* 176 */ &EKeys::Invalid, + /* 177 */ &EKeys::Invalid, + /* 178 */ &EKeys::Invalid, + /* 179 */ &EKeys::Invalid, + /* 180 */ &EKeys::Invalid, + /* 181 */ &EKeys::Invalid, + /* 182 */ &EKeys::Invalid, + /* 183 */ &EKeys::Invalid, + /* 184 */ &EKeys::Invalid, + /* 185 */ &EKeys::Invalid, + /* 186 */ &EKeys::Semicolon, + /* 187 */ &EKeys::Equals, + /* 188 */ &EKeys::Comma, + /* 189 */ &EKeys::Hyphen, + /* 190 */ &EKeys::Period, + /* 191 */ &EKeys::Slash, + /* 192 */ &EKeys::Tilde, + /* 193 */ &EKeys::Invalid, + /* 194 */ &EKeys::Invalid, + /* 195 */ &EKeys::Invalid, + /* 196 */ &EKeys::Invalid, + /* 197 */ &EKeys::Invalid, + /* 198 */ &EKeys::Invalid, + /* 199 */ &EKeys::Invalid, + /* 200 */ &EKeys::Invalid, + /* 201 */ &EKeys::Invalid, + /* 202 */ &EKeys::Invalid, + /* 203 */ &EKeys::Invalid, + /* 204 */ &EKeys::Invalid, + /* 205 */ &EKeys::Invalid, + /* 206 */ &EKeys::Invalid, + /* 207 */ &EKeys::Invalid, + /* 208 */ &EKeys::Invalid, + /* 209 */ &EKeys::Invalid, + /* 210 */ &EKeys::Invalid, + /* 211 */ &EKeys::Invalid, + /* 212 */ &EKeys::Invalid, + /* 213 */ &EKeys::Invalid, + /* 214 */ &EKeys::Invalid, + /* 215 */ &EKeys::Invalid, + /* 216 */ &EKeys::Invalid, + /* 217 */ &EKeys::Invalid, + /* 218 */ &EKeys::Invalid, + /* 219 */ &EKeys::LeftBracket, + /* 220 */ &EKeys::Backslash, + /* 221 */ &EKeys::RightBracket, + /* 222 */ &EKeys::Apostrophe, + /* 223 */ &EKeys::Quote, + /* 224 */ &EKeys::Invalid, + /* 225 */ &EKeys::Invalid, + /* 226 */ &EKeys::Invalid, + /* 227 */ &EKeys::Invalid, + /* 228 */ &EKeys::Invalid, + /* 229 */ &EKeys::Invalid, + /* 230 */ &EKeys::Invalid, + /* 231 */ &EKeys::Invalid, + /* 232 */ &EKeys::Invalid, + /* 233 */ &EKeys::Invalid, + /* 234 */ &EKeys::Invalid, + /* 235 */ &EKeys::Invalid, + /* 236 */ &EKeys::Invalid, + /* 237 */ &EKeys::Invalid, + /* 238 */ &EKeys::Invalid, + /* 239 */ &EKeys::Invalid, + /* 240 */ &EKeys::Invalid, + /* 241 */ &EKeys::Invalid, + /* 242 */ &EKeys::Invalid, + /* 243 */ &EKeys::Invalid, + /* 244 */ &EKeys::Invalid, + /* 245 */ &EKeys::Invalid, + /* 246 */ &EKeys::Invalid, + /* 247 */ &EKeys::Invalid, + /* 248 */ &EKeys::Invalid, + /* 249 */ &EKeys::Invalid, + /* 250 */ &EKeys::Invalid, + /* 251 */ &EKeys::Invalid, + /* 252 */ &EKeys::Invalid, + /* 253 */ &EKeys::Invalid, + /* 254 */ &EKeys::Invalid, + /* 255 */ &EKeys::Invalid +}; diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/NvVideoEncoder.cpp b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/NvVideoEncoder.cpp new file mode 100644 index 000000000000..f787a2ec5524 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/NvVideoEncoder.cpp @@ -0,0 +1,828 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "NvVideoEncoder.h" + +#include "Utils.h" +#include "ScreenRendering.h" +#include "PixelStreamingCommon.h" +#include "ShaderCore.h" +#include "RendererInterface.h" +#include "RHIStaticStates.h" +#include "PipelineStateCache.h" +#include "HAL/Runnable.h" +#include "HAL/RunnableThread.h" +#include "HAL/ThreadSafeBool.h" +#include "HAL/ThreadSafeCounter.h" +#include "Misc/CoreDelegates.h" +#include "Modules/ModuleManager.h" + +#if defined PLATFORM_WINDOWS +// Disable macro redefinition warning for compatibility with Windows SDK 8+ +# pragma warning(push) +# pragma warning(disable : 4005) // macro redefinition +# include "Windows/AllowWindowsPlatformTypes.h" +# include "NvEncoder/nvEncodeAPI.h" +# include +# include "D3D11State.h" +# include "D3D11Resources.h" +# include "Windows/HideWindowsPlatformTypes.h" +# pragma warning(pop) +#endif + +DECLARE_CYCLE_STAT(TEXT("CopyBackBuffer"), STAT_NvEnc_CopyBackBuffer, STATGROUP_NvEnc); +DECLARE_CYCLE_STAT(TEXT("SendBackBufferToEncoder"), STAT_NvEnc_SendBackBufferToEncoder, STATGROUP_NvEnc); +DECLARE_CYCLE_STAT(TEXT("WaitForEncodeEvent"), STAT_NvEnc_WaitForEncodeEvent, STATGROUP_NvEnc); +DECLARE_CYCLE_STAT(TEXT("RetrieveEncodedFrame"), STAT_NvEnc_RetrieveEncodedFrame, STATGROUP_NvEnc); +DECLARE_CYCLE_STAT(TEXT("StreamEncodedFrame"), STAT_NvEnc_StreamEncodedFrame, STATGROUP_NvEnc); +DECLARE_DWORD_COUNTER_STAT(TEXT("AsyncMode"), STAT_NvEnc_AsyncMode, STATGROUP_NvEnc); + +#define BITSTREAM_SIZE 1024 * 1024 * 2 +#define NV_RESULT(NvFunction) NvFunction == NV_ENC_SUCCESS + +#if defined PLATFORM_WINDOWS +#define CLOSE_EVENT_HANDLE(EventHandle) CloseHandle(EventHandle); +#else +#define CLOSE_EVENT_HANDLE(EventHandle) fclose((FILE*)EventHandle); +#endif + +class FNvVideoEncoder::FNvVideoEncoderImpl +{ +private: + struct FInputFrame + { + void* RegisteredResource; + NV_ENC_INPUT_PTR MappedResource; + NV_ENC_BUFFER_FORMAT BufferFormat; + }; + + struct FOutputFrame + { + NV_ENC_OUTPUT_PTR BitstreamBuffer; + HANDLE EventHandle; + }; + + struct FFrame + { + FTexture2DRHIRef ResolvedBackBuffer; + FInputFrame InputFrame; + FOutputFrame OutputFrame; + TArray EncodedFrame; + bool bIdrFrame = false; + uint64 FrameIdx = 0; + + // timestamps to measure encoding latency + uint64 CaptureTimeStamp = 0; + uint64 EncodeStartTimeStamp = 0; + uint64 EncodeEndTimeStamp = 0; + + FThreadSafeBool bEncoding = false; + }; + + struct FRHITransferRenderTargetToNvEnc final : public FRHICommand + { + FNvVideoEncoder::FNvVideoEncoderImpl* Encoder; + FFrame* Frame; + + FORCEINLINE_DEBUGGABLE FRHITransferRenderTargetToNvEnc(FNvVideoEncoder::FNvVideoEncoderImpl* InEncoder, FFrame* InFrame) + : Encoder(InEncoder), Frame(InFrame) + {} + + void Execute(FRHICommandListBase& CmdList) + { + Encoder->TransferRenderTargetToHWEncoder(*Frame); + } + }; + +public: + FNvVideoEncoderImpl(void* DllHandle, const FVideoEncoderSettings& Settings, const FTexture2DRHIRef& BackBuffer, bool bEnableAsyncMode, const FEncodedFrameReadyCallback& InEncodedFrameReadyCallback); + ~FNvVideoEncoderImpl(); + + void UpdateSettings(const FVideoEncoderSettings& Settings, const FTexture2DRHIRef& BackBuffer); + void EncodeFrame(const FVideoEncoderSettings& Settings, const FTexture2DRHIRef& BackBuffer, uint64 CaptureMs); + void TransferRenderTargetToHWEncoder(FFrame& Frame); + + void PostRenderingThreadCreated() { bWaitForRenderThreadToResume = false; } + void PreRenderingThreadDestroyed() { bWaitForRenderThreadToResume = true; } + bool IsSupported() const { return bIsSupported; } + bool IsAsyncEnabled() const { return NvEncInitializeParams.enableEncodeAsync > 0; } + const TArray& GetSpsPpsHeader() const { return SpsPpsHeader; } + void ForceIdrFrame() { bForceIdrFrame = true; } + +private: + void InitFrameInputBuffer(const FTexture2DRHIRef& BackBuffer, FFrame& Frame); + void InitializeResources(const FTexture2DRHIRef& BackBuffer); + void ReleaseFrameInputBuffer(FFrame& Frame); + void ReleaseResources(); + void RegisterAsyncEvent(void** OutEvent); + void UnregisterAsyncEvent(void* Event); + void EncoderCheckLoop(); + void ProcessFrame(FFrame& Frame); + void CopyBackBuffer(const FTexture2DRHIRef& BackBuffer, const FTexture2DRHIRef& ResolvedBackBuffer); + void UpdateSpsPpsHeader(); + + TUniquePtr NvEncodeAPI; + void* EncoderInterface; + NV_ENC_INITIALIZE_PARAMS NvEncInitializeParams; + NV_ENC_CONFIG NvEncConfig; + bool bIsSupported; + TArray SpsPpsHeader; + FThreadSafeBool bWaitForRenderThreadToResume; + FThreadSafeBool bForceIdrFrame; + // Used to make sure we don't have a race condition trying to access a deleted "this" captured + // in the render command lambda sent to the render thread from EncoderCheckLoop + static FThreadSafeCounter ImplCounter; + uint64 FrameCount; + static const uint32 NumBufferedFrames = 3; + FFrame BufferedFrames[NumBufferedFrames]; + TUniquePtr EncoderThread; + FThreadSafeBool bExitEncoderThread; + FEncodedFrameReadyCallback EncodedFrameReadyCallback; +}; + +FThreadSafeCounter FNvVideoEncoder::FNvVideoEncoderImpl::ImplCounter(0); + +/** +* Implementation class of NvEnc. +* Note bEnableAsyncMode flag is for debugging purpose, it should be set to true normally unless user wants to test in synchronous mode. +*/ +FNvVideoEncoder::FNvVideoEncoderImpl::FNvVideoEncoderImpl(void* DllHandle, const FVideoEncoderSettings& Settings, const FTexture2DRHIRef& BackBuffer, bool bEnableAsyncMode, const FEncodedFrameReadyCallback& InEncodedFrameReadyCallback) + : EncoderInterface(nullptr) + , bIsSupported(false) + , bWaitForRenderThreadToResume(false) + , bForceIdrFrame(false) + , FrameCount(0) + , bExitEncoderThread(false) + , EncodedFrameReadyCallback(InEncodedFrameReadyCallback) +{ + // Bind to the delegates that are triggered when render thread is created or destroyed, so the encoder thread can act accordingly. + FCoreDelegates::PostRenderingThreadCreated.AddRaw(this, &FNvVideoEncoderImpl::PostRenderingThreadCreated); + FCoreDelegates::PreRenderingThreadDestroyed.AddRaw(this, &FNvVideoEncoderImpl::PreRenderingThreadDestroyed); + + uint32 Width = Settings.Width; + uint32 Height = Settings.Height; + + ID3D11Device* Device = static_cast(GDynamicRHI->RHIGetNativeDevice()); + checkf(Device != nullptr, TEXT("Cannot initialize NvEnc with invalid device")); + checkf(Width > 0 && Height > 0, TEXT("Cannot initialize NvEnc with invalid width/height")); + bool Result = true; + bool bWebSocketStreaming = FParse::Param(FCommandLine::Get(), TEXT("WebSocketStreaming")); + + // Load NvEnc dll and create an NvEncode API instance + { + // define a function pointer for creating an instance of nvEncodeAPI + typedef NVENCSTATUS(NVENCAPI *NVENCAPIPROC)(NV_ENCODE_API_FUNCTION_LIST*); + NVENCAPIPROC NvEncodeAPICreateInstanceFunc; + +#if defined PLATFORM_WINDOWS +# pragma warning(push) +# pragma warning(disable: 4191) // https://stackoverflow.com/a/4215425/453271 + NvEncodeAPICreateInstanceFunc = (NVENCAPIPROC)FPlatformProcess::GetDllExport((HMODULE)DllHandle, TEXT("NvEncodeAPICreateInstance")); +# pragma warning(pop) +#else + NvEncodeAPICreateInstanceFunc = (NVENCAPIPROC)dlsym(DllHandle, "NvEncodeAPICreateInstance"); +#endif + checkf(NvEncodeAPICreateInstanceFunc != nullptr, TEXT("NvEncodeAPICreateInstance failed")); + NvEncodeAPI.Reset(new NV_ENCODE_API_FUNCTION_LIST); + FMemory::Memzero(NvEncodeAPI.Get(), sizeof(NV_ENCODE_API_FUNCTION_LIST)); + NvEncodeAPI->version = NV_ENCODE_API_FUNCTION_LIST_VER; + Result = NV_RESULT(NvEncodeAPICreateInstanceFunc(NvEncodeAPI.Get())); + checkf(Result, TEXT("Unable to create NvEnc API function list")); + } + // Open an encoding session + { + NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS OpenEncodeSessionExParams; + FMemory::Memzero(OpenEncodeSessionExParams); + OpenEncodeSessionExParams.version = NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER; + OpenEncodeSessionExParams.device = Device; + OpenEncodeSessionExParams.deviceType = NV_ENC_DEVICE_TYPE_DIRECTX; // Currently only DX11 is supported + OpenEncodeSessionExParams.apiVersion = NVENCAPI_VERSION; + Result = NV_RESULT(NvEncodeAPI->nvEncOpenEncodeSessionEx(&OpenEncodeSessionExParams, &EncoderInterface)); + checkf(Result, TEXT("Unable to open NvEnc encoding session")); + } + // Set initialization parameters + { + FMemory::Memzero(NvEncInitializeParams); + NvEncInitializeParams.version = NV_ENC_INITIALIZE_PARAMS_VER; + NvEncInitializeParams.encodeWidth = Width; + NvEncInitializeParams.encodeHeight = Height; + NvEncInitializeParams.darWidth = Width; + NvEncInitializeParams.darHeight = Height; + NvEncInitializeParams.encodeGUID = NV_ENC_CODEC_H264_GUID; + NvEncInitializeParams.presetGUID = NV_ENC_PRESET_LOW_LATENCY_HQ_GUID; + NvEncInitializeParams.frameRateNum = Settings.FrameRate; + FParse::Value(FCommandLine::Get(), TEXT("NvEncFrameRateNum="), NvEncInitializeParams.frameRateNum); + UE_LOG(PixelStreaming, Log, TEXT("NvEnc configured to %d FPS"), NvEncInitializeParams.frameRateNum); + NvEncInitializeParams.frameRateDen = 1; + NvEncInitializeParams.enablePTD = 1; + NvEncInitializeParams.reportSliceOffsets = 0; + NvEncInitializeParams.enableSubFrameWrite = 0; + NvEncInitializeParams.encodeConfig = &NvEncConfig; + NvEncInitializeParams.maxEncodeWidth = 3840; + NvEncInitializeParams.maxEncodeHeight = 2160; + FParse::Value(FCommandLine::Get(), TEXT("NvEncMaxEncodeWidth="), NvEncInitializeParams.maxEncodeWidth); + FParse::Value(FCommandLine::Get(), TEXT("NvEncMaxEncodeHeight="), NvEncInitializeParams.maxEncodeHeight); + } + // Get preset config and tweak it accordingly + { + NV_ENC_PRESET_CONFIG PresetConfig; + FMemory::Memzero(PresetConfig); + PresetConfig.version = NV_ENC_PRESET_CONFIG_VER; + PresetConfig.presetCfg.version = NV_ENC_CONFIG_VER; + Result = NV_RESULT(NvEncodeAPI->nvEncGetEncodePresetConfig(EncoderInterface, NvEncInitializeParams.encodeGUID, NvEncInitializeParams.presetGUID, &PresetConfig)); + checkf(Result, TEXT("Failed to select NVEncoder preset config")); + FMemory::Memcpy(&NvEncConfig, &PresetConfig.presetCfg, sizeof(NV_ENC_CONFIG)); + + NvEncConfig.profileGUID = NV_ENC_H264_PROFILE_BASELINE_GUID; + //NvEncConfig.profileGUID = NV_ENC_H264_PROFILE_HIGH_GUID; + //NvEncConfig.gopLength = NVENC_INFINITE_GOPLENGTH; + NvEncConfig.gopLength = NvEncInitializeParams.frameRateNum; // once a sec + //NvEncConfig.frameIntervalP = 1; + //NvEncConfig.frameFieldMode = NV_ENC_PARAMS_FRAME_FIELD_MODE_FRAME; + //NvEncConfig.mvPrecision = NV_ENC_MV_PRECISION_QUARTER_PEL; + //NvEncConfig.rcParams.rateControlMode = NV_ENC_PARAMS_RC_CBR; + //FString RateControlMode; + //FParse::Value(FCommandLine::Get(), TEXT("NvEncRateControlMode="), RateControlMode); + //if (RateControlMode == TEXT("NV_ENC_PARAMS_RC_VBR_HQ")) + //{ + // NvEncConfig.rcParams.rateControlMode = NV_ENC_PARAMS_RC_VBR_HQ; + //} + NvEncConfig.rcParams.averageBitRate = Settings.AverageBitRate; + FParse::Value(FCommandLine::Get(), TEXT("NvEncAverageBitRate="), NvEncConfig.rcParams.averageBitRate); + //NvEncConfig.encodeCodecConfig.h264Config.chromaFormatIDC = 1; + NvEncConfig.encodeCodecConfig.h264Config.idrPeriod = NvEncConfig.gopLength; + + if (bWebSocketStreaming) + { + NvEncConfig.encodeCodecConfig.h264Config.sliceMode = 0; + NvEncConfig.encodeCodecConfig.h264Config.sliceModeData = 0; + } + else + { + // configure "entire frame as a single slice" + // seems WebRTC implementation doesn't work well with slicing, default mode + // (Mode=3/ModeData=4 - 4 slices per frame) produces (rarely) grey full screen or just top half of it. + // it also can be related with our handling of slices in proxy's FakeVideoEncoder + NvEncConfig.encodeCodecConfig.h264Config.sliceMode = 0; + NvEncConfig.encodeCodecConfig.h264Config.sliceModeData = 0; + + // let encoder slice encoded frame so they can fit into RTP packets + // commented out because at some point it started to produce immediately visible visual artefacts + // on clients + //NvEncConfig.encodeCodecConfig.h264Config.sliceMode = 1; + //NvEncConfig.encodeCodecConfig.h264Config.sliceModeData = 1100; // max bytes per slice + + // repeat SPS/PPS with each key-frame for a case when the first frame (with mandatory SPS/PPS) + // was dropped by WebRTC + NvEncConfig.encodeCodecConfig.h264Config.repeatSPSPPS = 1; + } + + // maybe doesn't have an effect, high level is chosen because we aim at high bitrate + NvEncConfig.encodeCodecConfig.h264Config.level = NV_ENC_LEVEL_H264_51; + FString NvEncH264ConfigLevel; + FParse::Value(FCommandLine::Get(), TEXT("NvEncH264ConfigLevel="), NvEncH264ConfigLevel); + if (NvEncH264ConfigLevel == TEXT("NV_ENC_LEVEL_H264_52")) + { + NvEncConfig.encodeCodecConfig.h264Config.level = NV_ENC_LEVEL_H264_52; + } + } + // Get encoder capability + { + NV_ENC_CAPS_PARAM CapsParam; + FMemory::Memzero(CapsParam); + CapsParam.version = NV_ENC_CAPS_PARAM_VER; + CapsParam.capsToQuery = NV_ENC_CAPS_ASYNC_ENCODE_SUPPORT; + int32 AsyncMode = 0; + Result = NV_RESULT(NvEncodeAPI->nvEncGetEncodeCaps(EncoderInterface, NvEncInitializeParams.encodeGUID, &CapsParam, &AsyncMode)); + checkf(Result, TEXT("Failed to get NVEncoder capability params")); + NvEncInitializeParams.enableEncodeAsync = bEnableAsyncMode ? AsyncMode : 0; + } + + Result = NV_RESULT(NvEncodeAPI->nvEncInitializeEncoder(EncoderInterface, &NvEncInitializeParams)); + checkf(Result, TEXT("Failed to initialize NVEncoder")); + + UpdateSpsPpsHeader(); + + InitializeResources(BackBuffer); + + if (NvEncInitializeParams.enableEncodeAsync) + { + EncoderThread.Reset(new FThread(TEXT("PixelStreaming Video Send"), [this]() { EncoderCheckLoop(); })); + } + + bIsSupported = true; +} + +FNvVideoEncoder::FNvVideoEncoderImpl::~FNvVideoEncoderImpl() +{ + FCoreDelegates::PostRenderingThreadCreated.RemoveAll(this); + FCoreDelegates::PreRenderingThreadDestroyed.RemoveAll(this); + + if (EncoderThread) + { + // Reset bWaitForRenderThreadToResume so encoder thread can quit + bWaitForRenderThreadToResume = false; + + bExitEncoderThread = true; + // Trigger all frame events to release encoder thread waiting on them + // (we don't know here which frame it's waiting for) + for (FFrame& Frame : BufferedFrames) + { + SetEvent(Frame.OutputFrame.EventHandle); + } + // Exit encoder runnable thread before shutting down NvEnc interface + EncoderThread->Join(); + // Increment the counter, so that if any pending render commands sent from EncoderCheckLoop + // to the Render Thread still reference "this", they will be ignored because the counter is different + ImplCounter.Increment(); + } + + ReleaseResources(); + + if (EncoderInterface) + { + bool Result = NV_RESULT(NvEncodeAPI->nvEncDestroyEncoder(EncoderInterface)); + checkf(Result, TEXT("Failed to destroy NvEnc interface")); + EncoderInterface = nullptr; + } + + bIsSupported = false; +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::UpdateSpsPpsHeader() +{ + uint8 SpsPpsBuffer[NV_MAX_SEQ_HDR_LEN]; + uint32 PayloadSize = 0; + + NV_ENC_SEQUENCE_PARAM_PAYLOAD SequenceParamPayload; + FMemory::Memzero(SequenceParamPayload); + SequenceParamPayload.version = NV_ENC_SEQUENCE_PARAM_PAYLOAD_VER; + SequenceParamPayload.inBufferSize = NV_MAX_SEQ_HDR_LEN; + SequenceParamPayload.spsppsBuffer = &SpsPpsBuffer; + SequenceParamPayload.outSPSPPSPayloadSize = &PayloadSize; + + bool Result = NV_RESULT(NvEncodeAPI->nvEncGetSequenceParams(EncoderInterface, &SequenceParamPayload)); + checkf(Result, TEXT("Unable to get NvEnc sequence params")); + + SpsPpsHeader.SetNum(PayloadSize); + FMemory::Memcpy(SpsPpsHeader.GetData(), SpsPpsBuffer, PayloadSize); +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::UpdateSettings(const FVideoEncoderSettings& Settings, const FTexture2DRHIRef& BackBuffer) +{ + bool bSettingsChanged = false; + bool bResolutionChanged = false; + if (NvEncConfig.rcParams.averageBitRate != Settings.AverageBitRate) + { + NvEncConfig.rcParams.averageBitRate = Settings.AverageBitRate; + bSettingsChanged = true; + } + if (NvEncInitializeParams.frameRateNum != Settings.FrameRate) + { + NvEncInitializeParams.frameRateNum = Settings.FrameRate; + bSettingsChanged = true; + UE_LOG(PixelStreaming, Log, TEXT("NvEnc reconfigured to %d FPS"), NvEncInitializeParams.frameRateNum); + } + if (NvEncInitializeParams.encodeWidth != Settings.Width) + { + NvEncInitializeParams.encodeWidth = Settings.Width; + NvEncInitializeParams.darWidth = Settings.Width; + bResolutionChanged = true; + bSettingsChanged = true; + } + if (NvEncInitializeParams.encodeHeight != Settings.Height) + { + NvEncInitializeParams.encodeHeight = Settings.Height; + NvEncInitializeParams.darHeight = Settings.Height; + bResolutionChanged = true; + bSettingsChanged = true; + } + + if (bSettingsChanged) + { + NV_ENC_RECONFIGURE_PARAMS NvEncReconfigureParams; + FMemory::Memzero(NvEncReconfigureParams); + FMemory::Memcpy(&NvEncReconfigureParams.reInitEncodeParams, &NvEncInitializeParams, sizeof(NvEncInitializeParams)); + NvEncReconfigureParams.version = NV_ENC_RECONFIGURE_PARAMS_VER; + NvEncReconfigureParams.forceIDR = bResolutionChanged; + + bool Result = NV_RESULT(NvEncodeAPI->nvEncReconfigureEncoder(EncoderInterface, &NvEncReconfigureParams)); + checkf(Result, TEXT("Failed to reconfigure encoder")); + } + + if (bResolutionChanged) + { + UpdateSpsPpsHeader(); + } +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::CopyBackBuffer(const FTexture2DRHIRef& BackBuffer, const FTexture2DRHIRef& ResolvedBackBuffer) +{ + IRendererModule* RendererModule = &FModuleManager::GetModuleChecked("Renderer"); + FRHICommandListImmediate& RHICmdList = FRHICommandListExecutor::GetImmediateCommandList(); + + if (BackBuffer->GetFormat() == ResolvedBackBuffer->GetFormat() && + BackBuffer->GetSizeXY() == ResolvedBackBuffer->GetSizeXY()) + { + RHICmdList.CopyToResolveTarget(BackBuffer, ResolvedBackBuffer, FResolveParams()); + } + else // Texture format mismatch, use a shader to do the copy. + { + SetRenderTarget(RHICmdList, ResolvedBackBuffer, FTextureRHIRef()); + RHICmdList.SetViewport(0, 0, 0.0f, ResolvedBackBuffer->GetSizeX(), ResolvedBackBuffer->GetSizeY(), 1.0f); + + FGraphicsPipelineStateInitializer GraphicsPSOInit; + RHICmdList.ApplyCachedRenderTargets(GraphicsPSOInit); + GraphicsPSOInit.BlendState = TStaticBlendState<>::GetRHI(); + GraphicsPSOInit.RasterizerState = TStaticRasterizerState<>::GetRHI(); + GraphicsPSOInit.DepthStencilState = TStaticDepthStencilState::GetRHI(); + + TShaderMap* ShaderMap = GetGlobalShaderMap(GMaxRHIFeatureLevel); + TShaderMapRef VertexShader(ShaderMap); + TShaderMapRef PixelShader(ShaderMap); + + GraphicsPSOInit.BoundShaderState.VertexDeclarationRHI = RendererModule->GetFilterVertexDeclaration().VertexDeclarationRHI; + GraphicsPSOInit.BoundShaderState.VertexShaderRHI = GETSAFERHISHADER_VERTEX(*VertexShader); + GraphicsPSOInit.BoundShaderState.PixelShaderRHI = GETSAFERHISHADER_PIXEL(*PixelShader); + GraphicsPSOInit.PrimitiveType = PT_TriangleList; + + SetGraphicsPipelineState(RHICmdList, GraphicsPSOInit); + + if (ResolvedBackBuffer->GetSizeX() != BackBuffer->GetSizeX() || ResolvedBackBuffer->GetSizeY() != BackBuffer->GetSizeY()) + PixelShader->SetParameters(RHICmdList, TStaticSamplerState::GetRHI(), BackBuffer); + else + PixelShader->SetParameters(RHICmdList, TStaticSamplerState::GetRHI(), BackBuffer); + + RendererModule->DrawRectangle( + RHICmdList, + 0, 0, // Dest X, Y + ResolvedBackBuffer->GetSizeX(), // Dest Width + ResolvedBackBuffer->GetSizeY(), // Dest Height + 0, 0, // Source U, V + 1, 1, // Source USize, VSize + ResolvedBackBuffer->GetSizeXY(), // Target buffer size + FIntPoint(1, 1), // Source texture size + *VertexShader, + EDRF_Default); + } +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::EncoderCheckLoop() +{ + int CurrentIndex = 0; + while (!bExitEncoderThread) + { + FFrame& Frame = BufferedFrames[CurrentIndex]; + + { + SCOPE_CYCLE_COUNTER(STAT_NvEnc_WaitForEncodeEvent); + DWORD Result = WaitForSingleObject(Frame.OutputFrame.EventHandle, INFINITE); + checkf(Result == WAIT_OBJECT_0, TEXT("Error waiting for frame event: %d"), Result); + if (bExitEncoderThread) + { + return; + } + } + + Frame.EncodeEndTimeStamp = NowMs(); + + ResetEvent(Frame.OutputFrame.EventHandle); + int32 CurrImplCounter = ImplCounter.GetValue(); + // When resolution changes, render thread is stopped and later restarted from game thread. + // We can't enqueue render commands when render thread is stopped, so pause until render thread is restarted. + while (bWaitForRenderThreadToResume) {} + ENQUEUE_UNIQUE_RENDER_COMMAND_THREEPARAMETER( + NvEncProcessFrame, + FNvVideoEncoderImpl*, this_, this, + FFrame*, Frame, &Frame, + int32, CurrImplCounter, CurrImplCounter, + { + if (CurrImplCounter != ImplCounter.GetValue()) // Check if the "this" we captured is still valid + { + return; + } + + this_->ProcessFrame(*Frame); + } + ); + + CurrentIndex = (CurrentIndex + 1) % NumBufferedFrames; + } +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::EncodeFrame(const FVideoEncoderSettings& Settings, const FTexture2DRHIRef& BackBuffer, uint64 CaptureMs) +{ + SET_DWORD_STAT(STAT_NvEnc_AsyncMode, NvEncInitializeParams.enableEncodeAsync ? 1 : 0); + + UpdateSettings(Settings, BackBuffer); + + uint32 BufferIndexToWrite = FrameCount % NumBufferedFrames; + FFrame& Frame = BufferedFrames[BufferIndexToWrite]; + + // If we don't have any free buffers, then we skip this rendered frame + if (Frame.bEncoding) + { + return; + } + + // When resolution changes, buffers need to be recreated + if (Frame.ResolvedBackBuffer->GetSizeX() != Settings.Width || Frame.ResolvedBackBuffer->GetSizeY() != Settings.Height) + { + ReleaseFrameInputBuffer(Frame); + InitFrameInputBuffer(BackBuffer, Frame); + } + + Frame.bEncoding = true; + Frame.FrameIdx = FrameCount; + Frame.CaptureTimeStamp = CaptureMs; + + // Copy BackBuffer to ResolvedBackBuffer + { + SCOPE_CYCLE_COUNTER(STAT_NvEnc_CopyBackBuffer); + CopyBackBuffer(BackBuffer, Frame.ResolvedBackBuffer); + } + + // Encode frame + { + FRHICommandList& RHICmdList = FRHICommandListExecutor::GetImmediateCommandList(); + if (RHICmdList.Bypass()) + { + FRHITransferRenderTargetToNvEnc Command(this, &Frame); + Command.Execute(RHICmdList); + } + else + { + new (RHICmdList.AllocCommand()) FRHITransferRenderTargetToNvEnc(this, &Frame); + } + } + + FrameCount++; +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::TransferRenderTargetToHWEncoder(FFrame& Frame) +{ + SCOPE_CYCLE_COUNTER(STAT_NvEnc_SendBackBufferToEncoder); + + NV_ENC_PIC_PARAMS PicParams; + FMemory::Memzero(PicParams); + PicParams.version = NV_ENC_PIC_PARAMS_VER; + PicParams.inputBuffer = Frame.InputFrame.MappedResource; + PicParams.bufferFmt = Frame.InputFrame.BufferFormat; + PicParams.inputWidth = NvEncInitializeParams.encodeWidth; + PicParams.inputHeight = NvEncInitializeParams.encodeHeight; + PicParams.outputBitstream = Frame.OutputFrame.BitstreamBuffer; + PicParams.completionEvent = Frame.OutputFrame.EventHandle; + PicParams.inputTimeStamp = Frame.FrameIdx; + PicParams.pictureStruct = NV_ENC_PIC_STRUCT_FRAME; + + if (bForceIdrFrame) + { + PicParams.encodePicFlags |= NV_ENC_PIC_FLAG_FORCEIDR; + } + bForceIdrFrame = false; + + Frame.EncodeStartTimeStamp = NowMs(); + bool Result = NV_RESULT(NvEncodeAPI->nvEncEncodePicture(EncoderInterface, &PicParams)); + checkf(Result, TEXT("Failed to encode frame")); + + if (!NvEncInitializeParams.enableEncodeAsync) + { + // In synchronous mode, simply process the frame immediately. + ProcessFrame(Frame); + } +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::ProcessFrame(FFrame& Frame) +{ + // If the expected frame hasn't been doing encoding, then nothing to do + checkf(Frame.bEncoding, TEXT("This should not happen")); + if (!Frame.bEncoding) + { + return; + } + + // log encoding latency for every 1000th frame + if (Frame.FrameIdx % 1000 == 0) + { + uint64 ms = NowMs(); + UE_LOG(PixelStreaming, Log, TEXT("#%d %d %d %d"), Frame.FrameIdx, Frame.EncodeStartTimeStamp - Frame.CaptureTimeStamp, Frame.EncodeEndTimeStamp - Frame.EncodeStartTimeStamp, ms - Frame.EncodeEndTimeStamp); + } + + Frame.bEncoding = false; + + // Retrieve encoded frame from output buffer + { + SCOPE_CYCLE_COUNTER(STAT_NvEnc_RetrieveEncodedFrame); + + NV_ENC_LOCK_BITSTREAM LockBitstream; + FMemory::Memzero(LockBitstream); + LockBitstream.version = NV_ENC_LOCK_BITSTREAM_VER; + LockBitstream.outputBitstream = Frame.OutputFrame.BitstreamBuffer; + LockBitstream.doNotWait = NvEncInitializeParams.enableEncodeAsync; + + bool Result = NV_RESULT(NvEncodeAPI->nvEncLockBitstream(EncoderInterface, &LockBitstream)); + checkf(Result, TEXT("Failed to lock bitstream")); + + Frame.EncodedFrame.SetNum(LockBitstream.bitstreamSizeInBytes); + FMemory::Memcpy(Frame.EncodedFrame.GetData(), LockBitstream.bitstreamBufferPtr, LockBitstream.bitstreamSizeInBytes); + + Result = NV_RESULT(NvEncodeAPI->nvEncUnlockBitstream(EncoderInterface, Frame.OutputFrame.BitstreamBuffer)); + checkf(Result, TEXT("Failed to unlock bitstream")); + Frame.bIdrFrame = LockBitstream.pictureType == NV_ENC_PIC_TYPE_IDR; + } + + // Stream the encoded frame + { + SCOPE_CYCLE_COUNTER(STAT_NvEnc_StreamEncodedFrame); + EncodedFrameReadyCallback(Frame.CaptureTimeStamp, Frame.bIdrFrame, Frame.EncodedFrame.GetData(), Frame.EncodedFrame.Num()); + } +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::InitFrameInputBuffer(const FTexture2DRHIRef& BackBuffer, FFrame& Frame) +{ + // Create resolved back buffer texture + { + // Make sure format used here is compatible with NV_ENC_BUFFER_FORMAT specified later in NV_ENC_REGISTER_RESOURCE bufferFormat + FRHIResourceCreateInfo CreateInfo; + Frame.ResolvedBackBuffer = RHICreateTexture2D(NvEncInitializeParams.encodeWidth, NvEncInitializeParams.encodeHeight, EPixelFormat::PF_A2B10G10R10, 1, 1, TexCreate_RenderTargetable, CreateInfo); + } + + FMemory::Memzero(Frame.InputFrame); + // Register input back buffer + { + ID3D11Texture2D* ResolvedBackBufferDX11 = (ID3D11Texture2D*)(GetD3D11TextureFromRHITexture(Frame.ResolvedBackBuffer)->GetResource()); + EPixelFormat PixelFormat = Frame.ResolvedBackBuffer->GetFormat(); + + NV_ENC_REGISTER_RESOURCE RegisterResource; + FMemory::Memzero(RegisterResource); + RegisterResource.version = NV_ENC_REGISTER_RESOURCE_VER; + RegisterResource.resourceType = NV_ENC_INPUT_RESOURCE_TYPE_DIRECTX; + RegisterResource.resourceToRegister = (void*)ResolvedBackBufferDX11; + RegisterResource.width = NvEncInitializeParams.encodeWidth; + RegisterResource.height = NvEncInitializeParams.encodeHeight; + RegisterResource.bufferFormat = NV_ENC_BUFFER_FORMAT_ABGR10; // Make sure ResolvedBackBuffer is created with a compatible format + bool Result = NV_RESULT(NvEncodeAPI->nvEncRegisterResource(EncoderInterface, &RegisterResource)); + checkf(Result, TEXT("Failed to register input back buffer")); + + Frame.InputFrame.RegisteredResource = RegisterResource.registeredResource; + Frame.InputFrame.BufferFormat = RegisterResource.bufferFormat; + } + // Map input buffer resource + { + NV_ENC_MAP_INPUT_RESOURCE MapInputResource; + FMemory::Memzero(MapInputResource); + MapInputResource.version = NV_ENC_MAP_INPUT_RESOURCE_VER; + MapInputResource.registeredResource = Frame.InputFrame.RegisteredResource; + bool Result = NV_RESULT(NvEncodeAPI->nvEncMapInputResource(EncoderInterface, &MapInputResource)); + checkf(Result, TEXT("Failed to map NvEnc input resource")); + Frame.InputFrame.MappedResource = MapInputResource.mappedResource; + } +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::InitializeResources(const FTexture2DRHIRef& BackBuffer) +{ + for (uint32 i = 0; i < NumBufferedFrames; ++i) + { + FFrame& Frame = BufferedFrames[i]; + + InitFrameInputBuffer(BackBuffer, Frame); + + FMemory::Memzero(Frame.OutputFrame); + // Create output bitstream buffer + { + NV_ENC_CREATE_BITSTREAM_BUFFER CreateBitstreamBuffer; + FMemory::Memzero(CreateBitstreamBuffer); + CreateBitstreamBuffer.version = NV_ENC_CREATE_BITSTREAM_BUFFER_VER; + CreateBitstreamBuffer.size = BITSTREAM_SIZE; + CreateBitstreamBuffer.memoryHeap = NV_ENC_MEMORY_HEAP_SYSMEM_CACHED; + bool Result = NV_RESULT(NvEncodeAPI->nvEncCreateBitstreamBuffer(EncoderInterface, &CreateBitstreamBuffer)); + checkf(Result, TEXT("Failed to create NvEnc bitstream buffer")); + Frame.OutputFrame.BitstreamBuffer = CreateBitstreamBuffer.bitstreamBuffer; + } + // Register event handles + if (NvEncInitializeParams.enableEncodeAsync) + { + RegisterAsyncEvent(&Frame.OutputFrame.EventHandle); + } + } +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::ReleaseFrameInputBuffer(FFrame& Frame) +{ + bool Result = NV_RESULT(NvEncodeAPI->nvEncUnmapInputResource(EncoderInterface, Frame.InputFrame.MappedResource)); + checkf(Result, TEXT("Failed to unmap input resource")); + Frame.InputFrame.MappedResource = nullptr; + + Result = NV_RESULT(NvEncodeAPI->nvEncUnregisterResource(EncoderInterface, Frame.InputFrame.RegisteredResource)); + checkf(Result, TEXT("Failed to unregister input buffer resource")); + Frame.InputFrame.RegisteredResource = nullptr; + + Frame.ResolvedBackBuffer.SafeRelease(); +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::ReleaseResources() +{ + for (uint32 i = 0; i < NumBufferedFrames; ++i) + { + FFrame& Frame = BufferedFrames[i]; + + ReleaseFrameInputBuffer(Frame); + + bool Result = NV_RESULT(NvEncodeAPI->nvEncDestroyBitstreamBuffer(EncoderInterface, Frame.OutputFrame.BitstreamBuffer)); + checkf(Result, TEXT("Failed to destroy output buffer bitstream")); + Frame.OutputFrame.BitstreamBuffer = nullptr; + + if (Frame.OutputFrame.EventHandle) + { + UnregisterAsyncEvent(Frame.OutputFrame.EventHandle); + CLOSE_EVENT_HANDLE(Frame.OutputFrame.EventHandle); + Frame.OutputFrame.EventHandle = nullptr; + } + } +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::RegisterAsyncEvent(void** OutEvent) +{ + NV_ENC_EVENT_PARAMS EventParams; + FMemory::Memzero(EventParams); + EventParams.version = NV_ENC_EVENT_PARAMS_VER; +#if defined PLATFORM_WINDOWS + EventParams.completionEvent = CreateEvent(nullptr, false, false, nullptr); +#endif + bool Result = NV_RESULT(NvEncodeAPI->nvEncRegisterAsyncEvent(EncoderInterface, &EventParams)); + checkf(Result, TEXT("Failed to register async event")); + *OutEvent = EventParams.completionEvent; +} + +void FNvVideoEncoder::FNvVideoEncoderImpl::UnregisterAsyncEvent(void* Event) +{ + if (Event) + { + NV_ENC_EVENT_PARAMS EventParams; + FMemory::Memzero(EventParams); + EventParams.version = NV_ENC_EVENT_PARAMS_VER; + EventParams.completionEvent = Event; + bool Result = NV_RESULT(NvEncodeAPI->nvEncUnregisterAsyncEvent(EncoderInterface, &EventParams)); + checkf(Result, TEXT("Failed to unregister async event")); + } +} + + +FNvVideoEncoder::FNvVideoEncoder(const FVideoEncoderSettings& Settings, const FTexture2DRHIRef& BackBuffer, const FEncodedFrameReadyCallback& InEncodedFrameReadyCallback) + : NvVideoEncoderImpl(nullptr), DllHandle(nullptr) +{ +#if defined PLATFORM_WINDOWS +#if defined _WIN64 + DllHandle = FPlatformProcess::GetDllHandle(TEXT("nvEncodeAPI64.dll")); +#else + DllHandle = FPlatformProcess::GetDllHandle(TEXT("nvEncodeAPI.dll")); +#endif +#else + DllHandle = FPlatformProcess::GetDllHandle(TEXT("libnvidia-encode.so.1")); +#endif + checkf(DllHandle != nullptr, TEXT("Failed to load NvEncode dll")); + + if (DllHandle) + { + NvVideoEncoderImpl = new FNvVideoEncoderImpl(DllHandle, Settings, BackBuffer, true, InEncodedFrameReadyCallback); + } +} + +FNvVideoEncoder::~FNvVideoEncoder() +{ + if (DllHandle) + { + delete NvVideoEncoderImpl; + +#if defined PLATFORM_WINDOWS + FPlatformProcess::FreeDllHandle(DllHandle); +#else + dlclose(DllHandle); +#endif + DllHandle = nullptr; + } +} + +bool FNvVideoEncoder::IsSupported() const +{ + return DllHandle && NvVideoEncoderImpl->IsSupported(); +} + +bool FNvVideoEncoder::IsAsyncEnabled() const +{ + return NvVideoEncoderImpl->IsAsyncEnabled(); +} + +void FNvVideoEncoder::EncodeFrame(const FVideoEncoderSettings& Settings, const FTexture2DRHIRef& BackBuffer, uint64 CaptureMs) +{ + NvVideoEncoderImpl->EncodeFrame(Settings, BackBuffer, CaptureMs); +} + +const TArray& FNvVideoEncoder::GetSpsPpsHeader() const +{ + return NvVideoEncoderImpl->GetSpsPpsHeader(); +} + +void FNvVideoEncoder::ForceIdrFrame() +{ + NvVideoEncoderImpl->ForceIdrFrame(); +} diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/NvVideoEncoder.h b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/NvVideoEncoder.h new file mode 100644 index 000000000000..a06e6a497737 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/NvVideoEncoder.h @@ -0,0 +1,52 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "VideoEncoder.h" + +DECLARE_STATS_GROUP(TEXT("NvEnc"), STATGROUP_NvEnc, STATCAT_Advanced); + +// Video encoder implementation based on NVIDIA Video Codecs SDK: https://developer.nvidia.com/nvidia-video-codec-sdk +// Uses only encoder part +class FNvVideoEncoder : public IVideoEncoder +{ +public: + FNvVideoEncoder(const FVideoEncoderSettings& InSettings, const FTexture2DRHIRef& BackBuffer, const FEncodedFrameReadyCallback& InEncodedFrameReadyCallback); + ~FNvVideoEncoder(); + + /** + * Return name of the encoder. + */ + virtual FString GetName() const override + { return TEXT("Nvidia Video Codec SDK Encoder"); } + + /** + * If encoder is supported. + */ + virtual bool IsSupported() const override; + + /** + * Get Sps/Pps header data. + */ + virtual const TArray& GetSpsPpsHeader() const override; + + /** + * Encode an input back buffer. + */ + virtual void EncodeFrame(const FVideoEncoderSettings& Settings, const FTexture2DRHIRef& BackBuffer, uint64 CaptureMs) override; + + /** + * Force the next frame to be an IDR frame. + */ + virtual void ForceIdrFrame() override; + + /** + * If encoder is running in async/sync mode. + */ + virtual bool IsAsyncEnabled() const override; + +private: + class FNvVideoEncoderImpl; + FNvVideoEncoderImpl* NvVideoEncoderImpl; + void* DllHandle; +}; diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingInputComponent.cpp b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingInputComponent.cpp new file mode 100644 index 000000000000..424950fdbea6 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingInputComponent.cpp @@ -0,0 +1,113 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "PixelStreamingInputComponent.h" +#include "Engine/Engine.h" +#include "Engine/World.h" +#include "IPixelStreamingPlugin.h" +#include "Policies/CondensedJsonPrintPolicy.h" +#include "Serialization/JsonReader.h" +#include "Serialization/JsonSerializer.h" +#include "GameFramework/GameUserSettings.h" + +extern TAutoConsoleVariable CVarStreamerBitrateReduction; + +UPixelStreamingInputComponent::UPixelStreamingInputComponent() + : PixelStreamingPlugin(FModuleManager::Get().GetModule("PixelStreaming") ? &FModuleManager::Get().GetModuleChecked("PixelStreaming") : nullptr) +{ +} + +bool UPixelStreamingInputComponent::OnCommand(const FString& Descriptor) +{ + FString ConsoleCommand; + if (GetJsonStringField(Descriptor, TEXT("ConsoleCommand"), ConsoleCommand)) + { + return GEngine->Exec(GetWorld(), *ConsoleCommand); + } + + FString WidthString; + FString HeightString; + if (GetJsonStringField(Descriptor, TEXT("Resolution.Width"), WidthString) && + GetJsonStringField(Descriptor, TEXT("Resolution.Height"), HeightString)) + { + FIntPoint Resolution = { FCString::Atoi(*WidthString), FCString::Atoi(*HeightString) }; + GEngine->GameUserSettings->SetScreenResolution(Resolution); + GEngine->GameUserSettings->ApplySettings(false); + return true; + } + + FString BitrateReductionString; + if (GetJsonStringField(Descriptor, TEXT("Encoder.BitrateReduction"), BitrateReductionString)) + { + float BitrateReduction = FCString::Atof(*BitrateReductionString); + CVarStreamerBitrateReduction->Set(BitrateReduction); + return true; + } + + return false; +} + +void UPixelStreamingInputComponent::SendPixelStreamingResponse(const FString& Descriptor) +{ + PixelStreamingPlugin->SendResponse(Descriptor); +} + +bool UPixelStreamingInputComponent::GetJsonStringField(FString Descriptor, FString FieldName, FString& StringField) +{ + bool Success; + GetJsonStringValue(Descriptor, FieldName, StringField, Success); + return Success; +} + +void UPixelStreamingInputComponent::GetJsonStringValue(FString Descriptor, FString FieldName, FString& StringValue, bool& Success) +{ + TSharedPtr JsonObject = MakeShareable(new FJsonObject); + + TSharedRef> JsonReader = TJsonReaderFactory<>::Create(Descriptor); + if (FJsonSerializer::Deserialize(JsonReader, JsonObject) && JsonObject.IsValid()) + { + const TSharedPtr* JsonObjectPtr = &JsonObject; + + if (FieldName.Contains(TEXT("."))) + { + TArray FieldComponents; + FieldName.ParseIntoArray(FieldComponents, TEXT(".")); + FieldName = FieldComponents.Pop(); + + for (const FString& FieldComponent : FieldComponents) + { + if (!(*JsonObjectPtr)->TryGetObjectField(FieldComponent, JsonObjectPtr)) + { + Success = false; + return; + } + } + } + + Success = (*JsonObjectPtr)->TryGetStringField(FieldName, StringValue); + } + else + { + Success = false; + } +} + +void UPixelStreamingInputComponent::AddJsonStringValue(const FString& Descriptor, FString FieldName, FString StringValue, FString& NewDescriptor, bool& Success) +{ + TSharedPtr JsonObject = MakeShareable(new FJsonObject); + + if (!Descriptor.IsEmpty()) + { + TSharedRef> JsonReader = TJsonReaderFactory<>::Create(Descriptor); + if (!FJsonSerializer::Deserialize(JsonReader, JsonObject) && JsonObject.IsValid()) + { + Success = false; + return; + } + } + + TSharedRef JsonValueObject = MakeShareable(new FJsonValueString(StringValue)); + JsonObject->SetField(FieldName, JsonValueObject); + + TSharedRef>> JsonWriter = TJsonWriterFactory>::Create(&NewDescriptor); + Success = FJsonSerializer::Serialize(JsonObject.ToSharedRef(), JsonWriter); +} diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingInputDevice.cpp b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingInputDevice.cpp new file mode 100644 index 000000000000..f8685449ebdd --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingInputDevice.cpp @@ -0,0 +1,365 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "PixelStreamingInputDevice.h" +#include "PixelStreamingInputComponent.h" +#include "PixelStreamingSettings.h" +#include "Engine/Engine.h" +#include "Engine/GameEngine.h" +#include "Engine/GameViewportClient.h" +#include "Framework/Application/SlateApplication.h" +#include "Slate/SceneViewport.h" +#include "Widgets/SWindow.h" +#include "Misc/ScopeLock.h" +#include "JavaScriptKeyCodes.inl" + +DEFINE_LOG_CATEGORY(PixelStreamingInputDevice); + +/** + * When reading input from a browser then the cursor position will be sent + * across with mouse events. We want to use this position and avoid getting the + * cursor position from the operating system. This is not relevant to touch + * events. + */ +class FPixelStreamingCursor : public ICursor +{ +public: + + FPixelStreamingCursor() {} + virtual ~FPixelStreamingCursor() = default; + virtual FVector2D GetPosition() const override { return Position; } + virtual void SetPosition(const int32 X, const int32 Y) override { Position = FVector2D(X, Y); }; + virtual void SetType(const EMouseCursor::Type InNewCursor) override {}; + virtual EMouseCursor::Type GetType() const override { return EMouseCursor::Type::Default; }; + virtual void GetSize(int32& Width, int32& Height) const override {}; + virtual void Show(bool bShow) override {}; + virtual void Lock(const RECT* const Bounds) override {}; + virtual void SetTypeShape(EMouseCursor::Type InCursorType, void* CursorHandle) override {}; + +private: + + /** The cursor position sent across with mouse events. */ + FVector2D Position; +}; + +/** +* Wrap the GenericApplication layer so we can replace the cursor and override +* certain behavior. +*/ +class FPixelStreamingApplicationWrapper : public GenericApplication +{ +public: + + FPixelStreamingApplicationWrapper(TSharedPtr InWrappedApplication) + : GenericApplication(MakeShareable(new FPixelStreamingCursor())) + , WrappedApplication(InWrappedApplication) + { + } + + /** + * Functions passed directly to the wrapped application. + */ + + virtual void SetMessageHandler(const TSharedRef< FGenericApplicationMessageHandler >& InMessageHandler) { WrappedApplication->SetMessageHandler(InMessageHandler); } + virtual void PollGameDeviceState(const float TimeDelta) { WrappedApplication->PollGameDeviceState(TimeDelta); } + virtual void PumpMessages(const float TimeDelta) { WrappedApplication->PumpMessages(TimeDelta); } + virtual void ProcessDeferredEvents(const float TimeDelta) { WrappedApplication->ProcessDeferredEvents(TimeDelta); } + virtual void Tick(const float TimeDelta) { WrappedApplication->Tick(TimeDelta); } + virtual TSharedRef< FGenericWindow > MakeWindow() { return WrappedApplication->MakeWindow(); } + virtual void InitializeWindow(const TSharedRef< FGenericWindow >& Window, const TSharedRef< FGenericWindowDefinition >& InDefinition, const TSharedPtr< FGenericWindow >& InParent, const bool bShowImmediately) { WrappedApplication->InitializeWindow(Window, InDefinition, InParent, bShowImmediately); } + virtual void SetCapture(const TSharedPtr< FGenericWindow >& InWindow) { WrappedApplication->SetCapture(InWindow); } + virtual void* GetCapture(void) const { return WrappedApplication->GetCapture(); } + virtual FModifierKeysState GetModifierKeys() const { return WrappedApplication->GetModifierKeys(); } + virtual TSharedPtr< FGenericWindow > GetWindowUnderCursor() { return WrappedApplication->GetWindowUnderCursor(); } + virtual void SetHighPrecisionMouseMode(const bool Enable, const TSharedPtr< FGenericWindow >& InWindow) { WrappedApplication->SetHighPrecisionMouseMode(Enable, InWindow); }; + virtual bool IsUsingHighPrecisionMouseMode() const { return WrappedApplication->IsUsingHighPrecisionMouseMode(); } + virtual bool IsUsingTrackpad() const { return WrappedApplication->IsUsingTrackpad(); } + virtual bool IsMouseAttached() const { return WrappedApplication->IsMouseAttached(); } + virtual bool IsGamepadAttached() const { return WrappedApplication->IsGamepadAttached(); } + virtual void RegisterConsoleCommandListener(const FOnConsoleCommandListener& InListener) { WrappedApplication->RegisterConsoleCommandListener(InListener); } + virtual void AddPendingConsoleCommand(const FString& InCommand) { WrappedApplication->AddPendingConsoleCommand(InCommand); } + virtual FPlatformRect GetWorkArea(const FPlatformRect& CurrentWindow) const { return WrappedApplication->GetWorkArea(CurrentWindow); } + virtual bool TryCalculatePopupWindowPosition(const FPlatformRect& InAnchor, const FVector2D& InSize, const FVector2D& ProposedPlacement, const EPopUpOrientation::Type Orientation, /*OUT*/ FVector2D* const CalculatedPopUpPosition) const { return WrappedApplication->TryCalculatePopupWindowPosition(InAnchor, InSize, ProposedPlacement, Orientation, CalculatedPopUpPosition); } + virtual void GetInitialDisplayMetrics(FDisplayMetrics& OutDisplayMetrics) const { WrappedApplication->GetInitialDisplayMetrics(OutDisplayMetrics); } + virtual EWindowTitleAlignment::Type GetWindowTitleAlignment() const { return WrappedApplication->GetWindowTitleAlignment(); } + virtual EWindowTransparency GetWindowTransparencySupport() const { return WrappedApplication->GetWindowTransparencySupport(); } + virtual void DestroyApplication() { WrappedApplication->DestroyApplication(); } + virtual IInputInterface* GetInputInterface() { return WrappedApplication->GetInputInterface(); } + virtual ITextInputMethodSystem* GetTextInputMethodSystem() { return WrappedApplication->GetTextInputMethodSystem(); } + virtual void SendAnalytics(IAnalyticsProvider* Provider) { WrappedApplication->SendAnalytics(Provider); } + virtual bool SupportsSystemHelp() const { return WrappedApplication->SupportsSystemHelp(); } + virtual void ShowSystemHelp() { WrappedApplication->ShowSystemHelp(); } + virtual bool ApplicationLicenseValid(FPlatformUserId PlatformUser = PLATFORMUSERID_NONE) { return WrappedApplication->ApplicationLicenseValid(PlatformUser); } + + /** + * Functions with overridden behavior. + */ + virtual bool IsCursorDirectlyOverSlateWindow() const { return true; } + + TSharedPtr WrappedApplication; +}; + +FPixelStreamingInputDevice::FPixelStreamingInputDevice(const TSharedRef& InMessageHandler, TArray& InInputComponents) + : PixelStreamingApplicationWrapper(MakeShareable(new FPixelStreamingApplicationWrapper(FSlateApplication::Get().GetPlatformApplication()))) + , MessageHandler(InMessageHandler) + , InputComponents(InInputComponents) + , bAllowCommands(FParse::Param(FCommandLine::Get(), TEXT("AllowPixelStreamingCommands"))) + , bFakingTouchEvents(FSlateApplication::Get().IsFakingTouchEvents()) +{ + if (GEngine->GameViewport && !GEngine->GameViewport->HasSoftwareCursor(EMouseCursor::Default)) + { + // Pixel streaming always requires a default software cursor as it needs + // to be shown on the browser to allow the user to click UI elements. + const UPixelStreamingSettings* Settings = GetDefault(); + check(Settings); + + GEngine->GameViewport->AddSoftwareCursor(EMouseCursor::Default, Settings->PixelStreamingDefaultCursorClassName); + } +} + +void FPixelStreamingInputDevice::Tick(float DeltaTime) +{ + FEvent Event; + while (Events.Dequeue(Event)) + { + switch (Event.Event) + { + case EventType::UNDEFINED: + { + checkNoEntry(); + } + break; + case EventType::KEY_DOWN: + { + uint8 JavaScriptKeyCode; + bool IsRepeat; + Event.GetKeyDown(JavaScriptKeyCode, IsRepeat); + const FKey* AgnosticKey = AgnosticKeys[JavaScriptKeyCode]; + const uint32* KeyCode; + const uint32* CharacterCode; + FInputKeyManager::Get().GetCodesFromKey(*AgnosticKey, KeyCode, CharacterCode); + MessageHandler->OnKeyDown(KeyCode ? *KeyCode : 0, CharacterCode ? *CharacterCode : 0, IsRepeat); + UE_LOG(PixelStreamingInputDevice, Verbose, TEXT("KEY_DOWN: KeyCode = %d; CharacterCode = %d; IsRepeat = %s"), KeyCode, CharacterCode, IsRepeat ? TEXT("True") : TEXT("False")); + } + break; + case EventType::KEY_UP: + { + uint8 JavaScriptKeyCode; + Event.GetKeyUp(JavaScriptKeyCode); + const FKey* AgnosticKey = AgnosticKeys[JavaScriptKeyCode]; + const uint32* KeyCode; + const uint32* CharacterCode; + FInputKeyManager::Get().GetCodesFromKey(*AgnosticKey, KeyCode, CharacterCode); + MessageHandler->OnKeyUp(KeyCode ? *KeyCode : 0, CharacterCode ? *CharacterCode : 0, false); // Key up events are never repeats. + UE_LOG(PixelStreamingInputDevice, Verbose, TEXT("KEY_UP: KeyCode = %d; CharacterCode = %d"), KeyCode, CharacterCode); + } + break; + case EventType::KEY_PRESS: + { + TCHAR UnicodeCharacter; + Event.GetCharacterCode(UnicodeCharacter); + MessageHandler->OnKeyChar(UnicodeCharacter, false); // Key press repeat not yet available but are not intrinsically used. + UE_LOG(PixelStreamingInputDevice, Verbose, TEXT("KEY_PRESSED: Character = '%c'"), UnicodeCharacter); + } + break; + case EventType::MOUSE_ENTER: + { + // Override application layer to special pixel streaming version. + FSlateApplication::Get().OverridePlatformApplication(PixelStreamingApplicationWrapper); + FSlateApplication::Get().OnCursorSet(); + + // Make sure the viewport is active. + FSlateApplication::Get().ProcessApplicationActivationEvent(true); + + // Double the number of hit test cells to cater for the possibility + // that the window will be off screen. + UGameEngine* GameEngine = Cast(GEngine); + TSharedPtr Window = GameEngine->SceneViewport->FindWindow(); + Window->GetHittestGrid()->SetNumCellsExcess(Window->GetHittestGrid()->GetNumCells()); + + UE_LOG(PixelStreamingInputDevice, Verbose, TEXT("MOUSE_ENTER")); + } + break; + case EventType::MOUSE_LEAVE: + { + // Restore normal application layer. + FSlateApplication::Get().OverridePlatformApplication(PixelStreamingApplicationWrapper->WrappedApplication); + + // Reduce the number of hit test cells back to normal. + UGameEngine* GameEngine = Cast(GEngine); + TSharedPtr Window = GameEngine->SceneViewport->FindWindow(); + Window->GetHittestGrid()->SetNumCellsExcess(FIntPoint(0, 0)); + + UE_LOG(PixelStreamingInputDevice, Verbose, TEXT("MOUSE_LEAVE")); + } + break; + case EventType::MOUSE_MOVE: + { + uint16 PosX; + uint16 PosY; + int16 DeltaX; + int16 DeltaY; + Event.GetMouseDelta(PosX, PosY, DeltaX, DeltaY); + FVector2D CursorPos = GEngine->GameViewport->GetWindow()->GetPositionInScreen() + FVector2D(PosX, PosY); + PixelStreamingApplicationWrapper->Cursor->SetPosition(CursorPos.X, CursorPos.Y); + MessageHandler->OnRawMouseMove(DeltaX, DeltaY); + UE_LOG(PixelStreamingInputDevice, VeryVerbose, TEXT("MOUSE_MOVE: Pos = (%d, %d); CursorPos = (%d, %d); Delta = (%d, %d)"), PosX, PosY, static_cast(CursorPos.X), static_cast(CursorPos.Y), DeltaX, DeltaY); + } + break; + case EventType::MOUSE_DOWN: + { + // If a user clicks on the application window and then clicks on the + // browser then this will move the focus away from the application + // window which will deactivate the application, so we need to check + // if we must reactivate the application. + if (!FSlateApplication::Get().IsActive()) + { + FSlateApplication::Get().ProcessApplicationActivationEvent(true); + } + + EMouseButtons::Type Button; + uint16 PosX; + uint16 PosY; + Event.GetMouseClick(Button, PosX, PosY); + FVector2D CursorPos = GEngine->GameViewport->GetWindow()->GetPositionInScreen() + FVector2D(PosX, PosY); + PixelStreamingApplicationWrapper->Cursor->SetPosition(CursorPos.X, CursorPos.Y); + MessageHandler->OnMouseDown(GEngine->GameViewport->GetWindow()->GetNativeWindow(), Button, CursorPos); + UE_LOG(PixelStreamingInputDevice, Verbose, TEXT("MOUSE_DOWN: Button = %d; Pos = (%d, %d); CursorPos = (%d, %d)"), Button, PosX, PosY, static_cast(CursorPos.X), static_cast(CursorPos.Y)); + } + break; + case EventType::MOUSE_UP: + { + EMouseButtons::Type Button; + uint16 PosX; + uint16 PosY; + Event.GetMouseClick(Button, PosX, PosY); + FVector2D CursorPos = GEngine->GameViewport->GetWindow()->GetPositionInScreen() + FVector2D(PosX, PosY); + PixelStreamingApplicationWrapper->Cursor->SetPosition(CursorPos.X, CursorPos.Y); + MessageHandler->OnMouseUp(Button); + UE_LOG(PixelStreamingInputDevice, Verbose, TEXT("MOUSE_UP: Button = %d; Pos = (%d, %d); CursorPos = (%d, %d)"), Button, PosX, PosY, static_cast(CursorPos.X), static_cast(CursorPos.Y)); + } + break; + case EventType::MOUSE_WHEEL: + { + int16 Delta; + uint16 PosX; + uint16 PosY; + Event.GetMouseWheel(Delta, PosX, PosY); + const float SpinFactor = 1 / 120.0f; + FVector2D CursorPos = GEngine->GameViewport->GetWindow()->GetPositionInScreen() + FVector2D(PosX, PosY); + MessageHandler->OnMouseWheel(Delta * SpinFactor, CursorPos); + UE_LOG(PixelStreamingInputDevice, Verbose, TEXT("MOUSE_WHEEL: Delta = %d; Pos = (%d, %d); CursorPos = (%d, %d)"), Delta, PosX, PosY, static_cast(CursorPos.X), static_cast(CursorPos.Y)); + } + break; + case EventType::TOUCH_START: + { + uint8 TouchIndex; + uint16 PosX; + uint16 PosY; + uint8 Force; // Force is between 0.0 and 1.0 so will need to unquantize from byte. + Event.GetTouch(TouchIndex, PosX, PosY, Force); + FVector2D CursorPos = GEngine->GameViewport->GetWindow()->GetPositionInScreen() + FVector2D(PosX, PosY); + MessageHandler->OnTouchStarted(GEngine->GameViewport->GetWindow()->GetNativeWindow(), CursorPos, Force / 255.0f, TouchIndex, 0); // TODO: ControllerId? + UE_LOG(PixelStreamingInputDevice, Verbose, TEXT("TOUCH_START: TouchIndex = %d; Pos = (%d, %d); CursorPos = (%d, %d); Force = %.3f"), TouchIndex, PosX, PosY, static_cast(CursorPos.X), static_cast(CursorPos.Y), Force / 255.0f); + } + break; + case EventType::TOUCH_END: + { + uint8 TouchIndex; + uint16 PosX; + uint16 PosY; + uint8 Force; + Event.GetTouch(TouchIndex, PosX, PosY, Force); + FVector2D CursorPos = GEngine->GameViewport->GetWindow()->GetPositionInScreen() + FVector2D(PosX, PosY); + MessageHandler->OnTouchEnded(CursorPos, TouchIndex, 0); // TODO: ControllerId? + UE_LOG(PixelStreamingInputDevice, Verbose, TEXT("TOUCH_END: TouchIndex = %d; Pos = (%d, %d); CursorPos = (%d, %d)"), TouchIndex, PosX, PosY, static_cast(CursorPos.X), static_cast(CursorPos.Y)); + } + break; + case EventType::TOUCH_MOVE: + { + uint8 TouchIndex; + uint16 PosX; + uint16 PosY; + uint8 Force; // Force is between 0.0 and 1.0 so will need to unquantize from byte. + Event.GetTouch(TouchIndex, PosX, PosY, Force); + FVector2D CursorPos = GEngine->GameViewport->GetWindow()->GetPositionInScreen() + FVector2D(PosX, PosY); + MessageHandler->OnTouchMoved(CursorPos, Force / 255.0f, TouchIndex, 0); // TODO: ControllerId? + UE_LOG(PixelStreamingInputDevice, VeryVerbose, TEXT("TOUCH_MOVE: TouchIndex = %d; Pos = (%d, %d); CursorPos = (%d, %d); Force = %.3f"), TouchIndex, PosX, PosY, static_cast(CursorPos.X), static_cast(CursorPos.Y), Force / 255.0f); + } + break; + default: + { + UE_LOG(PixelStreamingInputDevice, Error, TEXT("Unknown Pixel Streaming event %d with word 0x%016llx"), static_cast(Event.Event), Event.Data.Word); + } + break; + } + } + + FString UIInteraction; + while (UIInteractions.Dequeue(UIInteraction)) + { + for (UPixelStreamingInputComponent* InputComponent : InputComponents) + { + InputComponent->OnPixelStreamingInputEvent.Broadcast(UIInteraction); + UE_LOG(PixelStreamingInputDevice, Verbose, TEXT("UIInteraction = %s"), *UIInteraction); + } + } + + FString Command; + while (Commands.Dequeue(Command)) + { + for (UPixelStreamingInputComponent* InputComponent : InputComponents) + { + if (InputComponent->OnCommand(Command)) + { + UE_LOG(PixelStreamingInputDevice, Verbose, TEXT("Command = %s"), *Command); + } + else + { + UE_LOG(PixelStreamingInputDevice, Warning, TEXT("Failed to run Command = %s"), *Command); + } + } + } +} + +void FPixelStreamingInputDevice::SendControllerEvents() +{ +} + +void FPixelStreamingInputDevice::SetMessageHandler(const TSharedRef& InMessageHandler) +{ + MessageHandler = InMessageHandler; +} + +bool FPixelStreamingInputDevice::Exec(UWorld* InWorld, const TCHAR* Cmd, FOutputDevice& Ar) +{ + return true; +} + +void FPixelStreamingInputDevice::SetChannelValue(int32 ControllerId, FForceFeedbackChannelType ChannelType, float Value) +{ +} + +void FPixelStreamingInputDevice::SetChannelValues(int32 ControllerId, const FForceFeedbackValues &values) +{ +} + +void FPixelStreamingInputDevice::ProcessEvent(const FEvent& InEvent) +{ + bool Success = Events.Enqueue(InEvent); + checkf(Success, TEXT("Unable to enqueue new event of type %d"), static_cast(InEvent.Event)); +} + +void FPixelStreamingInputDevice::ProcessUIInteraction(const FString& InDescriptor) +{ + bool Success = UIInteractions.Enqueue(InDescriptor); + checkf(Success, TEXT("Unable to enqueue new UI Interaction %s"), *InDescriptor); +} + +void FPixelStreamingInputDevice::ProcessCommand(const FString& InDescriptor) +{ + if (bAllowCommands) + { + bool Success = Commands.Enqueue(InDescriptor); + checkf(Success, TEXT("Unable to enqueue new Command %s"), *InDescriptor); + } +} diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingInputDevice.h b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingInputDevice.h new file mode 100644 index 000000000000..dc6c0a776b0c --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingInputDevice.h @@ -0,0 +1,435 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "IInputDevice.h" +#include "Containers/Queue.h" +#include "GenericPlatform/GenericApplication.h" + +DECLARE_LOG_CATEGORY_EXTERN(PixelStreamingInputDevice, Log, VeryVerbose); + +class FPixelStreamingInputDevice : public IInputDevice +{ +public: + + /** + * The pixel streaming input device takes input events and passes them to + * a message handler. + * @param InMessageHandler - The message handler which events should be passed to. + * @param InInputComponents - The preexisting list of pixel streaming input components. + */ + FPixelStreamingInputDevice(const TSharedRef& InMessageHandler, TArray& InInputComponents); + + /** + * Tell the input device about a new pixel streaming input component. + * @param InInputComponent - The new pixel streaming input component. + */ + void AddInputComponent(class UPixelStreamingInputComponent* InInputComponent) + { + InputComponents.Add(InInputComponent); + } + + /* + * Tell the input device that a pixel streaming input component is no longer + * relevant. + * @param InInputComponent - The pixel streaming input component which is no longer relevant. + */ + void RemoveInputComponent(class UPixelStreamingInputComponent* InInputComponent) + { + InputComponents.Remove(InInputComponent); + } + + /** Tick the interface (e.g. check for new controllers) */ + virtual void Tick(float DeltaTime) override; + + /** Poll for controller state and send events if needed */ + virtual void SendControllerEvents() override; + + /** Set which MessageHandler will get the events from SendControllerEvents. */ + virtual void SetMessageHandler(const TSharedRef& InMessageHandler) override; + + /** Exec handler to allow console commands to be passed through for debugging */ + virtual bool Exec(UWorld* InWorld, const TCHAR* Cmd, FOutputDevice& Ar) override; + + /** + * IForceFeedbackSystem pass through functions + */ + virtual void SetChannelValue(int32 ControllerId, FForceFeedbackChannelType ChannelType, float Value) override; + + virtual void SetChannelValues(int32 ControllerId, const FForceFeedbackValues &values) override; + + /** The types of event which can be processed by the device. */ + enum class EventType + { + UNDEFINED, /** No value. */ + KEY_DOWN, /** A key has been pushed down. */ + KEY_UP, /** A key has been released. */ + KEY_PRESS, /** A key has been pressed and a character has been input. */ + MOUSE_ENTER, /** The mouse has entered canvas. */ + MOUSE_LEAVE, /** The mouse has left the canvas. */ + MOUSE_MOVE, /** The mouse has been moved. */ + MOUSE_DOWN, /** A mouse button has been clicked. */ + MOUSE_UP, /** A mouse button has been released. */ + MOUSE_WHEEL, /** The mouse wheel was scrolled. */ + TOUCH_START, /** A finger is put down onto the canvas. */ + TOUCH_END, /** A finger is lifted from the canvas. */ + TOUCH_MOVE /** A finger is being dragged along the surface of the canvas. */ + }; + + /** A general input event. */ + struct FEvent + { + /** The type of the general event. */ + EventType Event; + + /** A generic piece of data which is used to hold information about the + * event, specialized by making a union with an appropriate struct. */ + union + { + uint64 Word; + + struct /** KEY_DOWN */ + { + uint8 KeyCode; + bool bIsRepeat; + } KeyDown; + + struct /* KEY_UP */ + { + uint8 KeyCode; + } KeyUp; + + struct /** KEY_PRESSED */ + { + TCHAR Character; + } Character; + + struct /** MOUSE_MOVE */ + { + int16 DeltaX; + int16 DeltaY; + uint16 PosX; + uint16 PosY; + } MouseMove; + + struct /** MOUSE_DOWN, MOUSE_UP */ + { + uint8 Button; + uint16 PosX; + uint16 PosY; + } MouseButton; + + struct /** MOUSE_WHEEL */ + { + int16 Delta; + uint16 PosX; + uint16 PosY; + } MouseWheel; + + struct /** TOUCH_START, TOUCH_END, TOUCH_MOVE */ + { + uint8 TouchIndex; + uint16 PosX; + uint16 PosY; + uint8 Force; + } Touch; + } Data; + + /** + * Create a completely empty event. + */ + FEvent() + : Event(EventType::UNDEFINED) + { + } + + /** + * Create an event of the given type. + * @param InEvent - The type of the event. + */ + FEvent(EventType InEvent) + : Event(InEvent) + { + } + + /** + * An event related to a key being pushed down. + * @param InKeyCode - Numerical code identifying the pushed down key. + * @param InIsRepeat - Whether the key is being kept down and is repeating. + */ + void SetKeyDown(uint8 InKeyCode, bool InIsRepeat) + { + check(Event == EventType::KEY_DOWN); + Data.KeyDown.KeyCode = InKeyCode; + Data.KeyDown.bIsRepeat = InIsRepeat; + } + + /** + * An event related to a key being released. + * @param InKeyCode - Numerical code identifying the released key. + */ + void SetKeyUp(uint8 InKeyCode) + { + check(Event == EventType::KEY_UP); + Data.KeyUp.KeyCode = InKeyCode; + } + + /** + * An event related to character input. + * @param InCharacter - The character being input. + */ + void SetCharCode(TCHAR InCharacter) + { + check(Event == EventType::KEY_PRESS); + Data.Character.Character = InCharacter; + } + + /** + * An event related to mouse movement. + * @param InPoxX - The X position of the mouse pointer. + * @param InPosY - The Y position of the mouse pointer. + * @param InDeltaX - The change in the X position of the mouse pointer. + * @param InDeltaY - The change in the Y position of the mouse pointer. + */ + void SetMouseDelta(uint16 InPosX, uint16 InPosY, int16 InDeltaX, int16 InDeltaY) + { + check(Event == EventType::MOUSE_MOVE); + Data.MouseMove.DeltaX = InDeltaX; + Data.MouseMove.DeltaY = InDeltaY; + Data.MouseMove.PosX = InPosX; + Data.MouseMove.PosY = InPosY; + } + + /** + * An event related to mouse buttons. + * @param InButton - The button number corresponding to left, middle, right, etc. + * @param InPoxX - The X position of the mouse pointer. + * @param InPosY - The Y position of the mouse pointer. + */ + void SetMouseClick(uint8 InButton, uint16 InPosX, uint16 InPosY) + { + check(Event == EventType::MOUSE_DOWN || Event == EventType::MOUSE_UP); + Data.MouseButton.Button = InButton; + Data.MouseButton.PosX = InPosX; + Data.MouseButton.PosY = InPosY; + } + + /** + * An event related to the mouse scroll wheel. + * @param InButton - The amount by which the mouse wheel was scrolled. + * @param InPoxX - The X position of the mouse pointer when the wheel was scrolled. + * @param InPosY - The Y position of the mouse pointer when the wheel was scrolled. + */ + void SetMouseWheel(int16 InDelta, uint16 InPosX, uint16 InPosY) + { + check(Event == EventType::MOUSE_WHEEL); + Data.MouseWheel.Delta = InDelta; + Data.MouseWheel.PosX = InPosX; + Data.MouseWheel.PosY = InPosY; + } + + /** + * An event related to a finger touching the canvas. + * @param InTouchIndex - The finger used in multi-touch. + * @param InPoxX - The X position of the finger. + * @param InPosY - The Y position of the finger. + * @param InForce - The amount of pressure being applied by the finger. + */ + void SetTouch(uint8 InTouchIndex, uint16 InPosX, uint16 InPosY, uint8 InForce) + { + check(Event == EventType::TOUCH_START || Event == EventType::TOUCH_END || Event == EventType::TOUCH_MOVE); + Data.Touch.TouchIndex = InTouchIndex; + Data.Touch.PosX = InPosX; + Data.Touch.PosY = InPosY; + Data.Touch.Force = InForce; + } + + /** + * Get information about an event related to a key being pushed down. + * @param OutKeyCode - Numerical code identifying the pushed down key. + * @param OutIsRepeat - Whether the key is being kept down and is repeating. + */ + void GetKeyDown(uint8& OutKeyCode, bool& OutIsRepeat) + { + check(Event == EventType::KEY_DOWN); + OutKeyCode = Data.KeyDown.KeyCode; + OutIsRepeat = Data.KeyDown.bIsRepeat; + } + + /** + * Get information about an event related to a key being released. + * @param OutKeyCode - Numerical code identifying the released key. + */ + void GetKeyUp(uint8& OutKeyCode) + { + check(Event == EventType::KEY_UP); + OutKeyCode = Data.KeyUp.KeyCode; + } + + /** + * Get information about an event related to character input. + * @param OutCharacter - The character being input. + */ + void GetCharacterCode(TCHAR& OutCharacter) + { + check(Event == EventType::KEY_PRESS); + OutCharacter = Data.Character.Character; + } + + /** + * Get information about an event related to mouse movement. + * @param OutPoxX - The X position of the mouse pointer. + * @param OutPosY - The Y position of the mouse pointer. + * @param OutDeltaX - The change in the X position of the mouse pointer. + * @param OutDeltaY - The change in the Y position of the mouse pointer. + */ + void GetMouseDelta(uint16& OutPosX, uint16& OutPosY, int16& OutDeltaX, int16& OutDeltaY) + { + check(Event == EventType::MOUSE_MOVE); + OutPosX = Data.MouseMove.PosX; + OutPosY = Data.MouseMove.PosY; + OutDeltaX = Data.MouseMove.DeltaX; + OutDeltaY = Data.MouseMove.DeltaY; + } + + /** + * Get information about an event related to mouse buttons. + * @param OutButton - The button number corresponding to left, middle, right, etc. + * @param OutPosX - The X position of the mouse pointer. + * @param OutPosY - The Y position of the mouse pointer. + */ + void GetMouseClick(EMouseButtons::Type& OutButton, uint16& OutPosX, uint16& OutPosY) + { + check(Event == EventType::MOUSE_DOWN || Event == EventType::MOUSE_UP); + // https://developer.mozilla.org/en-US/docs/Web/Events/mousedown + uint8 Button = Data.MouseButton.Button; + switch (Button) + { + case 0: + { + OutButton = EMouseButtons::Left; + } + break; + case 1: + { + OutButton = EMouseButtons::Middle; + } + break; + case 2: + { + OutButton = EMouseButtons::Right; + } + break; + default: + { + UE_LOG(PixelStreamingInputDevice, Error, TEXT("Unknown Pixel Streaming mouse click with button %d and word 0x%016llx"), Button, Data.Word); + } + break; + } + OutPosX = Data.MouseButton.PosX; + OutPosY = Data.MouseButton.PosY; + } + + /** + * Get information about an event related to the mouse wheel. + * @param OutDelta - The amount by which the mouse wheel was scrolled. + * @param PosX - The X position of the mouse pointer when the wheel was scrolled. + * @param PosY - The Y position of the mouse pointer when the wheel was scrolled. + */ + void GetMouseWheel(int16& OutDelta, uint16& OutPosX, uint16& OutPosY) + { + check(Event == EventType::MOUSE_WHEEL); + OutDelta = Data.MouseWheel.Delta; + OutPosX = Data.MouseWheel.PosX; + OutPosY = Data.MouseWheel.PosY; + } + + /** + * Get information about an event related to a finger touching the canvas. + * @param OutTouchIndex - The finger used in multi-touch. + * @param OutPoxX - The X position of the finger. + * @param OutPosY - The Y position of the finger. + * @param OutForce - Amount of pressure being applied by the finger. + */ + void GetTouch(uint8& OutTouchIndex, uint16& OutPosX, uint16& OutPosY, uint8& OutForce) + { + check(Event == EventType::TOUCH_START || Event == EventType::TOUCH_END || Event == EventType::TOUCH_MOVE); + OutTouchIndex = Data.Touch.TouchIndex; + OutPosX = Data.Touch.PosX; + OutPosY = Data.Touch.PosY; + OutForce = Data.Touch.Force; + } + }; + + /** + * Add a new event to the input device for later processing. + * @param InEvent - The new event. + */ + void ProcessEvent(const FEvent& InEvent); + + /** + * Add a new UI interaction descriptor to the input device for later processing. + * @param InDescriptor - The new UI interaction descriptor. + */ + void ProcessUIInteraction(const FString& InDescriptor); + + /** + * Add a new command descriptor to the input device for later execution. + * @param InDescriptor - The new command descriptor. + */ + void ProcessCommand(const FString& InDescriptor); + + /** + * Is the application faking touch events? + * @return True if the application is faking touch events. + */ + bool IsFakingTouchEvents() const + { + return bFakingTouchEvents; + } + +private: + + /** + * A special wrapper over the GenericApplication layer which allows us to + * override certain behavior. + */ + TSharedPtr PixelStreamingApplicationWrapper; + + /** Reference to the message handler which events should be passed to. */ + TSharedRef MessageHandler; + + /** The queue of events which are awaiting processing. */ + TQueue Events; + + /** + * Pixel streaming input components contain a delegate which will broadcast + * UI integrations to interested parties. + */ + TArray InputComponents; + + /** + * A queue of UI interaction descriptor strings which contain arbitrary + * information related to the interaction. + */ + TQueue UIInteractions; + + /** + * It is only possible to enable commands when explicitly permitted as these + * have security implications. + */ + bool bAllowCommands; + + /** + * A queue of command descriptor strings which contain the command to + * execute and its arguments. + */ + TQueue Commands; + + /** + * Is the application faking touch events by dragging the mouse along + * the canvas? If so then we must put the browser canvas in a special + * state to replicate the behavior of the application. + */ + bool bFakingTouchEvents; +}; diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingPlugin.cpp b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingPlugin.cpp new file mode 100644 index 000000000000..b8db56b05798 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingPlugin.cpp @@ -0,0 +1,182 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "CoreMinimal.h" +#include "Modules/ModuleManager.h" +#include "IPixelStreamingPlugin.h" +#include "UObject/UObjectIterator.h" +#include "Engine/GameEngine.h" +#include "Engine/GameViewportClient.h" +#include "Slate/SceneViewport.h" +#include "Streamer.h" +#include "Windows/WindowsHWrapper.h" +#include "RenderingThread.h" +#include "Misc/CommandLine.h" +#include "Misc/Parse.h" +#include "RendererInterface.h" +#include "Rendering/SlateRenderer.h" +#include "Framework/Application/SlateApplication.h" +#include "Misc/ConfigCacheIni.h" +#include "PixelStreamingInputDevice.h" +#include "PixelStreamingInputComponent.h" +#include "GameFramework/GameModeBase.h" +#include "Dom/JsonObject.h" +#include "Misc/App.h" + +DEFINE_LOG_CATEGORY(PixelStreaming); +DEFINE_LOG_CATEGORY(PixelStreamingInput); +DEFINE_LOG_CATEGORY(PixelStreamingNet); +DEFINE_LOG_CATEGORY(PixelStreamingCapture); + +class FPixelStreamingPlugin : public IPixelStreamingPlugin +{ +public: + /** IModuleInterface implementation */ + virtual void StartupModule() override + { + // detect hardware capabilities, init nvidia capture libs, etc + + // subscribe to engine delegates here for init / framebuffer creation / whatever + if (UGameEngine* GameEngine = Cast(GEngine)) + { + if (FSlateApplication::IsInitialized()) + { + FSlateRenderer::FOnBackBufferReadyToPresent OnBackBufferReadyDelegate; + OnBackBufferReadyDelegate.BindRaw(this, &FPixelStreamingPlugin::OnBackBufferReady_RenderThread); + FSlateApplication::Get().GetRenderer()->OnBackBufferReadyToPresent() = OnBackBufferReadyDelegate; + + FSlateApplication::Get().GetRenderer()->OnPreResizeWindowBackBuffer().AddRaw(this, &FPixelStreamingPlugin::OnPreResizeWindowBackbuffer); + } + + } + + FGameModeEvents::GameModePostLoginEvent.AddRaw(this, &FPixelStreamingPlugin::OnGameModePostLogin); + FGameModeEvents::GameModeLogoutEvent.AddRaw(this, &FPixelStreamingPlugin::OnGameModeLogout); + + IModularFeatures::Get().RegisterModularFeature(GetModularFeatureName(), this); + + FApp::SetUnfocusedVolumeMultiplier(1.0f); + } + + virtual void ShutdownModule() override + { + if (FSlateApplication::IsInitialized()) + { + FSlateApplication::Get().GetRenderer()->OnBackBufferReadyToPresent().Unbind(); + FSlateApplication::Get().GetRenderer()->OnPreResizeWindowBackBuffer().RemoveAll(this); + } + + IModularFeatures::Get().UnregisterModularFeature(GetModularFeatureName(), this); + } + +private: + void UpdateViewport(FSceneViewport* Viewport) + { + FRHIViewport* const ViewportRHI = Viewport->GetViewportRHI().GetReference(); + } + + void OnBackBufferReady_RenderThread(const FTexture2DRHIRef& BackBuffer) + { + check(IsInRenderingThread()); + + if (!Streamer) + { + FString IP = TEXT("0.0.0.0"); + FParse::Value(FCommandLine::Get(), TEXT("PixelStreamingIP="), IP); + uint16 Port = 8124; + FParse::Value(FCommandLine::Get(), TEXT("PixelStreamingPort="), Port); + + Streamer = MakeUnique(*IP, Port, BackBuffer); + } + + Streamer->OnFrameBufferReady(BackBuffer); + } + + void OnPreResizeWindowBackbuffer(void* BackBuffer) + { + if (Streamer) + { + ENQUEUE_UNIQUE_RENDER_COMMAND_ONEPARAMETER( + FPixelStreamingOnPreResizeWindowBackbuffer, + FPixelStreamingPlugin*, Plugin, this, + { + Plugin->OnPreResizeWindowBackbuffer_RenderThread(); + }); + + // Make sure OnPreResizeWindowBackbuffer_RenderThread is executed before continuing + FlushRenderingCommands(); + } + } + + void OnPreResizeWindowBackbuffer_RenderThread() + { + Streamer->OnPreResizeWindowBackbuffer(); + } + + virtual TSharedPtr CreateInputDevice(const TSharedRef& InMessageHandler) override + { + InputDevice = MakeShareable(new FPixelStreamingInputDevice(InMessageHandler, InputComponents)); + return InputDevice; + } + + virtual FPixelStreamingInputDevice& GetInputDevice() override + { + return *InputDevice; + } + + virtual void AddClientConfig(TSharedRef& JsonObject) override + { + checkf(InputDevice.IsValid(), TEXT("No Input Device available when populating Client Config")); + + JsonObject->SetBoolField(TEXT("FakingTouchEvents"), InputDevice->IsFakingTouchEvents()); + + FString PixelStreamingControlScheme; + if (FParse::Value(FCommandLine::Get(), TEXT("PixelStreamingControlScheme="), PixelStreamingControlScheme)) + { + JsonObject->SetStringField(TEXT("ControlScheme"), PixelStreamingControlScheme); + } + + float PixelStreamingFastPan; + if (FParse::Value(FCommandLine::Get(), TEXT("PixelStreamingFastPan="), PixelStreamingFastPan)) + { + JsonObject->SetNumberField(TEXT("FastPan"), PixelStreamingFastPan); + } + } + + virtual void SendResponse(const FString& Descriptor) override + { + Streamer->SendResponse(Descriptor); + } + + void OnGameModePostLogin(AGameModeBase* GameMode, APlayerController* NewPlayer) + { + UWorld* NewPlayerWorld = NewPlayer->GetWorld(); + for (TObjectIterator ObjIt; ObjIt; ++ObjIt) + { + UPixelStreamingInputComponent* InputComponent = *ObjIt; + UWorld* InputComponentWorld = InputComponent->GetWorld(); + if (InputComponentWorld == NewPlayerWorld) + { + InputComponents.Push(InputComponent); + } + } + if (InputComponents.Num() == 0) + { + UPixelStreamingInputComponent* InputComponent = NewObject(NewPlayer); + InputComponent->RegisterComponent(); + InputComponents.Push(InputComponent); + } + } + + void OnGameModeLogout(AGameModeBase* GameMode, AController* Exiting) + { + InputComponents.Empty(); + } + +private: + TUniquePtr Streamer; + FTexture2DRHIRef mResolvedFrameBuffer; + TSharedPtr InputDevice; + TArray InputComponents; +}; + +IMPLEMENT_MODULE(FPixelStreamingPlugin, PixelStreaming) diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingSettings.cpp b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingSettings.cpp new file mode 100644 index 000000000000..706d1f01e32d --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/PixelStreamingSettings.cpp @@ -0,0 +1,30 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "PixelStreamingSettings.h" + +UPixelStreamingSettings::UPixelStreamingSettings(const FObjectInitializer& ObjectInitlaizer) + : Super(ObjectInitlaizer) +{ + +} + +FName UPixelStreamingSettings::GetCategoryName() const +{ + return TEXT("Plugins"); +} + +#if WITH_EDITOR +FText UPixelStreamingSettings::GetSectionText() const +{ + return NSLOCTEXT("PixelStreamingPlugin", "PixelStreamingSettingsSection", "PixelStreaming"); +} +#endif + +#if WITH_EDITOR +void UPixelStreamingSettings::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + Super::PostEditChangeProperty(PropertyChangedEvent); +} +#endif + + diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/ProtocolDefs.h b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/ProtocolDefs.h new file mode 100644 index 000000000000..4e5f2d3a7664 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/ProtocolDefs.h @@ -0,0 +1,79 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + + +namespace PixelStreamingProtocol +{ + enum class EToUE4Msg : uint8 + { + /**********************************************************************/ + + /* + * Control Messages. Range = 0..49. + */ + + IFrameRequest = 0, + RequestQualityControl = 1, // This one is intercepted and processed at the proxy + MaxFpsRequest = 2, + AverageBitrateRequest = 3, + StartStreaming = 4, + StopStreaming = 5, + + /**********************************************************************/ + + /* + * Input Messages. Range = 50..89. + */ + + // Generic Input Messages. Range = 50..59. + UIInteraction = 50, + Command = 51, + + // Keyboard Input Message. Range = 60..69. + KeyDown = 60, + KeyUp = 61, + KeyPress = 62, + + // Mouse Input Messages. Range = 70..79. + MouseEnter = 70, + MouseLeave = 71, + MouseDown = 72, + MouseUp = 73, + MouseMove = 74, + MouseWheel = 75, + + // Touch Input Messages. Range = 80..89. + TouchStart = 80, + TouchEnd = 81, + TouchMove = 82, + + /**********************************************************************/ + + /* + * Ensure Count is the final entry. + */ + Count + + /**********************************************************************/ + }; + + // !!! modifying this enum make sure to update the next function !!! + enum class EToProxyMsg : uint8 { AudioPCM, SpsPps, VideoIDR, Video, ClientConfig, Response, Count }; + inline const TCHAR* PacketTypeStr(EToProxyMsg PktType) + { + static const TCHAR* Str[static_cast(EToProxyMsg::Count)] = { TEXT("AudioPCM"), TEXT("SpsPps"), TEXT("VideoIDR"), TEXT("Video"), TEXT("ClientConfig"), TEXT("Response") }; +#if WITH_ENGINE + check(PktType < EToProxyMsg::Count); +#else + assert(PktType < EToProxyMsg::Count); +#endif + return Str[static_cast(PktType)]; + } + + //! Messages that can be sent to the webrtc clients + enum class EToClientMsg : uint8 { QualityControlOwnership, Response }; + + enum class ECirrusToProxyMsg : uint8 { offer, iceCandidate, clientDisconnected, config, count }; + enum class EProxyToCirrusMsg : uint8 { answer, iceCandidate, disconnectClient }; +}; \ No newline at end of file diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/ProxyConnection.cpp b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/ProxyConnection.cpp new file mode 100644 index 000000000000..c1260bfebf7d --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/ProxyConnection.cpp @@ -0,0 +1,519 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "ProxyConnection.h" + +#include "Engine/Engine.h" +#include "Engine/GameViewportClient.h" +#include "Common/TcpSocketBuilder.h" +#include "Sockets.h" + +#include "PixelStreamingCommon.h" +#include "Streamer.h" +#include "IPixelStreamingPlugin.h" +#include "PixelStreamingInputDevice.h" +#include "ProtocolDefs.h" + +FProxyConnection::FProxyConnection(const FString& IP, uint16 Port, FStreamer& Streamer) : + Streamer(Streamer), + InputDevice(FModuleManager::Get().GetModuleChecked("PixelStreaming").GetInputDevice()), + Thread(TEXT("WebRTC Proxy Connection"), [this, IP, Port]() { Run(IP, Port); }), + Socket(nullptr), + Listener(nullptr), + ExitRequested(false) +{} + +FProxyConnection::~FProxyConnection() +{ + ExitRequested = true; + + { + FScopeLock Lock(&SocketMt); + if (Socket) + { + Socket->Close(); + } + } + + { + FScopeLock Lock(&ListenerMt); + if (Listener) + { + Listener->Close(); + } + } + + Thread.Join(); +} + +void FProxyConnection::Run(const FString& IP, uint16 Port) +{ + InitReceiveHandlers(); + + while (!ExitRequested) + { + if (!AcceptConnection(IP, Port)) + { + continue; + } + + Receive(); + DestroyConnection(); + } + UE_LOG(PixelStreamingNet, Log, TEXT("WebRTC Proxy connection thread exited")); +} + +bool FProxyConnection::Send(const uint8* Data, uint32 Size) +{ + FScopeLock Lock(&SocketMt); + if (!Socket) + { + return false; + } + + int32 bytesSent; + return Socket->Send(Data, Size, bytesSent); +} + +bool FProxyConnection::AcceptConnection(const FString& IP, uint16 Port) +{ + // listen to a single incoming connection from WebRTC Proxy + FIPv4Address BindToAddr; + bool bResult = FIPv4Address::Parse(IP, BindToAddr); + checkf(bResult, TEXT("Failed to parse IPv4 address %s"), *IP); + + { + FScopeLock Lock(&ListenerMt); + Listener = FTcpSocketBuilder(TEXT("WebRTC Proxy Listener")). + AsBlocking(). + AsReusable(). + Listening(1). + BoundToAddress(BindToAddr). + BoundToPort(Port). + WithSendBufferSize(10 * 1024 * 1024). + Build(); + check(Listener); + } + + UE_LOG(PixelStreamingNet, Log, TEXT("Waiting for connection from WebRTC Proxy on %s:%d"), *IP, Port); + FSocket* S = Listener->Accept(TEXT("WebRTC Proxy")); + if (!S) // usually happens on exit because `Listener` was closed in destructor + { + return false; + } + + // only one connection is expected, stop listening + { + FScopeLock Lock(&ListenerMt); + ISocketSubsystem::Get(PLATFORM_SOCKETSUBSYSTEM)->DestroySocket(Listener); + Listener = nullptr; + } + + { + FScopeLock Lock(&SocketMt); + Socket = S; + } + + TSharedPtr ProxyAddr = ISocketSubsystem::Get(PLATFORM_SOCKETSUBSYSTEM)->CreateInternetAddr(); + Socket->GetPeerAddress(*ProxyAddr); + + UE_LOG(PixelStreamingNet, Log, TEXT("Accepted connection from WebRTC Proxy: %s"), *ProxyAddr->ToString(true)); + + return true; +} + +void FProxyConnection::DestroyConnection() +{ + if (!ExitRequested) + { + UE_LOG(PixelStreamingNet, Log, TEXT("Disconnected from WebRTC proxy")); + } + + { + FScopeLock Lock(&SocketMt); + ISocketSubsystem::Get(PLATFORM_SOCKETSUBSYSTEM)->DestroySocket(Socket); + Socket = nullptr; + } +} + +////////////////////////////////////////////////////////////////////////// +// receiving Proxy messages + +namespace ProxyConnectionImpl +{ + template + bool Read(FSocket& Socket, T& Value) + { + int32 BytesRead = 0; + UE_LOG(PixelStreamingInput, VeryVerbose, TEXT("receiving %d bytes"), sizeof(T)); + return Socket.Recv(reinterpret_cast(&Value), sizeof(T), BytesRead, ESocketReceiveFlags::WaitAll); + } +} + +#define READFROMSOCKET(Type, Var)\ + Type Var;\ + if (!ProxyConnectionImpl::Read(*Socket, Var))\ + {\ + return false;\ + } + +bool ReceiveString(FSocket* Socket, FString& OutString) +{ + READFROMSOCKET(uint16, StrLen); + if (StrLen > 1024) + { + return false; // to avoid OOM by malicious browser scripts + } + + OutString.GetCharArray().SetNumUninitialized(StrLen + 1); + int32 BytesRead; + if (!Socket->Recv(reinterpret_cast(OutString.GetCharArray().GetData()), StrLen * sizeof(TCHAR), BytesRead, ESocketReceiveFlags::WaitAll)) + { + return false; + } + OutString.GetCharArray()[StrLen] = '\0'; + + return true; +} + +// XY positions are the ratio (0.0..1.0) along a viewport axis, quantized +// into an uint16 (0..65536). This allows the browser viewport and client +// viewport to have a different size. +void UnquantizeAndDenormalize(uint16& InOutX, uint16& InOutY) +{ + FIntPoint SizeXY = GEngine->GameViewport->Viewport->GetSizeXY(); + InOutX = InOutX / 65536.0f * SizeXY.X; + InOutY = InOutY / 65536.0f * SizeXY.Y; +} + +// XY deltas are the ratio (-1.0..1.0) along a viewport axis, quantized +// into an int16 (-32767..32767). This allows the browser viewport and +// client viewport to have a different size. +void UnquantizeAndDenormalize(int16& InOutX, int16& InOutY) +{ + FIntPoint SizeXY = GEngine->GameViewport->Viewport->GetSizeXY(); + InOutX = InOutX / 32767.0f * SizeXY.X; + InOutY = InOutY / 32767.0f * SizeXY.Y; +} + +/** + * A touch is a specific finger placed on the canvas as a specific position. + */ +struct FTouch +{ + uint16 PosX; // X position of finger. + uint16 PosY; // Y position of finger. + uint8 TouchIndex; // Index of finger for tracking multi-touch events. + uint8 Force; // Amount of pressure being applied by the finger. +}; + +using FKeyCodeType = uint8; +using FCharacterType = TCHAR; +using FRepeatType = uint8; +using FButtonType = uint8; +using FPosType = uint16; +using FDeltaType = int16; +using FTouchesType = TArray; + +/** +* Get the array of touch positions and touch indices for a touch event, +* consumed from the receive buffer. +* @param Consumed - The number of bytes consumed from the receive buffer. +* @param OutTouches - The array of touches. +* @return False if there insufficient room in the receive buffer to read the entire event. +*/ +bool ReceiveTouches(FSocket* Socket, FTouchesType& OutTouches) +{ + // Get the number of touches in the array. + READFROMSOCKET(uint8, NumTouches); + + // Get the value of each touch position and then the touch index. + for (int Touch = 0; Touch < NumTouches; Touch++) + { + READFROMSOCKET(FPosType, PosX); + READFROMSOCKET(FPosType, PosY); + UnquantizeAndDenormalize(PosX, PosY); + READFROMSOCKET(uint8, TouchIndex); + READFROMSOCKET(uint8, Force); + OutTouches.Add({ PosX, PosY, TouchIndex, Force }); + } + + return true; +} + +/** +* Convert the given array of touches to a friendly string for logging. +* @param InTouches - The array of touches. +* @return The string representation of the array. +*/ +FString TouchesToString(const FTouchesType& InTouches) +{ + FString String; + for (const FTouch& Touch : InTouches) + { + String += FString::Printf(TEXT("F[%d]=(%d, %d)(%.3f)"), Touch.TouchIndex, Touch.PosX, Touch.PosY, Touch.Force / 255.0f); + } + return String; +} + +enum class KeyState { Alt = 1 << 0, Ctrl = 1 << 1, Shift = 1 << 2 }; +enum class MouseButtonState { Left = 1 << 0, Right = 1 << 1, Middle = 1 << 2, Button4 = 1 << 3, Button5 = 1 << 4, Button6 = 1 << 5, Button7 = 1 << 6, Button8 = 1 << 7 }; + +void FProxyConnection::InitReceiveHandlers() +{ + using namespace PixelStreamingProtocol; + + ReceiveHandlers.SetNum(static_cast(EToUE4Msg::Count)); + +#define HANDLER(MsgType, Handler) ReceiveHandlers[static_cast(EToUE4Msg::MsgType)] = [this]() { {Handler} return true; } + + HANDLER(IFrameRequest, + { + UE_LOG(PixelStreamingInput, Log, TEXT("IFrameRequest")); + Streamer.ForceIdrFrame(); + }); + + HANDLER(UIInteraction, + { + FString Descriptor; + if (ReceiveString(Socket, Descriptor)) + { + UE_LOG(PixelStreamingInput, Verbose, TEXT("UIInteraction: %s"), *Descriptor); + InputDevice.ProcessUIInteraction(Descriptor); + } + }); + + HANDLER(Command, + { + FString Descriptor; + if (ReceiveString(Socket, Descriptor)) + { + UE_LOG(PixelStreamingInput, Verbose, TEXT("Command: %s"), *Descriptor); + InputDevice.ProcessCommand(Descriptor); + } + }); + + HANDLER(KeyDown, + { + READFROMSOCKET(FKeyCodeType, KeyCode); + READFROMSOCKET(FRepeatType, Repeat); + UE_LOG(PixelStreamingInput, Verbose, TEXT("key down: %d, repeat: %d"), KeyCode, Repeat); + + FPixelStreamingInputDevice::FEvent KeyDownEvent(FPixelStreamingInputDevice::EventType::KEY_DOWN); + KeyDownEvent.SetKeyDown(KeyCode, Repeat != 0); + InputDevice.ProcessEvent(KeyDownEvent); + }); + + HANDLER(KeyUp, + { + READFROMSOCKET(FKeyCodeType, KeyCode); + UE_LOG(PixelStreamingInput, Verbose, TEXT("key up: %d"), KeyCode); + + FPixelStreamingInputDevice::FEvent KeyUpEvent(FPixelStreamingInputDevice::EventType::KEY_UP); + KeyUpEvent.SetKeyUp(KeyCode); + InputDevice.ProcessEvent(KeyUpEvent); + }); + + HANDLER(KeyPress, + { + READFROMSOCKET(FCharacterType, Character); + UE_LOG(PixelStreamingInput, Verbose, TEXT("key press: '%c'"), Character); + + FPixelStreamingInputDevice::FEvent KeyPressEvent(FPixelStreamingInputDevice::EventType::KEY_PRESS); + KeyPressEvent.SetCharCode(Character); + InputDevice.ProcessEvent(KeyPressEvent); + }); + + HANDLER(MouseEnter, + { + InputDevice.ProcessEvent(FPixelStreamingInputDevice::FEvent(FPixelStreamingInputDevice::EventType::MOUSE_ENTER)); + UE_LOG(PixelStreamingInput, Verbose, TEXT("mouseEnter")); + }); + + HANDLER(MouseLeave, + { + InputDevice.ProcessEvent(FPixelStreamingInputDevice::FEvent(FPixelStreamingInputDevice::EventType::MOUSE_LEAVE)); + UE_LOG(PixelStreamingInput, Verbose, TEXT("mouseLeave")); + }); + + HANDLER(MouseDown, + { + READFROMSOCKET(FButtonType, Button); + READFROMSOCKET(FPosType, PosX); + READFROMSOCKET(FPosType, PosY); + UE_LOG(PixelStreamingInput, Verbose, TEXT("mouseDown at (%d, %d), button %d"), PosX, PosY, Button); + + UnquantizeAndDenormalize(PosX, PosY); + + FPixelStreamingInputDevice::FEvent MouseDownEvent(FPixelStreamingInputDevice::EventType::MOUSE_DOWN); + MouseDownEvent.SetMouseClick(Button, PosX, PosY); + InputDevice.ProcessEvent(MouseDownEvent); + }); + + HANDLER(MouseUp, + { + READFROMSOCKET(FButtonType, Button); + READFROMSOCKET(FPosType, PosX); + READFROMSOCKET(FPosType, PosY); + UE_LOG(PixelStreamingInput, Verbose, TEXT("mouseUp at (%d, %d), button %d"), PosX, PosY, Button); + + UnquantizeAndDenormalize(PosX, PosY); + + FPixelStreamingInputDevice::FEvent MouseUpEvent(FPixelStreamingInputDevice::EventType::MOUSE_UP); + MouseUpEvent.SetMouseClick(Button, PosX, PosY); + InputDevice.ProcessEvent(MouseUpEvent); + }); + + HANDLER(MouseMove, + { + READFROMSOCKET(FPosType, PosX); + READFROMSOCKET(FPosType, PosY); + READFROMSOCKET(FDeltaType, DeltaX); + READFROMSOCKET(FDeltaType, DeltaY); + UE_LOG(PixelStreamingInput, Verbose, TEXT("mouseMove to (%d, %d), delta (%d, %d)"), PosX, PosY, DeltaX, DeltaY); + + UnquantizeAndDenormalize(PosX, PosY); + UnquantizeAndDenormalize(DeltaX, DeltaY); + + FPixelStreamingInputDevice::FEvent MouseMoveEvent(FPixelStreamingInputDevice::EventType::MOUSE_MOVE); + MouseMoveEvent.SetMouseDelta(PosX, PosY, DeltaX, DeltaY); + InputDevice.ProcessEvent(MouseMoveEvent); + }); + + HANDLER(MouseWheel, + { + READFROMSOCKET(FDeltaType, Delta); + READFROMSOCKET(FPosType, PosX); + READFROMSOCKET(FPosType, PosY); + UE_LOG(PixelStreamingInput, Verbose, TEXT("mouseWheel, delta %d"), Delta); + + UnquantizeAndDenormalize(PosX, PosY); + + FPixelStreamingInputDevice::FEvent MouseWheelEvent(FPixelStreamingInputDevice::EventType::MOUSE_WHEEL); + MouseWheelEvent.SetMouseWheel(Delta, PosX, PosY); + InputDevice.ProcessEvent(MouseWheelEvent); + }); + + HANDLER(TouchStart, + { + FTouchesType Touches; + if (!ReceiveTouches(Socket, Touches)) + { + return false; + } + + UE_LOG(PixelStreamingInput, Verbose, TEXT("TouchStart: %s"), *TouchesToString(Touches)); + + for (const FTouch& Touch : Touches) + { + FPixelStreamingInputDevice::FEvent TouchStartEvent(FPixelStreamingInputDevice::EventType::TOUCH_START); + TouchStartEvent.SetTouch(Touch.TouchIndex, Touch.PosX, Touch.PosY, Touch.Force); + InputDevice.ProcessEvent(TouchStartEvent); + } + }); + + HANDLER(TouchEnd, + { + FTouchesType Touches; + if (!ReceiveTouches(Socket, Touches)) + { + return false; + } + + UE_LOG(PixelStreamingInput, Verbose, TEXT("TouchEnd: %s"), *TouchesToString(Touches)); + + for (const FTouch& Touch : Touches) + { + FPixelStreamingInputDevice::FEvent TouchEndEvent(FPixelStreamingInputDevice::EventType::TOUCH_END); + TouchEndEvent.SetTouch(Touch.TouchIndex, Touch.PosX, Touch.PosY, Touch.Force); + InputDevice.ProcessEvent(TouchEndEvent); + } + }); + + HANDLER(TouchMove, + { + FTouchesType Touches; + if (!ReceiveTouches(Socket, Touches)) + { + return false; + } + + UE_LOG(PixelStreamingInput, Verbose, TEXT("TouchMove: %s"), *TouchesToString(Touches)); + + for (const FTouch& Touch : Touches) + { + FPixelStreamingInputDevice::FEvent TouchMoveEvent(FPixelStreamingInputDevice::EventType::TOUCH_MOVE); + TouchMoveEvent.SetTouch(Touch.TouchIndex, Touch.PosX, Touch.PosY, Touch.Force); + InputDevice.ProcessEvent(TouchMoveEvent); + } + }); + + HANDLER(MaxFpsRequest, + { + READFROMSOCKET(uint8, Fps); + UE_LOG(PixelStreamingInput, Log, TEXT("%d WebRTC FPS"), Fps); + //Streamer.SetFramerate(Fps); + }); + + HANDLER(AverageBitrateRequest, + { + READFROMSOCKET(uint16, Kbps); + Streamer.SetBitrate(Kbps); + UE_LOG(PixelStreamingInput, Log, TEXT("AverageBitrateRequest: %d"), Kbps); + }); + + HANDLER(StartStreaming, + { + Streamer.StartStreaming(); + UE_LOG(PixelStreamingInput, Log, TEXT("streaming started")); + }); + + HANDLER(StopStreaming, + { + Streamer.StopStreaming(); + UE_LOG(PixelStreamingInput, Log, TEXT("streaming stopped")); + }); + +#undef HANDLER +} + +#undef READFROMSOCKET + +void FProxyConnection::Receive() +{ + while (!ExitRequested) + { + uint8 MsgType; + int32 BytesRead = 0; + if (!Socket->Recv(&MsgType, sizeof(MsgType), BytesRead, ESocketReceiveFlags::WaitAll)) + { + break; + } + + UE_LOG(PixelStreamingInput, Verbose, TEXT("receiving msg %d"), MsgType); + + if (ReceiveHandlers.IsValidIndex(MsgType)) + { + if (ReceiveHandlers[MsgType] != nullptr) + { + if (!ReceiveHandlers[MsgType]()) + { + break; + } + } + else + { + UE_LOG(PixelStreamingInput, Warning, TEXT("unbound receive handler %d"), MsgType); + } + } + else + { + UE_LOG(PixelStreamingInput, Warning, TEXT("out of range %d"), MsgType); + } + } + + if (!ExitRequested) + { + Streamer.StopStreaming(); + UE_LOG(PixelStreamingNet, Log, TEXT("WebRTC Proxy disconnected")); + } +} diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/ProxyConnection.h b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/ProxyConnection.h new file mode 100644 index 000000000000..f436b847a4aa --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/ProxyConnection.h @@ -0,0 +1,60 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "HAL/ThreadSafeBool.h" +#include "Misc/ScopeLock.h" + +#include "Utils.h" + +class FStreamer; +class FPixelStreamingInputDevice; +class FSocket; + +// encapsulates TCP connection to WebRTC Proxy +// accepts a single connection from WebRTC Proxy, in a loop, accepts a new one once the previous disconnected +// allows sending data to the connection +// runs an internal thread for receiving data, deserialises "Proxy -> UE4" protocol messages and calls +// appropriate handlers from that internal thread +class FProxyConnection final +{ +private: + FProxyConnection(const FProxyConnection&) = delete; + FProxyConnection& operator=(const FProxyConnection&) = delete; + +public: + FProxyConnection(const FString& IP, uint16 Port, FStreamer& Streamer); + ~FProxyConnection(); + + void Run(const FString& IP, uint16 Port); + bool Send(const uint8* Data, uint32 Size); + +private: + bool AcceptConnection(const FString& IP, uint16 Port); + void DestroyConnection(); + + void InitReceiveHandlers(); + void Receive(); + +private: + FStreamer& Streamer; + FPixelStreamingInputDevice& InputDevice; + + // socket obj and its ptr is modified only from the internal thread but is used from an external thread + // to send data. This lock protects sending to the socket to avoid concurrent modification. + // It's not needed for receiving from the socket because it happens in the same thread as modifications. + FCriticalSection SocketMt; + FSocket* Socket; + + FCriticalSection ListenerMt; + FSocket* Listener; + + // handlers for different type of messages received from network + TArray> ReceiveHandlers; + + FThreadSafeBool ExitRequested; + // should be the last thing declared, otherwise the thread func can access other members that are not + // initialised yet + FThread Thread; +}; + diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/Streamer.cpp b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/Streamer.cpp new file mode 100644 index 000000000000..f09b4b77bf02 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/Streamer.cpp @@ -0,0 +1,338 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "Streamer.h" +#include "UnrealClient.h" +#include "HAL/Runnable.h" +#include "HAL/PlatformFilemanager.h" +#include "HAL/PlatformFile.h" +#include "HAL/PlatformTime.h" +#include "Misc/ConfigCacheIni.h" +#include "Async/Async.h" + +#include "NvVideoEncoder.h" +#include "PixelStreamingCommon.h" +#include "Utils.h" +#include "ProxyConnection.h" + +DECLARE_DWORD_COUNTER_STAT(TEXT("EncodingFramerate"), STAT_PixelStreaming_EncodingFramerate, STATGROUP_PixelStreaming); +DECLARE_DWORD_COUNTER_STAT(TEXT("EncodingBitrate"), STAT_PixelStreaming_EncodingBitrate, STATGROUP_PixelStreaming); + +TAutoConsoleVariable CVarEncoderAverageBitRate( + TEXT("Encoder.AverageBitRate"), + 20000000, + TEXT("Encoder bit rate before reduction for B/W jitter"), + ECVF_RenderThreadSafe); + +TAutoConsoleVariable CVarEncoderMaxBitrate( + TEXT("Encoder.MaxBitrate"), + 100000000, + TEXT("Max bitrate no matter what WebRTC says, in Mbps"), + ECVF_RenderThreadSafe); + +static TAutoConsoleVariable CVarEncoderTargetSize( + TEXT("Encoder.TargetSize"), + TEXT("1920x1080"), + TEXT("Encoder target size in format widthxheight"), + ECVF_Cheat); + +static TAutoConsoleVariable CVarEncoderUseBackBufferSize( + TEXT("Encoder.UseBackBufferSize"), + 1, + TEXT("Whether to use back buffer size or custom size"), + ECVF_Cheat); + +#if !UE_BUILD_SHIPPING +static int32 bEncoderSaveVideoToFile = 0; +static FAutoConsoleVariableRef CVarEncoderSaveVideoToFile( + TEXT("Encoder.SaveVideoToFile"), + bEncoderSaveVideoToFile, + TEXT("Save encoded video into a file"), + ECVF_Cheat | ECVF_RenderThreadSafe); +#endif + +TAutoConsoleVariable CVarStreamerPrioritiseQuality( + TEXT("Streamer.PrioritiseQuality"), + 0, + TEXT("Reduces framerate automatically on bitrate reduction to trade FPS/latency for video quality"), + ECVF_Cheat); + +TAutoConsoleVariable CVarStreamerLowBitrate( + TEXT("Streamer.LowBitrate"), + 2000, + TEXT("Lower bound of bitrate for quality adaptation, Kbps"), + ECVF_Default); + +TAutoConsoleVariable CVarStreamerHighBitrate( + TEXT("Streamer.HighBitrate"), + 10000, + TEXT("Upper bound of bitrate for quality adaptation, Kbps"), + ECVF_Default); + +TAutoConsoleVariable CVarStreamerMinFPS( + TEXT("Streamer.MinFPS"), + 10, + TEXT("Minimal FPS for quality adaptation"), + ECVF_Default); + +TAutoConsoleVariable CVarStreamerBitrateReduction( + TEXT("Streamer.BitrateReduction"), + 50.0, + TEXT("How much to reduce WebRTC reported bitrate to handle bitrate jitter, in per cent"), + ECVF_RenderThreadSafe); + +const int32 DefaultFPS = 60; + +FStreamer::FStreamer(const TCHAR* IP, uint16 Port, const FTexture2DRHIRef& FrameBuffer) + : bResizingWindowBackBuffer(false) + , AudioEncoder(*this) + , bSendSpsPps(false) + , bStreamingStarted(false) + , InitialMaxFPS(GEngine->GetMaxFPS()) +{ + if (InitialMaxFPS == 0) + { + InitialMaxFPS = DefaultFPS; + + check(IsInRenderingThread()); + // we are in the rendering thread but `GEngine->SetMaxFPS()` can be called only in the main thread + AsyncTask(ENamedThreads::GameThread, [this]() + { + GEngine->SetMaxFPS(InitialMaxFPS); + }); + } + + ProxyConnection.Reset(new FProxyConnection(IP, Port, *this)); + + UpdateEncoderSettings(FrameBuffer); + CreateVideoEncoder(FrameBuffer); + + // This needs to be called last, after ProxyConnection is created + AudioEncoder.Init(); + + UE_LOG(PixelStreaming, Log, TEXT("Streamer created: %dx%d %d FPS%s"), + VideoEncoderSettings.Width, VideoEncoderSettings.Height, + InitialMaxFPS, + CVarStreamerPrioritiseQuality.GetValueOnAnyThread() != 0 ? TEXT(", prioritise quality") : TEXT("")); +} + +// must be in cpp file cos TUniquePtr incomplete type +// this doesn't violate The Rule of Zero: https://blog.rmf.io/cxx11/rule-of-zero cos we don't do any manual stuff +FStreamer::~FStreamer() +{ +} + +void FStreamer::CreateVideoEncoder(const FTexture2DRHIRef& FrameBuffer) +{ + VideoEncoder.Reset(new FNvVideoEncoder(VideoEncoderSettings, FrameBuffer, [this](uint64 Timestamp, bool KeyFrame, const uint8* Data, uint32 Size) + { + SubmitVideoFrame(Timestamp, KeyFrame, Data, Size); + })); + + checkf(VideoEncoder->IsSupported(), TEXT("Failed to initialize NvEnc")); + UE_LOG(PixelStreaming, Log, TEXT("NvEnc initialised")); +} + +void FStreamer::SendSpsPpsHeader() +{ + const TArray& SpsPps = VideoEncoder->GetSpsPpsHeader(); + Stream(FPlatformTime::Seconds(), PixelStreamingProtocol::EToProxyMsg::SpsPps, SpsPps.GetData(), SpsPps.Num()); +} + +void FStreamer::OnFrameBufferReady(const FTexture2DRHIRef& FrameBuffer) +{ + if (!bStreamingStarted) + { + return; + } + + uint64 CaptureMs = NowMs(); + + // VideoEncoder is reset on disconnection + if (!VideoEncoder) + { + CreateVideoEncoder(FrameBuffer); + check(VideoEncoder); + } + + if (bResizingWindowBackBuffer) + { + // Re-initialize video encoder if it has been destroyed by OnPreResizeWindowBackbuffer() + VideoEncoder->PostResizeBackBuffer(); + bResizingWindowBackBuffer = false; + } + + UpdateEncoderSettings(FrameBuffer); + VideoEncoder->EncodeFrame(VideoEncoderSettings, FrameBuffer, CaptureMs); +} + +void FStreamer::SubmitVideoFrame(uint64 Timestamp, bool KeyFrame, const uint8* Data, uint32 Size) +{ + if (bSendSpsPps) + { + SendSpsPpsHeader(); + bSendSpsPps = false; + } + + Stream(Timestamp, KeyFrame ? PixelStreamingProtocol::EToProxyMsg::VideoIDR : PixelStreamingProtocol::EToProxyMsg::Video, Data, Size); +} + +void FStreamer::OnPreResizeWindowBackbuffer() +{ + // Destroy video encoder before resizing window so it releases usage of graphics device & back buffer. + // It's recreated later on in OnFrameBufferReady(). + UE_LOG(PixelStreaming, Log, TEXT("Reset video encoder OnPreResizeWindowBackbuffer")); + VideoEncoder->PreResizeBackBuffer(); + bResizingWindowBackBuffer = true; +} + +// This is called from inside the audio encoder, when a audio packet is ready +void FStreamer::OnAudioPCMPacketReady(const uint8* Data, int Size) +{ + Stream(FPlatformTime::Seconds(), PixelStreamingProtocol::EToProxyMsg::AudioPCM, Data, Size); +} + +void FStreamer::Stream(uint64 Timestamp, PixelStreamingProtocol::EToProxyMsg PktType, const uint8* Data, uint32 Size) +{ + FScopeLock Lock(&AudioVideoStreamSync); + + SaveEncodedVideoToFile(PktType, Data, Size); + + if (ProxyConnection->Send(reinterpret_cast(&Timestamp), sizeof(Timestamp)) + && ProxyConnection->Send(reinterpret_cast(&PktType), 1) + && ProxyConnection->Send(reinterpret_cast(&Size), sizeof(Size)) + && ProxyConnection->Send(Data, Size)) + { + static uint32 frameNo = 0; + UE_LOG(PixelStreamingNet, Verbose, TEXT("Sent %s %d, %d bytes"), PacketTypeStr(PktType), frameNo++, Size); + } +} + +void FStreamer::SaveEncodedVideoToFile(PixelStreamingProtocol::EToProxyMsg PktType, const uint8* Data, uint32 Size) +{ +#if !UE_BUILD_SHIPPING + if (bEncoderSaveVideoToFile && !EncodedVideoFile) + { + // Open video file for writing + IPlatformFile& PlatformFile = FPlatformFileManager::Get().GetPlatformFile(); + EncodedVideoFile.Reset(PlatformFile.OpenWrite(TEXT("EncodedVideoFile.h264"))); + check(EncodedVideoFile); + } + if (EncodedVideoFile && (PktType != PixelStreamingProtocol::EToProxyMsg::AudioPCM)) + { + EncodedVideoFile->Write(Data, Size); + EncodedVideoFile->Flush(); + } + if (!bEncoderSaveVideoToFile && EncodedVideoFile) + { + // Close video file for writing + EncodedVideoFile.Reset(); + } +#endif +} + +void FStreamer::ForceIdrFrame() +{ + VideoEncoder->ForceIdrFrame(); +} + +void FStreamer::UpdateEncoderSettings(const FTexture2DRHIRef& FrameBuffer) +{ + float MaxBitrateMbps = CVarEncoderMaxBitrate.GetValueOnRenderThread(); + + // HACK(andriy): We reduce WebRTC reported bitrate to compensate for B/W jitter. We have long pipeline + // before passing encoded frames to WebRTC and a couple of frames are already in the pipeline when + // WebRTC reports lower bitrate. This often causes that WebRTC Rate Limiter or network drop frames + // because they exceed available bandwidth. While significant bandwidth drop are not expected to + // happen often small jitter is possible and causes frequent video distortion. Reducing reported bitrate + // by a small percentage gives us a chance to avoid frame drops on bandwidth jitter. + // There're couple of drawbacks: + // - minor one - we don't use all available bandwidth to achieve best possible quality + // - major one - we don't use all available bandwidth and in case of network congestion + // other connections can get upper hand and depress bandwidth allocated for streaming even more. + // Proper feasible solution is unknown atm. + // + // do reduction here instead of e.g. `SetBitrate` because this method is called on every frame and so + // changes to `CVarStreamerBitrateReduction` will be immediately picked up + float BitrateReduction = CVarStreamerBitrateReduction.GetValueOnRenderThread(); + uint32 Bitrate = CVarEncoderAverageBitRate.GetValueOnRenderThread(); + uint32 ReducedBitrate = static_cast(Bitrate / 100.0 * (100.0 - BitrateReduction)); + ReducedBitrate = FMath::Min(ReducedBitrate, static_cast(MaxBitrateMbps * 1000 * 1000)); + VideoEncoderSettings.AverageBitRate = ReducedBitrate; + SET_DWORD_STAT(STAT_PixelStreaming_EncodingBitrate, VideoEncoderSettings.AverageBitRate); + + VideoEncoderSettings.FrameRate = GEngine->GetMaxFPS(); + SET_DWORD_STAT(STAT_PixelStreaming_EncodingFramerate, VideoEncoderSettings.FrameRate); + + bool bUseBackBufferSize = CVarEncoderUseBackBufferSize.GetValueOnAnyThread() > 0; + if (bUseBackBufferSize) + { + VideoEncoderSettings.Width = FrameBuffer->GetSizeX(); + VideoEncoderSettings.Height = FrameBuffer->GetSizeY(); + } + else + { + FString EncoderTargetSize = CVarEncoderTargetSize.GetValueOnAnyThread(); + FString TargetWidth, TargetHeight; + bool bValidSize = EncoderTargetSize.Split(TEXT("x"), &TargetWidth, &TargetHeight); + if (bValidSize) + { + VideoEncoderSettings.Width = FCString::Atoi(*TargetWidth); + VideoEncoderSettings.Height = FCString::Atoi(*TargetHeight); + } + } +} + +void FStreamer::SetBitrate(uint16 Kbps) +{ + UE_LOG(PixelStreaming, Log, TEXT("%d Kbps"), Kbps); + + AsyncTask(ENamedThreads::GameThread, [Kbps]() + { + CVarEncoderAverageBitRate->Set(Kbps * 1000); + }); + + // reduce framerate proportionally to WebRTC reported bitrate to prioritise quality over FPS/latency + // by lowering framerate we allocate more bandwidth to fewer frames, thus increasing quality + if (CVarStreamerPrioritiseQuality.GetValueOnAnyThread()) + { + int32 Fps; + + // bitrate lower than lower bound results always in min FPS + // bitrate between lower and upper bounds results in FPS proportionally between min and max FPS + // bitrate higher than upper bound results always in max FPS + const uint16 LowerBoundKbps = CVarStreamerLowBitrate.GetValueOnAnyThread(); + const int32 MinFps = FMath::Min(CVarStreamerMinFPS.GetValueOnAnyThread(), InitialMaxFPS); + const uint16 UpperBoundKbps = CVarStreamerHighBitrate.GetValueOnAnyThread(); + const int32 MaxFps = InitialMaxFPS; + + if (Kbps < LowerBoundKbps) + { + Fps = MinFps; + } + else if (Kbps < UpperBoundKbps) + { + Fps = MinFps + static_cast(static_cast(MaxFps - MinFps) / (UpperBoundKbps - LowerBoundKbps) * (Kbps - LowerBoundKbps)); + } + else + { + Fps = MaxFps; + } + + SetFramerate(Fps); + } +} + +void FStreamer::SetFramerate(int32 Fps) +{ + UE_LOG(PixelStreaming, Log, TEXT("%d FPS"), Fps); + + AsyncTask(ENamedThreads::GameThread, [Fps]() + { + GEngine->SetMaxFPS(Fps); + }); +} + +void FStreamer::SendResponse(const FString& Descriptor) +{ + Stream(FPlatformTime::Seconds(), PixelStreamingProtocol::EToProxyMsg::Response, reinterpret_cast(*Descriptor), Descriptor.Len() * sizeof(TCHAR)); +} diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/Streamer.h b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/Streamer.h new file mode 100644 index 000000000000..a2816a1e111c --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/Streamer.h @@ -0,0 +1,83 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "VideoEncoder.h" +#include "AudioEncoder.h" +#include "RHI.h" +#include "RHIResources.h" +#include "Engine/GameViewportClient.h" +#include "ProtocolDefs.h" + +DECLARE_STATS_GROUP(TEXT("PixelStreaming"), STATGROUP_PixelStreaming, STATCAT_Advanced); + +class FRenderTarget; +class FProxyConnection; +class IFileHandle; +class FSocket; +struct ID3D11Device; + +class FStreamer +{ +private: + FStreamer(const FStreamer&) = delete; + FStreamer& operator=(const FStreamer&) = delete; + +public: + FStreamer(const TCHAR* IP, uint16 Port, const FTexture2DRHIRef& FrameBuffer); + virtual ~FStreamer(); + + void OnFrameBufferReady(const FTexture2DRHIRef& FrameBuffer); + void OnPreResizeWindowBackbuffer(); + void OnAudioPCMPacketReady(const uint8* Data, int Size); + void ForceIdrFrame(); + + void StartStreaming() + { + bStreamingStarted = true; + ForceIdrFrame(); + } + + void StopStreaming() + { + bStreamingStarted = false; + } + + void SetBitrate(uint16 Kbps); + void SetFramerate(int32 Fps); + + void SendResponse(const FString& Descriptor); + +private: + void CreateVideoEncoder(const FTexture2DRHIRef& FrameBuffer); + void SendSpsPpsHeader(); + void UpdateEncoderSettings(const FTexture2DRHIRef& FrameBuffer); + void Stream(uint64 Timestamp, PixelStreamingProtocol::EToProxyMsg, const uint8* Data, uint32 Size); + void SaveEncodedVideoToFile(PixelStreamingProtocol::EToProxyMsg PktType, const uint8* Data, uint32 Size); + void SubmitVideoFrame(uint64 Timestamp, bool KeyFrame, const uint8* Data, uint32 Size); + +private: + bool bResizingWindowBackBuffer; + FVideoEncoderSettings VideoEncoderSettings; + TUniquePtr VideoEncoder; + FAudioEncoder AudioEncoder; + + TUniquePtr ProxyConnection; + TArray ReceiveBuffer; + + FThreadSafeBool bSendSpsPps; + + // we shouldn't start streaming immediately after WebRTC is connected because + // encoding pipeline is not ready yet and a couple of first frames can be lost. + // instead wait for an explicit command to start streaming + FThreadSafeBool bStreamingStarted; + + FCriticalSection AudioVideoStreamSync; + +#if !UE_BUILD_SHIPPING + TUniquePtr EncodedVideoFile; +#endif + + int32 InitialMaxFPS; +}; + diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/Utils.h b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/Utils.h new file mode 100644 index 000000000000..18d302aee0e7 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/Utils.h @@ -0,0 +1,52 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "HAL/Runnable.h" +#include "HAL/RunnableThread.h" +#include + +class FThread final : public FRunnable +{ +public: + using FCallback = TFunction; + + explicit FThread(TCHAR const* ThreadName, const FCallback& Callback) : + Callback(Callback) + { + Thread = FRunnableThread::Create(this, ThreadName); + } + + void Join() + { + Thread->WaitForCompletion(); + } + + virtual uint32 Run() override + { + Callback(); + return 0; + } + +private: + FCallback Callback; + FRunnableThread* Thread; + +private: + FThread(const FThread&) = delete; + FThread& operator=(const FThread&) = delete; +}; + +// uses chrono library to have comparable timestamps between UE4 and webrtc app +inline uint64 NowMs() +{ + //return static_cast(FPlatformTime::Cycles64() * FPlatformTime::GetSecondsPerCycle64() * 1000); + + //double secs = FPlatformTime::Seconds(); + //// for the trick look at `FWindowsPlatformTime::Seconds()` + //return static_cast((secs - 16777216) * 1000); + + using namespace std::chrono; + system_clock::duration now = system_clock::now().time_since_epoch(); + return duration_cast(now - duration_cast(now)).count(); +} diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/VideoEncoder.h b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/VideoEncoder.h new file mode 100644 index 000000000000..62ef059c6c3e --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Private/VideoEncoder.h @@ -0,0 +1,86 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "RHI.h" +#include "RHIResources.h" + +struct FVideoEncoderSettings +{ + FVideoEncoderSettings() + : AverageBitRate(20000000) // TODO(andriy): the initial value should be dictated by the receiver + , FrameRate(60) + , Width(1920) + , Height(1080) + {} + + FORCEINLINE bool operator==(const FVideoEncoderSettings& Other) const + { + return (AverageBitRate == Other.AverageBitRate) + && (FrameRate == Other.FrameRate) + && (Width == Other.Width) + && (Height == Other.Height); + } + + FORCEINLINE bool operator!=(const FVideoEncoderSettings& Other) const + { + return (AverageBitRate != Other.AverageBitRate) + || (FrameRate != Other.FrameRate) + || (Width != Other.Width) + || (Height != Other.Height); + } + + uint32 AverageBitRate; + uint32 FrameRate; + uint32 Width; + uint32 Height; +}; + +class IVideoEncoder +{ +public: + using FEncodedFrameReadyCallback = TFunction; + + virtual ~IVideoEncoder() = default; + + /** + * Return name of the encoder. + */ + virtual FString GetName() const = 0; + + /** + * If encoder is supported. + */ + virtual bool IsSupported() const = 0; + + /** + * Get Sps/Pps header data. + */ + virtual const TArray& GetSpsPpsHeader() const = 0; + + /** + * Actions to take before resizing back buffer. + */ + virtual void PreResizeBackBuffer() {} + + /** + * Actions to take after back buffer is resized. + */ + virtual void PostResizeBackBuffer() {} + + /** + * Encode an input back buffer. + */ + virtual void EncodeFrame(const FVideoEncoderSettings& Settings, const FTexture2DRHIRef& BackBuffer, uint64 CaptureMs) = 0; + + /** + * Force the next frame to be an IDR frame. + */ + virtual void ForceIdrFrame() = 0; + + /** + * If encoder is running in async/sync mode. + */ + virtual bool IsAsyncEnabled() const = 0; +}; + diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/IPixelStreamingPlugin.h b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/IPixelStreamingPlugin.h new file mode 100644 index 000000000000..fec94005d217 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/IPixelStreamingPlugin.h @@ -0,0 +1,60 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "Modules/ModuleInterface.h" +#include "Modules/ModuleManager.h" +#include "PixelStreamingCommon.h" +#include "IInputDeviceModule.h" + +/** +* The public interface to this module +*/ +class IPixelStreamingPlugin : public IInputDeviceModule +{ +public: + + /** + * Singleton-like access to this module's interface. This is just for convenience! + * Beware of calling this during the shutdown phase, though. Your module might have been unloaded already. + * + * @return Returns singleton instance, loading the module on demand if needed + */ + static inline IPixelStreamingPlugin& Get() + { + return FModuleManager::LoadModuleChecked< IPixelStreamingPlugin >("PixelStreaming"); + } + + /** + * Checks to see if this module is loaded and ready. It is only valid to call Get() if IsAvailable() returns true. + * + * @return True if the module is loaded and ready to use + */ + static inline bool IsAvailable() + { + return FModuleManager::Get().IsModuleLoaded("PixelStreaming"); + } + + /** + * Returns a reference to the input device. The lifetime of this reference + * is that of the underlying shared pointer. + * @return A reference to the input device. + */ + virtual class FPixelStreamingInputDevice& GetInputDevice() = 0; + + /** + * Add any client config JSON to the given object which relates to + * configuring the input system for the pixel streaming on the browser. + * @param JsonObject - The JSON object to add fields to. + */ + virtual void AddClientConfig(TSharedRef& JsonObject) = 0; + + /** + * Send a data response back to the browser where we are sending video. This + * could be used as a response to a UI interaction, for example. + * @param Descriptor - A generic descriptor string. + */ + virtual void SendResponse(const FString& Descriptor) = 0; +}; + diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/PixelStreamingCommon.h b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/PixelStreamingCommon.h new file mode 100644 index 000000000000..9149ab90bec9 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/PixelStreamingCommon.h @@ -0,0 +1,13 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Logging/LogMacros.h" +#include "HAL/IConsoleManager.h" + +DECLARE_LOG_CATEGORY_EXTERN(PixelStreaming, Log, VeryVerbose); +DECLARE_LOG_CATEGORY_EXTERN(PixelStreamingInput, Log, VeryVerbose); +DECLARE_LOG_CATEGORY_EXTERN(PixelStreamingNet, Log, VeryVerbose); +DECLARE_LOG_CATEGORY_EXTERN(PixelStreamingCapture, Log, VeryVerbose); + +extern TAutoConsoleVariable CVarEncoderAverageBitRate; diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/PixelStreamingInputComponent.h b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/PixelStreamingInputComponent.h new file mode 100644 index 000000000000..0009b07f1d61 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/PixelStreamingInputComponent.h @@ -0,0 +1,89 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Components/ActorComponent.h" +#include "PixelStreamingInputComponent.generated.h" + +class IPixelStreamingPlugin; + +/** + * This component may be attached to an actor to allow UI interactions to be + * handled as the delegate will be notified about the interaction and will be + * supplied with a generic descriptor string containing, for example, JSON data. + * Responses back to the source of the UI interactions may also be sent. + */ +UCLASS(Blueprintable, ClassGroup = (PixelStreaming), meta = (BlueprintSpawnableComponent)) +class PIXELSTREAMING_API UPixelStreamingInputComponent : public UActorComponent +{ + GENERATED_BODY() + +public: + + UPixelStreamingInputComponent(); + + // The delegate which will be notified about a UI interaction. + DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FOnPixelStreamingInput, const FString&, Descriptor); + UPROPERTY(BlueprintAssignable, Category = "Pixel Streaming Input") + FOnPixelStreamingInput OnPixelStreamingInputEvent; + + /** + * Run a built-in command. The commands are defined by a JSON descriptor. + * The currently supported commands are: + * + * 1. A command to run any console command: + * "{ ConsoleCommand: }" + * + * 2. A command to change the resolution to the given width and height. + * "{ Resolution: { Width: , Height: } }" + * + * 3. A command to change the encoder settings by reducing the bitrate by the + * given percentage. + * "{ Encoder: { BitrateReduction: } } + * + * @param Descriptor - The command JSON descriptor. + * @return True if the command was successfully executed. + */ + bool OnCommand(const FString& Descriptor); + + /** + * Send a response back to the source of the UI interactions. + * @param Descriptor - A generic descriptor string. + */ + UFUNCTION(BlueprintCallable, Category = "Pixel Streaming Input") + void SendPixelStreamingResponse(const FString& Descriptor); + + UFUNCTION(BlueprintPure, Category = "Pixel Streaming Input", meta = (DeprecatedFunction, DeprecationMessage = "Use GetJsonStringValue instead")) + bool GetJsonStringField(FString Descriptor, FString FieldName, FString& StringField); + + /** + * Helper function to extract a string field from a JSON descriptor of a + * UI interaction given its field name. + * The field name may be hierarchical, delimited by a period. For example, + * to access the Width value of a Resolution command above you should use + * "Resolution.Width" to get the width value. + * @param Descriptor - The UI interaction JSON descriptor. + * @param FieldName - The name of the field to look for in the JSON. + * @param StringValue - The string value associated with the field name. + * @param Success - True if the field exists in the JSON data. + */ + UFUNCTION(BlueprintPure, Category = "Pixel Streaming Input") + void GetJsonStringValue(FString Descriptor, FString FieldName, FString& StringValue, bool& Success); + + /** + * Helper function to add a string field to a JSON descriptor. This produces + * a new descriptor which may then be chained to add further string fields. + * @param Descriptor - The initial JSON descriptor which may be blank initially. + * @param FieldName - The name of the field to add to the JSON. + * @param StringValue - The string value associated with the field name. + * @param NewDescriptor - The JSON descriptor with the string field added. + * @param Success - True if the string field could be added successfully. + */ + UFUNCTION(BlueprintPure, Category = "Pixel Streaming Input") + void AddJsonStringValue(const FString& Descriptor, FString FieldName, FString StringValue, FString& NewDescriptor, bool& Success); + +private: + + // For convenience we keep a reference to the Pixel Streaming plugin. + IPixelStreamingPlugin* PixelStreamingPlugin; +}; diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/PixelStreamingSettings.h b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/PixelStreamingSettings.h new file mode 100644 index 000000000000..3791372f1145 --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/PixelStreaming/Public/PixelStreamingSettings.h @@ -0,0 +1,33 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "Engine/DeveloperSettings.h" +#include "InputCoreTypes.h" +#include "PixelStreamingSettings.generated.h" + +UCLASS(config = PixelStreaming, defaultconfig, meta = (DisplayName = "PixelStreaming")) +class PIXELSTREAMING_API UPixelStreamingSettings : public UDeveloperSettings +{ + GENERATED_UCLASS_BODY() + +public: + /** + * Pixel streaming always requires a default software cursor as it needs to + * be shown on the browser to allow the user to click UI elements. + */ + UPROPERTY(config, EditAnywhere, Category = PixelStreaming) + FSoftClassPath PixelStreamingDefaultCursorClassName; + + // Begin UDeveloperSettings Interface + virtual FName GetCategoryName() const override; +#if WITH_EDITOR + virtual FText GetSectionText() const override; +#endif + // END UDeveloperSettings Interface + +#if WITH_EDITOR + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; +#endif +}; \ No newline at end of file diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/ThirdParty/NvEncoder/NVIDIAVideoCodecSDK.tps b/Engine/Plugins/Experimental/PixelStreaming/Source/ThirdParty/NvEncoder/NVIDIAVideoCodecSDK.tps new file mode 100644 index 000000000000..c88d28ff20ff --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/ThirdParty/NvEncoder/NVIDIAVideoCodecSDK.tps @@ -0,0 +1,11 @@ + + + NVIDIA VIDEO CODEC SDK v8.0 + /Engine/Plugins/Experimental/PixelStreaming/Source/ThirdParty/NvEncoder + Performs encoding/decoding of a video stream into various formats. Used for prototyping UE4 pixel streaming plugin for Enterprise team. SDK will primarily function as the video encoder for UE4 pixel streaming server instances deployed primarily to the cloud as well as locally. + http://developer2.download.nvidia.com/designworks/DesignWorks_SDKs_Samples_Tools_License_distrib_use_rights_2017_06_13.pdf + + P4 + + None + \ No newline at end of file diff --git a/Engine/Plugins/Experimental/PixelStreaming/Source/ThirdParty/NvEncoder/nvEncodeAPI.h b/Engine/Plugins/Experimental/PixelStreaming/Source/ThirdParty/NvEncoder/nvEncodeAPI.h new file mode 100644 index 000000000000..594c4ebec73d --- /dev/null +++ b/Engine/Plugins/Experimental/PixelStreaming/Source/ThirdParty/NvEncoder/nvEncodeAPI.h @@ -0,0 +1,3216 @@ +/* + * This copyright notice applies to this header file only: + * + * Copyright (c) 2010-2015 NVIDIA Corporation + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the software, and to permit persons to whom the + * software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + * OTHER DEALINGS IN THE SOFTWARE. + */ + +/** + * \file nvEncodeAPI.h + * NvEncodeAPI provides a NVENC Video Encoding interface to NVIDIA GPU devices based on the Kepler architecture. + * \date 2011-2016 + * This file contains the interface constants, structure definitions and function prototypes. + */ + +#ifndef _NV_ENCODEAPI_H_ +#define _NV_ENCODEAPI_H_ + +#include + +#ifdef _WIN32 +#include +#endif + +#ifdef _MSC_VER +#ifndef _STDINT +typedef __int32 int32_t; +typedef unsigned __int32 uint32_t; +typedef __int64 int64_t; +typedef unsigned __int64 uint64_t; +typedef signed char int8_t; +typedef unsigned char uint8_t; +typedef short int16_t; +typedef unsigned short uint16_t; +#endif +#else +#include +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * \addtogroup ENCODER_STRUCTURE NvEncodeAPI Data structures + * @{ + */ + +#ifdef _WIN32 +#define NVENCAPI __stdcall +typedef RECT NVENC_RECT; +#else +#define NVENCAPI +// ========================================================================================= +#ifndef GUID +/*! + * \struct GUID + * Abstracts the GUID structure for non-windows platforms. + */ +// ========================================================================================= +typedef struct +{ + uint32_t Data1; /**< [in]: Specifies the first 8 hexadecimal digits of the GUID. */ + uint16_t Data2; /**< [in]: Specifies the first group of 4 hexadecimal digits. */ + uint16_t Data3; /**< [in]: Specifies the second group of 4 hexadecimal digits. */ + uint8_t Data4[8]; /**< [in]: Array of 8 bytes. The first 2 bytes contain the third group of 4 hexadecimal digits. + The remaining 6 bytes contain the final 12 hexadecimal digits. */ +} GUID; +#endif // GUID + +/** + * \struct _NVENC_RECT + * Defines a Rectangle. Used in ::NV_ENC_PREPROCESS_FRAME. + */ +typedef struct _NVENC_RECT +{ + uint32_t left; /**< [in]: X coordinate of the upper left corner of rectangular area to be specified. */ + uint32_t top; /**< [in]: Y coordinate of the upper left corner of the rectangular area to be specified. */ + uint32_t right; /**< [in]: X coordinate of the bottom right corner of the rectangular area to be specified. */ + uint32_t bottom; /**< [in]: Y coordinate of the bottom right corner of the rectangular area to be specified. */ +} NVENC_RECT; + +#endif // _WIN32 + +/** @} */ /* End of GUID and NVENC_RECT structure grouping*/ + +typedef void* NV_ENC_INPUT_PTR; /**< NVENCODE API input buffer */ +typedef void* NV_ENC_OUTPUT_PTR; /**< NVENCODE API output buffer*/ +typedef void* NV_ENC_REGISTERED_PTR; /**< A Resource that has been registered with NVENCODE API*/ + +#define NVENCAPI_MAJOR_VERSION 7 +#define NVENCAPI_MINOR_VERSION 0 + +#define NVENCAPI_VERSION (NVENCAPI_MAJOR_VERSION | (NVENCAPI_MINOR_VERSION << 24)) + +/** + * Macro to generate per-structure version for use with API. + */ +#define NVENCAPI_STRUCT_VERSION(ver) ((uint32_t)NVENCAPI_VERSION | ((ver)<<16) | (0x7 << 28)) + + +#define NVENC_INFINITE_GOPLENGTH 0xffffffff + +#define NV_MAX_SEQ_HDR_LEN (512) + +// ========================================================================================= +// Encode Codec GUIDS supported by the NvEncodeAPI interface. +// ========================================================================================= + +// {6BC82762-4E63-4ca4-AA85-1E50F321F6BF} +static const GUID NV_ENC_CODEC_H264_GUID = +{ 0x6bc82762, 0x4e63, 0x4ca4, { 0xaa, 0x85, 0x1e, 0x50, 0xf3, 0x21, 0xf6, 0xbf } }; + +// {790CDC88-4522-4d7b-9425-BDA9975F7603} +static const GUID NV_ENC_CODEC_HEVC_GUID = +{ 0x790cdc88, 0x4522, 0x4d7b, { 0x94, 0x25, 0xbd, 0xa9, 0x97, 0x5f, 0x76, 0x3 } }; + + + +// ========================================================================================= +// * Encode Profile GUIDS supported by the NvEncodeAPI interface. +// ========================================================================================= + +// {BFD6F8E7-233C-4341-8B3E-4818523803F4} +static const GUID NV_ENC_CODEC_PROFILE_AUTOSELECT_GUID = +{ 0xbfd6f8e7, 0x233c, 0x4341, { 0x8b, 0x3e, 0x48, 0x18, 0x52, 0x38, 0x3, 0xf4 } }; + +// {0727BCAA-78C4-4c83-8C2F-EF3DFF267C6A} +static const GUID NV_ENC_H264_PROFILE_BASELINE_GUID = +{ 0x727bcaa, 0x78c4, 0x4c83, { 0x8c, 0x2f, 0xef, 0x3d, 0xff, 0x26, 0x7c, 0x6a } }; + +// {60B5C1D4-67FE-4790-94D5-C4726D7B6E6D} +static const GUID NV_ENC_H264_PROFILE_MAIN_GUID = +{ 0x60b5c1d4, 0x67fe, 0x4790, { 0x94, 0xd5, 0xc4, 0x72, 0x6d, 0x7b, 0x6e, 0x6d } }; + +// {E7CBC309-4F7A-4b89-AF2A-D537C92BE310} +static const GUID NV_ENC_H264_PROFILE_HIGH_GUID = +{ 0xe7cbc309, 0x4f7a, 0x4b89, { 0xaf, 0x2a, 0xd5, 0x37, 0xc9, 0x2b, 0xe3, 0x10 } }; + +// {7AC663CB-A598-4960-B844-339B261A7D52} +static const GUID NV_ENC_H264_PROFILE_HIGH_444_GUID = +{ 0x7ac663cb, 0xa598, 0x4960, { 0xb8, 0x44, 0x33, 0x9b, 0x26, 0x1a, 0x7d, 0x52 } }; + +// {40847BF5-33F7-4601-9084-E8FE3C1DB8B7} +static const GUID NV_ENC_H264_PROFILE_STEREO_GUID = +{ 0x40847bf5, 0x33f7, 0x4601, { 0x90, 0x84, 0xe8, 0xfe, 0x3c, 0x1d, 0xb8, 0xb7 } }; + +// {CE788D20-AAA9-4318-92BB-AC7E858C8D36} +static const GUID NV_ENC_H264_PROFILE_SVC_TEMPORAL_SCALABILTY = +{ 0xce788d20, 0xaaa9, 0x4318, { 0x92, 0xbb, 0xac, 0x7e, 0x85, 0x8c, 0x8d, 0x36 } }; + +// {B405AFAC-F32B-417B-89C4-9ABEED3E5978} +static const GUID NV_ENC_H264_PROFILE_PROGRESSIVE_HIGH_GUID = +{ 0xb405afac, 0xf32b, 0x417b, { 0x89, 0xc4, 0x9a, 0xbe, 0xed, 0x3e, 0x59, 0x78 } }; + +// {AEC1BD87-E85B-48f2-84C3-98BCA6285072} +static const GUID NV_ENC_H264_PROFILE_CONSTRAINED_HIGH_GUID = +{ 0xaec1bd87, 0xe85b, 0x48f2, { 0x84, 0xc3, 0x98, 0xbc, 0xa6, 0x28, 0x50, 0x72 } }; + +// {B514C39A-B55B-40fa-878F-F1253B4DFDEC} +static const GUID NV_ENC_HEVC_PROFILE_MAIN_GUID = +{ 0xb514c39a, 0xb55b, 0x40fa, { 0x87, 0x8f, 0xf1, 0x25, 0x3b, 0x4d, 0xfd, 0xec } }; + +// {fa4d2b6c-3a5b-411a-8018-0a3f5e3c9be5} +static const GUID NV_ENC_HEVC_PROFILE_MAIN10_GUID = +{ 0xfa4d2b6c, 0x3a5b, 0x411a, { 0x80, 0x18, 0x0a, 0x3f, 0x5e, 0x3c, 0x9b, 0xe5 } }; + +// For HEVC Main 444 8 bit and HEVC Main 444 10 bit profiles only +// {51ec32b5-1b4c-453c-9cbd-b616bd621341} +static const GUID NV_ENC_HEVC_PROFILE_FREXT_GUID = +{ 0x51ec32b5, 0x1b4c, 0x453c, { 0x9c, 0xbd, 0xb6, 0x16, 0xbd, 0x62, 0x13, 0x41 } }; + +// ========================================================================================= +// * Preset GUIDS supported by the NvEncodeAPI interface. +// ========================================================================================= +// {B2DFB705-4EBD-4C49-9B5F-24A777D3E587} +static const GUID NV_ENC_PRESET_DEFAULT_GUID = +{ 0xb2dfb705, 0x4ebd, 0x4c49, { 0x9b, 0x5f, 0x24, 0xa7, 0x77, 0xd3, 0xe5, 0x87 } }; + +// {60E4C59F-E846-4484-A56D-CD45BE9FDDF6} +static const GUID NV_ENC_PRESET_HP_GUID = +{ 0x60e4c59f, 0xe846, 0x4484, { 0xa5, 0x6d, 0xcd, 0x45, 0xbe, 0x9f, 0xdd, 0xf6 } }; + +// {34DBA71D-A77B-4B8F-9C3E-B6D5DA24C012} +static const GUID NV_ENC_PRESET_HQ_GUID = +{ 0x34dba71d, 0xa77b, 0x4b8f, { 0x9c, 0x3e, 0xb6, 0xd5, 0xda, 0x24, 0xc0, 0x12 } }; + +// {82E3E450-BDBB-4e40-989C-82A90DF9EF32} +static const GUID NV_ENC_PRESET_BD_GUID = +{ 0x82e3e450, 0xbdbb, 0x4e40, { 0x98, 0x9c, 0x82, 0xa9, 0xd, 0xf9, 0xef, 0x32 } }; + +// {49DF21C5-6DFA-4feb-9787-6ACC9EFFB726} +static const GUID NV_ENC_PRESET_LOW_LATENCY_DEFAULT_GUID = +{ 0x49df21c5, 0x6dfa, 0x4feb, { 0x97, 0x87, 0x6a, 0xcc, 0x9e, 0xff, 0xb7, 0x26 } }; + +// {C5F733B9-EA97-4cf9-BEC2-BF78A74FD105} +static const GUID NV_ENC_PRESET_LOW_LATENCY_HQ_GUID = +{ 0xc5f733b9, 0xea97, 0x4cf9, { 0xbe, 0xc2, 0xbf, 0x78, 0xa7, 0x4f, 0xd1, 0x5 } }; + +// {67082A44-4BAD-48FA-98EA-93056D150A58} +static const GUID NV_ENC_PRESET_LOW_LATENCY_HP_GUID = +{ 0x67082a44, 0x4bad, 0x48fa, { 0x98, 0xea, 0x93, 0x5, 0x6d, 0x15, 0xa, 0x58 } }; + +// {D5BFB716-C604-44e7-9BB8-DEA5510FC3AC} +static const GUID NV_ENC_PRESET_LOSSLESS_DEFAULT_GUID = +{ 0xd5bfb716, 0xc604, 0x44e7, { 0x9b, 0xb8, 0xde, 0xa5, 0x51, 0xf, 0xc3, 0xac } }; + +// {149998E7-2364-411d-82EF-179888093409} +static const GUID NV_ENC_PRESET_LOSSLESS_HP_GUID = +{ 0x149998e7, 0x2364, 0x411d, { 0x82, 0xef, 0x17, 0x98, 0x88, 0x9, 0x34, 0x9 } }; + +/** + * \addtogroup ENCODER_STRUCTURE NvEncodeAPI Data structures + * @{ + */ + +/** + * Input frame encode modes + */ +typedef enum _NV_ENC_PARAMS_FRAME_FIELD_MODE +{ + NV_ENC_PARAMS_FRAME_FIELD_MODE_FRAME = 0x01, /**< Frame mode */ + NV_ENC_PARAMS_FRAME_FIELD_MODE_FIELD = 0x02, /**< Field mode */ + NV_ENC_PARAMS_FRAME_FIELD_MODE_MBAFF = 0x03 /**< MB adaptive frame/field */ +} NV_ENC_PARAMS_FRAME_FIELD_MODE; + +/** + * Rate Control Modes + */ +typedef enum _NV_ENC_PARAMS_RC_MODE +{ + NV_ENC_PARAMS_RC_CONSTQP = 0x0, /**< Constant QP mode */ + NV_ENC_PARAMS_RC_VBR = 0x1, /**< Variable bitrate mode */ + NV_ENC_PARAMS_RC_CBR = 0x2, /**< Constant bitrate mode */ + NV_ENC_PARAMS_RC_VBR_MINQP = 0x4, /**< Variable bitrate mode with MinQP */ + NV_ENC_PARAMS_RC_2_PASS_QUALITY = 0x8, /**< Multi pass encoding optimized for image quality and works only with low latency mode */ + NV_ENC_PARAMS_RC_2_PASS_FRAMESIZE_CAP = 0x10, /**< Multi pass encoding optimized for maintaining frame size and works only with low latency mode */ + NV_ENC_PARAMS_RC_2_PASS_VBR = 0x20 /**< Multi pass VBR */ +} NV_ENC_PARAMS_RC_MODE; + +#define NV_ENC_PARAMS_RC_CBR2 NV_ENC_PARAMS_RC_CBR /**< Deprecated */ + +/** + * Input picture structure + */ +typedef enum _NV_ENC_PIC_STRUCT +{ + NV_ENC_PIC_STRUCT_FRAME = 0x01, /**< Progressive frame */ + NV_ENC_PIC_STRUCT_FIELD_TOP_BOTTOM = 0x02, /**< Field encoding top field first */ + NV_ENC_PIC_STRUCT_FIELD_BOTTOM_TOP = 0x03 /**< Field encoding bottom field first */ +} NV_ENC_PIC_STRUCT; + +/** + * Input picture type + */ +typedef enum _NV_ENC_PIC_TYPE +{ + NV_ENC_PIC_TYPE_P = 0x0, /**< Forward predicted */ + NV_ENC_PIC_TYPE_B = 0x01, /**< Bi-directionally predicted picture */ + NV_ENC_PIC_TYPE_I = 0x02, /**< Intra predicted picture */ + NV_ENC_PIC_TYPE_IDR = 0x03, /**< IDR picture */ + NV_ENC_PIC_TYPE_BI = 0x04, /**< Bi-directionally predicted with only Intra MBs */ + NV_ENC_PIC_TYPE_SKIPPED = 0x05, /**< Picture is skipped */ + NV_ENC_PIC_TYPE_INTRA_REFRESH = 0x06, /**< First picture in intra refresh cycle */ + NV_ENC_PIC_TYPE_UNKNOWN = 0xFF /**< Picture type unknown */ +} NV_ENC_PIC_TYPE; + +/** + * Motion vector precisions + */ +typedef enum _NV_ENC_MV_PRECISION +{ + NV_ENC_MV_PRECISION_DEFAULT = 0x0, /** (if lookahead is enabled, input frames must remain available to the encoder until encode completion) */ + uint32_t disableIadapt :1; /**< [in]: Set this to 1 to disable adaptive I-frame insertion at scene cuts (only has an effect when lookahead is enabled) */ + uint32_t disableBadapt :1; /**< [in]: Set this to 1 to disable adaptive B-frame decision (only has an effect when lookahead is enabled) */ + uint32_t enableTemporalAQ :1; /**< [in]: Set this to 1 to enable temporal AQ for H.264 */ + uint32_t zeroReorderDelay :1; /**< [in]: Set this to 1 to indicate zero latency operation (no reordering delay, num_reorder_frames=0) */ + uint32_t enableNonRefP :1; /**< [in]: Set this to 1 to enable automatic insertion of non-reference P-frames (no effect if enablePTD=0) */ + uint32_t strictGOPTarget :1; /**< [in]: Set this to 1 to minimize GOP-to-GOP rate fluctuations */ + uint32_t aqStrength :4; /**< [in]: When AQ (Spatial) is enabled (i.e. NV_ENC_RC_PARAMS::enableAQ is set), this field is used to specify AQ strength. AQ strength scale is from 1 (low) - 15 (aggressive). If not set, strength is autoselected by driver. Currently supported only with h264 */ + uint32_t reservedBitFields :16; /**< [in]: Reserved bitfields and must be set to 0 */ + NV_ENC_QP minQP; /**< [in]: Specifies the minimum QP used for rate control. Client must set NV_ENC_CONFIG::enableMinQP to 1. */ + NV_ENC_QP maxQP; /**< [in]: Specifies the maximum QP used for rate control. Client must set NV_ENC_CONFIG::enableMaxQP to 1. */ + NV_ENC_QP initialRCQP; /**< [in]: Specifies the initial QP used for rate control. Client must set NV_ENC_CONFIG::enableInitialRCQP to 1. */ + uint32_t temporallayerIdxMask; /**< [in]: Specifies the temporal layers (as a bitmask) whose QPs have changed. Valid max bitmask is [2^NV_ENC_CAPS_NUM_MAX_TEMPORAL_LAYERS - 1] */ + uint8_t temporalLayerQP[8]; /**< [in]: Specifies the temporal layer QPs used for rate control. Temporal layer index is used as as the array index */ + uint16_t targetQuality; /**< [in]: Target CQ (Constant Quality) level for VBR mode (range 0-51 with 0-automatic) */ + uint16_t lookaheadDepth; /**< [in]: Maximum depth of lookahead with range 0-32 (only used if enableLookahead=1) */ + uint32_t reserved[9]; + } NV_ENC_RC_PARAMS; + +/** macro for constructing the version field of ::_NV_ENC_RC_PARAMS */ +#define NV_ENC_RC_PARAMS_VER NVENCAPI_STRUCT_VERSION(1) + + + +/** + * \struct _NV_ENC_CONFIG_H264_VUI_PARAMETERS + * H264 Video Usability Info parameters + */ +typedef struct _NV_ENC_CONFIG_H264_VUI_PARAMETERS +{ + uint32_t overscanInfoPresentFlag; /**< [in]: if set to 1 , it specifies that the overscanInfo is present */ + uint32_t overscanInfo; /**< [in]: Specifies the overscan info(as defined in Annex E of the ITU-T Specification). */ + uint32_t videoSignalTypePresentFlag; /**< [in]: If set to 1, it specifies that the videoFormat, videoFullRangeFlag and colourDescriptionPresentFlag are present. */ + uint32_t videoFormat; /**< [in]: Specifies the source video format(as defined in Annex E of the ITU-T Specification).*/ + uint32_t videoFullRangeFlag; /**< [in]: Specifies the output range of the luma and chroma samples(as defined in Annex E of the ITU-T Specification). */ + uint32_t colourDescriptionPresentFlag; /**< [in]: If set to 1, it specifies that the colourPrimaries, transferCharacteristics and colourMatrix are present. */ + uint32_t colourPrimaries; /**< [in]: Specifies color primaries for converting to RGB(as defined in Annex E of the ITU-T Specification) */ + uint32_t transferCharacteristics; /**< [in]: Specifies the opto-electronic transfer characteristics to use (as defined in Annex E of the ITU-T Specification) */ + uint32_t colourMatrix; /**< [in]: Specifies the matrix coefficients used in deriving the luma and chroma from the RGB primaries (as defined in Annex E of the ITU-T Specification). */ + uint32_t chromaSampleLocationFlag; /**< [in]: if set to 1 , it specifies that the chromaSampleLocationTop and chromaSampleLocationBot are present.*/ + uint32_t chromaSampleLocationTop; /**< [in]: Specifies the chroma sample location for top field(as defined in Annex E of the ITU-T Specification) */ + uint32_t chromaSampleLocationBot; /**< [in]: Specifies the chroma sample location for bottom field(as defined in Annex E of the ITU-T Specification) */ + uint32_t bitstreamRestrictionFlag; /**< [in]: if set to 1, it specifies the bitstream restriction parameters are present in the bitstream.*/ + uint32_t reserved[15]; +}NV_ENC_CONFIG_H264_VUI_PARAMETERS; + +typedef NV_ENC_CONFIG_H264_VUI_PARAMETERS NV_ENC_CONFIG_HEVC_VUI_PARAMETERS; + +/** + * \struct _NVENC_EXTERNAL_ME_HINT_COUNTS_PER_BLOCKTYPE + * External motion vector hint counts per block type. + */ +typedef struct _NVENC_EXTERNAL_ME_HINT_COUNTS_PER_BLOCKTYPE +{ + uint32_t numCandsPerBlk16x16 : 4; /**< [in]: Specifies the number of candidates per 16x16 block. */ + uint32_t numCandsPerBlk16x8 : 4; /**< [in]: Specifies the number of candidates per 16x8 block. */ + uint32_t numCandsPerBlk8x16 : 4; /**< [in]: Specifies the number of candidates per 8x16 block. */ + uint32_t numCandsPerBlk8x8 : 4; /**< [in]: Specifies the number of candidates per 8x8 block. */ + uint32_t reserved : 16; /**< [in]: Reserved for padding. */ + uint32_t reserved1[3]; /**< [in]: Reserved for future use. */ +} NVENC_EXTERNAL_ME_HINT_COUNTS_PER_BLOCKTYPE; + + +/** + * \struct _NVENC_EXTERNAL_ME_HINT + * External Motion Vector hint structure. + */ +typedef struct _NVENC_EXTERNAL_ME_HINT +{ + int32_t mvx : 12; /**< [in]: Specifies the x component of integer pixel MV (relative to current MB) S12.0. */ + int32_t mvy : 10; /**< [in]: Specifies the y component of integer pixel MV (relative to current MB) S10.0 .*/ + int32_t refidx : 5; /**< [in]: Specifies the reference index (31=invalid). Current we support only 1 reference frame per direction for external hints, so \p refidx must be 0. */ + int32_t dir : 1; /**< [in]: Specifies the direction of motion estimation . 0=L0 1=L1.*/ + int32_t partType : 2; /**< [in]: Specifies the block partition type.0=16x16 1=16x8 2=8x16 3=8x8 (blocks in partition must be consecutive).*/ + int32_t lastofPart : 1; /**< [in]: Set to 1 for the last MV of (sub) partition */ + int32_t lastOfMB : 1; /**< [in]: Set to 1 for the last MV of macroblock. */ +} NVENC_EXTERNAL_ME_HINT; + + +/** + * \struct _NV_ENC_CONFIG_H264 + * H264 encoder configuration parameters + */ +typedef struct _NV_ENC_CONFIG_H264 +{ + uint32_t enableTemporalSVC :1; /**< [in]: Set to 1 to enable SVC temporal*/ + uint32_t enableStereoMVC :1; /**< [in]: Set to 1 to enable stereo MVC*/ + uint32_t hierarchicalPFrames :1; /**< [in]: Set to 1 to enable hierarchical PFrames */ + uint32_t hierarchicalBFrames :1; /**< [in]: Set to 1 to enable hierarchical BFrames */ + uint32_t outputBufferingPeriodSEI :1; /**< [in]: Set to 1 to write SEI buffering period syntax in the bitstream */ + uint32_t outputPictureTimingSEI :1; /**< [in]: Set to 1 to write SEI picture timing syntax in the bitstream. When set for following rateControlMode : NV_ENC_PARAMS_RC_CBR, NV_ENC_PARAMS_RC_CBR_LOWDELAY_HQ, + NV_ENC_PARAMS_RC_CBR_HQ, filler data is inserted if needed to achieve hrd bitrate */ + uint32_t outputAUD :1; /**< [in]: Set to 1 to write access unit delimiter syntax in bitstream */ + uint32_t disableSPSPPS :1; /**< [in]: Set to 1 to disable writing of Sequence and Picture parameter info in bitstream */ + uint32_t outputFramePackingSEI :1; /**< [in]: Set to 1 to enable writing of frame packing arrangement SEI messages to bitstream */ + uint32_t outputRecoveryPointSEI :1; /**< [in]: Set to 1 to enable writing of recovery point SEI message */ + uint32_t enableIntraRefresh :1; /**< [in]: Set to 1 to enable gradual decoder refresh or intra refresh. If the GOP structure uses B frames this will be ignored */ + uint32_t enableConstrainedEncoding :1; /**< [in]: Set this to 1 to enable constrainedFrame encoding where each slice in the constarined picture is independent of other slices + Check support for constrained encoding using ::NV_ENC_CAPS_SUPPORT_CONSTRAINED_ENCODING caps. */ + uint32_t repeatSPSPPS :1; /**< [in]: Set to 1 to enable writing of Sequence and Picture parameter for every IDR frame */ + uint32_t enableVFR :1; /**< [in]: Set to 1 to enable variable frame rate. */ + uint32_t enableLTR :1; /**< [in]: Currently this feature is not available and must be set to 0. Set to 1 to enable LTR support and auto-mark the first */ + uint32_t qpPrimeYZeroTransformBypassFlag :1; /**< [in]: To enable lossless encode set this to 1, set QP to 0 and RC_mode to NV_ENC_PARAMS_RC_CONSTQP and profile to HIGH_444_PREDICTIVE_PROFILE. + Check support for lossless encoding using ::NV_ENC_CAPS_SUPPORT_LOSSLESS_ENCODE caps. */ + uint32_t useConstrainedIntraPred :1; /**< [in]: Set 1 to enable constrained intra prediction. */ + uint32_t reservedBitFields :15; /**< [in]: Reserved bitfields and must be set to 0 */ + uint32_t level; /**< [in]: Specifies the encoding level. Client is recommended to set this to NV_ENC_LEVEL_AUTOSELECT in order to enable the NvEncodeAPI interface to select the correct level. */ + uint32_t idrPeriod; /**< [in]: Specifies the IDR interval. If not set, this is made equal to gopLength in NV_ENC_CONFIG.Low latency application client can set IDR interval to NVENC_INFINITE_GOPLENGTH so that IDR frames are not inserted automatically. */ + uint32_t separateColourPlaneFlag; /**< [in]: Set to 1 to enable 4:4:4 separate colour planes */ + uint32_t disableDeblockingFilterIDC; /**< [in]: Specifies the deblocking filter mode. Permissible value range: [0,2] */ + uint32_t numTemporalLayers; /**< [in]: Specifies max temporal layers to be used for hierarchical coding. Valid value range is [1,::NV_ENC_CAPS_NUM_MAX_TEMPORAL_LAYERS] */ + uint32_t spsId; /**< [in]: Specifies the SPS id of the sequence header. Currently reserved and must be set to 0. */ + uint32_t ppsId; /**< [in]: Specifies the PPS id of the picture header. Currently reserved and must be set to 0. */ + NV_ENC_H264_ADAPTIVE_TRANSFORM_MODE adaptiveTransformMode; /**< [in]: Specifies the AdaptiveTransform Mode. Check support for AdaptiveTransform mode using ::NV_ENC_CAPS_SUPPORT_ADAPTIVE_TRANSFORM caps. */ + NV_ENC_H264_FMO_MODE fmoMode; /**< [in]: Specified the FMO Mode. Check support for FMO using ::NV_ENC_CAPS_SUPPORT_FMO caps. */ + NV_ENC_H264_BDIRECT_MODE bdirectMode; /**< [in]: Specifies the BDirect mode. Check support for BDirect mode using ::NV_ENC_CAPS_SUPPORT_BDIRECT_MODE caps.*/ + NV_ENC_H264_ENTROPY_CODING_MODE entropyCodingMode; /**< [in]: Specifies the entropy coding mode. Check support for CABAC mode using ::NV_ENC_CAPS_SUPPORT_CABAC caps. */ + NV_ENC_STEREO_PACKING_MODE stereoMode; /**< [in]: Specifies the stereo frame packing mode which is to be signalled in frame packing arrangement SEI */ + uint32_t intraRefreshPeriod; /**< [in]: Specifies the interval between successive intra refresh if enableIntrarefresh is set. Requires enableIntraRefresh to be set. + Will be disabled if NV_ENC_CONFIG::gopLength is not set to NVENC_INFINITE_GOPLENGTH. */ + uint32_t intraRefreshCnt; /**< [in]: Specifies the length of intra refresh in number of frames for periodic intra refresh. This value should be smaller than intraRefreshPeriod */ + uint32_t maxNumRefFrames; /**< [in]: Specifies the DPB size used for encoding. Setting it to 0 will let driver use the default dpb size. + The low latency application which wants to invalidate reference frame as an error resilience tool + is recommended to use a large DPB size so that the encoder can keep old reference frames which can be used if recent + frames are invalidated. */ + uint32_t sliceMode; /**< [in]: This parameter in conjunction with sliceModeData specifies the way in which the picture is divided into slices + sliceMode = 0 MB based slices, sliceMode = 1 Byte based slices, sliceMode = 2 MB row based slices, sliceMode = 3, numSlices in Picture + When forceIntraRefreshWithFrameCnt is set it will have priority over sliceMode setting + When sliceMode == 0 and sliceModeData == 0 whole picture will be coded with one slice */ + uint32_t sliceModeData; /**< [in]: Specifies the parameter needed for sliceMode. For: + sliceMode = 0, sliceModeData specifies # of MBs in each slice (except last slice) + sliceMode = 1, sliceModeData specifies maximum # of bytes in each slice (except last slice) + sliceMode = 2, sliceModeData specifies # of MB rows in each slice (except last slice) + sliceMode = 3, sliceModeData specifies number of slices in the picture. Driver will divide picture into slices optimally */ + NV_ENC_CONFIG_H264_VUI_PARAMETERS h264VUIParameters; /**< [in]: Specifies the H264 video usability info pamameters */ + uint32_t ltrNumFrames; /**< [in]: Specifies the number of LTR frames used. + If ltrTrustMode=1, encoder will mark first numLTRFrames base layer reference frames within each IDR interval as LTR. + If ltrMarkFrame=1, ltrNumFrames specifies maximum number of ltr frames in DPB. + If ltrNumFrames value is more that DPB size(maxNumRefFrames) encoder will take decision on its own. */ + uint32_t ltrTrustMode; /**< [in]: Specifies the LTR operating mode. + Set to 0 to disallow encoding using LTR frames until later specified. + Set to 1 to allow encoding using LTR frames unless later invalidated.*/ + uint32_t chromaFormatIDC; /**< [in]: Specifies the chroma format. Should be set to 1 for yuv420 input, 3 for yuv444 input. + Check support for YUV444 encoding using ::NV_ENC_CAPS_SUPPORT_YUV444_ENCODE caps.*/ + uint32_t maxTemporalLayers; /**< [in]: Specifies the max temporal layer used for hierarchical coding. */ + uint32_t reserved1[270]; /**< [in]: Reserved and must be set to 0 */ + void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_CONFIG_H264; + + +/** + * \struct _NV_ENC_CONFIG_HEVC + * HEVC encoder configuration parameters to be set during initialization. + */ +typedef struct _NV_ENC_CONFIG_HEVC +{ + uint32_t level; /**< [in]: Specifies the level of the encoded bitstream.*/ + uint32_t tier; /**< [in]: Specifies the level tier of the encoded bitstream.*/ + NV_ENC_HEVC_CUSIZE minCUSize; /**< [in]: Specifies the minimum size of luma coding unit.*/ + NV_ENC_HEVC_CUSIZE maxCUSize; /**< [in]: Specifies the maximum size of luma coding unit. Currently NVENC SDK only supports maxCUSize equal to NV_ENC_HEVC_CUSIZE_32x32.*/ + uint32_t useConstrainedIntraPred :1; /**< [in]: Set 1 to enable constrained intra prediction. */ + uint32_t disableDeblockAcrossSliceBoundary :1; /**< [in]: Set 1 to disable in loop filtering across slice boundary.*/ + uint32_t outputBufferingPeriodSEI :1; /**< [in]: Set 1 to write SEI buffering period syntax in the bitstream */ + uint32_t outputPictureTimingSEI :1; /**< [in]: Set 1 to write SEI picture timing syntax in the bitstream */ + uint32_t outputAUD :1; /**< [in]: Set 1 to write Access Unit Delimiter syntax. */ + uint32_t enableLTR :1; /**< [in]: Set 1 to enable use of long term reference pictures for inter prediction. */ + uint32_t disableSPSPPS :1; /**< [in]: Set 1 to disable VPS,SPS and PPS signalling in the bitstream. */ + uint32_t repeatSPSPPS :1; /**< [in]: Set 1 to output VPS,SPS and PPS for every IDR frame.*/ + uint32_t enableIntraRefresh :1; /**< [in]: Set 1 to enable gradual decoder refresh or intra refresh. If the GOP structure uses B frames this will be ignored */ + uint32_t chromaFormatIDC :2; /**< [in]: Specifies the chroma format. Should be set to 1 for yuv420 input, 3 for yuv444 input.*/ + uint32_t pixelBitDepthMinus8 :3; /**< [in]: Specifies pixel bit depth minus 8. Should be set to 0 for 8 bit input, 2 for 10 bit input.*/ + uint32_t reserved :18; /**< [in]: Reserved bitfields.*/ + uint32_t idrPeriod; /**< [in]: Specifies the IDR interval. If not set, this is made equal to gopLength in NV_ENC_CONFIG.Low latency application client can set IDR interval to NVENC_INFINITE_GOPLENGTH so that IDR frames are not inserted automatically. */ + uint32_t intraRefreshPeriod; /**< [in]: Specifies the interval between successive intra refresh if enableIntrarefresh is set. Requires enableIntraRefresh to be set. + Will be disabled if NV_ENC_CONFIG::gopLength is not set to NVENC_INFINITE_GOPLENGTH. */ + uint32_t intraRefreshCnt; /**< [in]: Specifies the length of intra refresh in number of frames for periodic intra refresh. This value should be smaller than intraRefreshPeriod */ + uint32_t maxNumRefFramesInDPB; /**< [in]: Specifies the maximum number of references frames in the DPB.*/ + uint32_t ltrNumFrames; /**< [in]: Specifies the number of LTR frames used. + If ltrTrustMode=1, encoder will mark first numLTRFrames base layer reference frames within each IDR interval as LTR. + If ltrMarkFrame=1, ltrNumFrames specifies maximum number of ltr frames in DPB. + If ltrNumFrames value is more that DPB size(maxNumRefFramesInDPB) encoder will take decision on its own. */ + uint32_t vpsId; /**< [in]: Specifies the VPS id of the video parameter set. Currently reserved and must be set to 0. */ + uint32_t spsId; /**< [in]: Specifies the SPS id of the sequence header. Currently reserved and must be set to 0. */ + uint32_t ppsId; /**< [in]: Specifies the PPS id of the picture header. Currently reserved and must be set to 0. */ + uint32_t sliceMode; /**< [in]: This parameter in conjunction with sliceModeData specifies the way in which the picture is divided into slices + sliceMode = 0 CTU based slices, sliceMode = 1 Byte based slices, sliceMode = 2 CTU row based slices, sliceMode = 3, numSlices in Picture + When sliceMode == 0 and sliceModeData == 0 whole picture will be coded with one slice */ + uint32_t sliceModeData; /**< [in]: Specifies the parameter needed for sliceMode. For: + sliceMode = 0, sliceModeData specifies # of CTUs in each slice (except last slice) + sliceMode = 1, sliceModeData specifies maximum # of bytes in each slice (except last slice) + sliceMode = 2, sliceModeData specifies # of CTU rows in each slice (except last slice) + sliceMode = 3, sliceModeData specifies number of slices in the picture. Driver will divide picture into slices optimally */ + uint32_t maxTemporalLayersMinus1; /**< [in]: Specifies the max temporal layer used for hierarchical coding. */ + NV_ENC_CONFIG_HEVC_VUI_PARAMETERS hevcVUIParameters; /**< [in]: Specifies the HEVC video usability info pamameters */ + uint32_t ltrTrustMode; /**< [in]: Specifies the LTR operating mode. + Set to 0 to disallow encoding using LTR frames until later specified. + Set to 1 to allow encoding using LTR frames unless later invalidated.*/ + uint32_t reserved1[217]; /**< [in]: Reserved and must be set to 0.*/ + void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_CONFIG_HEVC; + +/** + * \struct _NV_ENC_CODEC_CONFIG + * Codec-specific encoder configuration parameters to be set during initialization. + */ +typedef union _NV_ENC_CODEC_CONFIG +{ + NV_ENC_CONFIG_H264 h264Config; /**< [in]: Specifies the H.264-specific encoder configuration. */ + NV_ENC_CONFIG_HEVC hevcConfig; /**< [in]: Specifies the HEVC-specific encoder configuration. */ + uint32_t reserved[256]; /**< [in]: Reserved and must be set to 0 */ +} NV_ENC_CODEC_CONFIG; + + +/** + * \struct _NV_ENC_CONFIG + * Encoder configuration parameters to be set during initialization. + */ +typedef struct _NV_ENC_CONFIG +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_CONFIG_VER. */ + GUID profileGUID; /**< [in]: Specifies the codec profile guid. If client specifies \p NV_ENC_CODEC_PROFILE_AUTOSELECT_GUID the NvEncodeAPI interface will select the appropriate codec profile. */ + uint32_t gopLength; /**< [in]: Specifies the number of pictures in one GOP. Low latency application client can set goplength to NVENC_INFINITE_GOPLENGTH so that keyframes are not inserted automatically. */ + int32_t frameIntervalP; /**< [in]: Specifies the GOP pattern as follows: \p frameIntervalP = 0: I, 1: IPP, 2: IBP, 3: IBBP If goplength is set to NVENC_INFINITE_GOPLENGTH \p frameIntervalP should be set to 1. */ + uint32_t monoChromeEncoding; /**< [in]: Set this to 1 to enable monochrome encoding for this session. */ + NV_ENC_PARAMS_FRAME_FIELD_MODE frameFieldMode; /**< [in]: Specifies the frame/field mode. + Check support for field encoding using ::NV_ENC_CAPS_SUPPORT_FIELD_ENCODING caps. + Using a frameFieldMode other than NV_ENC_PARAMS_FRAME_FIELD_MODE_FRAME for RGB input is not supported. */ + NV_ENC_MV_PRECISION mvPrecision; /**< [in]: Specifies the desired motion vector prediction precision. */ + NV_ENC_RC_PARAMS rcParams; /**< [in]: Specifies the rate control parameters for the current encoding session. */ + NV_ENC_CODEC_CONFIG encodeCodecConfig; /**< [in]: Specifies the codec specific config parameters through this union. */ + uint32_t reserved [278]; /**< [in]: Reserved and must be set to 0 */ + void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_CONFIG; + +/** macro for constructing the version field of ::_NV_ENC_CONFIG */ +#define NV_ENC_CONFIG_VER (NVENCAPI_STRUCT_VERSION(6) | ( 1<<31 )) + + +/** + * \struct _NV_ENC_INITIALIZE_PARAMS + * Encode Session Initialization parameters. + */ +typedef struct _NV_ENC_INITIALIZE_PARAMS +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_INITIALIZE_PARAMS_VER. */ + GUID encodeGUID; /**< [in]: Specifies the Encode GUID for which the encoder is being created. ::NvEncInitializeEncoder() API will fail if this is not set, or set to unsupported value. */ + GUID presetGUID; /**< [in]: Specifies the preset for encoding. If the preset GUID is set then , the preset configuration will be applied before any other parameter. */ + uint32_t encodeWidth; /**< [in]: Specifies the encode width. If not set ::NvEncInitializeEncoder() API will fail. */ + uint32_t encodeHeight; /**< [in]: Specifies the encode height. If not set ::NvEncInitializeEncoder() API will fail. */ + uint32_t darWidth; /**< [in]: Specifies the display aspect ratio Width. */ + uint32_t darHeight; /**< [in]: Specifies the display aspect ratio height. */ + uint32_t frameRateNum; /**< [in]: Specifies the numerator for frame rate used for encoding in frames per second ( Frame rate = frameRateNum / frameRateDen ). */ + uint32_t frameRateDen; /**< [in]: Specifies the denominator for frame rate used for encoding in frames per second ( Frame rate = frameRateNum / frameRateDen ). */ + uint32_t enableEncodeAsync; /**< [in]: Set this to 1 to enable asynchronous mode and is expected to use events to get picture completion notification. */ + uint32_t enablePTD; /**< [in]: Set this to 1 to enable the Picture Type Decision is be taken by the NvEncodeAPI interface. */ + uint32_t reportSliceOffsets :1; /**< [in]: Set this to 1 to enable reporting slice offsets in ::_NV_ENC_LOCK_BITSTREAM. NV_ENC_INITIALIZE_PARAMS::enableEncodeAsync must be set to 0 to use this feature. Client must set this to 0 if NV_ENC_CONFIG_H264::sliceMode is 1 on Kepler GPUs */ + uint32_t enableSubFrameWrite :1; /**< [in]: Set this to 1 to write out available bitstream to memory at subframe intervals */ + uint32_t enableExternalMEHints :1; /**< [in]: Set to 1 to enable external ME hints for the current frame. For NV_ENC_INITIALIZE_PARAMS::enablePTD=1 with B frames, programming L1 hints is optional for B frames since Client doesn't know internal GOP structure. + NV_ENC_PIC_PARAMS::meHintRefPicDist should preferably be set with enablePTD=1. */ + uint32_t enableMEOnlyMode :1; /**< [in]: Set to 1 to enable ME Only Mode .*/ + uint32_t reservedBitFields :28; /**< [in]: Reserved bitfields and must be set to 0 */ + uint32_t privDataSize; /**< [in]: Reserved private data buffer size and must be set to 0 */ + void* privData; /**< [in]: Reserved private data buffer and must be set to NULL */ + NV_ENC_CONFIG* encodeConfig; /**< [in]: Specifies the advanced codec specific structure. If client has sent a valid codec config structure, it will override parameters set by the NV_ENC_INITIALIZE_PARAMS::presetGUID parameter. If set to NULL the NvEncodeAPI interface will use the NV_ENC_INITIALIZE_PARAMS::presetGUID to set the codec specific parameters. + Client can also optionally query the NvEncodeAPI interface to get codec specific parameters for a presetGUID using ::NvEncGetEncodePresetConfig() API. It can then modify (if required) some of the codec config parameters and send down a custom config structure as part of ::_NV_ENC_INITIALIZE_PARAMS. + Even in this case client is recommended to pass the same preset guid it has used in ::NvEncGetEncodePresetConfig() API to query the config structure; as NV_ENC_INITIALIZE_PARAMS::presetGUID. This will not override the custom config structure but will be used to determine other Encoder HW specific parameters not exposed in the API. */ + uint32_t maxEncodeWidth; /**< [in]: Maximum encode width to be used for current Encode session. + Client should allocate output buffers according to this dimension for dynamic resolution change. If set to 0, Encoder will not allow dynamic resolution change. */ + uint32_t maxEncodeHeight; /**< [in]: Maximum encode height to be allowed for current Encode session. + Client should allocate output buffers according to this dimension for dynamic resolution change. If set to 0, Encode will not allow dynamic resolution change. */ + NVENC_EXTERNAL_ME_HINT_COUNTS_PER_BLOCKTYPE maxMEHintCountsPerBlock[2]; /**< [in]: If Client wants to pass external motion vectors in NV_ENC_PIC_PARAMS::meExternalHints buffer it must specify the maximum number of hint candidates per block per direction for the encode session. + The NV_ENC_INITIALIZE_PARAMS::maxMEHintCountsPerBlock[0] is for L0 predictors and NV_ENC_INITIALIZE_PARAMS::maxMEHintCountsPerBlock[1] is for L1 predictors. + This client must also set NV_ENC_INITIALIZE_PARAMS::enableExternalMEHints to 1. */ + uint32_t reserved [289]; /**< [in]: Reserved and must be set to 0 */ + void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_INITIALIZE_PARAMS; + +/** macro for constructing the version field of ::_NV_ENC_INITIALIZE_PARAMS */ +#define NV_ENC_INITIALIZE_PARAMS_VER (NVENCAPI_STRUCT_VERSION(5) | ( 1<<31 )) + + +/** + * \struct _NV_ENC_RECONFIGURE_PARAMS + * Encode Session Reconfigured parameters. + */ +typedef struct _NV_ENC_RECONFIGURE_PARAMS +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_RECONFIGURE_PARAMS_VER. */ + NV_ENC_INITIALIZE_PARAMS reInitEncodeParams; /**< [in]: Encoder session re-initialization parameters. */ + uint32_t resetEncoder :1; /**< [in]: This resets the rate control states and other internal encoder states. This should be used only with an IDR frame. + If NV_ENC_INITIALIZE_PARAMS::enablePTD is set to 1, encoder will force the frame type to IDR */ + uint32_t forceIDR :1; /**< [in]: Encode the current picture as an IDR picture. This flag is only valid when Picture type decision is taken by the Encoder + [_NV_ENC_INITIALIZE_PARAMS::enablePTD == 1]. */ + uint32_t reserved :30; + +}NV_ENC_RECONFIGURE_PARAMS; + +/** macro for constructing the version field of ::_NV_ENC_RECONFIGURE_PARAMS */ +#define NV_ENC_RECONFIGURE_PARAMS_VER (NVENCAPI_STRUCT_VERSION(1) | ( 1<<31 )) + +/** + * \struct _NV_ENC_PRESET_CONFIG + * Encoder preset config + */ +typedef struct _NV_ENC_PRESET_CONFIG +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_PRESET_CONFIG_VER. */ + NV_ENC_CONFIG presetCfg; /**< [out]: preset config returned by the Nvidia Video Encoder interface. */ + uint32_t reserved1[255]; /**< [in]: Reserved and must be set to 0 */ + void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */ +}NV_ENC_PRESET_CONFIG; + +/** macro for constructing the version field of ::_NV_ENC_PRESET_CONFIG */ +#define NV_ENC_PRESET_CONFIG_VER (NVENCAPI_STRUCT_VERSION(4) | ( 1<<31 )) + + +/** + * \struct _NV_ENC_SEI_PAYLOAD + * User SEI message + */ +typedef struct _NV_ENC_SEI_PAYLOAD +{ + uint32_t payloadSize; /**< [in] SEI payload size in bytes. SEI payload must be byte aligned, as described in Annex D */ + uint32_t payloadType; /**< [in] SEI payload types and syntax can be found in Annex D of the H.264 Specification. */ + uint8_t *payload; /**< [in] pointer to user data */ +} NV_ENC_SEI_PAYLOAD; + +#define NV_ENC_H264_SEI_PAYLOAD NV_ENC_SEI_PAYLOAD + +/** + * \struct _NV_ENC_PIC_PARAMS_H264 + * H264 specific enc pic params. sent on a per frame basis. + */ +typedef struct _NV_ENC_PIC_PARAMS_H264 +{ + uint32_t displayPOCSyntax; /**< [in]: Specifies the display POC syntax This is required to be set if client is handling the picture type decision. */ + uint32_t reserved3; /**< [in]: Reserved and must be set to 0 */ + uint32_t refPicFlag; /**< [in]: Set to 1 for a reference picture. This is ignored if NV_ENC_INITIALIZE_PARAMS::enablePTD is set to 1. */ + uint32_t colourPlaneId; /**< [in]: Specifies the colour plane ID associated with the current input. */ + uint32_t forceIntraRefreshWithFrameCnt; /**< [in]: Forces an intra refresh with duration equal to intraRefreshFrameCnt. + When outputRecoveryPointSEI is set this is value is used for recovery_frame_cnt in recovery point SEI message + forceIntraRefreshWithFrameCnt cannot be used if B frames are used in the GOP structure specified */ + uint32_t constrainedFrame :1; /**< [in]: Set to 1 if client wants to encode this frame with each slice completely independent of other slices in the frame. + NV_ENC_INITIALIZE_PARAMS::enableConstrainedEncoding should be set to 1 */ + uint32_t sliceModeDataUpdate :1; /**< [in]: Set to 1 if client wants to change the sliceModeData field to specify new sliceSize Parameter + When forceIntraRefreshWithFrameCnt is set it will have priority over sliceMode setting */ + uint32_t ltrMarkFrame :1; /**< [in]: Set to 1 if client wants to mark this frame as LTR */ + uint32_t ltrUseFrames :1; /**< [in]: Set to 1 if client allows encoding this frame using the LTR frames specified in ltrFrameBitmap */ + uint32_t reservedBitFields :28; /**< [in]: Reserved bit fields and must be set to 0 */ + uint8_t* sliceTypeData; /**< [in]: Deprecated. */ + uint32_t sliceTypeArrayCnt; /**< [in]: Deprecated. */ + uint32_t seiPayloadArrayCnt; /**< [in]: Specifies the number of elements allocated in seiPayloadArray array. */ + NV_ENC_SEI_PAYLOAD* seiPayloadArray; /**< [in]: Array of SEI payloads which will be inserted for this frame. */ + uint32_t sliceMode; /**< [in]: This parameter in conjunction with sliceModeData specifies the way in which the picture is divided into slices + sliceMode = 0 MB based slices, sliceMode = 1 Byte based slices, sliceMode = 2 MB row based slices, sliceMode = 3, numSlices in Picture + When forceIntraRefreshWithFrameCnt is set it will have priority over sliceMode setting + When sliceMode == 0 and sliceModeData == 0 whole picture will be coded with one slice */ + uint32_t sliceModeData; /**< [in]: Specifies the parameter needed for sliceMode. For: + sliceMode = 0, sliceModeData specifies # of MBs in each slice (except last slice) + sliceMode = 1, sliceModeData specifies maximum # of bytes in each slice (except last slice) + sliceMode = 2, sliceModeData specifies # of MB rows in each slice (except last slice) + sliceMode = 3, sliceModeData specifies number of slices in the picture. Driver will divide picture into slices optimally */ + uint32_t ltrMarkFrameIdx; /**< [in]: Specifies the long term referenceframe index to use for marking this frame as LTR.*/ + uint32_t ltrUseFrameBitmap; /**< [in]: Specifies the the associated bitmap of LTR frame indices when encoding this frame. */ + uint32_t ltrUsageMode; /**< [in]: Specifies additional usage constraints for encoding using LTR frames from this point further. 0: no constraints, 1: no short term refs older than current, no previous LTR frames.*/ + uint32_t reserved [243]; /**< [in]: Reserved and must be set to 0. */ + void* reserved2[62]; /**< [in]: Reserved and must be set to NULL. */ +} NV_ENC_PIC_PARAMS_H264; + +/** + * \struct _NV_ENC_PIC_PARAMS_HEVC + * HEVC specific enc pic params. sent on a per frame basis. + */ +typedef struct _NV_ENC_PIC_PARAMS_HEVC +{ + uint32_t displayPOCSyntax; /**< [in]: Specifies the display POC syntax This is required to be set if client is handling the picture type decision. */ + uint32_t refPicFlag; /**< [in]: Set to 1 for a reference picture. This is ignored if NV_ENC_INITIALIZE_PARAMS::enablePTD is set to 1. */ + uint32_t temporalId; /**< [in]: Specifies the temporal id of the picture */ + uint32_t forceIntraRefreshWithFrameCnt; /**< [in]: Forces an intra refresh with duration equal to intraRefreshFrameCnt. + When outputRecoveryPointSEI is set this is value is used for recovery_frame_cnt in recovery point SEI message + forceIntraRefreshWithFrameCnt cannot be used if B frames are used in the GOP structure specified */ + uint32_t constrainedFrame :1; /**< [in]: Set to 1 if client wants to encode this frame with each slice completely independent of other slices in the frame. + NV_ENC_INITIALIZE_PARAMS::enableConstrainedEncoding should be set to 1 */ + uint32_t sliceModeDataUpdate :1; /**< [in]: Set to 1 if client wants to change the sliceModeData field to specify new sliceSize Parameter + When forceIntraRefreshWithFrameCnt is set it will have priority over sliceMode setting */ + uint32_t ltrMarkFrame :1; /**< [in]: Set to 1 if client wants to mark this frame as LTR */ + uint32_t ltrUseFrames :1; /**< [in]: Set to 1 if client allows encoding this frame using the LTR frames specified in ltrFrameBitmap */ + uint32_t reservedBitFields :28; /**< [in]: Reserved bit fields and must be set to 0 */ + uint8_t* sliceTypeData; /**< [in]: Array which specifies the slice type used to force intra slice for a particular slice. Currently supported only for NV_ENC_CONFIG_H264::sliceMode == 3. + Client should allocate array of size sliceModeData where sliceModeData is specified in field of ::_NV_ENC_CONFIG_H264 + Array element with index n corresponds to nth slice. To force a particular slice to intra client should set corresponding array element to NV_ENC_SLICE_TYPE_I + all other array elements should be set to NV_ENC_SLICE_TYPE_DEFAULT */ + uint32_t sliceTypeArrayCnt; /**< [in]: Client should set this to the number of elements allocated in sliceTypeData array. If sliceTypeData is NULL then this should be set to 0 */ + uint32_t sliceMode; /**< [in]: This parameter in conjunction with sliceModeData specifies the way in which the picture is divided into slices + sliceMode = 0 CTU based slices, sliceMode = 1 Byte based slices, sliceMode = 2 CTU row based slices, sliceMode = 3, numSlices in Picture + When forceIntraRefreshWithFrameCnt is set it will have priority over sliceMode setting + When sliceMode == 0 and sliceModeData == 0 whole picture will be coded with one slice */ + uint32_t sliceModeData; /**< [in]: Specifies the parameter needed for sliceMode. For: + sliceMode = 0, sliceModeData specifies # of CTUs in each slice (except last slice) + sliceMode = 1, sliceModeData specifies maximum # of bytes in each slice (except last slice) + sliceMode = 2, sliceModeData specifies # of CTU rows in each slice (except last slice) + sliceMode = 3, sliceModeData specifies number of slices in the picture. Driver will divide picture into slices optimally */ + uint32_t ltrMarkFrameIdx; /**< [in]: Specifies the long term reference frame index to use for marking this frame as LTR.*/ + uint32_t ltrUseFrameBitmap; /**< [in]: Specifies the associated bitmap of LTR frame indices when encoding this frame. */ + uint32_t ltrUsageMode; /**< [in]: Specifies additional usage constraints for encoding using LTR frames from this point further. 0: no constraints, 1: no short term refs older than current, no previous LTR frames.*/ + uint32_t seiPayloadArrayCnt; /**< [in]: Specifies the number of elements allocated in seiPayloadArray array. */ + uint32_t reserved; /**< [in]: Reserved and must be set to 0. */ + NV_ENC_SEI_PAYLOAD* seiPayloadArray; /**< [in]: Array of SEI payloads which will be inserted for this frame. */ + uint32_t reserved2 [244]; /**< [in]: Reserved and must be set to 0. */ + void* reserved3[61]; /**< [in]: Reserved and must be set to NULL. */ +} NV_ENC_PIC_PARAMS_HEVC; + + +/** + * Codec specific per-picture encoding parameters. + */ +typedef union _NV_ENC_CODEC_PIC_PARAMS +{ + NV_ENC_PIC_PARAMS_H264 h264PicParams; /**< [in]: H264 encode picture params. */ + NV_ENC_PIC_PARAMS_HEVC hevcPicParams; /**< [in]: HEVC encode picture params. Currently unsupported and must not to be used. */ + uint32_t reserved[256]; /**< [in]: Reserved and must be set to 0. */ +} NV_ENC_CODEC_PIC_PARAMS; + +/** + * \struct _NV_ENC_PIC_PARAMS + * Encoding parameters that need to be sent on a per frame basis. + */ +typedef struct _NV_ENC_PIC_PARAMS +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_PIC_PARAMS_VER. */ + uint32_t inputWidth; /**< [in]: Specifies the input buffer width */ + uint32_t inputHeight; /**< [in]: Specifies the input buffer height */ + uint32_t inputPitch; /**< [in]: Specifies the input buffer pitch. If pitch value is not known, set this to inputWidth. */ + uint32_t encodePicFlags; /**< [in]: Specifies bit-wise OR`ed encode pic flags. See ::NV_ENC_PIC_FLAGS enum. */ + uint32_t frameIdx; /**< [in]: Specifies the frame index associated with the input frame [optional]. */ + uint64_t inputTimeStamp; /**< [in]: Specifies presentation timestamp associated with the input picture. */ + uint64_t inputDuration; /**< [in]: Specifies duration of the input picture */ + NV_ENC_INPUT_PTR inputBuffer; /**< [in]: Specifies the input buffer pointer. Client must use a pointer obtained from ::NvEncCreateInputBuffer() or ::NvEncMapInputResource() APIs.*/ + NV_ENC_OUTPUT_PTR outputBitstream; /**< [in]: Specifies the pointer to output buffer. Client should use a pointer obtained from ::NvEncCreateBitstreamBuffer() API. */ + void* completionEvent; /**< [in]: Specifies an event to be signalled on completion of encoding of this Frame [only if operating in Asynchronous mode]. Each output buffer should be associated with a distinct event pointer. */ + NV_ENC_BUFFER_FORMAT bufferFmt; /**< [in]: Specifies the input buffer format. */ + NV_ENC_PIC_STRUCT pictureStruct; /**< [in]: Specifies structure of the input picture. */ + NV_ENC_PIC_TYPE pictureType; /**< [in]: Specifies input picture type. Client required to be set explicitly by the client if the client has not set NV_ENC_INITALIZE_PARAMS::enablePTD to 1 while calling NvInitializeEncoder. */ + NV_ENC_CODEC_PIC_PARAMS codecPicParams; /**< [in]: Specifies the codec specific per-picture encoding parameters. */ + NVENC_EXTERNAL_ME_HINT_COUNTS_PER_BLOCKTYPE meHintCountsPerBlock[2]; /**< [in]: Specifies the number of hint candidates per block per direction for the current frame. meHintCountsPerBlock[0] is for L0 predictors and meHintCountsPerBlock[1] is for L1 predictors. + The candidate count in NV_ENC_PIC_PARAMS::meHintCountsPerBlock[lx] must never exceed NV_ENC_INITIALIZE_PARAMS::maxMEHintCountsPerBlock[lx] provided during encoder intialization. */ + NVENC_EXTERNAL_ME_HINT *meExternalHints; /**< [in]: Specifies the pointer to ME external hints for the current frame. The size of ME hint buffer should be equal to number of macroblocks multiplied by the total number of candidates per macroblock. + The total number of candidates per MB per direction = 1*meHintCountsPerBlock[Lx].numCandsPerBlk16x16 + 2*meHintCountsPerBlock[Lx].numCandsPerBlk16x8 + 2*meHintCountsPerBlock[Lx].numCandsPerBlk8x8 + + 4*meHintCountsPerBlock[Lx].numCandsPerBlk8x8. For frames using bidirectional ME , the total number of candidates for single macroblock is sum of total number of candidates per MB for each direction (L0 and L1) */ + uint32_t reserved1[6]; /**< [in]: Reserved and must be set to 0 */ + void* reserved2[2]; /**< [in]: Reserved and must be set to NULL */ + int8_t *qpDeltaMap; /**< [in]: Specifies the pointer to signed byte array containing QP delta value per MB in raster scan order in the current picture. This QP modifier is applied on top of the QP chosen by rate control. */ + uint32_t qpDeltaMapSize; /**< [in]: Specifies the size in bytes of qpDeltaMap surface allocated by client and pointed to by NV_ENC_PIC_PARAMS::qpDeltaMap. Surface (array) should be picWidthInMbs * picHeightInMbs */ + uint32_t reservedBitFields; /**< [in]: Reserved bitfields and must be set to 0 */ + uint16_t meHintRefPicDist[2]; /**< [in]: Specifies temporal distance for reference picture (NVENC_EXTERNAL_ME_HINT::refidx = 0) used during external ME with NV_ENC_INITALIZE_PARAMS::enablePTD = 1 . meHintRefPicDist[0] is for L0 hints and meHintRefPicDist[1] is for L1 hints. + If not set, will internally infer distance of 1. Ignored for NV_ENC_INITALIZE_PARAMS::enablePTD = 0 */ + uint32_t reserved3[286]; /**< [in]: Reserved and must be set to 0 */ + void* reserved4[60]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_PIC_PARAMS; + +/** Macro for constructing the version field of ::_NV_ENC_PIC_PARAMS */ +#define NV_ENC_PIC_PARAMS_VER (NVENCAPI_STRUCT_VERSION(4) | ( 1<<31 )) + + +/** + * \struct _NV_ENC_MEONLY_PARAMS + * MEOnly parameters that need to be sent on a per motion estimation basis. + */ +typedef struct _NV_ENC_MEONLY_PARAMS +{ + uint32_t version; /**< [in]: Struct version. Must be set to NV_ENC_MEONLY_PARAMS_VER.*/ + uint32_t inputWidth; /**< [in]: Specifies the input buffer width */ + uint32_t inputHeight; /**< [in]: Specifies the input buffer height */ + NV_ENC_INPUT_PTR inputBuffer; /**< [in]: Specifies the input buffer pointer. Client must use a pointer obtained from NvEncCreateInputBuffer() or NvEncMapInputResource() APIs. */ + NV_ENC_INPUT_PTR referenceFrame; /**< [in]: Specifies the reference frame pointer */ + NV_ENC_OUTPUT_PTR mvBuffer; /**< [in]: Specifies the pointer to motion vector data buffer allocated by NvEncCreateMVBuffer. Client must lock mvBuffer using ::NvEncLockBitstream() API to get the motion vector data. */ + NV_ENC_BUFFER_FORMAT bufferFmt; /**< [in]: Specifies the input buffer format. */ + void* completionEvent; /**< [in]: Specifies an event to be signalled on completion of motion estimation + of this Frame [only if operating in Asynchronous mode]. + Each output buffer should be associated with a distinct event pointer. */ + uint32_t reserved1[252]; /**< [in]: Reserved and must be set to 0 */ + void* reserved2[60]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_MEONLY_PARAMS; + +/** NV_ENC_MEONLY_PARAMS struct version*/ +#define NV_ENC_MEONLY_PARAMS_VER NVENCAPI_STRUCT_VERSION(2) + + +/** + * \struct _NV_ENC_LOCK_BITSTREAM + * Bitstream buffer lock parameters. + */ +typedef struct _NV_ENC_LOCK_BITSTREAM +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_LOCK_BITSTREAM_VER. */ + uint32_t doNotWait :1; /**< [in]: If this flag is set, the NvEncodeAPI interface will return buffer pointer even if operation is not completed. If not set, the call will block until operation completes. */ + uint32_t ltrFrame :1; /**< [out]: Flag indicating this frame is marked as LTR frame */ + uint32_t reservedBitFields :30; /**< [in]: Reserved bit fields and must be set to 0 */ + void* outputBitstream; /**< [in]: Pointer to the bitstream buffer being locked. */ + uint32_t* sliceOffsets; /**< [in,out]: Array which receives the slice offsets. This is not supported if NV_ENC_CONFIG_H264::sliceMode is 1 on Kepler GPUs. Array size must be equal to size of frame in MBs. */ + uint32_t frameIdx; /**< [out]: Frame no. for which the bitstream is being retrieved. */ + uint32_t hwEncodeStatus; /**< [out]: The NvEncodeAPI interface status for the locked picture. */ + uint32_t numSlices; /**< [out]: Number of slices in the encoded picture. Will be reported only if NV_ENC_INITIALIZE_PARAMS::reportSliceOffsets set to 1. */ + uint32_t bitstreamSizeInBytes; /**< [out]: Actual number of bytes generated and copied to the memory pointed by bitstreamBufferPtr. */ + uint64_t outputTimeStamp; /**< [out]: Presentation timestamp associated with the encoded output. */ + uint64_t outputDuration; /**< [out]: Presentation duration associates with the encoded output. */ + void* bitstreamBufferPtr; /**< [out]: Pointer to the generated output bitstream. + For MEOnly mode _NV_ENC_LOCK_BITSTREAM::bitstreamBufferPtr should be typecast to + NV_ENC_H264_MV_DATA/NV_ENC_HEVC_MV_DATA pointer respectively for H264/HEVC */ + NV_ENC_PIC_TYPE pictureType; /**< [out]: Picture type of the encoded picture. */ + NV_ENC_PIC_STRUCT pictureStruct; /**< [out]: Structure of the generated output picture. */ + uint32_t frameAvgQP; /**< [out]: Average QP of the frame. */ + uint32_t frameSatd; /**< [out]: Total SATD cost for whole frame. */ + uint32_t ltrFrameIdx; /**< [out]: Frame index associated with this LTR frame. */ + uint32_t ltrFrameBitmap; /**< [out]: Bitmap of LTR frames indices which were used for encoding this frame. Value of 0 if no LTR frames were used. */ + uint32_t reserved [236]; /**< [in]: Reserved and must be set to 0 */ + void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_LOCK_BITSTREAM; + +/** Macro for constructing the version field of ::_NV_ENC_LOCK_BITSTREAM */ +#define NV_ENC_LOCK_BITSTREAM_VER NVENCAPI_STRUCT_VERSION(1) + + +/** + * \struct _NV_ENC_LOCK_INPUT_BUFFER + * Uncompressed Input Buffer lock parameters. + */ +typedef struct _NV_ENC_LOCK_INPUT_BUFFER +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_LOCK_INPUT_BUFFER_VER. */ + uint32_t doNotWait :1; /**< [in]: Set to 1 to make ::NvEncLockInputBuffer() a unblocking call. If the encoding is not completed, driver will return ::NV_ENC_ERR_ENCODER_BUSY error code. */ + uint32_t reservedBitFields :31; /**< [in]: Reserved bitfields and must be set to 0 */ + NV_ENC_INPUT_PTR inputBuffer; /**< [in]: Pointer to the input buffer to be locked, client should pass the pointer obtained from ::NvEncCreateInputBuffer() or ::NvEncMapInputResource API. */ + void* bufferDataPtr; /**< [out]: Pointed to the locked input buffer data. Client can only access input buffer using the \p bufferDataPtr. */ + uint32_t pitch; /**< [out]: Pitch of the locked input buffer. */ + uint32_t reserved1[251]; /**< [in]: Reserved and must be set to 0 */ + void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_LOCK_INPUT_BUFFER; + +/** Macro for constructing the version field of ::_NV_ENC_LOCK_INPUT_BUFFER */ +#define NV_ENC_LOCK_INPUT_BUFFER_VER NVENCAPI_STRUCT_VERSION(1) + + +/** + * \struct _NV_ENC_MAP_INPUT_RESOURCE + * Map an input resource to a Nvidia Encoder Input Buffer + */ +typedef struct _NV_ENC_MAP_INPUT_RESOURCE +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_MAP_INPUT_RESOURCE_VER. */ + uint32_t subResourceIndex; /**< [in]: Deprecated. Do not use. */ + void* inputResource; /**< [in]: Deprecated. Do not use. */ + NV_ENC_REGISTERED_PTR registeredResource; /**< [in]: The Registered resource handle obtained by calling NvEncRegisterInputResource. */ + NV_ENC_INPUT_PTR mappedResource; /**< [out]: Mapped pointer corresponding to the registeredResource. This pointer must be used in NV_ENC_PIC_PARAMS::inputBuffer parameter in ::NvEncEncodePicture() API. */ + NV_ENC_BUFFER_FORMAT mappedBufferFmt; /**< [out]: Buffer format of the outputResource. This buffer format must be used in NV_ENC_PIC_PARAMS::bufferFmt if client using the above mapped resource pointer. */ + uint32_t reserved1[251]; /**< [in]: Reserved and must be set to 0. */ + void* reserved2[63]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_MAP_INPUT_RESOURCE; + +/** Macro for constructing the version field of ::_NV_ENC_MAP_INPUT_RESOURCE */ +#define NV_ENC_MAP_INPUT_RESOURCE_VER NVENCAPI_STRUCT_VERSION(4) + +/** + * \struct _NV_ENC_REGISTER_RESOURCE + * Register a resource for future use with the Nvidia Video Encoder Interface. + */ +typedef struct _NV_ENC_REGISTER_RESOURCE +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_REGISTER_RESOURCE_VER. */ + NV_ENC_INPUT_RESOURCE_TYPE resourceType; /**< [in]: Specifies the type of resource to be registered. Supported values are ::NV_ENC_INPUT_RESOURCE_TYPE_DIRECTX, ::NV_ENC_INPUT_RESOURCE_TYPE_CUDADEVICEPTR. */ + uint32_t width; /**< [in]: Input buffer Width. */ + uint32_t height; /**< [in]: Input buffer Height. */ + uint32_t pitch; /**< [in]: Input buffer Pitch. */ + uint32_t subResourceIndex; /**< [in]: Subresource Index of the DirectX resource to be registered. Should be set to 0 for other interfaces. */ + void* resourceToRegister; /**< [in]: Handle to the resource that is being registered. */ + NV_ENC_REGISTERED_PTR registeredResource; /**< [out]: Registered resource handle. This should be used in future interactions with the Nvidia Video Encoder Interface. */ + NV_ENC_BUFFER_FORMAT bufferFormat; /**< [in]: Buffer format of resource to be registered. */ + uint32_t reserved1[248]; /**< [in]: Reserved and must be set to 0. */ + void* reserved2[62]; /**< [in]: Reserved and must be set to NULL. */ +} NV_ENC_REGISTER_RESOURCE; + +/** Macro for constructing the version field of ::_NV_ENC_REGISTER_RESOURCE */ +#define NV_ENC_REGISTER_RESOURCE_VER NVENCAPI_STRUCT_VERSION(3) + +/** + * \struct _NV_ENC_STAT + * Encode Stats structure. + */ +typedef struct _NV_ENC_STAT +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_STAT_VER. */ + uint32_t reserved; /**< [in]: Reserved and must be set to 0 */ + NV_ENC_OUTPUT_PTR outputBitStream; /**< [out]: Specifies the pointer to output bitstream. */ + uint32_t bitStreamSize; /**< [out]: Size of generated bitstream in bytes. */ + uint32_t picType; /**< [out]: Picture type of encoded picture. See ::NV_ENC_PIC_TYPE. */ + uint32_t lastValidByteOffset; /**< [out]: Offset of last valid bytes of completed bitstream */ + uint32_t sliceOffsets[16]; /**< [out]: Offsets of each slice */ + uint32_t picIdx; /**< [out]: Picture number */ + uint32_t reserved1[233]; /**< [in]: Reserved and must be set to 0 */ + void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_STAT; + +/** Macro for constructing the version field of ::_NV_ENC_STAT */ +#define NV_ENC_STAT_VER NVENCAPI_STRUCT_VERSION(1) + + +/** + * \struct _NV_ENC_SEQUENCE_PARAM_PAYLOAD + * Sequence and picture paramaters payload. + */ +typedef struct _NV_ENC_SEQUENCE_PARAM_PAYLOAD +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_INITIALIZE_PARAMS_VER. */ + uint32_t inBufferSize; /**< [in]: Specifies the size of the spsppsBuffer provied by the client */ + uint32_t spsId; /**< [in]: Specifies the SPS id to be used in sequence header. Default value is 0. */ + uint32_t ppsId; /**< [in]: Specifies the PPS id to be used in picture header. Default value is 0. */ + void* spsppsBuffer; /**< [in]: Specifies bitstream header pointer of size NV_ENC_SEQUENCE_PARAM_PAYLOAD::inBufferSize. It is the client's responsibility to manage this memory. */ + uint32_t* outSPSPPSPayloadSize; /**< [out]: Size of the sequence and picture header in bytes written by the NvEncodeAPI interface to the SPSPPSBuffer. */ + uint32_t reserved [250]; /**< [in]: Reserved and must be set to 0 */ + void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_SEQUENCE_PARAM_PAYLOAD; + +/** Macro for constructing the version field of ::_NV_ENC_SEQUENCE_PARAM_PAYLOAD */ +#define NV_ENC_SEQUENCE_PARAM_PAYLOAD_VER NVENCAPI_STRUCT_VERSION(1) + + +/** + * Event registration/unregistration parameters. + */ +typedef struct _NV_ENC_EVENT_PARAMS +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_EVENT_PARAMS_VER. */ + uint32_t reserved; /**< [in]: Reserved and must be set to 0 */ + void* completionEvent; /**< [in]: Handle to event to be registered/unregistered with the NvEncodeAPI interface. */ + uint32_t reserved1[253]; /**< [in]: Reserved and must be set to 0 */ + void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_EVENT_PARAMS; + +/** Macro for constructing the version field of ::_NV_ENC_EVENT_PARAMS */ +#define NV_ENC_EVENT_PARAMS_VER NVENCAPI_STRUCT_VERSION(1) + +/** + * Encoder Session Creation parameters + */ +typedef struct _NV_ENC_OPEN_ENCODE_SESSIONEX_PARAMS +{ + uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER. */ + NV_ENC_DEVICE_TYPE deviceType; /**< [in]: Specified the device Type */ + void* device; /**< [in]: Pointer to client device. */ + void* reserved; /**< [in]: Reserved and must be set to 0. */ + uint32_t apiVersion; /**< [in]: API version. Should be set to NVENCAPI_VERSION. */ + uint32_t reserved1[253]; /**< [in]: Reserved and must be set to 0 */ + void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS; +/** Macro for constructing the version field of ::_NV_ENC_OPEN_ENCODE_SESSIONEX_PARAMS */ +#define NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER NVENCAPI_STRUCT_VERSION(1) + +/** @} */ /* END ENCODER_STRUCTURE */ + + +/** + * \addtogroup ENCODE_FUNC NvEncodeAPI Functions + * @{ + */ + +// NvEncOpenEncodeSession +/** + * \brief Opens an encoding session. + * + * Deprecated. + * + * \return + * ::NV_ENC_ERR_INVALID_CALL\n + * + */ +NVENCSTATUS NVENCAPI NvEncOpenEncodeSession (void* device, uint32_t deviceType, void** encoder); + +// NvEncGetEncodeGuidCount +/** + * \brief Retrieves the number of supported encode GUIDs. + * + * The function returns the number of codec guids supported by the NvEncodeAPI + * interface. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [out] encodeGUIDCount + * Number of supported encode GUIDs. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncGetEncodeGUIDCount (void* encoder, uint32_t* encodeGUIDCount); + + +// NvEncGetEncodeGUIDs +/** + * \brief Retrieves an array of supported encoder codec GUIDs. + * + * The function returns an array of codec guids supported by the NvEncodeAPI interface. + * The client must allocate an array where the NvEncodeAPI interface can + * fill the supported guids and pass the pointer in \p *GUIDs parameter. + * The size of the array can be determined by using ::NvEncGetEncodeGUIDCount() API. + * The Nvidia Encoding interface returns the number of codec guids it has actually + * filled in the guid array in the \p GUIDCount parameter. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] guidArraySize + * Number of GUIDs to retrieved. Should be set to the number retrieved using + * ::NvEncGetEncodeGUIDCount. + * \param [out] GUIDs + * Array of supported Encode GUIDs. + * \param [out] GUIDCount + * Number of supported Encode GUIDs. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncGetEncodeGUIDs (void* encoder, GUID* GUIDs, uint32_t guidArraySize, uint32_t* GUIDCount); + + +// NvEncGetEncodeProfileGuidCount +/** + * \brief Retrieves the number of supported profile GUIDs. + * + * The function returns the number of profile GUIDs supported for a given codec. + * The client must first enumerate the codec guids supported by the NvEncodeAPI + * interface. After determining the codec guid, it can query the NvEncodeAPI + * interface to determine the number of profile guids supported for a particular + * codec guid. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] encodeGUID + * The codec guid for which the profile guids are being enumerated. + * \param [out] encodeProfileGUIDCount + * Number of encode profiles supported for the given encodeGUID. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncGetEncodeProfileGUIDCount (void* encoder, GUID encodeGUID, uint32_t* encodeProfileGUIDCount); + + +// NvEncGetEncodeProfileGUIDs +/** + * \brief Retrieves an array of supported encode profile GUIDs. + * + * The function returns an array of supported profile guids for a particular + * codec guid. The client must allocate an array where the NvEncodeAPI interface + * can populate the profile guids. The client can determine the array size using + * ::NvEncGetEncodeProfileGUIDCount() API. The client must also validiate that the + * NvEncodeAPI interface supports the GUID the client wants to pass as \p encodeGUID + * parameter. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] encodeGUID + * The encode guid whose profile guids are being enumerated. + * \param [in] guidArraySize + * Number of GUIDs to be retrieved. Should be set to the number retrieved using + * ::NvEncGetEncodeProfileGUIDCount. + * \param [out] profileGUIDs + * Array of supported Encode Profile GUIDs + * \param [out] GUIDCount + * Number of valid encode profile GUIDs in \p profileGUIDs array. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncGetEncodeProfileGUIDs (void* encoder, GUID encodeGUID, GUID* profileGUIDs, uint32_t guidArraySize, uint32_t* GUIDCount); + +// NvEncGetInputFormatCount +/** + * \brief Retrieve the number of supported Input formats. + * + * The function returns the number of supported input formats. The client must + * query the NvEncodeAPI interface to determine the supported input formats + * before creating the input surfaces. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] encodeGUID + * Encode GUID, corresponding to which the number of supported input formats + * is to be retrieved. + * \param [out] inputFmtCount + * Number of input formats supported for specified Encode GUID. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + */ +NVENCSTATUS NVENCAPI NvEncGetInputFormatCount (void* encoder, GUID encodeGUID, uint32_t* inputFmtCount); + + +// NvEncGetInputFormats +/** + * \brief Retrieves an array of supported Input formats + * + * Returns an array of supported input formats The client must use the input + * format to create input surface using ::NvEncCreateInputBuffer() API. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] encodeGUID + * Encode GUID, corresponding to which the number of supported input formats + * is to be retrieved. + *\param [in] inputFmtArraySize + * Size input format count array passed in \p inputFmts. + *\param [out] inputFmts + * Array of input formats supported for this Encode GUID. + *\param [out] inputFmtCount + * The number of valid input format types returned by the NvEncodeAPI + * interface in \p inputFmts array. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncGetInputFormats (void* encoder, GUID encodeGUID, NV_ENC_BUFFER_FORMAT* inputFmts, uint32_t inputFmtArraySize, uint32_t* inputFmtCount); + + +// NvEncGetEncodeCaps +/** + * \brief Retrieves the capability value for a specified encoder attribute. + * + * The function returns the capability value for a given encoder attribute. The + * client must validate the encodeGUID using ::NvEncGetEncodeGUIDs() API before + * calling this function. The encoder attribute being queried are enumerated in + * ::NV_ENC_CAPS_PARAM enum. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] encodeGUID + * Encode GUID, corresponding to which the capability attribute is to be retrieved. + * \param [in] capsParam + * Used to specify attribute being queried. Refer ::NV_ENC_CAPS_PARAM for more + * details. + * \param [out] capsVal + * The value corresponding to the capability attribute being queried. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + */ +NVENCSTATUS NVENCAPI NvEncGetEncodeCaps (void* encoder, GUID encodeGUID, NV_ENC_CAPS_PARAM* capsParam, int* capsVal); + + +// NvEncGetEncodePresetCount +/** + * \brief Retrieves the number of supported preset GUIDs. + * + * The function returns the number of preset GUIDs available for a given codec. + * The client must validate the codec guid using ::NvEncGetEncodeGUIDs() API + * before calling this function. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] encodeGUID + * Encode GUID, corresponding to which the number of supported presets is to + * be retrieved. + * \param [out] encodePresetGUIDCount + * Receives the number of supported preset GUIDs. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncGetEncodePresetCount (void* encoder, GUID encodeGUID, uint32_t* encodePresetGUIDCount); + + +// NvEncGetEncodePresetGUIDs +/** + * \brief Receives an array of supported encoder preset GUIDs. + * + * The function returns an array of encode preset guids available for a given codec. + * The client can directly use one of the preset guids based upon the use case + * or target device. The preset guid chosen can be directly used in + * NV_ENC_INITIALIZE_PARAMS::presetGUID parameter to ::NvEncEncodePicture() API. + * Alternately client can also use the preset guid to retrieve the encoding config + * parameters being used by NvEncodeAPI interface for that given preset, using + * ::NvEncGetEncodePresetConfig() API. It can then modify preset config parameters + * as per its use case and send it to NvEncodeAPI interface as part of + * NV_ENC_INITIALIZE_PARAMS::encodeConfig parameter for NvEncInitializeEncoder() + * API. + * + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] encodeGUID + * Encode GUID, corresponding to which the list of supported presets is to be + * retrieved. + * \param [in] guidArraySize + * Size of array of preset guids passed in \p preset GUIDs + * \param [out] presetGUIDs + * Array of supported Encode preset GUIDs from the NvEncodeAPI interface + * to client. + * \param [out] encodePresetGUIDCount + * Receives the number of preset GUIDs returned by the NvEncodeAPI + * interface. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncGetEncodePresetGUIDs (void* encoder, GUID encodeGUID, GUID* presetGUIDs, uint32_t guidArraySize, uint32_t* encodePresetGUIDCount); + + +// NvEncGetEncodePresetConfig +/** + * \brief Returns a preset config structure supported for given preset GUID. + * + * The function returns a preset config structure for a given preset guid. Before + * using this function the client must enumerate the preset guids available for + * a given codec. The preset config structure can be modified by the client depending + * upon its use case and can be then used to initialize the encoder using + * ::NvEncInitializeEncoder() API. The client can use this function only if it + * wants to modify the NvEncodeAPI preset configuration, otherwise it can + * directly use the preset guid. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] encodeGUID + * Encode GUID, corresponding to which the list of supported presets is to be + * retrieved. + * \param [in] presetGUID + * Preset GUID, corresponding to which the Encoding configurations is to be + * retrieved. + * \param [out] presetConfig + * The requested Preset Encoder Attribute set. Refer ::_NV_ENC_CONFIG for +* more details. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncGetEncodePresetConfig (void* encoder, GUID encodeGUID, GUID presetGUID, NV_ENC_PRESET_CONFIG* presetConfig); + +// NvEncInitializeEncoder +/** + * \brief Initialize the encoder. + * + * This API must be used to initialize the encoder. The initialization parameter + * is passed using \p *createEncodeParams The client must send the following + * fields of the _NV_ENC_INITIALIZE_PARAMS structure with a valid value. + * - NV_ENC_INITIALIZE_PARAMS::encodeGUID + * - NV_ENC_INITIALIZE_PARAMS::encodeWidth + * - NV_ENC_INITIALIZE_PARAMS::encodeHeight + * + * The client can pass a preset guid directly to the NvEncodeAPI interface using + * NV_ENC_INITIALIZE_PARAMS::presetGUID field. If the client doesn't pass + * NV_ENC_INITIALIZE_PARAMS::encodeConfig structure, the codec specific parameters + * will be selected based on the preset guid. The preset guid must have been + * validated by the client using ::NvEncGetEncodePresetGUIDs() API. + * If the client passes a custom ::_NV_ENC_CONFIG structure through + * NV_ENC_INITIALIZE_PARAMS::encodeConfig , it will override the codec specific parameters + * based on the preset guid. It is recommended that even if the client passes a custom config, + * it should also send a preset guid. In this case, the preset guid passed by the client + * will not override any of the custom config parameters programmed by the client, + * it is only used as a hint by the NvEncodeAPI interface to determine certain encoder parameters + * which are not exposed to the client. + * + * There are two modes of operation for the encoder namely: + * - Asynchronous mode + * - Synchronous mode + * + * The client can select asynchronous or synchronous mode by setting the \p + * enableEncodeAsync field in ::_NV_ENC_INITIALIZE_PARAMS to 1 or 0 respectively. + *\par Asynchronous mode of operation: + * The Asynchronous mode can be enabled by setting NV_ENC_INITIALIZE_PARAMS::enableEncodeAsync to 1. + * The client operating in asynchronous mode must allocate completion event object + * for each output buffer and pass the completion event object in the + * ::NvEncEncodePicture() API. The client can create another thread and wait on + * the event object to be signalled by NvEncodeAPI interface on completion of the + * encoding process for the output frame. This should unblock the main thread from + * submitting work to the encoder. When the event is signalled the client can call + * NvEncodeAPI interfaces to copy the bitstream data using ::NvEncLockBitstream() + * API. This is the preferred mode of operation. + * + * NOTE: Asynchronous mode is not supported on Linux. + * + *\par Synchronous mode of operation: + * The client can select synchronous mode by setting NV_ENC_INITIALIZE_PARAMS::enableEncodeAsync to 0. + * The client working in synchronous mode can work in a single threaded or multi + * threaded mode. The client need not allocate any event objects. The client can + * only lock the bitstream data after NvEncodeAPI interface has returned + * ::NV_ENC_SUCCESS from encode picture. The NvEncodeAPI interface can return + * ::NV_ENC_ERR_NEED_MORE_INPUT error code from ::NvEncEncodePicture() API. The + * client must not lock the output buffer in such case but should send the next + * frame for encoding. The client must keep on calling ::NvEncEncodePicture() API + * until it returns ::NV_ENC_SUCCESS. \n + * The client must always lock the bitstream data in order in which it has submitted. + * This is true for both asynchronous and synchronous mode. + * + *\par Picture type decision: + * If the client is taking the picture type decision and it must disable the picture + * type decision module in NvEncodeAPI by setting NV_ENC_INITIALIZE_PARAMS::enablePTD + * to 0. In this case the client is required to send the picture in encoding + * order to NvEncodeAPI by doing the re-ordering for B frames. \n + * If the client doesn't want to take the picture type decision it can enable + * picture type decision module in the NvEncodeAPI interface by setting + * NV_ENC_INITIALIZE_PARAMS::enablePTD to 1 and send the input pictures in display + * order. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] createEncodeParams + * Refer ::_NV_ENC_INITIALIZE_PARAMS for details. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncInitializeEncoder (void* encoder, NV_ENC_INITIALIZE_PARAMS* createEncodeParams); + + +// NvEncCreateInputBuffer +/** + * \brief Allocates Input buffer. + * + * This function is used to allocate an input buffer. The client must enumerate + * the input buffer format before allocating the input buffer resources. The + * NV_ENC_INPUT_PTR returned by the NvEncodeAPI interface in the + * NV_ENC_CREATE_INPUT_BUFFER::inputBuffer field can be directly used in + * ::NvEncEncodePicture() API. The number of input buffers to be allocated by the + * client must be at least 4 more than the number of B frames being used for encoding. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in,out] createInputBufferParams + * Pointer to the ::NV_ENC_CREATE_INPUT_BUFFER structure. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncCreateInputBuffer (void* encoder, NV_ENC_CREATE_INPUT_BUFFER* createInputBufferParams); + + +// NvEncDestroyInputBuffer +/** + * \brief Release an input buffers. + * + * This function is used to free an input buffer. If the client has allocated + * any input buffer using ::NvEncCreateInputBuffer() API, it must free those + * input buffers by calling this function. The client must release the input + * buffers before destroying the encoder using ::NvEncDestroyEncoder() API. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] inputBuffer + * Pointer to the input buffer to be released. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncDestroyInputBuffer (void* encoder, NV_ENC_INPUT_PTR inputBuffer); + + +// NvEncCreateBitstreamBuffer +/** + * \brief Allocates an output bitstream buffer + * + * This function is used to allocate an output bitstream buffer and returns a + * NV_ENC_OUTPUT_PTR to bitstream buffer to the client in the + * NV_ENC_CREATE_BITSTREAM_BUFFER::bitstreamBuffer field. + * The client can only call this function after the encoder session has been + * initialized using ::NvEncInitializeEncoder() API. The minimum number of output + * buffers allocated by the client must be at least 4 more than the number of B + * B frames being used for encoding. The client can only access the output + * bitsteam data by locking the \p bitstreamBuffer using the ::NvEncLockBitstream() + * function. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in,out] createBitstreamBufferParams + * Pointer ::NV_ENC_CREATE_BITSTREAM_BUFFER for details. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncCreateBitstreamBuffer (void* encoder, NV_ENC_CREATE_BITSTREAM_BUFFER* createBitstreamBufferParams); + + +// NvEncDestroyBitstreamBuffer +/** + * \brief Release a bitstream buffer. + * + * This function is used to release the output bitstream buffer allocated using + * the ::NvEncCreateBitstreamBuffer() function. The client must release the output + * bitstreamBuffer using this function before destroying the encoder session. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] bitstreamBuffer + * Pointer to the bitstream buffer being released. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncDestroyBitstreamBuffer (void* encoder, NV_ENC_OUTPUT_PTR bitstreamBuffer); + +// NvEncEncodePicture +/** + * \brief Submit an input picture for encoding. + * + * This function is used to submit an input picture buffer for encoding. The + * encoding parameters are passed using \p *encodePicParams which is a pointer + * to the ::_NV_ENC_PIC_PARAMS structure. + * + * If the client has set NV_ENC_INITIALIZE_PARAMS::enablePTD to 0, then it must + * send a valid value for the following fields. + * - NV_ENC_PIC_PARAMS::pictureType + * - NV_ENC_PIC_PARAMS_H264::displayPOCSyntax (H264 only) + * - NV_ENC_PIC_PARAMS_H264::frameNumSyntax(H264 only) + * - NV_ENC_PIC_PARAMS_H264::refPicFlag(H264 only) + * + * + *\par Asynchronous Encoding + * If the client has enabled asynchronous mode of encoding by setting + * NV_ENC_INITIALIZE_PARAMS::enableEncodeAsync to 1 in the ::NvEncInitializeEncoder() + * API ,then the client must send a valid NV_ENC_PIC_PARAMS::completionEvent. + * Incase of asynchronous mode of operation, client can queue the ::NvEncEncodePicture() + * API commands from the main thread and then queue output buffers to be processed + * to a secondary worker thread. Before the locking the output buffers in the + * secondary thread , the client must wait on NV_ENC_PIC_PARAMS::completionEvent + * it has queued in ::NvEncEncodePicture() API call. The client must always process + * completion event and the output buffer in the same order in which they have been + * submitted for encoding. The NvEncodeAPI interface is responsible for any + * re-ordering required for B frames and will always ensure that encoded bitstream + * data is written in the same order in which output buffer is submitted. + *\code + The below example shows how asynchronous encoding in case of 1 B frames + ------------------------------------------------------------------------ + Suppose the client allocated 4 input buffers(I1,I2..), 4 output buffers(O1,O2..) + and 4 completion events(E1, E2, ...). The NvEncodeAPI interface will need to + keep a copy of the input buffers for re-ordering and it allocates following + internal buffers (NvI1, NvI2...). These internal buffers are managed by NvEncodeAPI + and the client is not responsible for the allocating or freeing the memory of + the internal buffers. + + a) The client main thread will queue the following encode frame calls. + Note the picture type is unknown to the client, the decision is being taken by + NvEncodeAPI interface. The client should pass ::_NV_ENC_PIC_PARAMS parameter + consisting of allocated input buffer, output buffer and output events in successive + ::NvEncEncodePicture() API calls along with other required encode picture params. + For example: + 1st EncodePicture parameters - (I1, O1, E1) + 2nd EncodePicture parameters - (I2, O2, E2) + 3rd EncodePicture parameters - (I3, O3, E3) + + b) NvEncodeAPI SW will receive the following encode Commands from the client. + The left side shows input from client in the form (Input buffer, Output Buffer, + Output Event). The right hand side shows a possible picture type decision take by + the NvEncodeAPI interface. + (I1, O1, E1) ---P1 Frame + (I2, O2, E2) ---B2 Frame + (I3, O3, E3) ---P3 Frame + + c) NvEncodeAPI interface will make a copy of the input buffers to its internal + buffersfor re-ordering. These copies are done as part of nvEncEncodePicture + function call from the client and NvEncodeAPI interface is responsible for + synchronization of copy operation with the actual encoding operation. + I1 --> NvI1 + I2 --> NvI2 + I3 --> NvI3 + + d) After returning from ::NvEncEncodePicture() call , the client must queue the output + bitstream processing work to the secondary thread. The output bitstream processing + for asynchronous mode consist of first waiting on completion event(E1, E2..) + and then locking the output bitstream buffer(O1, O2..) for reading the encoded + data. The work queued to the secondary thread by the client is in the following order + (I1, O1, E1) + (I2, O2, E2) + (I3, O3, E3) + Note they are in the same order in which client calls ::NvEncEncodePicture() API + in \p step a). + + e) NvEncodeAPI interface will do the re-ordering such that Encoder HW will receive + the following encode commands: + (NvI1, O1, E1) ---P1 Frame + (NvI3, O2, E2) ---P3 Frame + (NvI2, O3, E3) ---B2 frame + + f) After the encoding operations are completed, the events will be signalled + by NvEncodeAPI interface in the following order : + (O1, E1) ---P1 Frame ,output bitstream copied to O1 and event E1 signalled. + (O2, E2) ---P3 Frame ,output bitstream copied to O2 and event E2 signalled. + (O3, E3) ---B2 Frame ,output bitstream copied to O3 and event E3 signalled. + + g) The client must lock the bitstream data using ::NvEncLockBitstream() API in + the order O1,O2,O3 to read the encoded data, after waiting for the events + to be signalled in the same order i.e E1, E2 and E3.The output processing is + done in the secondary thread in the following order: + Waits on E1, copies encoded bitstream from O1 + Waits on E2, copies encoded bitstream from O2 + Waits on E3, copies encoded bitstream from O3 + + -Note the client will receive the events signalling and output buffer in the + same order in which they have submitted for encoding. + -Note the LockBitstream will have picture type field which will notify the + output picture type to the clients. + -Note the input, output buffer and the output completion event are free to be + reused once NvEncodeAPI interfaced has signalled the event and the client has + copied the data from the output buffer. + + * \endcode + * + *\par Synchronous Encoding + * The client can enable synchronous mode of encoding by setting + * NV_ENC_INITIALIZE_PARAMS::enableEncodeAsync to 0 in ::NvEncInitializeEncoder() API. + * The NvEncodeAPI interface may return ::NV_ENC_ERR_NEED_MORE_INPUT error code for + * some ::NvEncEncodePicture() API calls when NV_ENC_INITIALIZE_PARAMS::enablePTD + * is set to 1, but the client must not treat it as a fatal error. The NvEncodeAPI + * interface might not be able to submit an input picture buffer for encoding + * immediately due to re-ordering for B frames. The NvEncodeAPI interface cannot + * submit the input picture which is decided to be encoded as B frame as it waits + * for backward reference from temporally subsequent frames. This input picture + * is buffered internally and waits for more input picture to arrive. The client + * must not call ::NvEncLockBitstream() API on the output buffers whose + * ::NvEncEncodePicture() API returns ::NV_ENC_ERR_NEED_MORE_INPUT. The client must + * wait for the NvEncodeAPI interface to return ::NV_ENC_SUCCESS before locking the + * output bitstreams to read the encoded bitstream data. The following example + * explains the scenario with synchronous encoding with 2 B frames. + *\code + The below example shows how synchronous encoding works in case of 1 B frames + ----------------------------------------------------------------------------- + Suppose the client allocated 4 input buffers(I1,I2..), 4 output buffers(O1,O2..) + and 4 completion events(E1, E2, ...). The NvEncodeAPI interface will need to + keep a copy of the input buffers for re-ordering and it allocates following + internal buffers (NvI1, NvI2...). These internal buffers are managed by NvEncodeAPI + and the client is not responsible for the allocating or freeing the memory of + the internal buffers. + + The client calls ::NvEncEncodePicture() API with input buffer I1 and output buffer O1. + The NvEncodeAPI decides to encode I1 as P frame and submits it to encoder + HW and returns ::NV_ENC_SUCCESS. + The client can now read the encoded data by locking the output O1 by calling + NvEncLockBitstream API. + + The client calls ::NvEncEncodePicture() API with input buffer I2 and output buffer O2. + The NvEncodeAPI decides to encode I2 as B frame and buffers I2 by copying it + to internal buffer and returns ::NV_ENC_ERR_NEED_MORE_INPUT. + The error is not fatal and it notifies client that it cannot read the encoded + data by locking the output O2 by calling ::NvEncLockBitstream() API without submitting + more work to the NvEncodeAPI interface. + + The client calls ::NvEncEncodePicture() with input buffer I3 and output buffer O3. + The NvEncodeAPI decides to encode I3 as P frame and it first submits I3 for + encoding which will be used as backward reference frame for I2. + The NvEncodeAPI then submits I2 for encoding and returns ::NV_ENC_SUCESS. Both + the submission are part of the same ::NvEncEncodePicture() function call. + The client can now read the encoded data for both the frames by locking the output + O2 followed by O3 ,by calling ::NvEncLockBitstream() API. + + The client must always lock the output in the same order in which it has submitted + to receive the encoded bitstream in correct encoding order. + + * \endcode + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in,out] encodePicParams + * Pointer to the ::_NV_ENC_PIC_PARAMS structure. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_ENCODER_BUSY \n + * ::NV_ENC_ERR_NEED_MORE_INPUT \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncEncodePicture (void* encoder, NV_ENC_PIC_PARAMS* encodePicParams); + + +// NvEncLockBitstream +/** + * \brief Lock output bitstream buffer + * + * This function is used to lock the bitstream buffer to read the encoded data. + * The client can only access the encoded data by calling this function. + * The pointer to client accessible encoded data is returned in the + * NV_ENC_LOCK_BITSTREAM::bitstreamBufferPtr field. The size of the encoded data + * in the output buffer is returned in the NV_ENC_LOCK_BITSTREAM::bitstreamSizeInBytes + * The NvEncodeAPI interface also returns the output picture type and picture structure + * of the encoded frame in NV_ENC_LOCK_BITSTREAM::pictureType and + * NV_ENC_LOCK_BITSTREAM::pictureStruct fields respectively. If the client has + * set NV_ENC_LOCK_BITSTREAM::doNotWait to 1, the function might return + * ::NV_ENC_ERR_LOCK_BUSY if client is operating in synchronous mode. This is not + * a fatal failure if NV_ENC_LOCK_BITSTREAM::doNotWait is set to 1. In the above case the client can + * retry the function after few milliseconds. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in,out] lockBitstreamBufferParams + * Pointer to the ::_NV_ENC_LOCK_BITSTREAM structure. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_LOCK_BUSY \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncLockBitstream (void* encoder, NV_ENC_LOCK_BITSTREAM* lockBitstreamBufferParams); + + +// NvEncUnlockBitstream +/** + * \brief Unlock the output bitstream buffer + * + * This function is used to unlock the output bitstream buffer after the client + * has read the encoded data from output buffer. The client must call this function + * to unlock the output buffer which it has previously locked using ::NvEncLockBitstream() + * function. Using a locked bitstream buffer in ::NvEncEncodePicture() API will cause + * the function to fail. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in,out] bitstreamBuffer + * bitstream buffer pointer being unlocked + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncUnlockBitstream (void* encoder, NV_ENC_OUTPUT_PTR bitstreamBuffer); + + +// NvLockInputBuffer +/** + * \brief Locks an input buffer + * + * This function is used to lock the input buffer to load the uncompressed YUV + * pixel data into input buffer memory. The client must pass the NV_ENC_INPUT_PTR + * it had previously allocated using ::NvEncCreateInputBuffer()in the + * NV_ENC_LOCK_INPUT_BUFFER::inputBuffer field. + * The NvEncodeAPI interface returns pointer to client accessible input buffer + * memory in NV_ENC_LOCK_INPUT_BUFFER::bufferDataPtr field. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in,out] lockInputBufferParams + * Pointer to the ::_NV_ENC_LOCK_INPUT_BUFFER structure + * + * \return + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_LOCK_BUSY \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncLockInputBuffer (void* encoder, NV_ENC_LOCK_INPUT_BUFFER* lockInputBufferParams); + + +// NvUnlockInputBuffer +/** + * \brief Unlocks the input buffer + * + * This function is used to unlock the input buffer memory previously locked for + * uploading YUV pixel data. The input buffer must be unlocked before being used + * again for encoding, otherwise NvEncodeAPI will fail the ::NvEncEncodePicture() + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] inputBuffer + * Pointer to the input buffer that is being unlocked. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + * + * + */ +NVENCSTATUS NVENCAPI NvEncUnlockInputBuffer (void* encoder, NV_ENC_INPUT_PTR inputBuffer); + + +// NvEncGetEncodeStats +/** + * \brief Get encoding statistics. + * + * This function is used to retrieve the encoding statistics. + * This API is not supported when encode device type is CUDA. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in,out] encodeStats + * Pointer to the ::_NV_ENC_STAT structure. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncGetEncodeStats (void* encoder, NV_ENC_STAT* encodeStats); + + +// NvEncGetSequenceParams +/** + * \brief Get encoded sequence and picture header. + * + * This function can be used to retrieve the sequence and picture header out of + * band. The client must call this function only after the encoder has been + * initialized using ::NvEncInitializeEncoder() function. The client must + * allocate the memory where the NvEncodeAPI interface can copy the bitstream + * header and pass the pointer to the memory in NV_ENC_SEQUENCE_PARAM_PAYLOAD::spsppsBuffer. + * The size of buffer is passed in the field NV_ENC_SEQUENCE_PARAM_PAYLOAD::inBufferSize. + * The NvEncodeAPI interface will copy the bitstream header payload and returns + * the actual size of the bitstream header in the field + * NV_ENC_SEQUENCE_PARAM_PAYLOAD::outSPSPPSPayloadSize. + * The client must call ::NvEncGetSequenceParams() function from the same thread which is + * being used to call ::NvEncEncodePicture() function. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in,out] sequenceParamPayload + * Pointer to the ::_NV_ENC_SEQUENCE_PARAM_PAYLOAD structure. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncGetSequenceParams (void* encoder, NV_ENC_SEQUENCE_PARAM_PAYLOAD* sequenceParamPayload); + + +// NvEncRegisterAsyncEvent +/** + * \brief Register event for notification to encoding completion. + * + * This function is used to register the completion event with NvEncodeAPI + * interface. The event is required when the client has configured the encoder to + * work in asynchronous mode. In this mode the client needs to send a completion + * event with every output buffer. The NvEncodeAPI interface will signal the + * completion of the encoding process using this event. Only after the event is + * signalled the client can get the encoded data using ::NvEncLockBitstream() function. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] eventParams + * Pointer to the ::_NV_ENC_EVENT_PARAMS structure. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncRegisterAsyncEvent (void* encoder, NV_ENC_EVENT_PARAMS* eventParams); + + +// NvEncUnregisterAsyncEvent +/** + * \brief Unregister completion event. + * + * This function is used to unregister completion event which has been previously + * registered using ::NvEncRegisterAsyncEvent() function. The client must unregister + * all events before destroying the encoder using ::NvEncDestroyEncoder() function. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] eventParams + * Pointer to the ::_NV_ENC_EVENT_PARAMS structure. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncUnregisterAsyncEvent (void* encoder, NV_ENC_EVENT_PARAMS* eventParams); + + +// NvEncMapInputResource +/** + * \brief Map an externally created input resource pointer for encoding. + * + * Maps an externally allocated input resource [using and returns a NV_ENC_INPUT_PTR + * which can be used for encoding in the ::NvEncEncodePicture() function. The + * mapped resource is returned in the field NV_ENC_MAP_INPUT_RESOURCE::outputResourcePtr. + * The NvEncodeAPI interface also returns the buffer format of the mapped resource + * in the field NV_ENC_MAP_INPUT_RESOURCE::outbufferFmt. + * This function provides synchronization guarantee that any direct3d or cuda + * work submitted on the input buffer is completed before the buffer is used for encoding. + * The client should not access any input buffer while they are mapped by the encoder. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in,out] mapInputResParams + * Pointer to the ::_NV_ENC_MAP_INPUT_RESOURCE structure. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_RESOURCE_NOT_REGISTERED \n + * ::NV_ENC_ERR_MAP_FAILED \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncMapInputResource (void* encoder, NV_ENC_MAP_INPUT_RESOURCE* mapInputResParams); + + +// NvEncUnmapInputResource +/** + * \brief UnMaps a NV_ENC_INPUT_PTR which was mapped for encoding + * + * + * UnMaps an input buffer which was previously mapped using ::NvEncMapInputResource() + * API. The mapping created using ::NvEncMapInputResource() should be invalidated + * using this API before the external resource is destroyed by the client. The client + * must unmap the buffer after ::NvEncLockBitstream() API returns succuessfully for encode + * work submitted using the mapped input buffer. + * + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] mappedInputBuffer + * Pointer to the NV_ENC_INPUT_PTR + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_RESOURCE_NOT_REGISTERED \n + * ::NV_ENC_ERR_RESOURCE_NOT_MAPPED \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncUnmapInputResource (void* encoder, NV_ENC_INPUT_PTR mappedInputBuffer); + +// NvEncDestroyEncoder +/** + * \brief Destroy Encoding Session + * + * Destroys the encoder session previously created using ::NvEncOpenEncodeSession() + * function. The client must flush the encoder before freeing any resources. In order + * to flush the encoder the client must pass a NULL encode picture packet and either + * wait for the ::NvEncEncodePicture() function to return in synchronous mode or wait + * for the flush event to be signaled by the encoder in asynchronous mode. + * The client must free all the input and output resources created using the + * NvEncodeAPI interface before destroying the encoder. If the client is operating + * in asynchronous mode, it must also unregister the completion events previously + * registered. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncDestroyEncoder (void* encoder); + +// NvEncInvalidateRefFrames +/** + * \brief Invalidate reference frames + * + * Invalidates reference frame based on the time stamp provided by the client. + * The encoder marks any reference frames or any frames which have been reconstructed + * using the corrupt frame as invalid for motion estimation and uses older reference + * frames for motion estimation. The encoded forces the current frame to be encoded + * as an intra frame if no reference frames are left after invalidation process. + * This is useful for low latency application for error resiliency. The client + * is recommended to set NV_ENC_CONFIG_H264::maxNumRefFrames to a large value so + * that encoder can keep a backup of older reference frames in the DPB and can use them + * for motion estimation when the newer reference frames have been invalidated. + * This API can be called multiple times. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] invalidRefFrameTimeStamp + * Timestamp of the invalid reference frames which needs to be invalidated. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncInvalidateRefFrames(void* encoder, uint64_t invalidRefFrameTimeStamp); + +// NvEncOpenEncodeSessionEx +/** + * \brief Opens an encoding session. + * + * Opens an encoding session and returns a pointer to the encoder interface in + * the \p **encoder parameter. The client should start encoding process by calling + * this API first. + * The client must pass a pointer to IDirect3DDevice9/CUDA interface in the \p *device parameter. + * If the creation of encoder session fails, the client must call ::NvEncDestroyEncoder API + * before exiting. + * + * \param [in] openSessionExParams + * Pointer to a ::NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS structure. + * \param [out] encoder + * Encode Session pointer to the NvEncodeAPI interface. + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_NO_ENCODE_DEVICE \n + * ::NV_ENC_ERR_UNSUPPORTED_DEVICE \n + * ::NV_ENC_ERR_INVALID_DEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncOpenEncodeSessionEx (NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS *openSessionExParams, void** encoder); + +// NvEncRegisterResource +/** + * \brief Registers a resource with the Nvidia Video Encoder Interface. + * + * Registers a resource with the Nvidia Video Encoder Interface for book keeping. + * The client is expected to pass the registered resource handle as well, while calling ::NvEncMapInputResource API. + * This API is not implemented for the DirectX Interface. + * DirectX based clients need not change their implementation. + * + * \param [in] encoder + * Pointer to the NVEncodeAPI interface. + * + * \param [in] registerResParams + * Pointer to a ::_NV_ENC_REGISTER_RESOURCE structure + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_RESOURCE_REGISTER_FAILED \n + * ::NV_ENC_ERR_GENERIC \n + * ::NV_ENC_ERR_UNIMPLEMENTED \n + * + */ +NVENCSTATUS NVENCAPI NvEncRegisterResource (void* encoder, NV_ENC_REGISTER_RESOURCE* registerResParams); + +// NvEncUnregisterResource +/** + * \brief Unregisters a resource previously registered with the Nvidia Video Encoder Interface. + * + * Unregisters a resource previously registered with the Nvidia Video Encoder Interface. + * The client is expected to unregister any resource that it has registered with the + * Nvidia Video Encoder Interface before destroying the resource. + * This API is not implemented for the DirectX Interface. + * DirectX based clients need not change their implementation. + * + * \param [in] encoder + * Pointer to the NVEncodeAPI interface. + * + * \param [in] registeredResource + * The registered resource pointer that was returned in ::NvEncRegisterResource. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_RESOURCE_NOT_REGISTERED \n + * ::NV_ENC_ERR_GENERIC \n + * ::NV_ENC_ERR_UNIMPLEMENTED \n + * + */ +NVENCSTATUS NVENCAPI NvEncUnregisterResource (void* encoder, NV_ENC_REGISTERED_PTR registeredResource); + +// NvEncReconfigureEncoder +/** + * \brief Reconfigure an existing encoding session. + * + * Reconfigure an existing encoding session. + * The client should call this API to change/reconfigure the parameter passed during + * NvEncInitializeEncoder API call. + * Currently Reconfiguration of following are not supported. + * Change in GOP structure. + * Change in sync-Async mode. + * Change in MaxWidth & MaxHeight. + * Change in PTDmode. + * + * Resolution change is possible only if maxEncodeWidth & maxEncodeHeight of NV_ENC_INITIALIZE_PARAMS + * is set while creating encoder session. + * + * \param [in] encoder + * Pointer to the NVEncodeAPI interface. + * + * \param [in] reInitEncodeParams + * Pointer to a ::NV_ENC_RECONFIGURE_PARAMS structure. + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_NO_ENCODE_DEVICE \n + * ::NV_ENC_ERR_UNSUPPORTED_DEVICE \n + * ::NV_ENC_ERR_INVALID_DEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_GENERIC \n + * + */ +NVENCSTATUS NVENCAPI NvEncReconfigureEncoder (void *encoder, NV_ENC_RECONFIGURE_PARAMS* reInitEncodeParams); + + + +// NvEncCreateMVBuffer +/** + * \brief Allocates output MV buffer for ME only mode. + * + * This function is used to allocate an output MV buffer. The size of the mvBuffer is + * dependent on the frame height and width of the last ::NvEncCreateInputBuffer() call. + * The NV_ENC_OUTPUT_PTR returned by the NvEncodeAPI interface in the + * ::NV_ENC_CREATE_MV_BUFFER::mvBuffer field should be used in + * ::NvEncRunMotionEstimationOnly() API. + * Client must lock ::NV_ENC_CREATE_MV_BUFFER::mvBuffer using ::NvEncLockBitstream() API to get the motion vector data. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in,out] createMVBufferParams + * Pointer to the ::NV_ENC_CREATE_MV_BUFFER structure. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_GENERIC \n + */ +NVENCSTATUS NVENCAPI NvEncCreateMVBuffer (void* encoder, NV_ENC_CREATE_MV_BUFFER* createMVBufferParams); + + +// NvEncDestroyMVBuffer +/** + * \brief Release an output MV buffer for ME only mode. + * + * This function is used to release the output MV buffer allocated using + * the ::NvEncCreateMVBuffer() function. The client must release the output + * mvBuffer using this function before destroying the encoder session. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] mvBuffer + * Pointer to the mvBuffer being released. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + */ +NVENCSTATUS NVENCAPI NvEncDestroyMVBuffer (void* encoder, NV_ENC_OUTPUT_PTR mvBuffer); + + +// NvEncRunMotionEstimationOnly +/** + * \brief Submit an input picture and reference frame for motion estimation in ME only mode. + * + * This function is used to submit the input frame and reference frame for motion + * estimation. The ME parameters are passed using *meOnlyParams which is a pointer + * to ::_NV_ENC_MEONLY_PARAMS structure. + * Client must lock ::NV_ENC_CREATE_MV_BUFFER::mvBuffer using ::NvEncLockBitstream() API to get the motion vector data. + * to get motion vector data. + * + * \param [in] encoder + * Pointer to the NvEncodeAPI interface. + * \param [in] meOnlyParams + * Pointer to the ::_NV_ENC_MEONLY_PARAMS structure. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n + * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n + * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n + * ::NV_ENC_ERR_OUT_OF_MEMORY \n + * ::NV_ENC_ERR_INVALID_PARAM \n + * ::NV_ENC_ERR_INVALID_VERSION \n + * ::NV_ENC_ERR_NEED_MORE_INPUT \n + * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n + * ::NV_ENC_ERR_GENERIC \n + */ +NVENCSTATUS NVENCAPI NvEncRunMotionEstimationOnly (void* encoder, NV_ENC_MEONLY_PARAMS* meOnlyParams); + +// NvEncodeAPIGetMaxSupportedVersion +/** + * \brief Get the largest NvEncodeAPI version supported by the driver. + * + * This function can be used by clients to determine if the driver supports + * the NvEncodeAPI header the application was compiled with. + * + * \param [out] version + * Pointer to the requested value. The 4 least significant bits in the returned + * indicate the minor version and the rest of the bits indicate the major + * version of the largest supported version. + * + * \return + * ::NV_ENC_SUCCESS \n + * ::NV_ENC_ERR_INVALID_PTR \n + */ +NVENCSTATUS NVENCAPI NvEncodeAPIGetMaxSupportedVersion (uint32_t* version); + + +/// \cond API PFN +/* + * Defines API function pointers + */ +typedef NVENCSTATUS (NVENCAPI* PNVENCOPENENCODESESSION) (void* device, uint32_t deviceType, void** encoder); +typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEGUIDCOUNT) (void* encoder, uint32_t* encodeGUIDCount); +typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEGUIDS) (void* encoder, GUID* GUIDs, uint32_t guidArraySize, uint32_t* GUIDCount); +typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEPROFILEGUIDCOUNT) (void* encoder, GUID encodeGUID, uint32_t* encodeProfileGUIDCount); +typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEPROFILEGUIDS) (void* encoder, GUID encodeGUID, GUID* profileGUIDs, uint32_t guidArraySize, uint32_t* GUIDCount); +typedef NVENCSTATUS (NVENCAPI* PNVENCGETINPUTFORMATCOUNT) (void* encoder, GUID encodeGUID, uint32_t* inputFmtCount); +typedef NVENCSTATUS (NVENCAPI* PNVENCGETINPUTFORMATS) (void* encoder, GUID encodeGUID, NV_ENC_BUFFER_FORMAT* inputFmts, uint32_t inputFmtArraySize, uint32_t* inputFmtCount); +typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODECAPS) (void* encoder, GUID encodeGUID, NV_ENC_CAPS_PARAM* capsParam, int* capsVal); +typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEPRESETCOUNT) (void* encoder, GUID encodeGUID, uint32_t* encodePresetGUIDCount); +typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEPRESETGUIDS) (void* encoder, GUID encodeGUID, GUID* presetGUIDs, uint32_t guidArraySize, uint32_t* encodePresetGUIDCount); +typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEPRESETCONFIG) (void* encoder, GUID encodeGUID, GUID presetGUID, NV_ENC_PRESET_CONFIG* presetConfig); +typedef NVENCSTATUS (NVENCAPI* PNVENCINITIALIZEENCODER) (void* encoder, NV_ENC_INITIALIZE_PARAMS* createEncodeParams); +typedef NVENCSTATUS (NVENCAPI* PNVENCCREATEINPUTBUFFER) (void* encoder, NV_ENC_CREATE_INPUT_BUFFER* createInputBufferParams); +typedef NVENCSTATUS (NVENCAPI* PNVENCDESTROYINPUTBUFFER) (void* encoder, NV_ENC_INPUT_PTR inputBuffer); +typedef NVENCSTATUS (NVENCAPI* PNVENCCREATEBITSTREAMBUFFER) (void* encoder, NV_ENC_CREATE_BITSTREAM_BUFFER* createBitstreamBufferParams); +typedef NVENCSTATUS (NVENCAPI* PNVENCDESTROYBITSTREAMBUFFER) (void* encoder, NV_ENC_OUTPUT_PTR bitstreamBuffer); +typedef NVENCSTATUS (NVENCAPI* PNVENCENCODEPICTURE) (void* encoder, NV_ENC_PIC_PARAMS* encodePicParams); +typedef NVENCSTATUS (NVENCAPI* PNVENCLOCKBITSTREAM) (void* encoder, NV_ENC_LOCK_BITSTREAM* lockBitstreamBufferParams); +typedef NVENCSTATUS (NVENCAPI* PNVENCUNLOCKBITSTREAM) (void* encoder, NV_ENC_OUTPUT_PTR bitstreamBuffer); +typedef NVENCSTATUS (NVENCAPI* PNVENCLOCKINPUTBUFFER) (void* encoder, NV_ENC_LOCK_INPUT_BUFFER* lockInputBufferParams); +typedef NVENCSTATUS (NVENCAPI* PNVENCUNLOCKINPUTBUFFER) (void* encoder, NV_ENC_INPUT_PTR inputBuffer); +typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODESTATS) (void* encoder, NV_ENC_STAT* encodeStats); +typedef NVENCSTATUS (NVENCAPI* PNVENCGETSEQUENCEPARAMS) (void* encoder, NV_ENC_SEQUENCE_PARAM_PAYLOAD* sequenceParamPayload); +typedef NVENCSTATUS (NVENCAPI* PNVENCREGISTERASYNCEVENT) (void* encoder, NV_ENC_EVENT_PARAMS* eventParams); +typedef NVENCSTATUS (NVENCAPI* PNVENCUNREGISTERASYNCEVENT) (void* encoder, NV_ENC_EVENT_PARAMS* eventParams); +typedef NVENCSTATUS (NVENCAPI* PNVENCMAPINPUTRESOURCE) (void* encoder, NV_ENC_MAP_INPUT_RESOURCE* mapInputResParams); +typedef NVENCSTATUS (NVENCAPI* PNVENCUNMAPINPUTRESOURCE) (void* encoder, NV_ENC_INPUT_PTR mappedInputBuffer); +typedef NVENCSTATUS (NVENCAPI* PNVENCDESTROYENCODER) (void* encoder); +typedef NVENCSTATUS (NVENCAPI* PNVENCINVALIDATEREFFRAMES) (void* encoder, uint64_t invalidRefFrameTimeStamp); +typedef NVENCSTATUS (NVENCAPI* PNVENCOPENENCODESESSIONEX) (NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS *openSessionExParams, void** encoder); +typedef NVENCSTATUS (NVENCAPI* PNVENCREGISTERRESOURCE) (void* encoder, NV_ENC_REGISTER_RESOURCE* registerResParams); +typedef NVENCSTATUS (NVENCAPI* PNVENCUNREGISTERRESOURCE) (void* encoder, NV_ENC_REGISTERED_PTR registeredRes); +typedef NVENCSTATUS (NVENCAPI* PNVENCRECONFIGUREENCODER) (void* encoder, NV_ENC_RECONFIGURE_PARAMS* reInitEncodeParams); + +typedef NVENCSTATUS (NVENCAPI* PNVENCCREATEMVBUFFER) (void* encoder, NV_ENC_CREATE_MV_BUFFER* createMVBufferParams); +typedef NVENCSTATUS (NVENCAPI* PNVENCDESTROYMVBUFFER) (void* encoder, NV_ENC_OUTPUT_PTR mvBuffer); +typedef NVENCSTATUS (NVENCAPI* PNVENCRUNMOTIONESTIMATIONONLY) (void* encoder, NV_ENC_MEONLY_PARAMS* meOnlyParams); + + +/// \endcond + + +/** @} */ /* END ENCODE_FUNC */ + +/** + * \ingroup ENCODER_STRUCTURE + * NV_ENCODE_API_FUNCTION_LIST + */ +typedef struct _NV_ENCODE_API_FUNCTION_LIST +{ + uint32_t version; /**< [in]: Client should pass NV_ENCODE_API_FUNCTION_LIST_VER. */ + uint32_t reserved; /**< [in]: Reserved and should be set to 0. */ + PNVENCOPENENCODESESSION nvEncOpenEncodeSession; /**< [out]: Client should access ::NvEncOpenEncodeSession() API through this pointer. */ + PNVENCGETENCODEGUIDCOUNT nvEncGetEncodeGUIDCount; /**< [out]: Client should access ::NvEncGetEncodeGUIDCount() API through this pointer. */ + PNVENCGETENCODEPRESETCOUNT nvEncGetEncodeProfileGUIDCount; /**< [out]: Client should access ::NvEncGetEncodeProfileGUIDCount() API through this pointer.*/ + PNVENCGETENCODEPRESETGUIDS nvEncGetEncodeProfileGUIDs; /**< [out]: Client should access ::NvEncGetEncodeProfileGUIDs() API through this pointer. */ + PNVENCGETENCODEGUIDS nvEncGetEncodeGUIDs; /**< [out]: Client should access ::NvEncGetEncodeGUIDs() API through this pointer. */ + PNVENCGETINPUTFORMATCOUNT nvEncGetInputFormatCount; /**< [out]: Client should access ::NvEncGetInputFormatCount() API through this pointer. */ + PNVENCGETINPUTFORMATS nvEncGetInputFormats; /**< [out]: Client should access ::NvEncGetInputFormats() API through this pointer. */ + PNVENCGETENCODECAPS nvEncGetEncodeCaps; /**< [out]: Client should access ::NvEncGetEncodeCaps() API through this pointer. */ + PNVENCGETENCODEPRESETCOUNT nvEncGetEncodePresetCount; /**< [out]: Client should access ::NvEncGetEncodePresetCount() API through this pointer. */ + PNVENCGETENCODEPRESETGUIDS nvEncGetEncodePresetGUIDs; /**< [out]: Client should access ::NvEncGetEncodePresetGUIDs() API through this pointer. */ + PNVENCGETENCODEPRESETCONFIG nvEncGetEncodePresetConfig; /**< [out]: Client should access ::NvEncGetEncodePresetConfig() API through this pointer. */ + PNVENCINITIALIZEENCODER nvEncInitializeEncoder; /**< [out]: Client should access ::NvEncInitializeEncoder() API through this pointer. */ + PNVENCCREATEINPUTBUFFER nvEncCreateInputBuffer; /**< [out]: Client should access ::NvEncCreateInputBuffer() API through this pointer. */ + PNVENCDESTROYINPUTBUFFER nvEncDestroyInputBuffer; /**< [out]: Client should access ::NvEncDestroyInputBuffer() API through this pointer. */ + PNVENCCREATEBITSTREAMBUFFER nvEncCreateBitstreamBuffer; /**< [out]: Client should access ::NvEncCreateBitstreamBuffer() API through this pointer. */ + PNVENCDESTROYBITSTREAMBUFFER nvEncDestroyBitstreamBuffer; /**< [out]: Client should access ::NvEncDestroyBitstreamBuffer() API through this pointer. */ + PNVENCENCODEPICTURE nvEncEncodePicture; /**< [out]: Client should access ::NvEncEncodePicture() API through this pointer. */ + PNVENCLOCKBITSTREAM nvEncLockBitstream; /**< [out]: Client should access ::NvEncLockBitstream() API through this pointer. */ + PNVENCUNLOCKBITSTREAM nvEncUnlockBitstream; /**< [out]: Client should access ::NvEncUnlockBitstream() API through this pointer. */ + PNVENCLOCKINPUTBUFFER nvEncLockInputBuffer; /**< [out]: Client should access ::NvEncLockInputBuffer() API through this pointer. */ + PNVENCUNLOCKINPUTBUFFER nvEncUnlockInputBuffer; /**< [out]: Client should access ::NvEncUnlockInputBuffer() API through this pointer. */ + PNVENCGETENCODESTATS nvEncGetEncodeStats; /**< [out]: Client should access ::NvEncGetEncodeStats() API through this pointer. */ + PNVENCGETSEQUENCEPARAMS nvEncGetSequenceParams; /**< [out]: Client should access ::NvEncGetSequenceParams() API through this pointer. */ + PNVENCREGISTERASYNCEVENT nvEncRegisterAsyncEvent; /**< [out]: Client should access ::NvEncRegisterAsyncEvent() API through this pointer. */ + PNVENCUNREGISTERASYNCEVENT nvEncUnregisterAsyncEvent; /**< [out]: Client should access ::NvEncUnregisterAsyncEvent() API through this pointer. */ + PNVENCMAPINPUTRESOURCE nvEncMapInputResource; /**< [out]: Client should access ::NvEncMapInputResource() API through this pointer. */ + PNVENCUNMAPINPUTRESOURCE nvEncUnmapInputResource; /**< [out]: Client should access ::NvEncUnmapInputResource() API through this pointer. */ + PNVENCDESTROYENCODER nvEncDestroyEncoder; /**< [out]: Client should access ::NvEncDestroyEncoder() API through this pointer. */ + PNVENCINVALIDATEREFFRAMES nvEncInvalidateRefFrames; /**< [out]: Client should access ::NvEncInvalidateRefFrames() API through this pointer. */ + PNVENCOPENENCODESESSIONEX nvEncOpenEncodeSessionEx; /**< [out]: Client should access ::NvEncOpenEncodeSession() API through this pointer. */ + PNVENCREGISTERRESOURCE nvEncRegisterResource; /**< [out]: Client should access ::NvEncRegisterResource() API through this pointer. */ + PNVENCUNREGISTERRESOURCE nvEncUnregisterResource; /**< [out]: Client should access ::NvEncUnregisterResource() API through this pointer. */ + PNVENCRECONFIGUREENCODER nvEncReconfigureEncoder; /**< [out]: Client should access ::NvEncReconfigureEncoder() API through this pointer. */ + void* reserved1; + PNVENCCREATEMVBUFFER nvEncCreateMVBuffer; /**< [out]: Client should access ::NvEncCreateMVBuffer API through this pointer. */ + PNVENCDESTROYMVBUFFER nvEncDestroyMVBuffer; /**< [out]: Client should access ::NvEncDestroyMVBuffer API through this pointer. */ + PNVENCRUNMOTIONESTIMATIONONLY nvEncRunMotionEstimationOnly; /**< [out]: Client should access ::NvEncRunMotionEstimationOnly API through this pointer. */ + void* reserved2[281]; /**< [in]: Reserved and must be set to NULL */ +} NV_ENCODE_API_FUNCTION_LIST; + +/** Macro for constructing the version field of ::_NV_ENCODEAPI_FUNCTION_LIST. */ +#define NV_ENCODE_API_FUNCTION_LIST_VER NVENCAPI_STRUCT_VERSION(2) + +// NvEncodeAPICreateInstance +/** + * \ingroup ENCODE_FUNC + * Entry Point to the NvEncodeAPI interface. + * + * Creates an instance of the NvEncodeAPI interface, and populates the + * pFunctionList with function pointers to the API routines implemented by the + * NvEncodeAPI interface. + * + * \param [out] functionList + * + * \return + * ::NV_ENC_SUCCESS + * ::NV_ENC_ERR_INVALID_PTR + */ +NVENCSTATUS NVENCAPI NvEncodeAPICreateInstance(NV_ENCODE_API_FUNCTION_LIST *functionList); + +#ifdef __cplusplus +} +#endif + + +#endif + diff --git a/Engine/Plugins/Experimental/ProxyLODPlugin/Source/ProxyLOD/Private/ProxyLODVolume.cpp b/Engine/Plugins/Experimental/ProxyLODPlugin/Source/ProxyLOD/Private/ProxyLODVolume.cpp new file mode 100644 index 000000000000..9134c6a43aae --- /dev/null +++ b/Engine/Plugins/Experimental/ProxyLODPlugin/Source/ProxyLOD/Private/ProxyLODVolume.cpp @@ -0,0 +1,147 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "ProxyLODVolume.h" + +#include "ProxyLODMeshAttrTransfer.h" +#include "ProxyLODMeshConvertUtils.h" +#include "ProxyLODMeshSDFConversions.h" +#include "ProxyLODMeshTypes.h" +#include "ProxyLODMeshUtilities.h" + +#include +#include // for Spatial Query +#include // for MeshToVolume +#include // for VolumeToMesh + +typedef openvdb::math::Transform OpenVDBTransform; + +class FProxyLODVolumeImpl : public IProxyLODVolume +{ +public: + FProxyLODVolumeImpl() + : VoxelSize(0.0) + { + } + + ~FProxyLODVolumeImpl() + { + SDFVolume.reset(); + SrcPolyIndexGrid.reset(); + Sampler.reset(); + } + + bool Initialize(const TArray& Geometry, float Accuracy) + { + FRawMeshArrayAdapter SrcGeometryAdapter(Geometry); + OpenVDBTransform::Ptr XForm = OpenVDBTransform::createLinearTransform(Accuracy); + SrcGeometryAdapter.SetTransform(XForm); + + VoxelSize = SrcGeometryAdapter.GetTransform().voxelSize()[0]; + + SrcPolyIndexGrid = openvdb::Int32Grid::create(); + + if (!ProxyLOD::MeshArrayToSDFVolume(SrcGeometryAdapter, SDFVolume, SrcPolyIndexGrid.get())) + { + SrcPolyIndexGrid.reset(); + return false; + } + + Sampler.reset(new openvdb::tools::GridSampler(*SDFVolume)); + + return true; + } + + virtual double GetVoxelSize() const override + { + return VoxelSize; + } + + virtual FVector3i GetBBoxSize() const override + { + if (SDFVolume == nullptr) + { + return FVector3i(0,0,0); + } + + openvdb::math::Coord VolumeBBoxSize = SDFVolume->evalActiveVoxelDim(); + + return FVector3i(VolumeBBoxSize.x(), VolumeBBoxSize.y(), VolumeBBoxSize.z()); + } + + virtual void CloseGaps(const double GapRadius, const int32 MaxDilations) override + { + ProxyLOD::CloseGaps(SDFVolume, GapRadius, MaxDilations); + } + + virtual float QueryDistance(const FVector& Point) const override + { + return Sampler->wsSample(openvdb::Vec3R(Point.X, Point.Y, Point.Z)); + } + + virtual void ConvertToRawMesh(FRawMesh& OutRawMesh) const override + { + // Mesh types that will be shared by various stages. + FAOSMesh AOSMeshedVolume; + ProxyLOD::SDFVolumeToMesh(SDFVolume, 0.0, 0.0, AOSMeshedVolume); + + FVertexDataMesh VertexDataMesh; + ProxyLOD::ConvertMesh(AOSMeshedVolume, VertexDataMesh); + + ProxyLOD::ConvertMesh(VertexDataMesh, OutRawMesh); + } + + void ExpandNarrowBand(float ExteriorWidth, float InteriorWidth) + { + using namespace openvdb::tools; + + FRawMesh RawMesh; + ConvertToRawMesh(RawMesh); + FRawMeshAdapter MeshAdapter(RawMesh, SDFVolume->transform()); + + openvdb::FloatGrid::Ptr NewSDFVolume; + openvdb::Int32Grid::Ptr NewSrcPolyIndexGrid; + + try + { + NewSrcPolyIndexGrid = openvdb::Int32Grid::create(); + NewSDFVolume = openvdb::tools::meshToVolume(MeshAdapter, MeshAdapter.GetTransform(), ExteriorWidth / VoxelSize, InteriorWidth / VoxelSize, 0, NewSrcPolyIndexGrid.get()); + + SDFVolume = NewSDFVolume; + SrcPolyIndexGrid = NewSrcPolyIndexGrid; + + // reduce memory footprint, increase the spareness. + openvdb::tools::pruneLevelSet(SDFVolume->tree(), float(ExteriorWidth / VoxelSize), float(-InteriorWidth / VoxelSize)); + } + catch (std::bad_alloc&) + { + NewSDFVolume.reset(); + NewSrcPolyIndexGrid.reset(); + return; + } + + Sampler.reset(new openvdb::tools::GridSampler(*SDFVolume)); + } + +private: + openvdb::FloatGrid::Ptr SDFVolume; + openvdb::Int32Grid::Ptr SrcPolyIndexGrid; + openvdb::tools::GridSampler::Ptr Sampler; + double VoxelSize; +}; + +int32 IProxyLODVolume::FVector3i::MinIndex() const +{ + return (int32)openvdb::math::MinIndex(openvdb::math::Coord(X, Y, X)); +} + +TUniquePtr IProxyLODVolume::CreateSDFVolumeFromMeshArray(const TArray& Geometry, float Step) +{ + TUniquePtr Volume = MakeUnique(); + + if (Volume == nullptr || !Volume->Initialize(Geometry, Step)) + { + return nullptr; + } + + return Volume; +} diff --git a/Engine/Plugins/Experimental/ProxyLODPlugin/Source/ProxyLOD/Public/ProxyLODVolume.h b/Engine/Plugins/Experimental/ProxyLODPlugin/Source/ProxyLOD/Public/ProxyLODVolume.h new file mode 100644 index 000000000000..8488f539a9fd --- /dev/null +++ b/Engine/Plugins/Experimental/ProxyLODPlugin/Source/ProxyLOD/Public/ProxyLODVolume.h @@ -0,0 +1,58 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" + +struct FMeshMergeData; +struct FRawMesh; + +class PROXYLODMESHREDUCTION_API IProxyLODVolume +{ +public: + /** Helper class to extract dimensions in voxel size units of OpenVDB volume */ + struct PROXYLODMESHREDUCTION_API FVector3i + { + int32 X; + int32 Y; + int32 Z; + + FVector3i(int32 InX, int32 InY, int32 InZ) + : X(InX), Y(InY), Z(InZ) + { + } + + FORCEINLINE int32 operator[](int32 Index) const + { + return Index == 0 ? X : (Index == 1 ? Y : (Index == 2 ? Z : 0)); + } + + int32 MinIndex() const; + }; + + /** Create OpenVDB volume from input geometry */ + static TUniquePtr CreateSDFVolumeFromMeshArray(const TArray& Geometry, float Step); + + virtual ~IProxyLODVolume() {} + + /** Get size of voxel cell */ + virtual double GetVoxelSize() const = 0; + + /** Get dimensions of bounding box of OpenVDB volume in multiple of size of voxel cell */ + virtual FVector3i GetBBoxSize() const = 0; + + /** Close any gap in OpenVDB volume which radius is less than given one in given maximum iteration */ + virtual void CloseGaps(const double GapRadius, const int32 MaxDilations) = 0; + + /** Extract iso distance 0 from OpenVDB volume as a RawMesh */ + virtual void ConvertToRawMesh(FRawMesh& OutRawMesh) const = 0; + + /** Expand exterior and interior narrow band of OpenVDB volume by given amount */ + virtual void ExpandNarrowBand(float ExteriorWidth, float InteriorWidth) = 0; + + /** + * Returns distance between given point and iso distance 0 of OpenVDB volume. + * Note: Returned value is clamped between -'dimension of interior narrow band' and +'dimension of exterior narrow band' + */ + virtual float QueryDistance(const FVector& Point) const = 0; +}; diff --git a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyCore.cpp b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyCore.cpp index e8c25f292c68..3b1596a5ec6b 100644 --- a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyCore.cpp +++ b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyCore.cpp @@ -1568,7 +1568,7 @@ void InitializeModule() GPythonPropertyContainer.Reset(NewObject(GetTransientPackage(), TEXT("PythonProperties"))); GPythonTypeContainer.Reset(NewObject(nullptr, TEXT("/Engine/PythonTypes"), RF_Public)); - GPythonTypeContainer->SetPackageFlags(PKG_CompiledIn | PKG_ContainsScript); + GPythonTypeContainer->SetPackageFlags(PKG_ContainsScript); PyGenUtil::FNativePythonModule NativePythonModule; NativePythonModule.PyModuleMethods = PyCoreMethods; diff --git a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyEngine.cpp b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyEngine.cpp index 2ff494615d7e..0b3c4df2ad07 100644 --- a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyEngine.cpp +++ b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyEngine.cpp @@ -41,6 +41,11 @@ PyTypeObject InitializePyActorIteratorType(const char* InTypeName, const char* I PyUtil::SetPythonError(PyExc_TypeError, InSelf, *FString::Printf(TEXT("Failed to convert 'world' (%s) to 'World'"), *PyUtil::GetFriendlyTypename(PyWorldObj))); return -1; } + if (!IterWorld) + { + PyUtil::SetPythonError(PyExc_TypeError, InSelf, *FString::Printf(TEXT("'world' cannot be 'None'"), *PyUtil::GetFriendlyTypename(PyWorldObj))); + return -1; + } UClass* IterClass = AActor::StaticClass(); if (PyTypeObj && !PyConversion::NativizeClass(PyTypeObj, IterClass, AActor::StaticClass())) diff --git a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyGenUtil.cpp b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyGenUtil.cpp index 1bd5d8747141..febcf6617451 100644 --- a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyGenUtil.cpp +++ b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyGenUtil.cpp @@ -1187,7 +1187,7 @@ bool IsBlueprintExposedEnumEntry(const UEnum* InEnum, int32 InEnumEntryIndex) bool IsBlueprintExposedProperty(const UProperty* InProp) { - return InProp->HasAnyPropertyFlags(CPF_BlueprintVisible); + return InProp->HasAnyPropertyFlags(CPF_BlueprintVisible | CPF_BlueprintAssignable); } bool IsBlueprintExposedFunction(const UFunction* InFunc) diff --git a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyTest.h b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyTest.h index 6c1a6e4e2f84..e1ef9287fdba 100644 --- a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyTest.h +++ b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyTest.h @@ -157,7 +157,7 @@ public: UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = Python) FPyTestDelegate Delegate; - UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = Python) + UPROPERTY(EditAnywhere, BlueprintAssignable, Category = Python) FPyTestMulticastDelegate MulticastDelegate; UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = Python) diff --git a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyUtil.cpp b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyUtil.cpp index 5f946cbe98bb..a91a724704c6 100644 --- a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyUtil.cpp +++ b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyUtil.cpp @@ -673,7 +673,7 @@ PyObject* GetPropertyValue(const UStruct* InStruct, void* InStructData, const UP { if (InStruct && InProp && ensureAlways(InStructData)) { - if (!InProp->HasAnyPropertyFlags(CPF_Edit | CPF_BlueprintVisible)) + if (!InProp->HasAnyPropertyFlags(CPF_Edit | CPF_BlueprintVisible | CPF_BlueprintAssignable)) { SetPythonError(PyExc_Exception, InErrorCtxt, *FString::Printf(TEXT("Property '%s' for attribute '%s' on '%s' is protected and cannot be read"), *InProp->GetName(), UTF8_TO_TCHAR(InAttributeName), *InStruct->GetName())); return nullptr; @@ -701,7 +701,7 @@ int SetPropertyValue(const UStruct* InStruct, void* InStructData, PyObject* InVa if (InStruct && InProp && ensureAlways(InStructData)) { - if (!InProp->HasAnyPropertyFlags(CPF_Edit | CPF_BlueprintVisible)) + if (!InProp->HasAnyPropertyFlags(CPF_Edit | CPF_BlueprintVisible | CPF_BlueprintAssignable)) { SetPythonError(PyExc_Exception, InErrorCtxt, *FString::Printf(TEXT("Property '%s' for attribute '%s' on '%s' is protected and cannot be set"), *InProp->GetName(), UTF8_TO_TCHAR(InAttributeName), *InStruct->GetName())); return -1; diff --git a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyWrapperObject.cpp b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyWrapperObject.cpp index 6753fa3f7123..70567e0172b1 100644 --- a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyWrapperObject.cpp +++ b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyWrapperObject.cpp @@ -1086,6 +1086,9 @@ struct FPythonGeneratedClassUtil // Map the Unreal class to the Python type InClass->PyType = FPyTypeObjectPtr::NewReference(InPyType); FPyWrapperTypeRegistry::Get().RegisterWrappedClassType(InClass->GetFName(), InPyType); + + // Ensure the CDO exists + InClass->GetDefaultObject(); } static bool CreatePropertyFromDefinition(UPythonGeneratedClass* InClass, PyTypeObject* InPyType, const FString& InFieldName, FPyUPropertyDef* InPyPropDef) diff --git a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyWrapperTypeRegistry.cpp b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyWrapperTypeRegistry.cpp index 6e419099d237..842c3e22c676 100644 --- a/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyWrapperTypeRegistry.cpp +++ b/Engine/Plugins/Experimental/PythonScriptPlugin/Source/PythonScriptPlugin/Private/PyWrapperTypeRegistry.cpp @@ -1775,7 +1775,7 @@ PyTypeObject* FPyWrapperTypeRegistry::GenerateWrappedDelegateType(const UFunctio // Generate the proxy class needed to wrap Python callables in Unreal delegates UClass* PythonCallableForDelegateClass = nullptr; { - PythonCallableForDelegateClass = NewObject(GetTransientPackage(), *FString::Printf(TEXT("%s__PythonCallable"), *DelegateBaseTypename), RF_Public); + PythonCallableForDelegateClass = NewObject(GetPythonTypeContainer(), *FString::Printf(TEXT("%s__PythonCallable"), *DelegateBaseTypename), RF_Public); UFunction* PythonCallableForDelegateFunc = (UFunction*)StaticDuplicateObject(InDelegateSignature, PythonCallableForDelegateClass, UPythonCallableForDelegate::GeneratedFuncName, RF_AllFlags, UFunction::StaticClass()); PythonCallableForDelegateFunc->FunctionFlags = (PythonCallableForDelegateFunc->FunctionFlags | FUNC_Native) & ~(FUNC_Delegate | FUNC_MulticastDelegate); PythonCallableForDelegateFunc->SetNativeFunc(&UPythonCallableForDelegate::CallPythonNative); @@ -1785,6 +1785,7 @@ PyTypeObject* FPyWrapperTypeRegistry::GenerateWrappedDelegateType(const UFunctio PythonCallableForDelegateClass->Bind(); PythonCallableForDelegateClass->StaticLink(true); PythonCallableForDelegateClass->AssembleReferenceTokenStream(); + PythonCallableForDelegateClass->GetDefaultObject(); } if (InDelegateSignature->HasAnyFunctionFlags(FUNC_MulticastDelegate)) diff --git a/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Private/LevelSequencePlaybackController.cpp b/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Private/LevelSequencePlaybackController.cpp index 4379eb781d81..393edbda8173 100644 --- a/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Private/LevelSequencePlaybackController.cpp +++ b/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Private/LevelSequencePlaybackController.cpp @@ -299,6 +299,18 @@ FString ULevelSequencePlaybackController::GetCurrentRecordingTakeName() const return FText::AsNumber(NextTakeNumber, &LeadingZeroesFormatter).ToString(); } +void ULevelSequencePlaybackController::OnObjectSpawned(UObject * InObject, const FMovieSceneEvaluationOperand & Operand) +{ + Super::OnObjectSpawned(InObject, Operand); + + // Camera actors spawn with lock to hmd set to true by default. Unlock them here to prevent unwanted movement. + ACameraActor* CameraActor = Cast(InObject); + if (CameraActor && CameraActor->GetCameraComponent()) + { + CameraActor->GetCameraComponent()->bLockToHmd = false; + } +} + void ULevelSequencePlaybackController::PlayToEnd() { if (Sequence) diff --git a/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Private/VirtualCameraMovementComponent.cpp b/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Private/VirtualCameraMovementComponent.cpp index 8bd116c9c1c3..c3059e2b3c78 100644 --- a/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Private/VirtualCameraMovementComponent.cpp +++ b/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Private/VirtualCameraMovementComponent.cpp @@ -24,12 +24,30 @@ UVirtualCameraMovementComponent::UVirtualCameraMovementComponent(const FObjectIn void UVirtualCameraMovementComponent::AddInputVector(FVector WorldVector, bool bForce /*=false*/) { + if (WorldVector.IsZero()) + { + return; + } + ApplyLocationScaling(WorldVector); ApplyLocationLocks(WorldVector); TargetLocation += WorldVector; } +void UVirtualCameraMovementComponent::AddInputVectorFromController(FVector WorldVector, EVirtualCameraAxis MovementScaleAxis) +{ + if (WorldVector.IsZero()) + { + return; + } + + WorldVector *= AxisSettings[MovementScaleAxis].MovementScale; + ApplyLocationLocks(WorldVector); + + TargetLocation += WorldVector; +} + void UVirtualCameraMovementComponent::ProcessMovementInput(const FVector& TrackerLocation, const FRotator& TrackerRotation) { FVector DeltaMovement = TrackerLocation - PreviousTrackerLocation; @@ -128,14 +146,14 @@ void UVirtualCameraMovementComponent::OnMoveForward(const float InValue) { FVector InputVector = UpdatedComponent->GetForwardVector(); InputVector = GetOwner()->GetActorRotation().UnrotateVector(InputVector); - AddInputVector(InputVector * InValue); + AddInputVectorFromController(InputVector * InValue, EVirtualCameraAxis::LocationX); } void UVirtualCameraMovementComponent::OnMoveRight(const float InValue) { FVector InputVector = UpdatedComponent->GetRightVector(); InputVector = GetOwner()->GetActorRotation().UnrotateVector(InputVector); - AddInputVector(InputVector * InValue); + AddInputVectorFromController(InputVector * InValue, EVirtualCameraAxis::LocationY); } void UVirtualCameraMovementComponent::OnMoveUp(const float InValue) @@ -149,7 +167,7 @@ void UVirtualCameraMovementComponent::OnMoveUp(const float InValue) { FVector InputVector = UpdatedComponent->GetUpVector(); InputVector = GetOwner()->GetActorRotation().UnrotateVector(InputVector); - AddInputVector(InputVector * InValue); + AddInputVectorFromController(InputVector * InValue, EVirtualCameraAxis::LocationZ); } } @@ -220,12 +238,21 @@ FVector UVirtualCameraMovementComponent::GetStabilizedDeltaLocation() const void UVirtualCameraMovementComponent::ApplyLocationScaling(FVector& VectorToAdjust) { + // Get the axes to scale along FVector ForwardVector; FVector RightVector; FVector UpVector; GetDirectionVectorsForCamera(ForwardVector, RightVector, UpVector); + // Orient to global Z up, but maintain yaw + ForwardVector = FVector::VectorPlaneProject(ForwardVector, FVector::UpVector); + RightVector = FVector::VectorPlaneProject(RightVector, FVector::UpVector); + UpVector = FVector::UpVector; + + ForwardVector.Normalize(); + RightVector.Normalize(); + FVector XComponent = VectorToAdjust.ProjectOnTo(ForwardVector); FVector YComponent = VectorToAdjust.ProjectOnTo(RightVector); FVector ZComponent = VectorToAdjust.ProjectOnTo(UpVector); diff --git a/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Public/LevelSequencePlaybackController.h b/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Public/LevelSequencePlaybackController.h index 72fb3cfa9986..6c17cd78d4e9 100644 --- a/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Public/LevelSequencePlaybackController.h +++ b/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Public/LevelSequencePlaybackController.h @@ -152,6 +152,11 @@ protected: /** Pointer to the sequence recorder module */ IAssetRegistry* AssetRegistry; + /** + * Overriding to allow playback controller to intercept and modify objects spawned by sequencer, for example disabling attach to HMD on spawned cameras + */ + virtual void OnObjectSpawned(UObject* InObject, const FMovieSceneEvaluationOperand& Operand) override; + #if WITH_EDITOR /** Pointer to the sequence recorder module */ ISequenceRecorder* Recorder; diff --git a/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Public/VirtualCameraMovementComponent.h b/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Public/VirtualCameraMovementComponent.h index 1e98b08eed25..88b04c1c7245 100644 --- a/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Public/VirtualCameraMovementComponent.h +++ b/Engine/Plugins/Experimental/VirtualCamera/Source/VirtualCamera/Public/VirtualCameraMovementComponent.h @@ -38,6 +38,14 @@ public: */ virtual void AddInputVector(FVector WorldVector, bool bForce = false) override; + /** + * Adds the given vector to the accumulated input in world space. Similar to AddInputVector(). + * This calls input scaling specific to how controller (joystick) input should be handled. + * @param WorldVector - Direction in world space to apply input. + * @param MovementScaleAxis - Which axis movement scale to use. + */ + void AddInputVectorFromController(FVector WorldVector, EVirtualCameraAxis MovementScaleAxis); + /** * Update the position and rotation of the camera. * @param Location - The current location of the tracker being used for input diff --git a/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/MovieSceneNiagaraSystemTrack.cpp b/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/MovieSceneNiagaraSystemTrack.cpp index 12e5f1029106..cd35fd22f34e 100644 --- a/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/MovieSceneNiagaraSystemTrack.cpp +++ b/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/MovieSceneNiagaraSystemTrack.cpp @@ -14,7 +14,7 @@ UMovieSceneSection* UMovieSceneNiagaraSystemTrack::CreateNewSection() { - return NewObject(this); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneNiagaraSystemTrack::CreateTemplateForSection(const UMovieSceneSection& InSection) const diff --git a/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraBoolParameterTrack.cpp b/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraBoolParameterTrack.cpp index c60cc368f44f..49343676bdbd 100644 --- a/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraBoolParameterTrack.cpp +++ b/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraBoolParameterTrack.cpp @@ -8,7 +8,7 @@ UMovieSceneSection* UMovieSceneNiagaraBoolParameterTrack::CreateNewSection() { - return NewObject(this); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneNiagaraBoolParameterTrack::CreateTemplateForSection(const UMovieSceneSection& InSection) const diff --git a/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraColorParameterTrack.cpp b/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraColorParameterTrack.cpp index aa9c266256bd..dbb007f3cec2 100644 --- a/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraColorParameterTrack.cpp +++ b/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraColorParameterTrack.cpp @@ -7,7 +7,7 @@ UMovieSceneSection* UMovieSceneNiagaraColorParameterTrack::CreateNewSection() { - return NewObject(this); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneNiagaraColorParameterTrack::CreateTemplateForSection(const UMovieSceneSection& InSection) const diff --git a/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraFloatParameterTrack.cpp b/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraFloatParameterTrack.cpp index a2ed3475fbf6..0b4345e2a97c 100644 --- a/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraFloatParameterTrack.cpp +++ b/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraFloatParameterTrack.cpp @@ -8,7 +8,7 @@ UMovieSceneSection* UMovieSceneNiagaraFloatParameterTrack::CreateNewSection() { - return NewObject(this); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneNiagaraFloatParameterTrack::CreateTemplateForSection(const UMovieSceneSection& InSection) const diff --git a/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraIntegerParameterTrack.cpp b/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraIntegerParameterTrack.cpp index 2de640472cab..8d0aecb460bc 100644 --- a/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraIntegerParameterTrack.cpp +++ b/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraIntegerParameterTrack.cpp @@ -7,7 +7,7 @@ UMovieSceneSection* UMovieSceneNiagaraIntegerParameterTrack::CreateNewSection() { - return NewObject(this); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneNiagaraIntegerParameterTrack::CreateTemplateForSection(const UMovieSceneSection& InSection) const diff --git a/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraVectorParameterTrack.cpp b/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraVectorParameterTrack.cpp index a82474d5d4b1..bc9aff320441 100644 --- a/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraVectorParameterTrack.cpp +++ b/Engine/Plugins/FX/Niagara/Source/Niagara/Private/MovieScene/Parameters/MovieSceneNiagaraVectorParameterTrack.cpp @@ -7,7 +7,7 @@ UMovieSceneSection* UMovieSceneNiagaraVectorParameterTrack::CreateNewSection() { - UMovieSceneVectorSection* VectorSection = NewObject(this); + UMovieSceneVectorSection* VectorSection = NewObject(this, NAME_None, RF_Transactional); VectorSection->SetChannelsUsed(ChannelsUsed); return VectorSection; } diff --git a/Engine/Plugins/FX/Niagara/Source/NiagaraEditor/Private/NiagaraNodeReroute.cpp b/Engine/Plugins/FX/Niagara/Source/NiagaraEditor/Private/NiagaraNodeReroute.cpp index 9994cd26bb46..3a198b5c14d0 100644 --- a/Engine/Plugins/FX/Niagara/Source/NiagaraEditor/Private/NiagaraNodeReroute.cpp +++ b/Engine/Plugins/FX/Niagara/Source/NiagaraEditor/Private/NiagaraNodeReroute.cpp @@ -41,7 +41,7 @@ void UNiagaraNodeReroute::AllocateDefaultPins() FText UNiagaraNodeReroute::GetTooltipText() const { - return LOCTEXT("RerouteNodeDesc", ""); + return FText::GetEmpty(); } FText UNiagaraNodeReroute::GetNodeTitle(ENodeTitleType::Type TitleType) const diff --git a/Engine/Plugins/Media/AjaMedia/AjaMedia.uplugin b/Engine/Plugins/Media/AjaMedia/AjaMedia.uplugin index 527b26902e2d..da712deadbfa 100644 --- a/Engine/Plugins/Media/AjaMedia/AjaMedia.uplugin +++ b/Engine/Plugins/Media/AjaMedia/AjaMedia.uplugin @@ -50,6 +50,10 @@ { "Name" : "MediaFrameworkUtilities", "Enabled" : true + }, + { + "Name" : "MediaPlayerEditor", + "Enabled" : true } ] } diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Aja/Aja.cpp b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Aja/Aja.cpp index e3d8bfbaa0c2..1de58c74e3a6 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Aja/Aja.cpp +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Aja/Aja.cpp @@ -80,8 +80,8 @@ FTimespan FAja::ConvertAJATimecode2Timespan(const AJA::FTimecode& InTimecode, co //With FrameRate faster than 30FPS, max frame number will still be small than 30 //Get by how much we need to divide the actual count. - const float FrameRate = InFPS.AsDecimal(); - const float DividedFrameRate = FrameRate > 30.0f ? (FrameRate * 30.0f) / FrameRate : FrameRate; + const double FrameRate = InFPS.AsDecimal(); + const double DividedFrameRate = FrameRate > 30.0f ? (FrameRate * 30.0f) / FrameRate : FrameRate; FTimespan NewTimespan; if (PreviousTimeCode == InTimecode) diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/AjaMediaModule.cpp b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/AjaMediaModule.cpp index 26b4d364d5e3..5de0b190d7c2 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/AjaMediaModule.cpp +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/AjaMediaModule.cpp @@ -17,30 +17,10 @@ DEFINE_LOG_CATEGORY(LogAjaMedia); #define LOCTEXT_NAMESPACE "AjaMediaModule" -namespace AJAHelpers -{ - FAjaMediaMode FromVideoFormatDescriptor(int32 InDeviceIndex, const AJA::AJAVideoFormats::VideoFormatDescriptor& InDescriptor) - { - FAjaMediaMode Mode; - Mode.DeviceIndex = InDeviceIndex; - Mode.ModeName = InDescriptor.FormatedText; - Mode.VideoFormatIndex = InDescriptor.VideoFormatIndex; - Mode.FrameRate = FFrameRate(InDescriptor.FrameRateNumerator, InDescriptor.FrameRateDenominator); - Mode.TargetSize = FIntPoint(InDescriptor.Width, InDescriptor.Height); - - if (!InDescriptor.bIsProgressive) - { - Mode.FrameRate.Numerator *= 2; - } - - return Mode; - } -} - /** * Implements the AJAMedia module. */ -class FAjaMediaModule : public IAjaMediaModule, public FSelfRegisteringExec +class FAjaMediaModule : public IAjaMediaModule { public: @@ -74,104 +54,6 @@ public: { FAja::Shutdown(); } - - TStrongObjectPtr CustomTimeStep; - TStrongObjectPtr TimecodeProvider; - virtual bool Exec(UWorld* Inworld, const TCHAR* Cmd, FOutputDevice& Ar) override - { - if (FParse::Command(&Cmd, TEXT("AJA"))) - { - if (FParse::Command(&Cmd, TEXT("CustomTimeStep"))) - { - if (FParse::Command(&Cmd, TEXT("Start"))) - { - CustomTimeStep.Reset(NewObject()); - - CustomTimeStep->MediaPort.PortIndex = 0; - CustomTimeStep->MediaPort.DeviceIndex = 0; - FParse::Value(Cmd, TEXT("Port="), CustomTimeStep->MediaPort.PortIndex); - FParse::Value(Cmd, TEXT("Device="), CustomTimeStep->MediaPort.DeviceIndex); - FParse::Bool(Cmd, TEXT("EnableOverrunDetection="), CustomTimeStep->bEnableOverrunDetection); - - bool bOverrideProjectSetting = false; - FParse::Bool(Cmd, TEXT("OverrideProjectSetting="), bOverrideProjectSetting); - - if (bOverrideProjectSetting) - { - AJA::FAJAVideoFormat VideoFormatIndex = AjaMediaOption::DefaultVideoFormat; - FParse::Value(Cmd, TEXT("VideoFormat="), VideoFormatIndex); - - AJA::AJAVideoFormats::VideoFormatDescriptor Descriptor = AJA::AJAVideoFormats::GetVideoFormat(VideoFormatIndex); - if (Descriptor.bValid) - { - const FAjaMediaMode MediaMode = AJAHelpers::FromVideoFormatDescriptor(CustomTimeStep->MediaPort.DeviceIndex, Descriptor); - CustomTimeStep->OverrideMediaMode(MediaMode); - } - } - - { - GEngine->SetCustomTimeStep(CustomTimeStep.Get()); - } - } - else if (FParse::Command(&Cmd, TEXT("Stop"))) - { - if (GEngine->GetCustomTimeStep() == CustomTimeStep.Get()) - { - GEngine->SetCustomTimeStep(nullptr); - } - CustomTimeStep.Reset(); - } - return true; - } - - if (FParse::Command(&Cmd, TEXT("TimecodeProvider"))) - { - if (FParse::Command(&Cmd, TEXT("Start"))) - { - TimecodeProvider.Reset(NewObject()); - - TimecodeProvider->MediaPort.PortIndex = 0; - TimecodeProvider->MediaPort.DeviceIndex = 0; - FParse::Value(Cmd, TEXT("Port="), TimecodeProvider->MediaPort.PortIndex); - FParse::Value(Cmd, TEXT("Device="), TimecodeProvider->MediaPort.DeviceIndex); - int32 TimecodeFormatInt = 1; - if (FParse::Value(Cmd, TEXT("TimecodeFormat="), TimecodeFormatInt)) - { - TimecodeFormatInt = FMath::Clamp(TimecodeFormatInt, 0, (int32)EAjaMediaTimecodeFormat::VITC); - TimecodeProvider->TimecodeFormat = (EAjaMediaTimecodeFormat)TimecodeFormatInt; - } - - bool bOverrideProjectSetting = false; - FParse::Bool(Cmd, TEXT("OverrideProjectSetting="), bOverrideProjectSetting); - - if (bOverrideProjectSetting) - { - AJA::FAJAVideoFormat VideoFormatIndex = AjaMediaOption::DefaultVideoFormat; - FParse::Value(Cmd, TEXT("VideoFormat="), VideoFormatIndex); - - AJA::AJAVideoFormats::VideoFormatDescriptor Descriptor = AJA::AJAVideoFormats::GetVideoFormat(VideoFormatIndex); - if (Descriptor.bValid) - { - const FAjaMediaMode MediaMode = AJAHelpers::FromVideoFormatDescriptor(TimecodeProvider->MediaPort.DeviceIndex, Descriptor); - TimecodeProvider->OverrideMediaMode(MediaMode); - } - } - - GEngine->SetTimecodeProvider(TimecodeProvider.Get()); - } - else if (FParse::Command(&Cmd, TEXT("Stop"))) - { - if (GEngine->GetTimecodeProvider() == TimecodeProvider.Get()) - { - GEngine->SetTimecodeProvider(nullptr); - } - TimecodeProvider.Reset(); - } - return true; - } - } - return false; - } }; IMPLEMENT_MODULE(FAjaMediaModule, AjaMedia); diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/AjaMediaPrivate.h b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/AjaMediaPrivate.h index b416c9b277df..d8c591e82db7 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/AjaMediaPrivate.h +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/AjaMediaPrivate.h @@ -20,8 +20,8 @@ DECLARE_LOG_CATEGORY_EXTERN(LogAjaMedia, Log, All); namespace AjaMediaOption { - static const FName FrameRateNumerator("FrameRateNumerator"); - static const FName FrameRateDenominator("FrameRateDenominator"); + static const FName DeviceIndex("DeviceIndex"); + static const FName PortIndex("PortIndex"); static const FName TimecodeFormat("TimecodeFormat"); static const FName LogDropFrame("LogDropFrame"); static const FName EncodeTimecodeInTexel("EncodeTimecodeInTexel"); @@ -38,8 +38,3 @@ namespace AjaMediaOption static const AJA::FAJAVideoFormat DefaultVideoFormat = 9; // 1080p3000 } - -namespace AJAHelpers -{ - FAjaMediaMode FromVideoFormatDescriptor(int32 InDeviceIndex, const AJA::AJAVideoFormats::VideoFormatDescriptor& InDescriptor); -} diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Assets/AjaCustomTimeStep.cpp b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Assets/AjaCustomTimeStep.cpp index e1fc5cdd1a92..22944a05c8fa 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Assets/AjaCustomTimeStep.cpp +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Assets/AjaCustomTimeStep.cpp @@ -64,7 +64,7 @@ bool UAjaCustomTimeStep::Initialize(UEngine* InEngine) if (!FAja::IsInitialized()) { State = ECustomTimeStepSynchronizationState::Error; - UE_LOG(LogAjaMedia, Warning, TEXT("The CustomTimeStep '%s' can't be initialized. Aja is not initialized on your machine."), *GetName()); + UE_LOG(LogAjaMedia, Error, TEXT("The CustomTimeStep '%s' can't be initialized. Aja is not initialized on your machine."), *GetName()); return false; } @@ -76,11 +76,21 @@ bool UAjaCustomTimeStep::Initialize(UEngine* InEngine) State = ECustomTimeStepSynchronizationState::Error; const bool bAddProjectSettingMessage = MediaPort.IsValid() && !bIsDefaultModeOverriden; - const FString OverrideString = bAddProjectSettingMessage ? TEXT("The project settings haven't been set for this port.") : TEXT(""); - UE_LOG(LogAjaMedia, Warning, TEXT("The CustomTimeStep '%s' is invalid. %s %s"), *GetName(), *FailureReason, *OverrideString); + const TCHAR* OverrideString = bAddProjectSettingMessage ? TEXT("The project settings haven't been set for this port.") : TEXT(""); + UE_LOG(LogAjaMedia, Error, TEXT("The CustomTimeStep '%s' is invalid. %s %s"), *GetName(), *FailureReason, OverrideString); return false; } + if (bUseReferenceIn && bWaitForFrameToBeReady) + { + UE_LOG(LogAjaMedia, Warning, TEXT("The CustomTimeStep '%s' use both the reference and wait for the frame to be ready. These options are not compatible."), *GetName()); + } + + if (bWaitForFrameToBeReady && CurrentMediaMode.bIsInterlacedStandard) + { + UE_LOG(LogAjaMedia, Warning, TEXT("The CustomTimeStep '%s' is waiting for the frame to be ready and interlaced picture is not supported."), *GetName()); + } + check(SyncCallback == nullptr); SyncCallback = new FAJACallback(this); @@ -91,6 +101,7 @@ bool UAjaCustomTimeStep::Initialize(UEngine* InEngine) Options.CallbackInterface = SyncCallback; Options.VideoFormatIndex = CurrentMediaMode.VideoFormatIndex; Options.bOutput = bUseReferenceIn; + Options.bWaitForFrameToBeReady = bWaitForFrameToBeReady && !bUseReferenceIn; Options.TimecodeFormat = AJA::ETimecodeFormat::TCF_None; if (!Options.bOutput) @@ -164,7 +175,7 @@ bool UAjaCustomTimeStep::UpdateTimeStep(UEngine* InEngine) // Use fixed delta time and update time. FApp::SetDeltaTime(GetFixedFrameRate().AsInterval()); - FApp::SetCurrentTime(FApp::GetCurrentTime() + FApp::GetDeltaTime()); + FApp::SetCurrentTime(FPlatformTime::Seconds()); bRunEngineTimeStep = false; bDidAValidUpdateTimeStep = true; diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Assets/AjaMediaFinder.cpp b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Assets/AjaMediaFinder.cpp index 3719516039f6..2271701d8680 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Assets/AjaMediaFinder.cpp +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Assets/AjaMediaFinder.cpp @@ -6,55 +6,109 @@ #include "Aja.h" #include "AJALib.h" -#include "Templates/UniquePtr.h" +#include "Misc/FeedbackContext.h" +#include "UObject/PropertyPortFlags.h" -/* - * FAjaMediaSourceId interface - */ +#define LOCTEXT_NAMESPACE "AjaMediaFinder" -const TCHAR* FAjaMediaPort::Protocol = TEXT("aja"); + +/************************************************************************/ +// FAjaMediaSourceId interface +/************************************************************************/ namespace AjaMediaSourceId { const TCHAR* ProtocolPath = TEXT("aja://"); const TCHAR* Device = TEXT("device"); - const TCHAR* Port = TEXT("port"); + const TCHAR* Single = TEXT("single"); + const TCHAR* Dual = TEXT("dual"); + const TCHAR* QuadSquare = TEXT("quadSq"); + const TCHAR* QuadTsi = TEXT("quadTSI"); - const int32 ProtocolLength = FPlatformString::Strlen(AjaMediaSourceId::ProtocolPath); - const int32 DeviceLength = FPlatformString::Strlen(AjaMediaSourceId::Device); - const int32 InputLength = FPlatformString::Strlen(AjaMediaSourceId::Port); - - const int32 DeviceNameBufferSize = 64; - const int32 ModeNameBufferSize = 64; - - bool GetNumber(TCHAR InInput, int32& OutId) + const TCHAR* GetLinkText(EAjaLinkType InLinkType, EAjaQuadLinkType InQuadLinkType) { - OutId = 0; - if (InInput < '0' && InInput > '9') + const TCHAR* Result = AjaMediaSourceId::Single; + switch (InLinkType) + { + case EAjaLinkType::DualLink: Result = AjaMediaSourceId::Dual; break; + case EAjaLinkType::QuadLink: Result = (InQuadLinkType == EAjaQuadLinkType::Square ? AjaMediaSourceId::QuadSquare : AjaMediaSourceId::QuadTsi); break; + } + return Result; + } + + FAjaMediaMode ToMediaMode(const AJA::AJAVideoFormats::VideoFormatDescriptor& InDescriptor) + { + FAjaMediaMode MediaMode; + MediaMode.TargetSize = FIntPoint(InDescriptor.ResolutionWidth, InDescriptor.ResolutionHeight); + MediaMode.bIsProgressiveStandard = InDescriptor.bIsProgressiveStandard; + MediaMode.bIsInterlacedStandard = InDescriptor.bIsInterlacedStandard; + MediaMode.bIsPsfStandard = InDescriptor.bIsPsfStandard; + MediaMode.FrameRate = FFrameRate(InDescriptor.FrameRateNumerator, InDescriptor.FrameRateDenominator); + MediaMode.VideoFormatIndex = InDescriptor.VideoFormatIndex; + + if (InDescriptor.bIsInterlacedStandard) + { + MediaMode.FrameRate.Numerator *= 2; + } + + return MediaMode; + } + + bool IsVideoFormatValid(const AJA::AJAVideoFormats::VideoFormatDescriptor& InDescriptor) + { + if (!InDescriptor.bIsValid) + { + return false; + } + if (InDescriptor.bIsPsfStandard || InDescriptor.bIsVideoFormatB) + { + return false; + } + if (InDescriptor.bIsSD || InDescriptor.bIs2K || InDescriptor.bIs4K) { return false; } - OutId = InInput - '0'; return true; } } +/************************************************************************/ +// FAjaMediaDevice implementation +/************************************************************************/ + +FAjaMediaDevice::FAjaMediaDevice() + : DeviceIndex(INDEX_NONE) +{ } + +/************************************************************************/ +// FAjaMediaPort implementation +/************************************************************************/ + FAjaMediaPort::FAjaMediaPort() : DeviceIndex(INDEX_NONE) , PortIndex(INDEX_NONE) + , LinkType(EAjaLinkType::SingleLink) + , QuadLinkType(EAjaQuadLinkType::Square) { } -FAjaMediaPort::FAjaMediaPort(const FString& InDeviceName, int32 InDeviceIndex, int32 InPortIndex) +FAjaMediaPort::FAjaMediaPort(FName InDeviceName, int32 InDeviceIndex, int32 InPortIndex) : DeviceName(InDeviceName) , DeviceIndex(InDeviceIndex) , PortIndex(InPortIndex) + , LinkType(EAjaLinkType::SingleLink) + , QuadLinkType(EAjaQuadLinkType::Square) { } FString FAjaMediaPort::ToString() const { if (IsValid()) { - return FString::Printf(TEXT("%s [%s]"), *DeviceName, *ToUrl()); + return FString::Printf(TEXT("%s [%s%d/%s%d]") + , *DeviceName.ToString() + , AjaMediaSourceId::Device + , DeviceIndex + , AjaMediaSourceId::GetLinkText(LinkType, QuadLinkType) + , PortIndex); } return TEXT(""); } @@ -63,7 +117,12 @@ FString FAjaMediaPort::ToUrl() const { if (IsValid()) { - return FString::Printf(TEXT("aja://device%d/port%d"), DeviceIndex, (PortIndex)); + return FString::Printf(TEXT("%s%s%d/%s%d") + , AjaMediaSourceId::ProtocolPath + , AjaMediaSourceId::Device + , DeviceIndex + , AjaMediaSourceId::GetLinkText(LinkType, QuadLinkType) + , PortIndex); } return TEXT("aja://"); } @@ -73,86 +132,282 @@ bool FAjaMediaPort::IsValid() const return DeviceIndex != INDEX_NONE && PortIndex != INDEX_NONE; } -bool FAjaMediaPort::FromUrl(const FString& Url, bool bDiscoverDeviceName) -{ - const int32 NumberLength = 1; - const int32 SlashLength = 1; - const int32 ValidUrlLength = AjaMediaSourceId::ProtocolLength + AjaMediaSourceId::DeviceLength + NumberLength + SlashLength + AjaMediaSourceId::InputLength + NumberLength; - - DeviceName.Reset(); - DeviceIndex = INDEX_NONE; - PortIndex = INDEX_NONE; - - if (Url.Len() != ValidUrlLength || !Url.StartsWith(AjaMediaSourceId::ProtocolPath)) - { - return false; - } - - int32 TempDeviceIndex; - int32 TempPortIndex; - if(!AjaMediaSourceId::GetNumber(Url[AjaMediaSourceId::ProtocolLength + AjaMediaSourceId::DeviceLength], TempDeviceIndex) - || !AjaMediaSourceId::GetNumber(Url[AjaMediaSourceId::ProtocolLength + AjaMediaSourceId::DeviceLength + NumberLength + SlashLength + AjaMediaSourceId::InputLength], TempPortIndex)) - { - return false; - } - - DeviceIndex = TempDeviceIndex; - PortIndex = TempPortIndex; - - bool bResult = true; - if (bDiscoverDeviceName) - { - bResult = FAja::IsInitialized(); - if (bResult) - { - TUniquePtr DeviceScanner = MakeUnique(); - if (DeviceScanner) - { - int32 NumDevices = DeviceScanner->GetNumDevices(); - bResult = DeviceIndex < NumDevices; - if (bResult) - { - TCHAR DeviceNameBuffer[AJA::AJADeviceScanner::FormatedTextSize]; - bResult = DeviceScanner->GetDeviceTextId(DeviceIndex, DeviceNameBuffer); - if (bResult) - { - DeviceName = DeviceNameBuffer; - } - } - } - } - } - - return bResult; -} - -/* - * FAjaMediaMode interface - */ +/************************************************************************/ +// FAjaMediaMode implementation +/************************************************************************/ FAjaMediaMode::FAjaMediaMode() - : DeviceIndex(INDEX_NONE) - , VideoFormatIndex(INDEX_NONE) + : VideoFormatIndex(INDEX_NONE) { } -FString FAjaMediaMode::ToString() const -{ - if (IsValid()) - { - return FString::Printf(TEXT("%s"), *ModeName); - } - return TEXT(""); -} - bool FAjaMediaMode::IsValid() const { return VideoFormatIndex != INDEX_NONE; } -/* - * UAjaMediaFinder interface - */ +FString FAjaMediaMode::ToString() const +{ + const TCHAR* Standard = TEXT(""); + if (TargetSize.Y == 1080 || TargetSize.Y == 720) + { + if (bIsInterlacedStandard) + { + Standard = TEXT("i"); + } + else if (bIsProgressiveStandard) + { + Standard = TEXT("p"); + } + else if (bIsPsfStandard) + { + Standard = TEXT("psf"); + } + } + return FString::Printf(TEXT("%s%s%s"), *FAjaMediaFinder::ResolutionToPrettyText(TargetSize).ToString(), Standard, *FrameRate.ToPrettyText().ToString()); +} + +/************************************************************************/ +// FAjaMediaConfiguration implementation +/************************************************************************/ + +FAjaMediaConfiguration::FAjaMediaConfiguration() + : bInput(true) +{ +} + +bool FAjaMediaConfiguration::IsValid() const +{ + return MediaPort.IsValid() && MediaMode.IsValid(); +} + +FText FAjaMediaConfiguration::ToText() const +{ + if (IsValid()) + { + FNumberFormattingOptions Options; + Options.UseGrouping = false; + return FText::Format(LOCTEXT("MediaConfigurationInputText", "{0}[{1}]/{2}/{3}{4}/{5}") + , FText::FromName(MediaPort.DeviceName) + , FText::AsNumber(MediaPort.DeviceIndex) + , FAjaMediaFinder::LinkTypeToPrettyText(MediaPort.LinkType, MediaPort.PortIndex, true) + , FText::AsNumber(MediaMode.TargetSize.Y, &Options) + , (MediaMode.bIsInterlacedStandard ? FText::FromString(TEXT("i")) : (MediaMode.bIsPsfStandard ? FText::FromString(TEXT("psf")) : FText::FromString(TEXT("p")))) + , MediaMode.FrameRate.ToPrettyText()); + } + + return LOCTEXT("Invalid", ""); +} + +/************************************************************************/ +// FAjaMediaFinder interface +/************************************************************************/ + +FText FAjaMediaFinder::LinkTypeToPrettyText(EAjaLinkType InLinkType, int32 InChannel, bool bShortVersion) +{ + if (bShortVersion) + { + switch (InLinkType) + { + case EAjaLinkType::SingleLink: return FText::Format(LOCTEXT("ChannelShortLabel", "Single[{0}]"), FText::AsNumber(InChannel)); + case EAjaLinkType::DualLink: return FText::Format(LOCTEXT("DualLinkShortLabel", "Dual[{0}-{1}]"), FText::AsNumber(InChannel), FText::AsNumber(InChannel + 1)); + case EAjaLinkType::QuadLink: return FText::Format(LOCTEXT("QuadLinkShortLabel", "Quad[{0}-{1}]"), FText::AsNumber(InChannel), FText::AsNumber(InChannel + 3)); + } + } + else + { + switch (InLinkType) + { + case EAjaLinkType::SingleLink: return FText::Format(LOCTEXT("ChannelLabel", "Single Link {0}"), FText::AsNumber(InChannel)); + case EAjaLinkType::DualLink: return FText::Format(LOCTEXT("DualLinkLabel", "Dual Link {0}-{1}"), FText::AsNumber(InChannel), FText::AsNumber(InChannel + 1)); + case EAjaLinkType::QuadLink: return FText::Format(LOCTEXT("QuadLinkLabel", "Quad Link {0}-{1}"), FText::AsNumber(InChannel), FText::AsNumber(InChannel + 3)); + } + } + return FText::GetEmpty(); +} + + +FText FAjaMediaFinder::QuadLinkTypeToPrettyText(EAjaQuadLinkType InLinkType) +{ + switch (InLinkType) + { + case EAjaQuadLinkType::Square: return LOCTEXT("QuadLinkSquareLabel", "Square"); + case EAjaQuadLinkType::TSI: return LOCTEXT("QuadLink_TsiLabel", "TSI"); + } + return FText::GetEmpty(); +} + + +FText FAjaMediaFinder::ResolutionToPrettyText(FIntPoint InResolution) +{ + if (InResolution.X == 720 && InResolution.Y == 486) { return LOCTEXT("NTSC_Resolution", "NTSC SD"); } + if (InResolution.X == 720 && InResolution.Y == 576) { return LOCTEXT("PAL_Resolution", "PAL SD"); } + if (InResolution.X == 1280 && InResolution.Y == 720) { return LOCTEXT("HD720_Resolution", "HD 720"); } + if (InResolution.X == 1920 && InResolution.Y == 1080) { return LOCTEXT("HD1080_Resolution", "HD 1080"); } + if (InResolution.X == 2048 && InResolution.Y == 1080) { return LOCTEXT("2K_Resolution", "2K"); } + if (InResolution.X == 2048 && InResolution.Y == 1556) { return LOCTEXT("2KFull_Resolution", "2K Full"); } + if (InResolution.X == 3840 && InResolution.Y == 2160) { return LOCTEXT("UHD_Resolution", "UHD"); } + if (InResolution.X == 4096 && InResolution.Y == 2160) { return LOCTEXT("4K_Resolution", "4K"); } + return FText::Format(LOCTEXT("Undefined_Resolution", "{0}x{1}"), FText::AsNumber(InResolution.X), FText::AsNumber(InResolution.Y)); +} + + +namespace AjaMediaFinder +{ + bool GetConfigurations(TArray& OutConfigurations, bool bAllowInput, bool bAllowOutput) + { + const int32 MaxNumberOfChannel = 8; + + OutConfigurations.Reset(); + if (!FAja::IsInitialized()) + { + return false; + } + + AJA::AJADeviceScanner DeviceScanner; + int32 NumDevices = DeviceScanner.GetNumDevices(); + for (int32 DeviceIndex = 0; DeviceIndex < NumDevices; ++DeviceIndex) + { + TCHAR DeviceNameBuffer[AJA::AJADeviceScanner::FormatedTextSize]; + if (DeviceScanner.GetDeviceTextId(DeviceIndex, DeviceNameBuffer)) + { + AJA::AJADeviceScanner::DeviceInfo DeviceInfo; + if (DeviceScanner.GetDeviceInfo(DeviceIndex, DeviceInfo)) + { + int32 OutputCount; + int32 InputCount; + if (DeviceInfo.bIsSupported && DeviceScanner.GetNumberVideoChannels(DeviceIndex, InputCount, OutputCount)) + { + if (bAllowInput && !DeviceInfo.bCanDoCapture) + { + continue; + } + + if (bAllowOutput && !DeviceInfo.bCanDoPlayback) + { + continue; + } + + InputCount = FMath::Min(InputCount, MaxNumberOfChannel); + OutputCount = FMath::Min(OutputCount, MaxNumberOfChannel); + const int32 PortCount = (bAllowInput && bAllowOutput) ? FMath::Min(InputCount, OutputCount) : (bAllowOutput ? OutputCount : InputCount); + + AJA::AJAVideoFormats FrameFormats(DeviceIndex, false); + const int32 NumSupportedFormat = FrameFormats.GetNumSupportedFormat(); + + FAjaMediaConfiguration MediaConfiguration; + MediaConfiguration.MediaPort.DeviceIndex = DeviceIndex; + MediaConfiguration.MediaPort.DeviceName = FName(DeviceNameBuffer); + MediaConfiguration.bInput = true; + + for (int32 FormatIndex = 0; FormatIndex < NumSupportedFormat; ++FormatIndex) + { + AJA::AJAVideoFormats::VideoFormatDescriptor Descriptor = FrameFormats.GetSupportedFormat(FormatIndex); + if (!AjaMediaSourceId::IsVideoFormatValid(Descriptor)) + { + continue; + } + + MediaConfiguration.MediaMode = AjaMediaSourceId::ToMediaMode(Descriptor); + MediaConfiguration.MediaPort.QuadLinkType = EAjaQuadLinkType::Square; + + if (Descriptor.bIs2K) + { + if (DeviceInfo.bCanDoDualLink) + { + MediaConfiguration.MediaPort.LinkType = EAjaLinkType::DualLink; + for (int32 SourceIndex = 0; SourceIndex < PortCount; SourceIndex += 2) + { + MediaConfiguration.MediaPort.PortIndex = SourceIndex + 1; + OutConfigurations.Add(MediaConfiguration); + } + } + } + else if (Descriptor.bIs4K) + { + if (DeviceInfo.bCanDoDualLink) + { + MediaConfiguration.MediaPort.LinkType = EAjaLinkType::QuadLink; + for (int32 SourceIndex = 0; SourceIndex < PortCount; SourceIndex += 4) + { + MediaConfiguration.MediaPort.QuadLinkType = EAjaQuadLinkType::Square; + MediaConfiguration.MediaPort.PortIndex = SourceIndex + 1; + OutConfigurations.Add(MediaConfiguration); + + MediaConfiguration.MediaPort.QuadLinkType = EAjaQuadLinkType::TSI; + OutConfigurations.Add(MediaConfiguration); + } + } + } + else + { + MediaConfiguration.MediaPort.LinkType = EAjaLinkType::SingleLink; + for (int32 SourceIndex = 0; SourceIndex < PortCount; ++SourceIndex) + { + MediaConfiguration.MediaPort.PortIndex = SourceIndex + 1; + OutConfigurations.Add(MediaConfiguration); + } + } + } + } + } + } + } + + return OutConfigurations.Num() > 0; + } +} + + +bool FAjaMediaFinder::GetInputConfigurations(TArray& OutConfigurations) +{ + return AjaMediaFinder::GetConfigurations(OutConfigurations, true, false); +} + + +bool FAjaMediaFinder::GetOutputConfigurations(TArray& OutConfigurations) +{ + return AjaMediaFinder::GetConfigurations(OutConfigurations, false, true); +} + + +bool FAjaMediaFinder::GetDevices(TArray& OutDevices) +{ + OutDevices.Reset(); + if (!FAja::IsInitialized()) + { + return false; + } + + AJA::AJADeviceScanner DeviceScanner; + int32 NumDevices = DeviceScanner.GetNumDevices(); + for (int32 DeviceIndex = 0; DeviceIndex < NumDevices; ++DeviceIndex) + { + AJA::AJADeviceScanner::DeviceInfo DeviceInfo; + if (!DeviceScanner.GetDeviceInfo(DeviceIndex, DeviceInfo)) + { + continue; + } + + if (!DeviceInfo.bIsSupported) + { + continue; + } + + TCHAR DeviceNameBuffer[AJA::AJADeviceScanner::FormatedTextSize]; + if (!DeviceScanner.GetDeviceTextId(DeviceIndex, DeviceNameBuffer)) + { + continue; + } + + FAjaMediaDevice Device; + Device.DeviceIndex = DeviceIndex; + Device.DeviceName = DeviceNameBuffer; + OutDevices.Add(MoveTemp(Device)); + } + + return true; +} bool FAjaMediaFinder::GetSources(TArray& OutSources) { @@ -162,23 +417,31 @@ bool FAjaMediaFinder::GetSources(TArray& OutSources) return false; } - TUniquePtr DeviceScanner = MakeUnique(); - if (DeviceScanner) + AJA::AJADeviceScanner DeviceScanner; + const int32 NumDevices = DeviceScanner.GetNumDevices(); + for (int32 DeviceIndex = 0; DeviceIndex < NumDevices; ++DeviceIndex) { - int32 NumDevices = DeviceScanner->GetNumDevices(); - for (int32 SourceIndex = 0; SourceIndex < NumDevices; ++SourceIndex) + AJA::AJADeviceScanner::DeviceInfo DeviceInfo; + if (!DeviceScanner.GetDeviceInfo(DeviceIndex, DeviceInfo)) { - TCHAR DeviceNameBuffer[AJA::AJADeviceScanner::FormatedTextSize]; - if (DeviceScanner->GetDeviceTextId(SourceIndex, DeviceNameBuffer)) + continue; + } + + if (!DeviceInfo.bIsSupported) + { + continue; + } + + TCHAR DeviceNameBuffer[AJA::AJADeviceScanner::FormatedTextSize]; + if (DeviceScanner.GetDeviceTextId(DeviceIndex, DeviceNameBuffer)) + { + int32 OutputCount; + int32 InputCount; + if (DeviceScanner.GetNumberVideoChannels(DeviceIndex, InputCount, OutputCount)) { - int32 OutputCount; - int32 InputCount; - if (DeviceScanner->GetNumberVideoChannels(SourceIndex, InputCount, OutputCount)) + for (int32 Inputs = 0; Inputs < InputCount; ++Inputs) { - for (int32 Inputs = 0; Inputs < InputCount; ++Inputs) - { - OutSources.Add(FAjaMediaPort(DeviceNameBuffer, SourceIndex, Inputs + 1)); - } + OutSources.Add(FAjaMediaPort(DeviceNameBuffer, DeviceIndex, Inputs + 1)); } } } @@ -196,19 +459,40 @@ bool FAjaMediaFinder::GetModes(int32 DeviceIndex, bool bInOutput, TArrayGetDeviceInfo(MediaPort.DeviceIndex, DeviceInfo)) { - UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that doesn't exist on this machine."), *GetName(), *MediaPort.DeviceName); + UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that doesn't exist on this machine."), *GetName(), *MediaPort.DeviceName.ToString()); return false; } if (!DeviceInfo.bIsSupported) { - UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that is not supported by the AJA SDK."), *GetName(), *MediaPort.DeviceName); + UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that is not supported by the AJA SDK."), *GetName(), *MediaPort.DeviceName.ToString()); return false; } if (!DeviceInfo.bCanDoCapture) { - UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that can't capture."), *GetName(), *MediaPort.DeviceName); + UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that can't capture."), *GetName(), *MediaPort.DeviceName.ToString()); return false; } if (bCaptureAncillary && !DeviceInfo.bCanDoCustomAnc) { - UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that can't capture Ancillary data."), *GetName(), *MediaPort.DeviceName); + UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that can't capture Ancillary data."), *GetName(), *MediaPort.DeviceName.ToString()); + return false; + } + + if (bUseTimeSynchronization && TimecodeFormat == EAjaMediaTimecodeFormat::None) + { + UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use time synchronization but doesn't enabled the timecode."), *GetName()); return false; } @@ -194,17 +238,12 @@ bool UAjaMediaSource::Validate() const { if (ColorFormat == EAjaMediaSourceColorFormat::BGRA && !DeviceInfo.bSupportPixelFormat8bitARGB) { - UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that doesn't support the 8bit ARGB pixel format."), *GetName(), *MediaPort.DeviceName); - return false; - } - if (ColorFormat == EAjaMediaSourceColorFormat::UYVY && !DeviceInfo.bSupportPixelFormat8bitYCBCR) - { - UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that doesn't support the 8bit YCbCr pixel format."), *GetName(), *MediaPort.DeviceName); + UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that doesn't support the 8bit ARGB pixel format."), *GetName(), *MediaPort.DeviceName.ToString()); return false; } if (ColorFormat == EAjaMediaSourceColorFormat::BGR10 && !DeviceInfo.bSupportPixelFormat10bitRGB) { - UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that doesn't support the 10bit ARGB pixel format."), *GetName(), *MediaPort.DeviceName); + UE_LOG(LogAjaMedia, Warning, TEXT("The MediaSource '%s' use the device '%s' that doesn't support the 10bit ARGB pixel format."), *GetName(), *MediaPort.DeviceName.ToString()); return false; } } @@ -223,7 +262,12 @@ bool UAjaMediaSource::CanEditChange(const UProperty* InProperty) const if (InProperty->GetFName() == GET_MEMBER_NAME_CHECKED(UAjaMediaSource, bEncodeTimecodeInTexel)) { return TimecodeFormat != EAjaMediaTimecodeFormat::None && bCaptureVideo; - } + } + + if (InProperty->GetFName() == GET_MEMBER_NAME_CHECKED(UTimeSynchronizableMediaSource, bUseTimeSynchronization)) + { + return TimecodeFormat != EAjaMediaTimecodeFormat::None; + } return true; } diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Assets/AjaTimecodeProvider.cpp b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Assets/AjaTimecodeProvider.cpp index 824f198c9776..620e2613b538 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Assets/AjaTimecodeProvider.cpp +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Assets/AjaTimecodeProvider.cpp @@ -48,14 +48,17 @@ FTimecode UAjaTimecodeProvider::GetTimecode() const { if (SyncChannel) { - AJA::FTimecode NewTimecode; - if (SyncChannel->GetTimecode(NewTimecode)) + if (State == ETimecodeProviderSynchronizationState::Synchronized) { - return FAja::ConvertAJATimecode2Timecode(NewTimecode, GetFrameRate()); - } - else if (State == ETimecodeProviderSynchronizationState::Synchronized) - { - const_cast(this)->State = ETimecodeProviderSynchronizationState::Error; + AJA::FTimecode NewTimecode; + if (SyncChannel->GetTimecode(NewTimecode)) + { + return FAja::ConvertAJATimecode2Timecode(NewTimecode, GetFrameRate()); + } + else + { + const_cast(this)->State = ETimecodeProviderSynchronizationState::Error; + } } } return FTimecode(); @@ -84,8 +87,8 @@ bool UAjaTimecodeProvider::Initialize(class UEngine* InEngine) State = ETimecodeProviderSynchronizationState::Error; const bool bAddProjectSettingMessage = MediaPort.IsValid() && !bIsDefaultModeOverriden; - const FString OverrideString = bAddProjectSettingMessage ? TEXT("The project settings haven't been set for this port.") : TEXT(""); - UE_LOG(LogAjaMedia, Warning, TEXT("The TimecodeProvider '%s' is invalid. %s %s"), *GetName(), *FailureReason, *OverrideString); + const TCHAR* OverrideString = bAddProjectSettingMessage ? TEXT("The project settings haven't been set for this port.") : TEXT(""); + UE_LOG(LogAjaMedia, Warning, TEXT("The TimecodeProvider '%s' is invalid. %s %s"), *GetName(), *FailureReason, OverrideString); return false; } @@ -98,6 +101,8 @@ bool UAjaTimecodeProvider::Initialize(class UEngine* InEngine) Options.CallbackInterface = SyncCallback; Options.VideoFormatIndex = CurrentMediaMode.VideoFormatIndex; Options.TimecodeFormat = AJA::ETimecodeFormat::TCF_None; + Options.bReadTimecodeFromReferenceIn = false; + Options.LTCSourceIndex = 1; switch(TimecodeFormat) { case EAjaMediaTimecodeFormat::None: diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaAudioSample.h b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaAudioSample.h index d46141733e29..415f998e712d 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaAudioSample.h +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaAudioSample.h @@ -11,32 +11,23 @@ class FAjaMediaAudioSample : public FMediaIOCoreAudioSampleBase { + using Super = FMediaIOCoreAudioSampleBase; + public: - bool Initialize(const AJA::AJAAudioFrameData& InAudioData, FTimespan InTime) + bool Initialize(const AJA::AJAAudioFrameData& InAudioData, FTimespan InTime, const TOptional& InTimecode) { - if (InAudioData.AudioBuffer) - { - Buffer.Reset(InAudioData.AudioBufferSize); - Buffer.Append(reinterpret_cast(InAudioData.AudioBuffer), InAudioData.AudioBufferSize/sizeof(int32)); - } - else - { - Buffer.Reset(); - return false; - } - - Channels = InAudioData.NumChannels; - SampleRate = InAudioData.AudioRate; - Time = InTime; - Duration = (InAudioData.AudioBufferSize * ETimespan::TicksPerSecond) / (Channels * SampleRate * sizeof(int32)); - - return true; + return Super::Initialize( + reinterpret_cast(InAudioData.AudioBuffer) + , InAudioData.AudioBufferSize / sizeof(int32) + , InAudioData.NumChannels + , InAudioData.AudioRate + , InTime + , InTimecode); } }; /* * Implements a pool for AJA audio sample objects. */ - class FAjaMediaAudioSamplePool : public TMediaObjectPool { }; diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaPlayer.cpp b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaPlayer.cpp index b6707556383f..34d59bdd4b20 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaPlayer.cpp +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaPlayer.cpp @@ -40,7 +40,6 @@ FAjaMediaPlayer::FAjaMediaPlayer(IMediaEventSink& InEventSink) , MaxNumMetadataFrameBuffer(8) , MaxNumVideoFrameBuffer(8) , AjaThreadNewState(EMediaState::Closed) - , AjaThreadCurrentTime(FTimespan::Zero()) , EventSink(InEventSink) , AjaThreadAudioChannels(0) , AjaThreadAudioSampleRate(0) @@ -51,6 +50,7 @@ FAjaMediaPlayer::FAjaMediaPlayer(IMediaEventSink& InEventSink) , AjaThreadAutoCirculateMetadataFrameDropCount(0) , AjaThreadAutoCirculateVideoFrameDropCount(0) , bEncodeTimecodeInTexel(false) + , bUseFrameTimecode(false) , bUseAncillary(false) , bUseAudio(false) , bUseVideo(false) @@ -74,30 +74,17 @@ FAjaMediaPlayer::~FAjaMediaPlayer() *****************************************************************************/ bool FAjaMediaPlayer::Open(const FString& Url, const IMediaOptions* Options) { - Close(); - - if (!DeviceSource.FromUrl(Url, false)) + if (!Super::Open(Url, Options)) { return false; } - if (!ReadMediaOptions(Options)) - { - return false; - } - - AJA::AJADeviceOptions DeviceOptions(DeviceSource.DeviceIndex); + AJA::AJADeviceOptions DeviceOptions(Options->GetMediaOption(AjaMediaOption::DeviceIndex, (int64)0)); // Read options - AJA::AJAInputOutputChannelOptions AjaOptions(TEXT("MediaPlayer"), DeviceSource.PortIndex); + AJA::AJAInputOutputChannelOptions AjaOptions(TEXT("MediaPlayer"), Options->GetMediaOption(AjaMediaOption::PortIndex, (int64)0)); AjaOptions.CallbackInterface = this; AjaOptions.bOutput = false; - { - int32 Numerator, Denominator; - Numerator = Options->GetMediaOption(AjaMediaOption::FrameRateNumerator, (int64)30); - Denominator = Options->GetMediaOption(AjaMediaOption::FrameRateDenominator, (int64)1); - VideoFrameRate = FFrameRate(Numerator, Denominator); - } { EAjaMediaTimecodeFormat Timecode = (EAjaMediaTimecodeFormat)(Options->GetMediaOption(AjaMediaOption::TimecodeFormat, (int64)EAjaMediaTimecodeFormat::None)); bUseFrameTimecode = Timecode != EAjaMediaTimecodeFormat::None; @@ -130,10 +117,6 @@ bool FAjaMediaPlayer::Open(const FString& Url, const IMediaOptions* Options) EAjaMediaSourceColorFormat ColorFormat = (EAjaMediaSourceColorFormat)(Options->GetMediaOption(AjaMediaOption::ColorFormat, (int64)EMediaTextureSampleFormat::CharBGRA)); switch(ColorFormat) { - case EAjaMediaSourceColorFormat::UYVY: - VideoSampleFormat = EMediaTextureSampleFormat::CharUYVY; - AjaOptions.PixelFormat = AJA::EPixelFormat::PF_8BIT_YCBCR; - break; case EAjaMediaSourceColorFormat::BGR10: VideoSampleFormat = EMediaTextureSampleFormat::CharBGR10A2; AjaOptions.PixelFormat = AJA::EPixelFormat::PF_10BIT_RGB; @@ -174,15 +157,6 @@ bool FAjaMediaPlayer::Open(const FString& Url, const IMediaOptions* Options) AudioTrackFormat.SampleRate = 48000; AudioTrackFormat.TypeName = FString(TEXT("PCM")); - AudioTrackFormat.NumChannels = LastAudioChannels; - AudioTrackFormat.SampleRate = LastAudioSampleRate; - - AJA::AJAVideoFormats::VideoFormatDescriptor FrameDescriptor = AJA::AJAVideoFormats::GetVideoFormat(LastVideoFormatIndex); - VideoTrackFormat.Dim = FIntPoint(FrameDescriptor.Width, FrameDescriptor.Height); - VideoTrackFormat.FrameRate = VideoFrameRate.AsDecimal(); - VideoTrackFormat.FrameRates = TRange(VideoFrameRate.AsDecimal()); - VideoTrackFormat.TypeName = FrameDescriptor.FormatedText; - // finalize CurrentState = EMediaState::Preparing; AjaThreadNewState = EMediaState::Preparing; @@ -193,7 +167,6 @@ bool FAjaMediaPlayer::Open(const FString& Url, const IMediaOptions* Options) void FAjaMediaPlayer::Close() { - CurrentState = EMediaState::Closed; AjaThreadNewState = EMediaState::Closed; if (InputChannel) @@ -206,18 +179,8 @@ void FAjaMediaPlayer::Close() AudioSamplePool->Reset(); MetadataSamplePool->Reset(); TextureSamplePool->Reset(); - Samples->FlushSamples(); - CurrentTime = FTimespan::Zero(); - AjaThreadCurrentTime = FTimespan::Zero(); - - DeviceSource = FAjaMediaPort(); - LastVideoDim = FIntPoint::ZeroValue; - LastAudioChannels = 0; - LastAudioSampleRate = 0; - - EventSink.ReceiveMediaEvent(EMediaEvent::TracksChanged); - EventSink.ReceiveMediaEvent(EMediaEvent::MediaClosed); + Super::Close(); } @@ -232,8 +195,7 @@ FString FAjaMediaPlayer::GetStats() const { FString Stats; - Stats += TEXT("Aja settings\n"); - Stats += FString::Printf(TEXT(" Input port: %s\n"), *DeviceSource.ToString()); + Stats += FString::Printf(TEXT(" Input port: %s\n"), *GetUrl()); Stats += FString::Printf(TEXT(" Frame rate: %s\n"), *VideoFrameRate.ToPrettyText().ToString()); Stats += FString::Printf(TEXT(" Aja Mode: %s\n"), *VideoTrackFormat.TypeName); @@ -252,7 +214,7 @@ FString FAjaMediaPlayer::GetStats() const if (bUseVideo) { - Stats += FString::Printf(TEXT(" Buffered video frames: %d\n"), Samples->NumVideoSamples()); + Stats += FString::Printf(TEXT(" Buffered video frames: %d\n"), GetSamples().NumVideoSamples()); } else { @@ -261,7 +223,7 @@ FString FAjaMediaPlayer::GetStats() const if (bUseAudio) { - Stats += FString::Printf(TEXT(" Buffered audio frames: %d\n"), Samples->NumAudioSamples()); + Stats += FString::Printf(TEXT(" Buffered audio frames: %d\n"), GetSamples().NumAudioSamples()); } else { @@ -274,12 +236,6 @@ FString FAjaMediaPlayer::GetStats() const } -FString FAjaMediaPlayer::GetUrl() const -{ - return DeviceSource.ToUrl(); -} - - void FAjaMediaPlayer::TickFetch(FTimespan DeltaTime, FTimespan /*Timecode*/) { if (InputChannel && CurrentState == EMediaState::Playing) @@ -316,11 +272,7 @@ void FAjaMediaPlayer::TickInput(FTimespan DeltaTime, FTimespan Timecode) return; } - if (TickTimeManagement() && !bUseFrameTimecode) -{ - // As default, use the App time - CurrentTime = FTimespan::FromSeconds(FApp::GetCurrentTime()); -} + TickTimeManagement(); } @@ -330,21 +282,9 @@ void FAjaMediaPlayer::ProcessFrame() { if (CurrentState == EMediaState::Playing) { - //If Asset is setup to use time synchronization, use it only if it can provide a valid time. - if (bUseFrameTimecode && !bUseTimeSynchronization) - { - // We want to atomically read the FTimespan.GetTick() - static_assert(sizeof(AjaThreadCurrentTime) == sizeof(int64), "The size of a FTimespan is not a int64"); - static_assert(sizeof(AjaThreadCurrentTime.GetTicks()) == sizeof(int64), "The size of a FTimespan is not a int64"); - static_assert(TIsSame::Value, "The type of CurrentTime is not the same as AjaThreadCurrentTime"); - - // Take the latest input as the CurrentTime - FPlatformAtomics::InterlockedExchange(reinterpret_cast(&CurrentTime), *reinterpret_cast(&AjaThreadCurrentTime)); - } - // No need to lock here. That info is only used for debug information. - LastAudioChannels = AjaThreadAudioChannels; - LastAudioSampleRate = AjaThreadAudioSampleRate; + AudioTrackFormat.NumChannels = AjaThreadAudioChannels; + AudioTrackFormat.SampleRate = AjaThreadAudioSampleRate; } } @@ -355,26 +295,26 @@ void FAjaMediaPlayer::VerifyFrameDropCount() uint32 FrameDropCount = AjaThreadFrameDropCount; if (FrameDropCount > LastFrameDropCount) { - UE_LOG(LogAjaMedia, Warning, TEXT("Lost %d frames on Aja input %s. UE4 frame rate is too slow and the capture card was not able to send the frame(s) to UE4."), FrameDropCount - LastFrameDropCount, *DeviceSource.ToString()); + UE_LOG(LogAjaMedia, Warning, TEXT("Lost %d frames on input %s. UE4 frame rate is too slow and the capture card was not able to send the frame(s) to UE4."), FrameDropCount - LastFrameDropCount, *GetUrl()); } LastFrameDropCount = FrameDropCount; FrameDropCount = FPlatformAtomics::InterlockedExchange(&AjaThreadAutoCirculateAudioFrameDropCount, 0); if (FrameDropCount > 0) { - UE_LOG(LogAjaMedia, Warning, TEXT("Lost %d audio frames on Aja input %s. Frame rate is either too slow or buffering capacity is too small."), FrameDropCount, *DeviceSource.ToString()); + UE_LOG(LogAjaMedia, Warning, TEXT("Lost %d audio frames on input %s. Frame rate is either too slow or buffering capacity is too small."), FrameDropCount, *GetUrl()); } FrameDropCount = FPlatformAtomics::InterlockedExchange(&AjaThreadAutoCirculateMetadataFrameDropCount, 0); if (FrameDropCount > 0) { - UE_LOG(LogAjaMedia, Warning, TEXT("Lost %d metadata frames on Aja input %s. Frame rate is either too slow or buffering capacity is too small."), FrameDropCount, *DeviceSource.ToString()); + UE_LOG(LogAjaMedia, Warning, TEXT("Lost %d metadata frames on input %s. Frame rate is either too slow or buffering capacity is too small."), FrameDropCount, *GetUrl()); } FrameDropCount = FPlatformAtomics::InterlockedExchange(&AjaThreadAutoCirculateVideoFrameDropCount, 0); if (FrameDropCount > 0) { - UE_LOG(LogAjaMedia, Warning, TEXT("Lost %d video frames on Aja input %s. Frame rate is either too slow or buffering capacity is too small."), FrameDropCount, *DeviceSource.ToString()); + UE_LOG(LogAjaMedia, Warning, TEXT("Lost %d video frames on input %s. Frame rate is either too slow or buffering capacity is too small."), FrameDropCount, *GetUrl()); } } } @@ -401,37 +341,35 @@ void FAjaMediaPlayer::OnCompletion(bool bSucceed) bool FAjaMediaPlayer::OnInputFrameReceived(const AJA::AJAInputFrameData& InInputFrame, const AJA::AJAAncillaryFrameData& InAncillaryFrame, const AJA::AJAAudioFrameData& InAudioFrame, const AJA::AJAVideoFrameData& InVideoFrame) { - if (AjaThreadNewState != EMediaState::Playing && CurrentState != EMediaState::Playing) + if (AjaThreadNewState != EMediaState::Playing) { return false; } AjaThreadFrameDropCount = InInputFrame.FramesDropped; - FTimespan DecodedTime = FTimespan::FromSeconds(FApp::GetCurrentTime()); - + FTimespan DecodedTime = FTimespan::FromSeconds(FPlatformTime::Seconds()); + TOptional DecodedTimecode; if (bUseFrameTimecode) { - DecodedTime = FAja::ConvertAJATimecode2Timespan(InInputFrame.Timecode, AjaThreadPreviousFrameTimecode, AjaThreadPreviousFrameTimespan, VideoFrameRate); + FTimespan TimecodeDecodedTime = FAja::ConvertAJATimecode2Timespan(InInputFrame.Timecode, AjaThreadPreviousFrameTimecode, AjaThreadPreviousFrameTimespan, VideoFrameRate); + DecodedTimecode = FAja::ConvertAJATimecode2Timecode(InInputFrame.Timecode, VideoFrameRate); + + if (bUseTimeSynchronization) + { + DecodedTime = TimecodeDecodedTime; + } //Previous frame Timecode and Timespan are used to cover the facts that AJAFrameTimecode FrameNumber is capped at 30 even for higher FPS. AjaThreadPreviousFrameTimecode = InInputFrame.Timecode; - AjaThreadPreviousFrameTimespan = DecodedTime; + AjaThreadPreviousFrameTimespan = TimecodeDecodedTime; if (bIsTimecodeLogEnable) { - UE_LOG(LogAjaMedia, Log, TEXT("Aja input port %s has timecode : %02d:%02d:%02d:%02d"), *DeviceSource.ToString(), AjaThreadPreviousFrameTimecode.Hours, AjaThreadPreviousFrameTimecode.Minutes, AjaThreadPreviousFrameTimecode.Seconds, AjaThreadPreviousFrameTimecode.Frames); + UE_LOG(LogAjaMedia, Log, TEXT("Input %s has timecode : %02d:%02d:%02d:%02d"), *GetUrl(), AjaThreadPreviousFrameTimecode.Hours, AjaThreadPreviousFrameTimecode.Minutes, AjaThreadPreviousFrameTimecode.Seconds, AjaThreadPreviousFrameTimecode.Frames); } } - // We want to atomically set AjaThreadCurrentTime - static_assert(sizeof(AjaThreadCurrentTime) == sizeof(int64), "The size of a FTimespan is not a int64"); - static_assert(sizeof(AjaThreadCurrentTime.GetTicks()) == sizeof(int64), "The size of a FTimespan is not a int64"); - static_assert(TIsSame::Value, "The type of DecodedTime is not the same as AjaThreadCurrentTime"); - - // Take the latest input as the CurrentTime - FPlatformAtomics::InterlockedExchange(reinterpret_cast(&AjaThreadCurrentTime), *reinterpret_cast(&DecodedTime)); - if (AjaThreadNewState == EMediaState::Playing) { if (bUseAncillary && InAncillaryFrame.AncBuffer) @@ -451,7 +389,7 @@ bool FAjaMediaPlayer::OnInputFrameReceived(const AJA::AJAInputFrameData& InInput { auto MetaDataSample = MetadataSamplePool->AcquireShared(); - if (MetaDataSample->Initialize(InAncillaryFrame.AncBuffer, InAncillaryFrame.AncBufferSize, AjaThreadCurrentTime)) + if (MetaDataSample->Initialize(InAncillaryFrame.AncBuffer, InAncillaryFrame.AncBufferSize, DecodedTime, DecodedTimecode)) { Samples->AddMetadata(MetaDataSample); } @@ -459,9 +397,9 @@ bool FAjaMediaPlayer::OnInputFrameReceived(const AJA::AJAInputFrameData& InInput if (bHaveField2) { - FTimespan CurrentOddTime = AjaThreadCurrentTime + FTimespan::FromSeconds(VideoFrameRate.AsInterval() / 2.0); + FTimespan CurrentOddTime = DecodedTime + FTimespan::FromSeconds(VideoFrameRate.AsInterval() / 2.0); auto MetaDataSample = MetadataSamplePool->AcquireShared(); - if (MetaDataSample->Initialize(InAncillaryFrame.AncF2Buffer, InAncillaryFrame.AncF2BufferSize, AjaThreadCurrentTime)) + if (MetaDataSample->Initialize(InAncillaryFrame.AncF2Buffer, InAncillaryFrame.AncF2BufferSize, DecodedTime, DecodedTimecode)) { Samples->AddMetadata(MetaDataSample); } @@ -477,7 +415,7 @@ bool FAjaMediaPlayer::OnInputFrameReceived(const AJA::AJAInputFrameData& InInput } auto AudioSample = AudioSamplePool->AcquireShared(); - if (AudioSample->Initialize(InAudioFrame, AjaThreadCurrentTime)) + if (AudioSample->Initialize(InAudioFrame, DecodedTime, DecodedTimecode)) { Samples->AddAudio(AudioSample); @@ -510,7 +448,7 @@ bool FAjaMediaPlayer::OnInputFrameReceived(const AJA::AJAInputFrameData& InInput EncodeTime.Render(0, 0, AjaThreadPreviousFrameTimecode.Hours, AjaThreadPreviousFrameTimecode.Minutes, AjaThreadPreviousFrameTimecode.Seconds, AjaThreadPreviousFrameTimecode.Frames); } - if (TextureSample->InitializeProgressive(InVideoFrame, VideoSampleFormat, AjaThreadCurrentTime)) + if (TextureSample->InitializeProgressive(InVideoFrame, VideoSampleFormat, DecodedTime, DecodedTimecode)) { Samples->AddVideo(TextureSample); bWasAdded = true; @@ -519,16 +457,16 @@ bool FAjaMediaPlayer::OnInputFrameReceived(const AJA::AJAInputFrameData& InInput else { bool bEven = true; - if (TextureSample->InitializeInterlaced_Halfed(InVideoFrame, VideoSampleFormat, AjaThreadCurrentTime, bEven)) + if (TextureSample->InitializeInterlaced_Halfed(InVideoFrame, VideoSampleFormat, DecodedTime, DecodedTimecode, bEven)) { Samples->AddVideo(TextureSample); bWasAdded = true; } auto TextureSampleOdd = TextureSamplePool->AcquireShared(); - FTimespan CurrentOddTime = AjaThreadCurrentTime + FTimespan::FromSeconds(VideoFrameRate.AsInterval() / 2.0); + FTimespan CurrentOddTime = DecodedTime + FTimespan::FromSeconds(VideoFrameRate.AsInterval() / 2.0); bEven = false; - if (TextureSampleOdd->InitializeInterlaced_Halfed(InVideoFrame, VideoSampleFormat, CurrentOddTime, bEven)) + if (TextureSampleOdd->InitializeInterlaced_Halfed(InVideoFrame, VideoSampleFormat, CurrentOddTime, DecodedTimecode, bEven)) { Samples->AddVideo(TextureSampleOdd); } @@ -554,7 +492,7 @@ bool FAjaMediaPlayer::OnOutputFrameCopied(const AJA::AJAOutputFrameData& InFrame bool FAjaMediaPlayer::IsHardwareReady() const { - return CurrentState == EMediaState::Playing ? true : false; + return AjaThreadNewState == EMediaState::Playing ? true : false; } #undef LOCTEXT_NAMESPACE diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaPlayer.h b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaPlayer.h index 3505886caf37..61f96768b705 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaPlayer.h +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaPlayer.h @@ -54,7 +54,6 @@ public: virtual void Close() override; virtual FName GetPlayerName() const override; - virtual FString GetUrl() const override; virtual bool Open(const FString& Url, const IMediaOptions* Options) override; @@ -102,9 +101,6 @@ private: /** Current state of the media player. */ EMediaState AjaThreadNewState; - /** Current playback time. */ - FTimespan AjaThreadCurrentTime; - /** The media event handler. */ IMediaEventSink& EventSink; @@ -129,6 +125,9 @@ private: /** Whether to use the time code embedded in Aja frames. */ bool bEncodeTimecodeInTexel; + /** Whether to use the timecode embedded in a frame. */ + bool bUseFrameTimecode; + /** Which field need to be capture. */ bool bUseAncillary; bool bUseAudio; @@ -137,9 +136,6 @@ private: /** The current video sample format. */ EMediaTextureSampleFormat VideoSampleFormat; - - /** The currently opened URL. */ - FAjaMediaPort DeviceSource; /** Maps to the current input Device */ AJA::AJAInputChannel* InputChannel; diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaTextureSample.h b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaTextureSample.h index 939f4d940706..84e8188fce09 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaTextureSample.h +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Private/Player/AjaMediaTextureSample.h @@ -10,12 +10,7 @@ class FAjaMediaTextureSample : public FMediaIOCoreTextureSampleBase { -public: - /** Virtual destructor. */ - virtual ~FAjaMediaTextureSample() - { - FreeSample(); - } + using Super = FMediaIOCoreTextureSampleBase; public: @@ -26,25 +21,16 @@ public: * @param InSampleFormat The sample format. * @param InTime The sample time (in the player's own clock). */ - bool InitializeProgressive(const AJA::AJAVideoFrameData& InVideoData, EMediaTextureSampleFormat InSampleFormat, FTimespan InTime) + bool InitializeProgressive(const AJA::AJAVideoFrameData& InVideoData, EMediaTextureSampleFormat InSampleFormat, FTimespan InTime, const TOptional& InTimecode) { - FreeSample(); - - if ((InVideoData.VideoBuffer == nullptr) || (InSampleFormat == EMediaTextureSampleFormat::Undefined)) - { - return false; - } - - Buffer.Reset(InVideoData.VideoBufferSize); - Buffer.Append(InVideoData.VideoBuffer, InVideoData.VideoBufferSize); - Stride = InVideoData.Stride; - Width = InVideoData.Width; - Height = InVideoData.Height; - SampleFormat = InSampleFormat; - Time = InTime; - PixelBuffer = Buffer.GetData(); - - return true; + return Super::Initialize(InVideoData.VideoBuffer + , InVideoData.VideoBufferSize + , InVideoData.Stride + , InVideoData.Width + , InVideoData.Height + , InSampleFormat + , InTime + , InTimecode); } /** @@ -55,7 +41,7 @@ public: * @param InTime The sample time (in the player's own clock). * @param bEven Only take the even frame from the image. */ - bool InitializeInterlaced_Halfed(const AJA::AJAVideoFrameData& InVideoData, EMediaTextureSampleFormat InSampleFormat, FTimespan InTime, bool bInEven) + bool InitializeInterlaced_Halfed(const AJA::AJAVideoFrameData& InVideoData, EMediaTextureSampleFormat InSampleFormat, FTimespan InTime, const TOptional& InTimecode, bool bInEven) { FreeSample(); @@ -69,6 +55,7 @@ public: Width = InVideoData.Width; Height = InVideoData.Height / 2; SampleFormat = InSampleFormat; + Timecode = InTimecode; Time = InTime; for (uint32 IndexY = (bInEven ? 0 : 1); IndexY < InVideoData.Height; IndexY += 2) @@ -77,25 +64,11 @@ public: Buffer.Append(Source, Stride); } - PixelBuffer = Buffer.GetData(); return true; } - -protected: - - /** Free the video frame data. */ - virtual void FreeSample() override - { - Buffer.Reset(); - } - -private: - /** Image buffer */ - TArray Buffer; }; /* * Implements a pool for AJA texture sample objects. */ - class FAjaMediaTextureSamplePool : public TMediaObjectPool { }; diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Public/AjaCustomTimeStep.h b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Public/AjaCustomTimeStep.h index 7d89ca1a0921..71b6cbe479aa 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Public/AjaCustomTimeStep.h +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Public/AjaCustomTimeStep.h @@ -75,6 +75,16 @@ public: UPROPERTY(EditAnywhere, Category = "Genlock options") bool bUseReferenceIn; + /** + * If true, the Engine will wait for the frame to be read. + * This will introduce random latency (the time it takes to read a frame). + * Use this option when you want to synchronize the engine with the incoming frame and discard the buffered frames. + * @note If false, there is no guarantee that the incoming frame will be ready since it takes some time to read a frame. + * @note This will not work as intended with interlaced transport because both fields are processed at the same time. + */ + UPROPERTY(EditAnywhere, Category = "Genlock options", meta=(EditCondition="!bUseReferenceIn")) + bool bWaitForFrameToBeReady; + /** The type of Timecode to read from SDI stream. */ UPROPERTY(EditAnywhere, Category="Genlock options", meta=(EditCondition="!bUseReferenceIn")) EAjaMediaTimecodeFormat TimecodeFormat; diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Public/AjaMediaFinder.h b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Public/AjaMediaFinder.h index 81212fdd8239..03bcfe263d5c 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Public/AjaMediaFinder.h +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Public/AjaMediaFinder.h @@ -3,14 +3,33 @@ #pragma once #include "CoreMinimal.h" - #include "Misc/FrameRate.h" - +#include "UObject/ObjectMacros.h" #include "AjaMediaFinder.generated.h" +/** + * Available type of link the AJA support. + */ +UENUM() +enum class EAjaLinkType +{ + SingleLink = 0, + DualLink, + QuadLink, +}; /** - * Available timecode formats for Aja sources. + * Available quad link the AJA support. + */ +UENUM() +enum class EAjaQuadLinkType +{ + Square, + TSI, +}; + +/** + * Available timecode formats that AJA support. */ UENUM() enum class EAjaMediaTimecodeFormat : uint8 @@ -21,7 +40,29 @@ enum class EAjaMediaTimecodeFormat : uint8 }; /** - * Identifies an Aja media source. +* Identifies an AJA media source. +*/ +USTRUCT(BlueprintType) +struct AJAMEDIA_API FAjaMediaDevice +{ + GENERATED_BODY() + +public: + + /** Default constructor. */ + FAjaMediaDevice(); + + /** The retail name of the AJA Device, i.e. "IoExpress". */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category=AJA) + FString DeviceName; + + /** The index of the AJA Device */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category=AJA, meta=(ClampMin="0")) + int32 DeviceIndex; +}; + +/** + * Identifies an AJA media source. */ USTRUCT(BlueprintType) struct AJAMEDIA_API FAjaMediaPort @@ -29,7 +70,6 @@ struct AJAMEDIA_API FAjaMediaPort GENERATED_BODY() public: - static const TCHAR* Protocol; /** Default constructor. */ FAjaMediaPort(); @@ -37,45 +77,50 @@ public: /** * Create and initialize a new instance. */ - FAjaMediaPort(const FString& InDeviceName, int32 InDeviceIndex, int32 InPortIndex); + FAjaMediaPort(FName InDeviceName, int32 InDeviceIndex, int32 InPortIndex); - /** The retail name of the Aja Device, i.e. "IoExpress". */ + /** The retail name of the AJA Device, i.e. "IoExpress". */ UPROPERTY(EditAnywhere, BlueprintReadWrite, Category=AJA) - FString DeviceName; + FName DeviceName; - /** The index of the Aja Device */ + /** The index of the AJA Device */ UPROPERTY(EditAnywhere, BlueprintReadWrite, Category=AJA, meta=(ClampMin="0")) int32 DeviceIndex; - /** The index of the video input/ouput port on that Device. */ + /** The type of cable link used for that configuration */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category=AJA) + EAjaLinkType LinkType; + + /** The type of cable link used for that configuration */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = AJA) + EAjaQuadLinkType QuadLinkType; + + /** The index of the video channel on the Device. */ UPROPERTY(EditAnywhere, BlueprintReadWrite, Category=AJA, meta=(ClampMin="0")) int32 PortIndex; public: - bool operator==(const FAjaMediaPort& Other) const { return Other.DeviceIndex == DeviceIndex && Other.PortIndex == PortIndex; } + bool operator==(const FAjaMediaPort& Other) const + { + return Other.DeviceIndex == DeviceIndex + && Other.LinkType == LinkType + && Other.PortIndex == PortIndex; + } /** * Get a string representation of this source. - * @return String representation, i.e. "IoExpress [device0/port1]". + * @return String representation, i.e. "IoExpress [device0/single1]". */ FString ToString() const; /** * Get a url used by the Media framework - * @return Url representation, "aja://device0/port1" + * @return Url representation, "aja://device0/single1" */ FString ToUrl() const; /** Return true if the device & port index have been set properly */ bool IsValid() const; - - /** - * Build a AjaMediaSourceId from a Url representation. - * @param Url A Url representation, i.e. "IoExpress [device0/port1]". - * @param bDiscoverDeviceName Ask the AjaDevice the name of the Device. If false, the name will be empty. - * @return true on success - */ - bool FromUrl(const FString& Url, bool bDiscoverDeviceName); }; /** @@ -91,40 +136,91 @@ public: FAjaMediaMode(); public: - /** The index of the Aja Device */ - UPROPERTY() - int32 DeviceIndex; - - /** The name of the mode, i.e. "1080p 60". */ - UPROPERTY() - FString ModeName; - /** The frame rate of the mode */ - UPROPERTY() + UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category=AJA) FFrameRate FrameRate; - /** The target size of the mode */ - UPROPERTY() + /** The image resolution of the mode */ + UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category=AJA, meta=(DisplayName="Resolution")) FIntPoint TargetSize; + /** If that configuration is in progressive transport */ + UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category=AJA) + bool bIsProgressiveStandard; //NTV2_IS_PROGRESSIVE_STANDARD + + /** If that configuration is in PSF */ + UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category=AJA) + bool bIsInterlacedStandard; + + /** If that configuration is in PSF */ + UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category=AJA) + bool bIsPsfStandard; + /** The video format index for AJA */ - UPROPERTY() + UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category=AJA) int32 VideoFormatIndex; public: bool operator==(const FAjaMediaMode& Other) const { - return Other.DeviceIndex == DeviceIndex && Other.FrameRate == FrameRate && Other.VideoFormatIndex == VideoFormatIndex; + return Other.VideoFormatIndex == VideoFormatIndex; } - /** - * Get a string representation of this mode. - * @return i.e. "1080p 60". - */ - FString ToString() const; - /** Return true if the MediaMode has been set properly */ bool IsValid() const; + + /** + * Get a string representation of this source. + * @return String representation, i.e. "IoExpress [device0/single1]". + */ + FString ToString() const; +}; + +/** + * Configuration of an AJA input/output. + */ +USTRUCT() +struct AJAMEDIA_API FAjaMediaConfiguration +{ + GENERATED_BODY() + + FAjaMediaConfiguration(); + +public: + /** Configured as an input or output. */ + UPROPERTY() + bool bInput; + + /** The device and port and link type of the configuration. */ + UPROPERTY() + FAjaMediaPort MediaPort; + + /** The video format of the configuration. */ + UPROPERTY() + FAjaMediaMode MediaMode; + +public: + /** Return true if the device & port index have been set properly */ + bool IsValid() const; + + bool operator== (const FAjaMediaConfiguration& Other) const + { + return MediaPort == Other.MediaPort + && MediaMode == Other.MediaMode + && bInput == Other.bInput; + } + + bool operator!= (const FAjaMediaConfiguration& Other) const + { + return !(operator==(Other)); + } + +public: + /** + * Get a string representation of this source. + * @return String representation, i.e. "IoExpress [SingleLink1][1080p60]". + */ + FText ToText() const; }; /* @@ -133,8 +229,21 @@ public: class AJAMEDIA_API FAjaMediaFinder { public: + /** Link in a text format. */ + static FText LinkTypeToPrettyText(EAjaLinkType InLinkType, int32 InChannel, bool bShortVersion); + /** Quad link in a text format. */ + static FText QuadLinkTypeToPrettyText(EAjaQuadLinkType InLinkType); + /** Resolution in a text format */ + static FText ResolutionToPrettyText(FIntPoint InResolution); - /** Get the list of AJA device installed in the machine. */ + /** Get the list of AJA different configuration permutations available for that machine. */ + static bool GetInputConfigurations(TArray& OutConfigurations); + static bool GetOutputConfigurations(TArray& OutConfigurations); + + /** Get the list of AJA device available for that machine. */ + static bool GetDevices(TArray& OutDevices); + + /** Get the list of AJA available sources available for that machine. */ static bool GetSources(TArray& OutSources); /** Get the list of Supported AJA video modes. */ diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Public/AjaMediaSource.h b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Public/AjaMediaSource.h index 59e88e3f5400..3df23f6567fd 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Public/AjaMediaSource.h +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMedia/Public/AjaMediaSource.h @@ -17,7 +17,6 @@ enum class EAjaMediaSourceColorFormat : uint8 { BGRA UMETA(DisplayName = "8bit RGBA"), BGR10 UMETA(DisplayName = "10bit RGB"), - UYVY UMETA(DisplayName = "8bit YUV 4:2:2"), }; /** @@ -33,7 +32,7 @@ enum class EAjaMediaAudioChannel : uint8 /** * Media source for Aja streams. */ -UCLASS(BlueprintType) +UCLASS(BlueprintType, hideCategories=(Platforms,Object)) class AJAMEDIA_API UAjaMediaSource : public UTimeSynchronizableMediaSource { GENERATED_BODY() @@ -46,59 +45,48 @@ public: * The input name of the AJA source to be played". * This combines the device ID, and the input. */ - UPROPERTY(BlueprintReadWrite, EditAnywhere, Category="AJA", AssetRegistrySearchable) + UPROPERTY() FAjaMediaPort MediaPort; -private: /** Override project setting's media mode. */ UPROPERTY() bool bIsDefaultModeOverriden; /** The expected input signal format from the MediaPort. Uses project settings by default. */ - UPROPERTY(EditAnywhere, Category="AJA", meta=(EditCondition="bIsDefaultModeOverriden", MediaPort="MediaPort")) + UPROPERTY() FAjaMediaMode MediaMode; -public: - /** Use the time code embedded in the input stream. */ UPROPERTY(BlueprintReadWrite, EditAnywhere, Category="AJA") EAjaMediaTimecodeFormat TimecodeFormat; -public: /** - * The capture of the Audio, Ancillary and/or video will be perform at the same time. - * This may decrease transfer performance but the data will be sync in relation with each other. + * Use a ring buffer to capture and transfer data. + * This may decrease transfer latency but increase stability. */ - UPROPERTY(BlueprintReadWrite, EditAnywhere, Category="Capture") + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category="AJA") bool bCaptureWithAutoCirculating; +public: /** * Capture Ancillary from the AJA source. * It will decrease performance */ - UPROPERTY(BlueprintReadWrite, EditAnywhere, Category="Capture") + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category="Ancillary") bool bCaptureAncillary; - /** - * Capture Audio from the AJA source. - * It will decrease performance - */ - UPROPERTY(BlueprintReadWrite, EditAnywhere, Category="Capture") - bool bCaptureAudio; - - /** - * Capture Video from the AJA source. - * It will decrease performance - */ - UPROPERTY(BlueprintReadWrite, EditAnywhere, Category="Capture") - bool bCaptureVideo; - -public: /** Maximum number of ancillary data frames to buffer. */ UPROPERTY(BlueprintReadWrite, EditAnywhere, AdvancedDisplay, Category="Ancillary", meta=(EditCondition="bCaptureAncillary", ClampMin="1", ClampMax="32")) int32 MaxNumAncillaryFrameBuffer; public: + /** + * Capture Audio from the AJA source. + * It will decrease performance + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category="Audio") + bool bCaptureAudio; + /** Desired number of audio channel to capture. */ UPROPERTY(BlueprintReadWrite, EditAnywhere, Category="Audio", meta=(EditCondition="bCaptureAudio")) EAjaMediaAudioChannel AudioChannel; @@ -108,6 +96,13 @@ public: int32 MaxNumAudioFrameBuffer; public: + /** + * Capture Video from the AJA source. + * It will decrease performance + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category="Video") + bool bCaptureVideo; + /** Desired color format of input video frames (default = BGRA). */ UPROPERTY(BlueprintReadWrite, EditAnywhere, Category="Video", meta=(EditCondition="bCaptureVideo")) EAjaMediaSourceColorFormat ColorFormat; @@ -129,24 +124,17 @@ public: bool bEncodeTimecodeInTexel; public: - /** Sets desired MediaMode on this Source to override project setting. */ - UFUNCTION(BlueprintCallable, Category = "AJA") - void OverrideMediaMode(const FAjaMediaMode& InMediaMode); - - /** Disables MediaMode override for this source to use project setting. */ - UFUNCTION(BlueprintCallable, Category = "AJA") - void DisableMediaModeOverride() { bIsDefaultModeOverriden = false; } + FAjaMediaMode GetMediaMode() const; + FAjaMediaConfiguration GetMediaConfiguration() const; public: //~ IMediaOptions interface virtual bool GetMediaOption(const FName& Key, bool DefaultValue) const override; virtual int64 GetMediaOption(const FName& Key, int64 DefaultValue) const override; + virtual FString GetMediaOption(const FName& Key, const FString& DefaultValue) const override; virtual bool HasMediaOption(const FName& Key) const override; -public: - FAjaMediaMode GetMediaMode() const; - public: //~ UMediaSource interface diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/AjaMediaEditor.Build.cs b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/AjaMediaEditor.Build.cs index 82e22234c0c1..90f4c7dc1d87 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/AjaMediaEditor.Build.cs +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/AjaMediaEditor.Build.cs @@ -12,12 +12,15 @@ namespace UnrealBuildTool.Rules "AjaMediaOutput", "Core", "CoreUObject", + "EditorStyle", "MediaAssets", + "MediaPlayerEditor", "Projects", "PropertyEditor", "Settings", "Slate", "SlateCore", + "TimeManagement", "UnrealEd", }); diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/AjaMediaEditorModule.cpp b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/AjaMediaEditorModule.cpp index f6eddfc75973..868fd2df79c0 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/AjaMediaEditorModule.cpp +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/AjaMediaEditorModule.cpp @@ -7,6 +7,7 @@ #include "Customizations/AjaMediaPortCustomization.h" #include "Customizations/AjaMediaModeCustomization.h" +#include "Customizations/AjaMediaSourceDetailCustomization.h" #include "Brushes/SlateImageBrush.h" #include "Interfaces/IPluginManager.h" @@ -58,12 +59,14 @@ private: FPropertyEditorModule& PropertyModule = FModuleManager::LoadModuleChecked("PropertyEditor"); PropertyModule.RegisterCustomPropertyTypeLayout(FAjaMediaPort::StaticStruct()->GetFName(), FOnGetPropertyTypeCustomizationInstance::CreateStatic(&FAjaMediaPortCustomization::MakeInstance)); PropertyModule.RegisterCustomPropertyTypeLayout(FAjaMediaMode::StaticStruct()->GetFName(), FOnGetPropertyTypeCustomizationInstance::CreateStatic(&FAjaMediaModeCustomization::MakeInstance)); + PropertyModule.RegisterCustomClassLayout(UAjaMediaSource::StaticClass()->GetFName(), FOnGetDetailCustomizationInstance::CreateStatic(&FAjaMediaSourceDetailCustomization::MakeInstance)); } /** Unregister details view customizations. */ void UnregisterCustomizations() { FPropertyEditorModule& PropertyModule = FModuleManager::LoadModuleChecked("PropertyEditor"); + PropertyModule.UnregisterCustomClassLayout(UAjaMediaSource::StaticClass()->GetFName()); PropertyModule.UnregisterCustomPropertyTypeLayout(FAjaMediaPort::StaticStruct()->GetFName()); PropertyModule.UnregisterCustomPropertyTypeLayout(FAjaMediaMode::StaticStruct()->GetFName()); } diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Customizations/AjaMediaSourceDetailCustomization.cpp b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Customizations/AjaMediaSourceDetailCustomization.cpp new file mode 100644 index 000000000000..25972592e44d --- /dev/null +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Customizations/AjaMediaSourceDetailCustomization.cpp @@ -0,0 +1,170 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "AjaMediaSourceDetailCustomization.h" + +#include "DetailWidgetRow.h" +#include "DetailCategoryBuilder.h" +#include "DetailLayoutBuilder.h" +#include "Framework/Application/SlateApplication.h" +#include "ObjectEditorUtils.h" +#include "Widgets/Input/SComboButton.h" +#include "Widgets/MediaPermutationsSelectorBuilder.h" +#include "Widgets/Text/STextBlock.h" +#include "Widgets/SBoxPanel.h" +#include "Widgets/SMediaPermutationsSelector.h" +#include "Widgets/SNullWidget.h" +#include "Widgets/SWindow.h" + +#define LOCTEXT_NAMESPACE "AjaMediaSourceDetailCustomization" + + +void FAjaMediaSourceDetailCustomization::CustomizeDetails(IDetailLayoutBuilder& DetailBuilder) +{ + TArray> Objects; + DetailBuilder.GetObjectsBeingCustomized(Objects); + + for (const TWeakObjectPtr& Obj : Objects) + { + if (UAjaMediaSource* Source = Cast(Obj.Get())) + { + MediaSources.Add(Source); + } + } + + if (MediaSources.Num() == 0) + { + return; + } + + IDetailCategoryBuilder& SourceCategory = DetailBuilder.EditCategory("Source"); + FDetailWidgetRow& UpdateRevisionRow = SourceCategory.AddCustomRow(LOCTEXT("Configuration", "Configuration")) + .NameContent() + [ + SNew(SHorizontalBox) + + SHorizontalBox::Slot() + .Padding(FMargin(0, 1, 0, 1)) + .FillWidth(1.0f) + [ + SNew(STextBlock) + .Text(LOCTEXT("ConfigurationLabel", "Configuration")) + .Font(DetailBuilder.GetDetailFont()) + ] + ] + .ValueContent() + .MaxDesiredWidth(512) + [ + SNew(SHorizontalBox) + + SHorizontalBox::Slot() + .FillWidth(1.0f) + .VAlign(VAlign_Center) + [ + SNew(STextBlock) + .Text(MakeAttributeLambda([this] + { + FText Result; + if (MediaSources[0].IsValid()) + { + FAjaMediaConfiguration Configuration = MediaSources[0]->GetMediaConfiguration(); + Result = Configuration.ToText(); + for (int32 Index = 1; Index < MediaSources.Num(); ++Index) + { + if (MediaSources[Index]->GetMediaConfiguration() != Configuration) + { + Result = LOCTEXT("MultipleValues", "Multiple Values"); + break; + } + } + } + return Result; + })) + ] + + SHorizontalBox::Slot() + .AutoWidth() + .Padding(FMargin(4.0f, 0.0f, 0.0f, 0.0f)) + .VAlign(VAlign_Center) + [ + SNew(SComboButton) + .OnGetMenuContent(this, &FAjaMediaSourceDetailCustomization::HandleSourceComboButtonMenuContent) + .ContentPadding(FMargin(4.0, 2.0)) + ] + ]; +} + +TSharedRef FAjaMediaSourceDetailCustomization::HandleSourceComboButtonMenuContent() +{ + PermutationSelector.Reset(); + if (MediaSources.Num() == 0) + { + return SNullWidget::NullWidget; + } + + TArray MediaConfiguration; + bool bHasInputConfiguration = FAjaMediaFinder::GetInputConfigurations(MediaConfiguration); + if (!bHasInputConfiguration || MediaConfiguration.Num() == 0) + { + return SNullWidget::NullWidget; + } + + if (MediaSources[0].IsValid()) + { + SelectedConfiguration = MediaSources[0]->GetMediaConfiguration(); + } + + auto QuadTypeVisible = [](FName ColumnName, const TArray& UniquePermutationsForThisColumn) + { + if (UniquePermutationsForThisColumn.Num() > 0) + { + return UniquePermutationsForThisColumn[0].MediaPort.LinkType == EAjaLinkType::QuadLink; + } + return false; + }; + + using TSelection = SMediaPermutationsSelector; + TSharedRef Selector = SNew(TSelection) + .PermutationsSource(MoveTemp(MediaConfiguration)) + .SelectedPermutation(SelectedConfiguration) + .OnSelectionChanged(this, &FAjaMediaSourceDetailCustomization::OnSelectionChanged) + .OnButtonClicked(this, &FAjaMediaSourceDetailCustomization::OnButtonClicked) + +TSelection::Column(FMediaPermutationsSelectorBuilder::NAME_DeviceIndex) + .Label(LOCTEXT("DeviceLabel", "Device")) + + TSelection::Column(FMediaPermutationsSelectorBuilder::NAME_SourceType) + .Label(LOCTEXT("SourceTypeLabel", "Source")) + + TSelection::Column(FMediaPermutationsSelectorBuilder::NAME_QuadType) + .Label(LOCTEXT("QuadTypeLabel", "Quad")) + .IsColumnVisible_Lambda(QuadTypeVisible) + + TSelection::Column(FMediaPermutationsSelectorBuilder::NAME_Resolution) + .Label(LOCTEXT("ResolutionLabel", "Resolution")) + + TSelection::Column(FMediaPermutationsSelectorBuilder::NAME_Standard) + .Label(LOCTEXT("StandardLabel", "Standard")) + + TSelection::Column(FMediaPermutationsSelectorBuilder::NAME_FrameRate) + .Label(LOCTEXT("FrameRateLabel", "Frame Rate")); + PermutationSelector = Selector; + + return Selector; +} + +void FAjaMediaSourceDetailCustomization::OnSelectionChanged(FAjaMediaConfiguration SelectedItem) +{ + SelectedConfiguration = SelectedItem; +} + +FReply FAjaMediaSourceDetailCustomization::OnButtonClicked() const +{ + for (const TWeakObjectPtr& Obj : MediaSources) + { + FObjectEditorUtils::SetPropertyValue(Obj.Get(), GET_MEMBER_NAME_CHECKED(UAjaMediaSource, MediaPort), SelectedConfiguration.MediaPort); + FObjectEditorUtils::SetPropertyValue(Obj.Get(), GET_MEMBER_NAME_CHECKED(UAjaMediaSource, bIsDefaultModeOverriden), true); + FObjectEditorUtils::SetPropertyValue(Obj.Get(), GET_MEMBER_NAME_CHECKED(UAjaMediaSource, MediaMode), SelectedConfiguration.MediaMode); + } + + TSharedPtr SharedPermutationSelector = PermutationSelector.Pin(); + if (SharedPermutationSelector.IsValid()) + { + TSharedRef ParentContextMenuWindow = FSlateApplication::Get().FindWidgetWindow(SharedPermutationSelector.ToSharedRef()).ToSharedRef(); + FSlateApplication::Get().RequestDestroyWindow(ParentContextMenuWindow); + } + + return FReply::Handled(); +} + +#undef LOCTEXT_NAMESPACE diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Customizations/AjaMediaSourceDetailCustomization.h b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Customizations/AjaMediaSourceDetailCustomization.h new file mode 100644 index 000000000000..a9f739a5c9a0 --- /dev/null +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Customizations/AjaMediaSourceDetailCustomization.h @@ -0,0 +1,29 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "IDetailCustomization.h" + +#include "AjaMediaFinder.h" +#include "AjaMediaSource.h" +#include "Input/Reply.h" +#include "UObject/WeakObjectPtr.h" +#include "Widgets/SWidget.h" + +/** AJA Media Source detail customization */ +class FAjaMediaSourceDetailCustomization : public IDetailCustomization +{ +public: + static TSharedRef MakeInstance() { return MakeShareable(new FAjaMediaSourceDetailCustomization()); } + + virtual void CustomizeDetails(IDetailLayoutBuilder& DetailBuilder) override; + +private: + TSharedRef HandleSourceComboButtonMenuContent(); + void OnSelectionChanged(FAjaMediaConfiguration SelectedItem); + FReply OnButtonClicked() const; + + TArray> MediaSources; + TWeakPtr PermutationSelector; + FAjaMediaConfiguration SelectedConfiguration; +}; diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Widgets/MediaPermutationsSelectorBuilder.cpp b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Widgets/MediaPermutationsSelectorBuilder.cpp new file mode 100644 index 000000000000..a585bced91d4 --- /dev/null +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Widgets/MediaPermutationsSelectorBuilder.cpp @@ -0,0 +1,109 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "Widgets/MediaPermutationsSelectorBuilder.h" + + +#define LOCTEXT_NAMESPACE "AjaMediaPermutationsSelectorBuilder" + +const FName FMediaPermutationsSelectorBuilder::NAME_DeviceIndex = "DeviceIndex"; +const FName FMediaPermutationsSelectorBuilder::NAME_SourceType = "SourceType"; +const FName FMediaPermutationsSelectorBuilder::NAME_QuadType = "QuadType"; +const FName FMediaPermutationsSelectorBuilder::NAME_Resolution = "Resolution"; +const FName FMediaPermutationsSelectorBuilder::NAME_Standard = "Standard"; +const FName FMediaPermutationsSelectorBuilder::NAME_FrameRate = "FrameRate"; + +bool FMediaPermutationsSelectorBuilder::IdenticalProperty(FName ColumnName, const FAjaMediaConfiguration& Left, const FAjaMediaConfiguration& Right) +{ + if (ColumnName == NAME_DeviceIndex) return Left.MediaPort.DeviceIndex == Right.MediaPort.DeviceIndex; + if (ColumnName == NAME_SourceType) return Left.MediaPort.LinkType == Right.MediaPort.LinkType && Left.MediaPort.PortIndex == Right.MediaPort.PortIndex; + if (ColumnName == NAME_QuadType) return Left.MediaPort.LinkType == EAjaLinkType::QuadLink ? Left.MediaPort.QuadLinkType == Right.MediaPort.QuadLinkType : true; + if (ColumnName == NAME_Resolution) return Left.MediaMode.TargetSize == Right.MediaMode.TargetSize; + if (ColumnName == NAME_Standard) return Left.MediaMode.bIsProgressiveStandard == Right.MediaMode.bIsProgressiveStandard && Left.MediaMode.bIsPsfStandard == Right.MediaMode.bIsPsfStandard; + if (ColumnName == NAME_FrameRate) return Left.MediaMode.FrameRate == Right.MediaMode.FrameRate; + check(false); + return false; +} + +bool FMediaPermutationsSelectorBuilder::Less(FName ColumnName, const FAjaMediaConfiguration& Left, const FAjaMediaConfiguration& Right) +{ + if (ColumnName == NAME_DeviceIndex) + { + return Left.MediaPort.DeviceIndex < Right.MediaPort.DeviceIndex; + } + if (ColumnName == NAME_SourceType) + { + if (Left.MediaPort.LinkType == Right.MediaPort.LinkType) + { + return Left.MediaPort.PortIndex < Right.MediaPort.PortIndex; + } + return (int32)Left.MediaPort.LinkType < (int32)Right.MediaPort.LinkType; + } + + if (ColumnName == NAME_Resolution) + { + if (Left.MediaMode.TargetSize.X == Right.MediaMode.TargetSize.X) + { + return Left.MediaMode.TargetSize.Y < Right.MediaMode.TargetSize.Y; + } + return Left.MediaMode.TargetSize.X < Right.MediaMode.TargetSize.X; + } + + if (ColumnName == NAME_QuadType) + { + if (Left.MediaPort.LinkType == EAjaLinkType::QuadLink) + { + return (int32)Left.MediaPort.QuadLinkType < (int32)Right.MediaPort.QuadLinkType; + } + return true; + } + + if (ColumnName == NAME_Standard) + { + if (Left.MediaMode.bIsProgressiveStandard == Right.MediaMode.bIsProgressiveStandard) + { + return Left.MediaMode.bIsPsfStandard; + } + return Left.MediaMode.bIsProgressiveStandard; + } + + if (ColumnName == NAME_FrameRate) + { + return Left.MediaMode.FrameRate.AsDecimal() < Right.MediaMode.FrameRate.AsDecimal(); + } + + check(false); + return false; +} + +FText FMediaPermutationsSelectorBuilder::GetLabel(FName ColumnName, const FAjaMediaConfiguration& Item) +{ + if (ColumnName == NAME_DeviceIndex) return FText::FromName(Item.MediaPort.DeviceName); + if (ColumnName == NAME_SourceType) return FAjaMediaFinder::LinkTypeToPrettyText(Item.MediaPort.LinkType, Item.MediaPort.PortIndex, false); + if (ColumnName == NAME_QuadType) return FAjaMediaFinder::QuadLinkTypeToPrettyText(Item.MediaPort.QuadLinkType); + if (ColumnName == NAME_Resolution) return FAjaMediaFinder::ResolutionToPrettyText(Item.MediaMode.TargetSize); + if (ColumnName == NAME_Standard) return Item.MediaMode.bIsProgressiveStandard ? LOCTEXT("Progressive", "Progressive") : (Item.MediaMode.bIsPsfStandard ? LOCTEXT("psf", "psf") : LOCTEXT("Interlaced", "Interlaced")); + if (ColumnName == NAME_FrameRate) return Item.MediaMode.FrameRate.ToPrettyText(); + check(false); + return FText::GetEmpty(); +} + +FText FMediaPermutationsSelectorBuilder::GetTooltip(FName ColumnName, const FAjaMediaConfiguration& Item) +{ + if (ColumnName == NAME_DeviceIndex) return FText::FromString(FString::Printf(TEXT("%s as index: %d"), *Item.MediaPort.DeviceName.ToString(), Item.MediaPort.DeviceIndex)); + if (ColumnName == NAME_SourceType) return FText::GetEmpty(); + if (ColumnName == NAME_QuadType) return FText::GetEmpty(); + if (ColumnName == NAME_Resolution) return FText::FromString(FString::Printf(TEXT("%dx%d"), Item.MediaMode.TargetSize.X, Item.MediaMode.TargetSize.Y)); + if (ColumnName == NAME_Standard) return FText::GetEmpty(); + if (ColumnName == NAME_FrameRate) + { + if (const FCommonFrameRateInfo* Found = FCommonFrameRates::Find(Item.MediaMode.FrameRate)) + { + return Found->Description; + } + return Item.MediaMode.FrameRate.ToPrettyText(); + } + check(false); + return FText::GetEmpty(); +} + +#undef LOCTEXT_NAMESPACE diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Widgets/MediaPermutationsSelectorBuilder.h b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Widgets/MediaPermutationsSelectorBuilder.h new file mode 100644 index 000000000000..0905e659d3ef --- /dev/null +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaEditor/Private/Widgets/MediaPermutationsSelectorBuilder.h @@ -0,0 +1,25 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" + +#include "AjaMediaFinder.h" +#include "CommonFrameRates.h" +#include "Misc/FrameRate.h" + + +struct FMediaPermutationsSelectorBuilder +{ + static const FName NAME_DeviceIndex; + static const FName NAME_SourceType; + static const FName NAME_QuadType; + static const FName NAME_Resolution; + static const FName NAME_Standard; + static const FName NAME_FrameRate; + + static bool IdenticalProperty(FName ColumnName, const FAjaMediaConfiguration& Left, const FAjaMediaConfiguration& Right); + static bool Less(FName ColumnName, const FAjaMediaConfiguration& Left, const FAjaMediaConfiguration& Right); + static FText GetLabel(FName ColumnName, const FAjaMediaConfiguration& Item); + static FText GetTooltip(FName ColumnName, const FAjaMediaConfiguration& Item); +}; diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaOutput/Private/AjaMediaCapture.cpp b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaOutput/Private/AjaMediaCapture.cpp index 7e83b52224cd..7dc9f2e20fa6 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaOutput/Private/AjaMediaCapture.cpp +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaOutput/Private/AjaMediaCapture.cpp @@ -89,19 +89,7 @@ bool UAjaMediaCapture::CaptureSceneViewportImpl(TSharedPtr& InSc bool bResult = InitAJA(AjaMediaSource); if (bResult) { - TSharedPtr Widget(InSceneViewport->GetViewportWidget().Pin()); - if (Widget.IsValid()) - { - bSavedIgnoreTextureAlpha = Widget->GetIgnoreTextureAlpha(); - if (AjaMediaSource->OutputType == EAjaMediaOutputType::FillAndKey) - { - if (bSavedIgnoreTextureAlpha) - { - bIgnoreTextureAlphaChanged = true; - Widget->SetIgnoreTextureAlpha(false); - } - } - } + ApplyViewportTextureAlpha(InSceneViewport); } return bResult; } @@ -113,6 +101,19 @@ bool UAjaMediaCapture::CaptureRenderTargetImpl(UTextureRenderTarget2D* InRenderT return bResult; } +bool UAjaMediaCapture::UpdateSceneViewportImpl(TSharedPtr& InSceneViewport) +{ + RestoreViewportTextureAlpha(GetCapturingSceneViewport()); + ApplyViewportTextureAlpha(InSceneViewport); + return true; +} + +bool UAjaMediaCapture::UpdateRenderTargetImpl(UTextureRenderTarget2D* InRenderTarget) +{ + RestoreViewportTextureAlpha(GetCapturingSceneViewport()); + return true; +} + void UAjaMediaCapture::StopCaptureImpl(bool bAllowPendingFrameToBeProcess) { if (!bAllowPendingFrameToBeProcess) @@ -138,23 +139,49 @@ void UAjaMediaCapture::StopCaptureImpl(bool bAllowPendingFrameToBeProcess) } } - // restore the ignore texture alpha state - if (bIgnoreTextureAlphaChanged) + RestoreViewportTextureAlpha(GetCapturingSceneViewport()); + } +} + +void UAjaMediaCapture::ApplyViewportTextureAlpha(TSharedPtr InSceneViewport) +{ + if (InSceneViewport.IsValid()) + { + TSharedPtr Widget(InSceneViewport->GetViewportWidget().Pin()); + if (Widget.IsValid()) { - TSharedPtr Viewport = GetCapturingSceneViewport(); - if (Viewport.IsValid()) + bSavedIgnoreTextureAlpha = Widget->GetIgnoreTextureAlpha(); + + UAjaMediaOutput* AjaMediaSource = CastChecked(MediaOutput); + if (AjaMediaSource->OutputType == EAjaMediaOutputType::FillAndKey) { - TSharedPtr Widget(Viewport->GetViewportWidget().Pin()); - if (Widget.IsValid()) + if (bSavedIgnoreTextureAlpha) { - Widget->SetIgnoreTextureAlpha(bSavedIgnoreTextureAlpha); + bIgnoreTextureAlphaChanged = true; + Widget->SetIgnoreTextureAlpha(false); } } - bIgnoreTextureAlphaChanged = false; } } } +void UAjaMediaCapture::RestoreViewportTextureAlpha(TSharedPtr InSceneViewport) +{ + // restore the ignore texture alpha state + if (bIgnoreTextureAlphaChanged) + { + if (InSceneViewport.IsValid()) + { + TSharedPtr Widget(InSceneViewport->GetViewportWidget().Pin()); + if (Widget.IsValid()) + { + Widget->SetIgnoreTextureAlpha(bSavedIgnoreTextureAlpha); + } + } + bIgnoreTextureAlphaChanged = false; + } +} + bool UAjaMediaCapture::HasFinishedProcessing() const { return Super::HasFinishedProcessing() || OutputChannel == nullptr; diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaOutput/Private/AjaMediaOutput.cpp b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaOutput/Private/AjaMediaOutput.cpp index 1a69806398a5..c3504b28379b 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaOutput/Private/AjaMediaOutput.cpp +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaOutput/Private/AjaMediaOutput.cpp @@ -71,52 +71,46 @@ bool UAjaMediaOutput::Validate(FString& OutFailureReason) const return false; } - if (CurrentMode.DeviceIndex != FillPort.DeviceIndex) - { - OutFailureReason = FString::Printf(TEXT("The MediaMode & FillPort of '%s' are not on the same device."), *GetName()); - return false; - } - TUniquePtr Scanner = MakeUnique(); AJA::AJADeviceScanner::DeviceInfo DeviceInfo; if (!Scanner->GetDeviceInfo(FillPort.DeviceIndex, DeviceInfo)) { - OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that doesn't exist on this machine."), *GetName(), *FillPort.DeviceName); + OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that doesn't exist on this machine."), *GetName(), *FillPort.DeviceName.ToString()); return false; } if (!DeviceInfo.bIsSupported) { - OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that is not supported by the AJA SDK."), *GetName(), *FillPort.DeviceName); + OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that is not supported by the AJA SDK."), *GetName(), *FillPort.DeviceName.ToString()); return false; } if (!DeviceInfo.bCanDoPlayback) { - OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that can't do playback."), *GetName(), *FillPort.DeviceName); + OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that can't do playback."), *GetName(), *FillPort.DeviceName.ToString()); return false; } if (FillPort.PortIndex == 1 && !DeviceInfo.bCanFrameStore1DoPlayback) { - OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that can't do playback on port 1."), *GetName(), *FillPort.DeviceName); + OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that can't do playback on port 1."), *GetName(), *FillPort.DeviceName.ToString()); return false; } if (OutputType == EAjaMediaOutputType::FillAndKey && KeyPort.PortIndex == 1 && !DeviceInfo.bCanFrameStore1DoPlayback) { - OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that can't do playback on port 1."), *GetName(), *FillPort.DeviceName); + OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that can't do playback on port 1."), *GetName(), *FillPort.DeviceName.ToString()); return false; } if (PixelFormat == EAjaMediaOutputPixelFormat::PF_8BIT_ARGB && !DeviceInfo.bSupportPixelFormat8bitARGB) { - OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that doesn't support the 8bit ARGB pixel format."), *GetName(), *FillPort.DeviceName); + OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that doesn't support the 8bit ARGB pixel format."), *GetName(), *FillPort.DeviceName.ToString()); return false; } if (PixelFormat == EAjaMediaOutputPixelFormat::PF_10BIT_RGB && !DeviceInfo.bSupportPixelFormat10bitRGB) { - OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that doesn't support the 10bit RGB pixel format."), *GetName(), *FillPort.DeviceName); + OutFailureReason = FString::Printf(TEXT("The MediaOutput '%s' use the device '%s' that doesn't support the 10bit RGB pixel format."), *GetName(), *FillPort.DeviceName.ToString()); return false; } diff --git a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaOutput/Public/AjaMediaCapture.h b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaOutput/Public/AjaMediaCapture.h index af717f6536c4..ad70978ab1d5 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/AjaMediaOutput/Public/AjaMediaCapture.h +++ b/Engine/Plugins/Media/AjaMedia/Source/AjaMediaOutput/Public/AjaMediaCapture.h @@ -28,6 +28,8 @@ protected: virtual bool ValidateMediaOutput() const override; virtual bool CaptureSceneViewportImpl(TSharedPtr& InSceneViewport) override; virtual bool CaptureRenderTargetImpl(UTextureRenderTarget2D* InRenderTarget) override; + virtual bool UpdateSceneViewportImpl(TSharedPtr& InSceneViewport) override; + virtual bool UpdateRenderTargetImpl(UTextureRenderTarget2D* InRenderTarget) override; virtual void StopCaptureImpl(bool bAllowPendingFrameToBeProcess) override; virtual void OnFrameCaptured_RenderingThread(const FCaptureBaseData& InBaseData, TSharedPtr InUserData, void* InBuffer, int32 Width, int32 Height) override; @@ -39,6 +41,8 @@ private: private: bool InitAJA(UAjaMediaOutput* InMediaOutput); void WaitForSync_RenderingThread() const; + void ApplyViewportTextureAlpha(TSharedPtr InSceneViewport); + void RestoreViewportTextureAlpha(TSharedPtr InSceneViewport); private: /** Aja Port for outputting */ diff --git a/Engine/Plugins/Media/AjaMedia/Source/ThirdParty/Build/include/AJALib.h b/Engine/Plugins/Media/AjaMedia/Source/ThirdParty/Build/include/AJALib.h index 3d98ddcc1d9f..4380bfdf7fcc 100644 --- a/Engine/Plugins/Media/AjaMedia/Source/ThirdParty/Build/include/AJALib.h +++ b/Engine/Plugins/Media/AjaMedia/Source/ThirdParty/Build/include/AJALib.h @@ -89,11 +89,13 @@ namespace AJA bool bCanDoCapture; bool bCanDoPlayback; bool bCanFrameStore1DoPlayback; + bool bCanDoDualLink; bool bCanDo2K; bool bCanDo4K; bool bCanDoMultiFormat; bool bCanDoAlpha; bool bCanDoCustomAnc; + bool bCanDoLtcInRefPort; bool bSupportPixelFormat8bitYCBCR; bool bSupportPixelFormat8bitARGB; bool bSupportPixelFormat10bitRGB; @@ -118,21 +120,26 @@ namespace AJA *****************************************************************************/ struct AJA_API AJAVideoFormats { - const static int32_t FormatedTextSize = 64; struct AJA_API VideoFormatDescriptor { VideoFormatDescriptor(); FAJAVideoFormat VideoFormatIndex; - TCHAR FormatedText[FormatedTextSize]; uint32_t FrameRateNumerator; uint32_t FrameRateDenominator; - uint32_t Width; - uint32_t Height; - bool bIsProgressive; - bool bValid; + uint32_t ResolutionWidth; + uint32_t ResolutionHeight; + bool bIsProgressiveStandard; + bool bIsInterlacedStandard; + bool bIsPsfStandard; + bool bIsVideoFormatA; + bool bIsVideoFormatB; + bool bIsSD; + bool bIsHD; + bool bIs2K; + bool bIs4K; - bool operator<(const VideoFormatDescriptor& Other) const; + bool bIsValid; }; AJAVideoFormats(int32_t InDeviceId, bool bForOutput); @@ -182,6 +189,10 @@ namespace AJA FAJAVideoFormat VideoFormatIndex; ETimecodeFormat TimecodeFormat; bool bOutput; // port is output + bool bWaitForFrameToBeReady; // port is input and we want to wait for the image to be sent to UE4 before ticking + + bool bReadTimecodeFromReferenceIn; + uint32_t LTCSourceIndex; //[1...x] }; class AJA_API AJASyncChannel diff --git a/Engine/Plugins/Media/BlackmagicMedia/BlackmagicMedia.uplugin b/Engine/Plugins/Media/BlackmagicMedia/BlackmagicMedia.uplugin new file mode 100644 index 000000000000..e9a8e6b555a5 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/BlackmagicMedia.uplugin @@ -0,0 +1,42 @@ +{ + "FileVersion" : 3, + + "FriendlyName" : "Blackmagic Media Player", + "Version" : 1, + "VersionName" : "1.0", + "CreatedBy" : "Epic Games Inc", + "CreatedByURL" : "http://epicgames.com", + "Description" : "Implements input and output using Blackmagic Capture cards.", + "Category" : "Media Players", + "CanContainContent" : false, + "EnabledByDefault" : false, + "IsBetaVersion": false, + + "Modules" : + [ + { + "Name" : "BlackmagicMedia", + "Type" : "RuntimeNoCommandlet", + "LoadingPhase" : "PreLoadingScreen", + "WhitelistPlatforms" : [ "Win64" ] + }, + { + "Name": "BlackmagicMediaOutput", + "Type" : "RuntimeNoCommandlet", + "LoadingPhase" : "PreLoadingScreen", + "WhitelistPlatforms" : [ "Win64" ] + }, + { + "Name" : "BlackmagicMediaFactory", + "Type": "RuntimeNoCommandlet", + "LoadingPhase" : "PostEngineInit", + "WhitelistPlatforms" : [ "Win64" ] + }, + { + "Name" : "BlackmagicMediaEditor", + "Type" : "Editor", + "LoadingPhase" : "PostEngineInit", + "WhitelistPlatforms" : [ "Win64" ] + } + ] +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/BlackmagicMedia.Build.cs b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/BlackmagicMedia.Build.cs new file mode 100644 index 000000000000..95cb385c5334 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/BlackmagicMedia.Build.cs @@ -0,0 +1,52 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +using System.IO; + +namespace UnrealBuildTool.Rules +{ + using System.IO; + + public class BlackmagicMedia : ModuleRules + { + public BlackmagicMedia(ReadOnlyTargetRules Target) : base(Target) + { + PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs; + + DynamicallyLoadedModuleNames.AddRange( + new string[] { + "Media", + }); + + PrivateDependencyModuleNames.AddRange( + new string[] { + "Blackmagic", + "Core", + "CoreUObject", + "Engine", + "MediaIOCore", + "MediaUtils", + "Projects", + "TimeManagement", + }); + + PrivateIncludePathModuleNames.AddRange( + new string[] { + "Media", + }); + + PrivateIncludePaths.AddRange( + new string[] { + "BlackmagicMedia/Private", + "BlackmagicMedia/Private/Blackmagic", + "BlackmagicMedia/Private/Assets", + "BlackmagicMedia/Private/Player", + "BlackmagicMedia/Private/Shared", + }); + + PublicDependencyModuleNames.AddRange( + new string[] { + "MediaAssets", + }); + } + } +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicCustomTimeStep.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicCustomTimeStep.cpp new file mode 100644 index 000000000000..eec8c03a6613 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicCustomTimeStep.cpp @@ -0,0 +1,144 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicCustomTimeStep.h" +#include "BlackmagicMediaPrivate.h" +#include "BlackmagicHardwareSync.h" + +#include "Misc/App.h" + +UBlackmagicCustomTimeStep::UBlackmagicCustomTimeStep(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) + , FixedFPS(30) + , bEnableOverrunDetection(false) + , AudioChannels(EBlackmagicMediaAudioChannel::Stereo2) + , Device(nullptr) + , Port(nullptr) + , State(ECustomTimeStepSynchronizationState::Closed) +{ +} + +bool UBlackmagicCustomTimeStep::Initialize(class UEngine* InEngine) +{ + State = ECustomTimeStepSynchronizationState::Closed; + + if (!MediaPort.IsValid()) + { + State = ECustomTimeStepSynchronizationState::Error; + UE_LOG(LogBlackmagicMedia, Warning, TEXT("The Source of '%s' is not valid."), *GetName()); + return false; + } + + Device = BlackmagicDevice::VideoIOCreateDevice(MediaPort.DeviceIndex); + if (!Device) + { + State = ECustomTimeStepSynchronizationState::Error; + UE_LOG(LogBlackmagicMedia, Warning, TEXT("The Blackmagic Device for '%s' could not be created."), *GetName()); + return false; + } + + const uint32_t PortIndex = MediaPort.PortIndex; + + BlackmagicDevice::FFrameDesc FrameDesc; + // Blackmagic requires YUV for input + FrameDesc.PixelFormat = BlackmagicDevice::EPixelFormat::PF_UYVY; + + BlackmagicDevice::FPortOptions Options = {}; + Options.bUseTimecode = true; + + if (AudioChannels == EBlackmagicMediaAudioChannel::Surround8) + { + Options.AudioChannels = 8; + } + else + { + Options.AudioChannels = 2; + } + + Port = BlackmagicDevice::VideoIODeviceOpenSharedPort(Device, PortIndex, FrameDesc, Options); + + if (!Port) + { + State = ECustomTimeStepSynchronizationState::Error; + UE_LOG(LogBlackmagicMedia, Warning, TEXT("The Blackmagic port for '%s' could not be opened."), *GetName()); + BlackmagicDevice::VideoIOReleaseDevice(Device); + Device = nullptr; + return false; + } + + if (bEnableOverrunDetection) + { + TSharedPtr HardwareSync = MakeShared(Port); + VSyncThread = MakeUnique(HardwareSync); + VSyncRunnableThread.Reset(FRunnableThread::Create(VSyncThread.Get(), TEXT("UBlackmagicCustomTimeStep::FBlackmagicMediaWaitVSyncThread"), TPri_AboveNormal)); + } + + State = ECustomTimeStepSynchronizationState::Synchronizing; + return true; +} + +void UBlackmagicCustomTimeStep::Shutdown(class UEngine* InEngine) +{ + State = ECustomTimeStepSynchronizationState::Closed; + + if (VSyncRunnableThread.IsValid()) + { + check(VSyncThread.IsValid()); + VSyncThread->Stop(); + VSyncRunnableThread->WaitForCompletion(); // Wait for the thread to return. + VSyncRunnableThread.Reset(); + VSyncThread.Reset(); + } + + if (Port) + { + Port->Release(); + Port = nullptr; + } + + if (Device) + { + BlackmagicDevice::VideoIOReleaseDevice(Device); + Device = nullptr; + } +} + +bool UBlackmagicCustomTimeStep::UpdateTimeStep(class UEngine* InEngine) +{ + bool bRunEngineTimeStep = true; + if (Port && (State == ECustomTimeStepSynchronizationState::Synchronized || State == ECustomTimeStepSynchronizationState::Synchronizing)) + { + WaitForVSync(); + + // Updates logical last time to match logical current time from last tick + FApp::UpdateLastTime(); + + // Use fixed delta time and update time. + const float FrameRate = 1.f / FixedFPS; + FApp::SetDeltaTime(FrameRate); + FApp::SetCurrentTime(FPlatformTime::Seconds()); + bRunEngineTimeStep = false; + + State = ECustomTimeStepSynchronizationState::Synchronized; + } + + return bRunEngineTimeStep; +} + +ECustomTimeStepSynchronizationState UBlackmagicCustomTimeStep::GetSynchronizationState() const +{ + return State; +} + +void UBlackmagicCustomTimeStep::WaitForVSync() const +{ + if (VSyncThread.IsValid()) + { + VSyncThread->Wait_GameOrRenderThread(); + } + else + { + Port->WaitVSync(); + } +} + + diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicMediaFinder.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicMediaFinder.cpp new file mode 100644 index 000000000000..a78b4cb0c67b --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicMediaFinder.cpp @@ -0,0 +1,215 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicMediaFinder.h" + +#include "Blackmagic.h" +#include "BlackmagicLib.h" + +/* + * FBlackmagicMediaSourceId interface + */ + +const TCHAR* FBlackmagicMediaPort::Protocol = TEXT("blackmagic"); + +namespace BlackmagicMediaSourceId +{ + const TCHAR* ProtocolPath = TEXT("blackmagic://"); + const TCHAR* Device = TEXT("device"); + const TCHAR* Port = TEXT("port"); + + const int32 ProtocolLength = FPlatformString::Strlen(BlackmagicMediaSourceId::ProtocolPath); + const int32 DeviceLength = FPlatformString::Strlen(BlackmagicMediaSourceId::Device); + const int32 InputLength = FPlatformString::Strlen(BlackmagicMediaSourceId::Port); + + const int32 DeviceNameBufferSize = 64; + const int32 ModeNameBufferSize = 64; + + bool GetNumber(TCHAR InInput, int32& OutId) + { + OutId = 0; + if (InInput < '0' && InInput > '9') + { + return false; + } + OutId = InInput - '0'; + return true; + } +} + +FBlackmagicMediaPort::FBlackmagicMediaPort() + : DeviceIndex(INDEX_NONE) + , PortIndex(INDEX_NONE) +{ } + +FBlackmagicMediaPort::FBlackmagicMediaPort(const FString& InDeviceName, int32 InDeviceIndex, int32 InPortIndex) + : DeviceName(InDeviceName) + , DeviceIndex(InDeviceIndex) + , PortIndex(InPortIndex) +{ } + +FString FBlackmagicMediaPort::ToString() const +{ + return FString::Printf(TEXT("%s [%s]"), *DeviceName, *ToUrl()); +} + +FString FBlackmagicMediaPort::ToUrl() const +{ + return FString::Printf(TEXT("blackmagic://device%d/port%d"), DeviceIndex, (PortIndex)); +} + +bool FBlackmagicMediaPort::IsValid() const +{ + return DeviceIndex != INDEX_NONE && PortIndex != INDEX_NONE; +} + +bool FBlackmagicMediaPort::FromUrl(const FString& Url, bool bDiscoverDeviceName) +{ + const int32 NumberLength = 1; + const int32 SlashLength = 1; + const int32 ValidUrlLength = BlackmagicMediaSourceId::ProtocolLength + BlackmagicMediaSourceId::DeviceLength + NumberLength + SlashLength + BlackmagicMediaSourceId::InputLength + NumberLength; + + DeviceName.Reset(); + DeviceIndex = INDEX_NONE; + PortIndex = INDEX_NONE; + + if (Url.Len() != ValidUrlLength || !Url.StartsWith(BlackmagicMediaSourceId::ProtocolPath)) + { + return false; + } + + int32 TempDeviceIndex; + int32 TempPortIndex; + if(!BlackmagicMediaSourceId::GetNumber(Url[BlackmagicMediaSourceId::ProtocolLength + BlackmagicMediaSourceId::DeviceLength], TempDeviceIndex) + || !BlackmagicMediaSourceId::GetNumber(Url[BlackmagicMediaSourceId::ProtocolLength + BlackmagicMediaSourceId::DeviceLength + NumberLength + SlashLength + BlackmagicMediaSourceId::InputLength], TempPortIndex)) + { + return false; + } + + DeviceIndex = TempDeviceIndex; + PortIndex = TempPortIndex; + + bool bResult = true; + if (bDiscoverDeviceName) + { + DeviceName.Reset(); + + bResult = FBlackmagic::IsInitialized(); + if (bResult) + { + BlackmagicDevice::FDeviceScanner DeviceScanner = BlackmagicDevice::VideoIOCreateDeviceScanner(); + if (DeviceScanner) + { + BlackmagicDevice::VideoIODeviceScannerScanHardware(DeviceScanner); + bResult = DeviceScanner != nullptr; + if (bResult) + { + uint32 NumDevices = BlackmagicDevice::VideoIODeviceScannerGetNumDevices(DeviceScanner); + bResult = (uint32)DeviceIndex < NumDevices; + if (bResult) + { + BlackmagicDevice::FDeviceInfo DeviceInfo = BlackmagicDevice::VideoIODeviceScannerGetDeviceInfo(DeviceScanner, PortIndex); + TCHAR DeviceNameBuffer[BlackmagicMediaSourceId::DeviceNameBufferSize]; + BlackmagicDevice::VideoIODeviceInfoGetDeviceId(DeviceInfo, DeviceNameBuffer, BlackmagicMediaSourceId::DeviceNameBufferSize); + BlackmagicDevice::VideoIOReleaseDeviceInfo(DeviceInfo); + DeviceName = DeviceNameBuffer; + } + + BlackmagicDevice::VideoIOReleaseDeviceScanner(DeviceScanner); + } + } + } + } + + return bResult; +} + +/* + * UBlackmagicMediaMode interface + */ + +FBlackmagicMediaMode::FBlackmagicMediaMode() + : Mode(INDEX_NONE) +{ +} + +FBlackmagicMediaMode::FBlackmagicMediaMode(const FString& InModeName, int32 InMode) + : ModeName(InModeName) + , Mode(InMode) +{ +} + +FString FBlackmagicMediaMode::ToString() const +{ + return FString::Printf(TEXT("%s [%d]"), *ModeName, Mode); +} + +FString FBlackmagicMediaMode::ToUrl() const +{ + return ToString(); +} + +bool FBlackmagicMediaMode::IsValid() const +{ + return Mode != INDEX_NONE; // && PortIndex != INDEX_NONE; +} + +/* + * UBlackmagicMediaFinder interface + */ + +bool UBlackmagicMediaFinder::GetSources(TArray& OutSources) +{ + OutSources.Reset(); + if (!FBlackmagic::IsInitialized()) + { + return false; + } + + BlackmagicDevice::FDeviceScanner DeviceScanner = BlackmagicDevice::VideoIOCreateDeviceScanner(); + if (DeviceScanner) + { + BlackmagicDevice::VideoIODeviceScannerScanHardware(DeviceScanner); + + uint32 NumDevices = BlackmagicDevice::VideoIODeviceScannerGetNumDevices(DeviceScanner); + for (uint32 SourceIndex = 0; SourceIndex < NumDevices; ++SourceIndex) + { + BlackmagicDevice::FDeviceInfo DeviceInfo = BlackmagicDevice::VideoIODeviceScannerGetDeviceInfo(DeviceScanner, SourceIndex); + TCHAR DeviceName[BlackmagicMediaSourceId::DeviceNameBufferSize]; + BlackmagicDevice::VideoIODeviceInfoGetDeviceId(DeviceInfo, DeviceName, BlackmagicMediaSourceId::DeviceNameBufferSize); + + uint32 InputCount = BlackmagicDevice::VideoIODeviceInfoGetVidInputs(DeviceInfo); + for (uint32 Inputs = 0; Inputs < InputCount; ++Inputs) + { + OutSources.Add(FBlackmagicMediaPort(DeviceName, SourceIndex, Inputs+1)); + } + BlackmagicDevice::VideoIOReleaseDeviceInfo(DeviceInfo); + } + BlackmagicDevice::VideoIOReleaseDeviceScanner(DeviceScanner); + } + + return true; +} + +bool UBlackmagicMediaFinder::GetModes(TArray& OutModes, bool bInOutput) +{ + OutModes.Reset(); + if (!FBlackmagic::IsInitialized()) + { + return false; + } + + uint32 NumModes = BlackmagicDevice::VideoIOModeCount(); + TCHAR ModeName[BlackmagicMediaSourceId::ModeNameBufferSize]; + for (uint32 Mode = 0; Mode < NumModes; ++Mode) + { + if (BlackmagicDevice::VideoIOModeNames(Mode, + bInOutput? BlackmagicDevice::EModeFilter::MF_OUTPUT : BlackmagicDevice::EModeFilter::MF_INPUT, + ModeName, + BlackmagicMediaSourceId::ModeNameBufferSize)) + { + OutModes.Add(FBlackmagicMediaMode(ModeName, Mode)); + } + } + + return true; +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicMediaOutput.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicMediaOutput.cpp new file mode 100644 index 000000000000..06af764bac17 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicMediaOutput.cpp @@ -0,0 +1,21 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicMediaOutput.h" +#include "BlackmagicMediaPrivate.h" + + +/* UBlackmagicMediaOutput +*****************************************************************************/ + +UBlackmagicMediaOutput::UBlackmagicMediaOutput() + : OutputType(EBlackmagicMediaOutputType::FillOnly) + , bClearBuffer(false) + , ClearBufferColor(FColor::Green) + , bOutputTimecode(true) + , bCopyOnRenderThread(true) + , bWaitForOutputFrame(true) + , bWaitForVSyncEvent(true) + , bVSyncEventOnAnotherThread(false) + , bEncodeTimecodeInTexel(false) +{ +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicMediaSource.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicMediaSource.cpp new file mode 100644 index 000000000000..c0bf2a57b499 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicMediaSource.cpp @@ -0,0 +1,96 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicMediaSource.h" +#include "BlackmagicMediaPrivate.h" + +UBlackmagicMediaSource::UBlackmagicMediaSource() + : UseTimecode(false) + , CaptureStyle(EBlackmagicMediaCaptureStyle::AudioVideo) + , AudioChannels(EBlackmagicMediaAudioChannel::Stereo2) + , bEncodeTimecodeInTexel(false) + , UseStreamBuffer(false) + , NumberFrameBuffers(8) +{ } + +/* + * IMediaOptions interface + */ + +int64 ConveryAudioEnumToChannels(EBlackmagicMediaAudioChannel InAudioEnum) +{ + switch (InAudioEnum) + { + case EBlackmagicMediaAudioChannel::Surround8: + return 8; + } + return 2; +} + +bool UBlackmagicMediaSource::GetMediaOption(const FName& Key, bool DefaultValue) const +{ + if (Key == BlackmagicMedia::UseTimecodeOption) + { + return UseTimecode; + } + else if (Key == BlackmagicMedia::UseStreamBufferOption) + { + return UseStreamBuffer; + } + else if (Key == BlackmagicMedia::EncodeTimecodeInTexel) + { + return bEncodeTimecodeInTexel; + } + + return Super::GetMediaOption(Key, DefaultValue); +} + +int64 UBlackmagicMediaSource::GetMediaOption(const FName& Key, int64 DefaultValue) const +{ + if (Key == BlackmagicMedia::AudioChannelOption) + { + return (int64)ConveryAudioEnumToChannels(AudioChannels); + } + else if (Key == BlackmagicMedia::CaptureStyleOption) + { + return (int64)CaptureStyle; + } + else if (Key == BlackmagicMedia::MediaModeOption) + { + return (int64)MediaMode.Mode; + } + else if (Key == BlackmagicMedia::NumFrameBufferOption) + { + return (int64)NumberFrameBuffers; + } + + return Super::GetMediaOption(Key, DefaultValue); +} + +bool UBlackmagicMediaSource::HasMediaOption(const FName& Key) const +{ + if ((Key == BlackmagicMedia::AudioChannelOption) || + (Key == BlackmagicMedia::CaptureStyleOption) || + (Key == BlackmagicMedia::MediaModeOption) || + (Key == BlackmagicMedia::NumFrameBufferOption) || + (Key == BlackmagicMedia::UseStreamBufferOption) || + (Key == BlackmagicMedia::UseTimecodeOption)) + { + return true; + } + + return Super::HasMediaOption(Key); +} + +/* + * UMediaSource interface + */ + +FString UBlackmagicMediaSource::GetUrl() const +{ + return MediaPort.ToUrl(); +} + +bool UBlackmagicMediaSource::Validate() const +{ + return MediaPort.IsValid(); +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicTimecodeProvider.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicTimecodeProvider.cpp new file mode 100644 index 000000000000..485e3a85010b --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Assets/BlackmagicTimecodeProvider.cpp @@ -0,0 +1,167 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicTimecodeProvider.h" +#include "BlackmagicMediaPrivate.h" +#include "Blackmagic.h" + + +//~ BlackMagicDevice::IPortCallbackInterface implementation +//-------------------------------------------------------------------- +// Those are called from the Blackmagic thread. There's a lock inside the Blackmagic layer +// to prevent this object from dying while in this thread. + +struct UBlackmagicTimecodeProvider::FCallbackHandler : public BlackmagicDevice::IPortCallback +{ + FCallbackHandler(UBlackmagicTimecodeProvider* InOwner) + : Owner(InOwner) + {} + + //~ BlackmagicDevice::IPortCallback interface + virtual void OnInitializationCompleted(bool bSucceed) override + { + Owner->State = bSucceed ? ETimecodeProviderSynchronizationState::Synchronized : ETimecodeProviderSynchronizationState::Error; + if (!bSucceed) + { + UE_LOG(LogBlackmagicMedia, Error, TEXT("The initialization of '%s' failed. The TimecodeProvider won't be synchronized."), *Owner->GetName()); + } + } + virtual bool OnFrameArrived(BlackmagicDevice::FFrame InFrame) + { + return false; + } + +protected: + UBlackmagicTimecodeProvider* Owner; +}; + + +//~ UBlackmagicTimecodeProvider implementation +//-------------------------------------------------------------------- +UBlackmagicTimecodeProvider::UBlackmagicTimecodeProvider(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) + , AudioChannels(EBlackmagicMediaAudioChannel::Stereo2) + , Device(nullptr) + , Port(nullptr) + , CallbackHandler(nullptr) + , bIsRunning(false) + , State(ETimecodeProviderSynchronizationState::Closed) +{ +} + +FTimecode UBlackmagicTimecodeProvider::GetTimecode() const +{ + if (Port) + { + BlackmagicDevice::FTimecode Timecode; + if (Port->GetTimecode(Timecode)) + { + return FTimecode(Timecode.Hours, Timecode.Minutes, Timecode.Seconds, Timecode.Frames, Timecode.bIsDropFrame); + } + else if (State == ETimecodeProviderSynchronizationState::Synchronized) + { + const_cast(this)->State = ETimecodeProviderSynchronizationState::Error; + } + } + return FTimecode(); +} + +bool UBlackmagicTimecodeProvider::Initialize(class UEngine* InEngine) +{ + State = ETimecodeProviderSynchronizationState::Closed; + + if (!MediaPort.IsValid()) + { + UE_LOG(LogBlackmagicMedia, Warning, TEXT("The Source of '%s' is not valid."), *GetName()); + State = ETimecodeProviderSynchronizationState::Error; + return false; + } + + // create the device + Device = BlackmagicDevice::VideoIOCreateDevice(MediaPort.DeviceIndex); + + if (Device == nullptr) + { + UE_LOG(LogBlackmagicMedia, Warning, TEXT("Can't aquire the Blackmagic device.")); + return false; + } + + BlackmagicDevice::FPortOptions Options; + FMemory::Memset(&Options, 0, sizeof(Options)); + // to enable the OnInitializationCompleted callback + Options.bUseSync = true; + Options.bUseTimecode = true; + + // TODO: configure audio + if (AudioChannels == EBlackmagicMediaAudioChannel::Surround8) + { + Options.AudioChannels = 8; + } + else + { + Options.AudioChannels = 2; + } + + BlackmagicDevice::FFrameDesc FrameDesc; + + // Blackmagic requires YUV for input + FrameDesc.PixelFormat = BlackmagicDevice::EPixelFormat::PF_UYVY; + Port = BlackmagicDevice::VideoIODeviceOpenSharedPort(Device, MediaPort.PortIndex, FrameDesc, Options); + + if (Port == nullptr) + { + UE_LOG(LogBlackmagicMedia, Warning, TEXT("Can't aquire the Blackmagic port.")); + ReleaseResources(); + return false; + } + + bIsRunning = true; + + check(CallbackHandler == nullptr); + CallbackHandler = new FCallbackHandler(this); + Port->SetCallback(CallbackHandler); + + return true; +} + +void UBlackmagicTimecodeProvider::Shutdown(class UEngine* InEngine) +{ + State = ETimecodeProviderSynchronizationState::Closed; + ReleaseResources(); +} + +void UBlackmagicTimecodeProvider::BeginDestroy() +{ + ReleaseResources(); + Super::BeginDestroy(); +} + +void UBlackmagicTimecodeProvider::ReleaseResources() +{ + // Stop if we are running + if (bIsRunning && Port) + { + bIsRunning = false; + } + // cleanup the callback handler + if (CallbackHandler && Port) + { + if (Port->SetCallback(nullptr)) + { + delete CallbackHandler; + CallbackHandler = nullptr; + } + } + // close the port + if (Port) + { + Port->Release(); + delete Port; + Port = nullptr; + } + // close the device + if (Device) + { + BlackmagicDevice::VideoIOReleaseDevice(Device); + Device = nullptr; + } +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Blackmagic/Blackmagic.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Blackmagic/Blackmagic.cpp new file mode 100644 index 000000000000..27ae04ca07fe --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Blackmagic/Blackmagic.cpp @@ -0,0 +1,114 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "Blackmagic.h" +#include "BlackmagicMediaPrivate.h" + +#include "Interfaces/IPluginManager.h" +#include "HAL/PlatformProcess.h" +#include "Misc/Paths.h" + +/* + * Static initialization + */ + +void* FBlackmagic::LibHandle = nullptr; + +bool FBlackmagic::Initialize() +{ +#if BLACKMAGICMEDIA_DLL_PLATFORM + check(LibHandle == nullptr); + +#if BLACKMAGICMEDIA_DLL_DEBUG + const FString VideoIODll = TEXT("BlackmagicLibd.dll"); +#else + const FString VideoIODll = TEXT("BlackmagicLib.dll"); +#endif // BLACKMAGICMEDIA_DLL_DEBUG + + // determine directory paths + FString DllPath = FPaths::Combine(IPluginManager::Get().FindPlugin(TEXT("BlackmagicMedia"))->GetBaseDir(), TEXT("/Binaries/ThirdParty/Win64")); + FPlatformProcess::PushDllDirectory(*DllPath); + DllPath = FPaths::Combine(DllPath, VideoIODll); + + if (!FPaths::FileExists(DllPath)) + { + UE_LOG(LogBlackmagicMedia, Error, TEXT("Failed to find the binary folder for the dll. Plug-in will not be functional.")); + return false; + } + + LibHandle = FPlatformProcess::GetDllHandle(*DllPath); + if (LibHandle == nullptr) + { + UE_LOG(LogBlackmagicMedia, Error, TEXT("Failed to load required library %s. Plug-in will not be functional."), *VideoIODll); + return false; + } + +#if !NO_LOGGING + BlackmagicDevice::VideoIOSetLoggingCallbacks(&LogInfo, &LogWarning, &LogError); +#endif // !NO_LOGGING + + return true; +#else + return false; +#endif // BLACKMAGICMEDIA_DLL_PLATFORM +} + +bool FBlackmagic::IsInitialized() +{ + return (LibHandle != nullptr); +} + +void FBlackmagic::Shutdown() +{ +#if BLACKMAGICMEDIA_DLL_PLATFORM + if (LibHandle != nullptr) + { +#if !NO_LOGGING + BlackmagicDevice::VideoIOSetLoggingCallbacks(nullptr, nullptr, nullptr); +#endif // !NO_LOGGING + FPlatformProcess::FreeDllHandle(LibHandle); + LibHandle = nullptr; + } +#endif // BLACKMAGICMEDIA_DLL_PLATFORM +} + +void FBlackmagic::LogInfo(const TCHAR* InFormat, ...) +{ +#if !NO_LOGGING + TCHAR TempString[1024]; + va_list Args; + + va_start(Args, InFormat ); + FCString::GetVarArgs(TempString, ARRAY_COUNT(TempString), ARRAY_COUNT(TempString) - 1, InFormat, Args); + va_end(Args); + + UE_LOG(LogBlackmagicMedia, Log, TempString); +#endif // !NO_LOGGIN +} + +void FBlackmagic::LogWarning(const TCHAR* InFormat, ...) +{ +#if !NO_LOGGING + TCHAR TempString[1024]; + va_list Args; + + va_start(Args, InFormat ); + FCString::GetVarArgs(TempString, ARRAY_COUNT(TempString), ARRAY_COUNT(TempString) - 1, InFormat, Args); + va_end(Args); + + UE_LOG(LogBlackmagicMedia, Warning, TempString); +#endif // !NO_LOGGIN +} + +void FBlackmagic::LogError(const TCHAR* InFormat, ...) +{ +#if !NO_LOGGING + TCHAR TempString[1024]; + va_list Args; + + va_start(Args, InFormat ); + FCString::GetVarArgs(TempString, ARRAY_COUNT(TempString), ARRAY_COUNT(TempString) - 1, InFormat, Args); + va_end(Args); + + UE_LOG(LogBlackmagicMedia, Error, TempString); +#endif // !NO_LOGGING +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Blackmagic/Blackmagic.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Blackmagic/Blackmagic.h new file mode 100644 index 000000000000..af999650fe55 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Blackmagic/Blackmagic.h @@ -0,0 +1,21 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreTypes.h" + +class FBlackmagic +{ +public: + static bool Initialize(); + static bool IsInitialized(); + static void Shutdown(); + +private: + static void LogInfo(const TCHAR* InFormat, ...); + static void LogWarning(const TCHAR* InFormat, ...); + static void LogError(const TCHAR* InFormat, ...); + +private: + static void* LibHandle; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/BlackmagicMediaModule.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/BlackmagicMediaModule.cpp new file mode 100644 index 000000000000..9cbef81f21c8 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/BlackmagicMediaModule.cpp @@ -0,0 +1,148 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "IBlackmagicMediaModule.h" + +#include "Blackmagic/Blackmagic.h" +#include "BlackmagicMediaPlayer.h" +#include "BlackmagicCustomTimeStep.h" +#include "BlackmagicTimecodeProvider.h" + +#include "Engine/Engine.h" +#include "ITimeManagementModule.h" +#include "Modules/ModuleManager.h" +#include "UObject/StrongObjectPtr.h" + + +DEFINE_LOG_CATEGORY(LogBlackmagicMedia); + +#define LOCTEXT_NAMESPACE "BlackmagicMediaModule" + +/** + * Implements the NdiMedia module. + */ +class FBlackmagicMediaModule : public IBlackmagicMediaModule, public FSelfRegisteringExec +{ +public: + + //~ IBlackmagicMediaModule interface + virtual TSharedPtr CreatePlayer(IMediaEventSink& EventSink) override + { + if (!FBlackmagic::IsInitialized()) + { + return nullptr; + } + + return MakeShared(EventSink); + } + + virtual bool IsInitialized() const override { return FBlackmagic::IsInitialized(); } + +public: + + //~ IModuleInterface interface + virtual void StartupModule() override + { + // initialize + if (!FBlackmagic::Initialize()) + { + UE_LOG(LogBlackmagicMedia, Error, TEXT("Failed to initialize Blackmagic")); + return; + } + } + + virtual void ShutdownModule() override + { + FBlackmagic::Shutdown(); + } + + TStrongObjectPtr CustomTimeStep; + TStrongObjectPtr TimecodeProvider; + + virtual bool Exec(UWorld* Inworld, const TCHAR* Cmd, FOutputDevice& Ar) override + { + if (FParse::Command(&Cmd, TEXT("Blackmagic"))) + { + if (FParse::Command(&Cmd, TEXT("CustomTimeStep"))) + { + if (FParse::Command(&Cmd, TEXT("Start"))) + { + CustomTimeStep.Reset(NewObject()); + + CustomTimeStep->MediaPort.PortIndex = 1; + CustomTimeStep->MediaPort.DeviceIndex = 0; + FParse::Value(Cmd, TEXT("Port="), CustomTimeStep->MediaPort.PortIndex); + FParse::Value(Cmd, TEXT("Device="), CustomTimeStep->MediaPort.DeviceIndex); + FParse::Bool(Cmd, TEXT("EnableOverrunDetection="), CustomTimeStep->bEnableOverrunDetection); + + int32 AudioChannels = 2; + EBlackmagicMediaAudioChannel MediaAudioChannel = EBlackmagicMediaAudioChannel::Stereo2; + if (FParse::Value(Cmd, TEXT("AudioChannels="), AudioChannels)) + { + if (AudioChannels == 8) + { + MediaAudioChannel = EBlackmagicMediaAudioChannel::Surround8; + } + } + CustomTimeStep->AudioChannels = MediaAudioChannel; + + { + GEngine->SetCustomTimeStep(CustomTimeStep.Get()); + } + } + else if (FParse::Command(&Cmd, TEXT("Stop"))) + { + if (GEngine->GetCustomTimeStep() == CustomTimeStep.Get()) + { + GEngine->SetCustomTimeStep(nullptr); + } + CustomTimeStep.Reset(); + } + return true; + } + + if (FParse::Command(&Cmd, TEXT("TimecodeProvider"))) + { + if (FParse::Command(&Cmd, TEXT("Start"))) + { + TimecodeProvider.Reset(NewObject()); + + // ports are numbered from 1 + TimecodeProvider->MediaPort.PortIndex = 1; + TimecodeProvider->MediaPort.DeviceIndex = 0; + FParse::Value(Cmd, TEXT("Port="), TimecodeProvider->MediaPort.PortIndex); + FParse::Value(Cmd, TEXT("Device="), TimecodeProvider->MediaPort.DeviceIndex); + FParse::Value(Cmd, TEXT("Numerator="), TimecodeProvider->FrameRate.Numerator); + FParse::Value(Cmd, TEXT("Denominator="), TimecodeProvider->FrameRate.Denominator); + + int32 AudioChannels = 2; + EBlackmagicMediaAudioChannel MediaAudioChannel = EBlackmagicMediaAudioChannel::Stereo2; + + if (FParse::Value(Cmd, TEXT("AudioChannels="), AudioChannels)) + { + if (AudioChannels == 8) + { + MediaAudioChannel = EBlackmagicMediaAudioChannel::Surround8; + } + } + TimecodeProvider->AudioChannels = MediaAudioChannel; + + GEngine->SetTimecodeProvider(TimecodeProvider.Get()); + } + else if (FParse::Command(&Cmd, TEXT("Stop"))) + { + if (GEngine->GetTimecodeProvider() == TimecodeProvider.Get()) + { + GEngine->SetTimecodeProvider(nullptr); + } + TimecodeProvider.Reset(); + } + return true; + } + } + return false; + } +}; + +IMPLEMENT_MODULE(FBlackmagicMediaModule, BlackmagicMedia); + +#undef LOCTEXT_NAMESPACE diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/BlackmagicMediaPrivate.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/BlackmagicMediaPrivate.h new file mode 100644 index 000000000000..4668279e8bab --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/BlackmagicMediaPrivate.h @@ -0,0 +1,42 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#if PLATFORM_WINDOWS +#include "Windows/WindowsHWrapper.h" +#include "Windows/AllowWindowsPlatformTypes.h" +#include + +#include "BlackmagicLib.h" + +#include "Windows/HideWindowsPlatformTypes.h" +#endif + +#include "CoreMinimal.h" +#include "BlackmagicMediaSettings.h" + +DECLARE_LOG_CATEGORY_EXTERN(LogBlackmagicMedia, Log, All); + +namespace BlackmagicMedia +{ + /** Name of the UseTimecode media option. */ + static const FName UseTimecodeOption("UseTimecode"); + + /** Name of the UseStreamBuffer media option. */ + static const FName UseStreamBufferOption("UseStreamBuffer"); + + /** Debug feature: to encode the timecode into small square on frame */ + static const FName EncodeTimecodeInTexel("EncodeTimecodeInTexel"); + + /** Name of the UseTimecode media option. */ + static const FName CaptureStyleOption("CaptureStyle"); + + /** Name of the UseTimecode media option. */ + static const FName MediaModeOption("MediaMode"); + + /** Name of the UseTimecode media option. */ + static const FName NumFrameBufferOption("NumFrameBuffer"); + + /** Enum for number of audio channels to capture. */ + static const FName AudioChannelOption("AudioChannel"); +} \ No newline at end of file diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaAudioSample.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaAudioSample.h new file mode 100644 index 000000000000..c27e873da3d9 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaAudioSample.h @@ -0,0 +1,59 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "MediaIOCoreAudioSampleBase.h" +#include "BlackmagicMediaPrivate.h" + +/* + * Implements a media audio sample. + */ +class FBlackmagicMediaAudioSample : public FMediaIOCoreAudioSampleBase +{ +public: + + /** Default constructor. */ + FBlackmagicMediaAudioSample() + { } + +public: + + /** + * Initialize the sample. + * + * @param InFrame The audio frame data. + * @param InTime The sample time (in the player's own clock). + * @result true on success, false otherwise. + */ + bool Initialize(const BlackmagicDevice::FFrame InFrame, FTimespan InTime) + { + uint32_t TmpSize, TmpNumberOfChannels, TmpAudioRate, TmpNumSamples; + int32_t* TmpAudioBuffer = BlackmagicDevice::VideoIOFrameAudioBuffer(InFrame, TmpSize, TmpNumberOfChannels, TmpAudioRate, TmpNumSamples); + + if (TmpAudioBuffer) + { + Channels = TmpNumberOfChannels; + SampleRate = TmpAudioRate; + Time = InTime; + Duration = (TmpSize * ETimespan::TicksPerSecond) / (Channels * SampleRate * sizeof(int32)); + + Buffer.Reset(TmpSize); + Buffer.Append(TmpAudioBuffer, TmpSize); + return true; + } + else + { + Channels = SampleRate = 0; + Buffer.Reset(); + } + return false; + } +}; + +/* + * Implements a pool for NDI audio sample objects. + */ + +class FBlackmagicMediaAudioSamplePool : public TMediaObjectPool +{ +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaPlayer.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaPlayer.cpp new file mode 100644 index 000000000000..bc7499ce75ef --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaPlayer.cpp @@ -0,0 +1,400 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicMediaPlayer.h" +#include "BlackmagicMediaPrivate.h" + +#include "HAL/PlatformProcess.h" +#include "IMediaEventSink.h" +#include "IMediaOptions.h" + +#include "MediaIOCoreSamples.h" +#include "MediaIOCoreEncodeTime.h" + +#include "Misc/ScopeLock.h" + +#include "Engine/GameEngine.h" +#include "Slate/SceneViewport.h" + +#include "BlackmagicMediaAudioSample.h" +#include "BlackmagicMediaSettings.h" +#include "BlackmagicMediaSource.h" +#include "BlackmagicMediaTextureSample.h" + +#include "BlackmagicMediaAllowPlatformTypes.h" + +#define LOCTEXT_NAMESPACE "BlackmagicMediaPlayer" + +namespace BlackMagicMediaPlayerHelpers +{ + FTimespan ConvertTimecode2Timespan(const BlackmagicDevice::FTimecode& InTimecode, const BlackmagicDevice::FTimecode& PreviousTimecode, const FTimespan& PreviousTimespan, const FFrameRate& InFPS) + { + check(InFPS.IsValid()); + + //With FrameRate faster than 30FPS, max frame number will still be small than 30 + //Get by how much we need to divide the actual count. + const float FrameRate = InFPS.AsDecimal(); + const float DividedFrameRate = FrameRate > 30.0f ? (FrameRate * 30.0f) / FrameRate : FrameRate; + + FTimespan NewTimespan; + if (PreviousTimecode == InTimecode) + { + NewTimespan = PreviousTimespan + FTimespan::FromSeconds(InFPS.AsInterval()); + } + else + { + NewTimespan = FTimespan(0, InTimecode.Hours, InTimecode.Minutes, InTimecode.Seconds, static_cast((ETimespan::TicksPerSecond * InTimecode.Frames) / DividedFrameRate) * ETimespan::NanosecondsPerTick); + } + return NewTimespan; + } +} + +//~ BlackMagicDevice::IPortCallbackInterface implementation +//-------------------------------------------------------------------- +// Those are called from the Blackmagic thread. There's a lock inside the Blackmagic layer +// to prevent this object from dying while in this thread. + +struct FBlackmagicMediaPlayer::FCallbackHandler : public BlackmagicDevice::IPortCallback +{ + FCallbackHandler(FBlackmagicMediaPlayer* InOwner) + : Owner(InOwner) + { + } + + //~ BlackmagicDevice::IPortCallback interface + virtual void OnInitializationCompleted(bool bSucceed) override + { + } + virtual bool OnFrameArrived(BlackmagicDevice::FFrame InFrame) + { + return Owner->OnFrameArrived(InFrame); + } + +protected: + FBlackmagicMediaPlayer* Owner; +}; + +/* FBlackmagicVideoPlayer structors +*****************************************************************************/ + +FBlackmagicMediaPlayer::FBlackmagicMediaPlayer(IMediaEventSink& InEventSink) + : Super(InEventSink) + , bUseFrameTimecode(false) + , bEncodeTimecodeInTexel(false) + , bIsOpen(false) + , AudioSamplePool(new FBlackmagicMediaAudioSamplePool) + , CaptureStyle(EBlackmagicMediaCaptureStyle::AudioVideo) + , BmThread_AudioChannels(0) + , BmThread_AudioSampleRate(0) + , Device(nullptr) + , Port(nullptr) +{ +} + +FBlackmagicMediaPlayer::~FBlackmagicMediaPlayer() +{ + Close(); + delete AudioSamplePool; + AudioSamplePool = nullptr; +} + +/* IMediaPlayer interface +*****************************************************************************/ + +void FBlackmagicMediaPlayer::Close() +{ + bIsOpen = false; + + if (Port && Device) + { + FScopeLock Lock(&CriticalSection); + + if (Port) + { + if (CallbackHandler) + { + Port->SetCallback(nullptr); + delete CallbackHandler; + CallbackHandler = nullptr; + } + + Port->Release(); + Port = nullptr; + } + + if (Device != nullptr) + { + BlackmagicDevice::VideoIOReleaseDevice(Device); + Device = nullptr; + } + } + + AudioSamplePool->Reset(); + DeviceSource = FBlackmagicMediaPort(); + + Super::Close(); +} + +FName FBlackmagicMediaPlayer::GetPlayerName() const +{ + static FName PlayerName(TEXT("BlackmagicMedia")); + return PlayerName; +} + + +FString FBlackmagicMediaPlayer::GetUrl() const +{ + return DeviceSource.ToUrl(); +} + + +bool FBlackmagicMediaPlayer::Open(const FString& Url, const IMediaOptions* Options) +{ + if (!Super::Open(Url, Options)) + { + return false; + } + + if (!DeviceSource.FromUrl(Url, false)) + { + return false; + } + + Device = BlackmagicDevice::VideoIOCreateDevice(DeviceSource.DeviceIndex); + + if (Device == nullptr) + { + UE_LOG(LogBlackmagicMedia, Warning, TEXT("Can't aquire the Blackmagic device.")); + return false; + } + + // Read options + bUseFrameTimecode = Options->GetMediaOption(BlackmagicMedia::UseTimecodeOption, false); + bEncodeTimecodeInTexel = bUseFrameTimecode && Options->GetMediaOption(BlackmagicMedia::EncodeTimecodeInTexel, false); + + CaptureStyle = EBlackmagicMediaCaptureStyle(Options->GetMediaOption(BlackmagicMedia::CaptureStyleOption, (int64)EBlackmagicMediaCaptureStyle::AudioVideo)); + + BlackmagicDevice::FPortOptions PortOptions; + FMemory::Memset(&PortOptions, 0, sizeof(PortOptions)); + + if (bUseFrameTimecode) + { + PortOptions.bUseTimecode = true; + } + + if (CaptureStyle == EBlackmagicMediaCaptureStyle::AudioVideo) + { + PortOptions.bUseAudio = true; + PortOptions.AudioChannels = Options->GetMediaOption(BlackmagicMedia::AudioChannelOption, (int64)2); + } + PortOptions.bUseVideo = true; + PortOptions.bUseCallback = !Options->GetMediaOption(BlackmagicMedia::UseStreamBufferOption, false); + + + int32 NumFrameBufferOptions = Options->GetMediaOption(BlackmagicMedia::NumFrameBufferOption, (int64)8); + NumFrameBufferOptions = FMath::Clamp(NumFrameBufferOptions, 2, 16); + PortOptions.FrameBuffers = NumFrameBufferOptions; + + // Open Device port + int32 PortIndex = DeviceSource.PortIndex; + + BlackmagicDevice::FUInt MediaMode = Options->GetMediaOption(BlackmagicMedia::MediaModeOption, (int64)0); + + if (!BlackmagicDevice::VideoIOModeFrameDesc(MediaMode, FrameDesc)) + { + UE_LOG(LogBlackmagicMedia, Warning, TEXT("bad mode (%d), default to default."), MediaMode); + } + + VideoSampleFormat = (FrameDesc.PixelFormat == BlackmagicDevice::EPixelFormat::PF_ARGB) ? EMediaTextureSampleFormat::CharBGRA : EMediaTextureSampleFormat::CharUYVY; + + Port = BlackmagicDevice::VideoIODeviceOpenSharedPort(Device, PortIndex, FrameDesc, PortOptions); + + // match, so we will update when the actual mode arrives + LastFrameDesc = FrameDesc; + + // Configure the audio supported + AudioTrackFormat.BitsPerSample = 32; + AudioTrackFormat.NumChannels = 0; + AudioTrackFormat.SampleRate = 48000; + AudioTrackFormat.TypeName = TEXT("PCM"); + + // Configure the video supported + VideoTrackFormat.Dim = FIntPoint(FrameInfo.Width, FrameInfo.Height); + VideoTrackFormat.FrameRate = VideoFrameRate.AsDecimal(); + VideoTrackFormat.FrameRates = TRange(VideoFrameRate.AsDecimal()); + VideoTrackFormat.TypeName = FString(); + + if (Port) + { + BlackmagicDevice::VideoIOFrameDesc2Info(FrameDesc, FrameInfo); + VideoFrameRate = FFrameRate(FrameInfo.TimeScale, FrameInfo.TimeValue); + } + else + { + UE_LOG(LogBlackmagicMedia, Warning, TEXT("The port couldn't be opened.")); + return false; + } + + LastFrameDropCount = Port->FrameDropCount(); + + EventSink.ReceiveMediaEvent(EMediaEvent::TracksChanged); + EventSink.ReceiveMediaEvent(EMediaEvent::MediaOpened); + + CallbackHandler = new FCallbackHandler(this); + Port->SetCallback(CallbackHandler); + + bIsOpen = true; + return true; +} + +void FBlackmagicMediaPlayer::TickInput(FTimespan DeltaTime, FTimespan Timecode) +{ + // update player state + const EMediaState State = IsHardwareReady() ? EMediaState::Playing : EMediaState::Preparing; + + if (State != CurrentState) + { + CurrentState = State; + EventSink.ReceiveMediaEvent(State == EMediaState::Playing ? EMediaEvent::PlaybackResumed : EMediaEvent::PlaybackSuspended); + } + + if (CurrentState != EMediaState::Playing) + { + return; + } + + // Don't update unless changed + // (operator != is not defined for Blackmagic::FrameDesc) + if (!(FrameDesc == LastFrameDesc)) + { + FrameDesc = LastFrameDesc; + // update the capture format + BlackmagicDevice::VideoIOFrameDesc2Info(FrameDesc, FrameInfo); + VideoFrameRate = FFrameRate(FrameInfo.TimeScale, FrameInfo.TimeValue); + VideoTrackFormat.Dim = FIntPoint(FrameInfo.Width, FrameInfo.Height); + VideoTrackFormat.FrameRate = VideoFrameRate.AsDecimal(); + VideoTrackFormat.FrameRates = TRange(VideoFrameRate.AsDecimal()); + + static const int ModeNameLength = 64; + TCHAR ModeName[ModeNameLength]; + BlackmagicDevice::VideoIOFrameDesc2Name(FrameDesc, ModeName, ModeNameLength); + VideoTrackFormat.TypeName = FString(ModeName); + } + + AudioTrackFormat.NumChannels = BmThread_AudioChannels; + AudioTrackFormat.SampleRate = BmThread_AudioSampleRate; + + TickTimeManagement(); +} + +void FBlackmagicMediaPlayer::TickFetch(FTimespan DeltaTime, FTimespan /*Timecode*/) +{ + if (IsHardwareReady()) + { + ProcessFrame(); + VerifyFrameDropCount(); + } +} + +/* FBlackmagicMediaPlayer implementation +*****************************************************************************/ + +bool FBlackmagicMediaPlayer::DeliverFrame(BlackmagicDevice::FFrame InFrame) +{ + bool bReturn = false; + + if (InFrame) + { + if (CurrentState == EMediaState::Playing) + { + FTimespan DecodedTime = FTimespan::FromSeconds(FPlatformTime::Seconds()); + if (bUseFrameTimecode) + { + BlackmagicDevice::FTimecode Timecode; + BlackmagicDevice::VideoIOFrameTimecode(InFrame, Timecode); + DecodedTime = BlackMagicMediaPlayerHelpers::ConvertTimecode2Timespan(Timecode, PreviousFrameTimecode, PreviousFrameTimespan, VideoFrameRate); + + //Previous frame Timecode and Timespan are used to cover the facts that FrameTimecode FrameNumber is capped at 30 even for higher FPS. + PreviousFrameTimecode = Timecode; + PreviousFrameTimespan = DecodedTime; + } + { + BlackmagicDevice::VideoIOFrameDesc(InFrame, LastFrameDesc); + } + + if (bUseFrameTimecode && !bUseTimeSynchronization) + { + CurrentTime = DecodedTime; + } + + if (CaptureStyle == EBlackmagicMediaCaptureStyle::AudioVideo) + { + auto AudioSample = AudioSamplePool->AcquireShared(); + if (AudioSample->Initialize(InFrame, DecodedTime)) + { + BmThread_AudioChannels = AudioSample->GetChannels(); + BmThread_AudioSampleRate = AudioSample->GetSampleRate(); + Samples->AddAudio(AudioSample); + } + } + + auto TextureSample = MakeShared(); + + if (TextureSample->Initialize(InFrame, VideoSampleFormat, DecodedTime)) + { + LastVideoDim = TextureSample->GetDim(); + + if (bEncodeTimecodeInTexel && bUseFrameTimecode) + { + void* PixelBuffer = const_cast(TextureSample->GetBuffer()); + EMediaIOCoreEncodePixelFormat EncodePixelFormat = (VideoSampleFormat == EMediaTextureSampleFormat::CharBGRA) ? EMediaIOCoreEncodePixelFormat::CharBGRA : EMediaIOCoreEncodePixelFormat::CharUYVY; + FMediaIOCoreEncodeTime EncodeTime(EncodePixelFormat, PixelBuffer, LastVideoDim.X, LastVideoDim.Y); + EncodeTime.Render(0, 0, PreviousFrameTimecode.Hours, PreviousFrameTimecode.Minutes, PreviousFrameTimecode.Seconds, PreviousFrameTimecode.Frames); + } + + Samples->AddVideo(TextureSample); + bReturn = true; + } + } + } + return bReturn; +} + +void FBlackmagicMediaPlayer::ProcessFrame() +{ + while (IsHardwareReady() && Port->PeekFrame()) + { + BlackmagicDevice::FFrame Frame = Port->WaitFrame(); + + if (!DeliverFrame(Frame)) + { + BlackmagicDevice::VideoIOReleaseFrame(Frame); + } + } +} + +bool FBlackmagicMediaPlayer::OnFrameArrived(BlackmagicDevice::FFrame InFrame) +{ + return DeliverFrame(InFrame); +} + +void FBlackmagicMediaPlayer::VerifyFrameDropCount() +{ + if (IsHardwareReady()) + { + const uint32 FrameDropCount = Port->FrameDropCount(); + if (FrameDropCount > LastFrameDropCount) + { + UE_LOG(LogBlackmagicMedia, Warning, TEXT("Lost %d frames on input %s. Frame rate is either too slow or buffering capacity is too small."), FrameDropCount - LastFrameDropCount, *DeviceSource.ToString()); + } + LastFrameDropCount = FrameDropCount; + } +} + +bool FBlackmagicMediaPlayer::IsHardwareReady() const +{ + return (Device && Port && bIsOpen); +} + +#undef LOCTEXT_NAMESPACE + +#include "BlackmagicMediaHidePlatformTypes.h" diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaPlayer.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaPlayer.h new file mode 100644 index 000000000000..7e0c553d5600 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaPlayer.h @@ -0,0 +1,112 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "MediaIOCorePlayerBase.h" + +#include "BlackmagicMediaFinder.h" +#include "BlackmagicMediaPrivate.h" +#include "BlackmagicMediaSource.h" + +class FBlackmagicMediaAudioSamplePool; +class FBlackmagicMediaTextureSamplePool; +class IMediaEventSink; + +enum class EMediaTextureSampleFormat; + +/** +* Implements a media player for Blackmagic. +* +* The processing of metadata and video frames is delayed until the fetch stage +* (TickFetch) in order to increase the window of opportunity for receiving +* frames for the current render frame time code. +* +* Depending on whether the media source enables time code synchronization, +* the player's current play time (CurrentTime) is derived either from the +* time codes embedded in frames or from the Engine's global time code. +*/ +class FBlackmagicMediaPlayer : public FMediaIOCorePlayerBase +{ + using Super = FMediaIOCorePlayerBase; +public: + + /** + * Create and initialize a new instance. + * + * @param InEventSink The object that receives media events from this player. + */ + FBlackmagicMediaPlayer(IMediaEventSink& InEventSink); + + /** Virtual destructor. */ + virtual ~FBlackmagicMediaPlayer(); + +public: + + //~ IMediaPlayer interface + + virtual void Close() override; + virtual FName GetPlayerName() const override; + virtual FString GetUrl() const override; + + virtual bool Open(const FString& Url, const IMediaOptions* Options) override; + + virtual void TickInput(FTimespan DeltaTime, FTimespan Timecode) override; + virtual void TickFetch(FTimespan DeltaTime, FTimespan Timecode) override; + + /** Process pending audio and video frames, and forward them to the sinks. */ + void ProcessFrame(); + + /** Verify if we lost some frames since last Tick. */ + void VerifyFrameDropCount(); + + /** Is Hardware initialized */ + virtual bool IsHardwareReady() const override; + +protected: + bool OnFrameArrived(BlackmagicDevice::FFrame InFrame); + bool DeliverFrame(BlackmagicDevice::FFrame InFrame); + +private: + + /** Encode the time into video frame */ + bool bEncodeTimecodeInTexel; + + /** Whether to use the timecode embedded in a frame. */ + bool bUseFrameTimecode; + + /** Open has finished */ + bool bIsOpen; + + /** Audio sample object pool. */ + FBlackmagicMediaAudioSamplePool* AudioSamplePool; + + /** The currently opened URL. */ + FBlackmagicMediaPort DeviceSource; + + /** Which feature do we captured. Audio/Video */ + EBlackmagicMediaCaptureStyle CaptureStyle; + + /** Current Frame Description */ + BlackmagicDevice::FFrameDesc LastFrameDesc; + BlackmagicDevice::FFrameDesc FrameDesc; + int32 BmThread_AudioSampleRate; + int32 BmThread_AudioChannels; + + /** Current Frame Description Info */ + BlackmagicDevice::FFrameInfo FrameInfo; + + /** Currently active capture Device */ + BlackmagicDevice::FDevice Device; + + /** Maps to the current input Device */ + BlackmagicDevice::FPort Port; + + /** Previous frame timecode */ + BlackmagicDevice::FTimecode PreviousFrameTimecode; + + //* get notifications for frames arriving */ + struct FCallbackHandler; + friend FCallbackHandler; + FCallbackHandler* CallbackHandler; + +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaTextureSample.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaTextureSample.h new file mode 100644 index 000000000000..bf564e4e6570 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Player/BlackmagicMediaTextureSample.h @@ -0,0 +1,71 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "MediaIOCoreTextureSampleBase.h" +#include "BlackmagicMediaPrivate.h" + +/** + * Implements a media texture sample for Blackmagic. + */ +class FBlackmagicMediaTextureSample : public FMediaIOCoreTextureSampleBase +{ + +public: + /** Default constructor. */ + FBlackmagicMediaTextureSample() + : Frame(nullptr) + { + } + + /** Default destructor. */ + virtual ~FBlackmagicMediaTextureSample() + { + FreeSample(); + } + +public: + /** + * Initialize the sample. + * + * @param InReceiverInstance The receiver instance that generated the sample. + * @param InFrame The video frame data. + * @param InSampleFormat The sample format. + * @param InTime The sample time (in the player's own clock). + */ + bool Initialize(BlackmagicDevice::FFrame InFrame, EMediaTextureSampleFormat InSampleFormat, FTimespan InTime) + { + FreeSample(); + + if ((InFrame == nullptr) || (InSampleFormat == EMediaTextureSampleFormat::Undefined)) + { + return false; + } + + Frame = InFrame; + Stride = BlackmagicDevice::VideoIOFrameDimensions(Frame, Width, Height); + + uint32_t Size; + PixelBuffer = BlackmagicDevice::VideoIOFrameVideoBuffer(Frame, Size); + + Duration = FTimespan(0); + SampleFormat = InSampleFormat; + Time = InTime; + return true; + } + +protected: + /** Free the video frame data. */ + virtual void FreeSample() override + { + if (Frame) + { + PixelBuffer = nullptr; + BlackmagicDevice::VideoIOReleaseFrame(Frame); + Frame = nullptr; + } + } + +protected: + BlackmagicDevice::FFrame Frame; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Shared/BlackmagicMediaAllowPlatformTypes.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Shared/BlackmagicMediaAllowPlatformTypes.h new file mode 100644 index 000000000000..57397fcae0bd --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Shared/BlackmagicMediaAllowPlatformTypes.h @@ -0,0 +1,22 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#ifndef BLACKMAGIC_PLATFORM_TYPES_GUARD + #define BLACKMAGIC_PLATFORM_TYPES_GUARD +#else + #error Nesting BlackmagicAllowPlatformTypes.h is not allowed! +#endif + +#ifndef PLATFORM_WINDOWS + #include "Processing.Blackmagic.compat.h" +#endif + +#define DWORD ::DWORD +#define FLOAT ::FLOAT + +#ifndef TRUE + #define TRUE 1 +#endif + +#ifndef FALSE + #define FALSE 0 +#endif diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Shared/BlackmagicMediaHidePlatformTypes.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Shared/BlackmagicMediaHidePlatformTypes.h new file mode 100644 index 000000000000..d579783d8962 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Private/Shared/BlackmagicMediaHidePlatformTypes.h @@ -0,0 +1,18 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#ifdef BLACKMAGIC_PLATFORM_TYPES_GUARD + #undef BLACKMAGIC_PLATFORM_TYPES_GUARD +#else + #error Mismatched BlackmagicHidePLatformTypes.h detected. +#endif + +#undef DWORD +#undef FLOAT + +#ifdef TRUE + #undef TRUE +#endif + +#ifdef FALSE + #undef FALSE +#endif diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicCustomTimeStep.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicCustomTimeStep.h new file mode 100644 index 000000000000..2b9244a933a3 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicCustomTimeStep.h @@ -0,0 +1,70 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Engine/EngineCustomTimeStep.h" + +#include "BlackmagicLib.h" +#include "BlackmagicMediaFinder.h" +#include "BlackmagicMediaSource.h" + +#include "HAL/RunnableThread.h" +#include "MediaIOCoreWaitVSyncThread.h" + +#include "BlackmagicCustomTimeStep.generated.h" + +/** + * Class to control the Engine TimeStep via the Blackmagic card + */ +UCLASS(editinlinenew, meta=(DisplayName="Blackmagic SDI Input")) +class BLACKMAGICMEDIA_API UBlackmagicCustomTimeStep : public UEngineCustomTimeStep +{ + GENERATED_UCLASS_BODY() + +public: + + //~ UEngineCustomTimeStep interface + virtual bool Initialize(class UEngine* InEngine) override; + virtual void Shutdown(class UEngine* InEngine) override; + virtual bool UpdateTimeStep(class UEngine* InEngine) override; + virtual ECustomTimeStepSynchronizationState GetSynchronizationState() const override; + +private: + + void WaitForVSync() const; + +public: + /** + * The Blackmagic source from where the Genlock signal will be coming from. + */ + UPROPERTY(EditAnywhere, Category="Genlock options", AssetRegistrySearchable, meta=(DisplayName="Source")) + FBlackmagicMediaPort MediaPort; + + /** Fixed tick rate */ + UPROPERTY(EditAnywhere, Category="Genlock options", Meta= (ClampMin = 1)) + float FixedFPS; + + /** Enable mechanism to detect Engine loop overrunning the source */ + UPROPERTY(EditAnywhere, Category="Genlock options", meta=(DisplayName="Display Dropped Frames Warning")) + bool bEnableOverrunDetection; + + /** Desired number of audio channel to capture. */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = Audio) + EBlackmagicMediaAudioChannel AudioChannels; + +private: + /** Blackmagic Device to capture the Sync */ + BlackmagicDevice::FDevice Device; + + /** Blackmagic Port to capture the Sync */ + BlackmagicDevice::FPort Port; + + /** WaitForVSync task Runnable */ + TUniquePtr VSyncThread; + + /** WaitForVSync thread */ + TUniquePtr VSyncRunnableThread; + + /** The current SynchronizationState of the CustomTimeStep */ + ECustomTimeStepSynchronizationState State; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicHardwareSync.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicHardwareSync.h new file mode 100644 index 000000000000..ddd7c388bac9 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicHardwareSync.h @@ -0,0 +1,33 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "IMediaIOCoreHardwareSync.h" +#include "BlackmagicLib.h" + +/** + * Implementation of HardwareSync for Blackmagic. + */ + +class FBlackmagicHardwareSync : public IMediaIOCoreHardwareSync +{ +public: + FBlackmagicHardwareSync(BlackmagicDevice::IPortShared* InPort) + : Port(InPort) + { + } + virtual bool IsValid() const override + { + return Port != nullptr; + } + virtual bool WaitVSync() override + { + if (Port) + { + return Port->WaitVSync(); + } + return false; + } +protected: + BlackmagicDevice::IPortShared* Port; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaFinder.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaFinder.h new file mode 100644 index 000000000000..c4db1d85dfbc --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaFinder.h @@ -0,0 +1,144 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" + +#include "BlackmagicMediaFinder.generated.h" + + +/** + * Identifies an media source. + */ +USTRUCT(BlueprintType) +struct BLACKMAGICMEDIA_API FBlackmagicMediaPort +{ + GENERATED_BODY() + +public: + static const TCHAR* Protocol; + + /** Default constructor. */ + FBlackmagicMediaPort(); + + /** + * Create and initialize a new instance. + */ + FBlackmagicMediaPort(const FString& InDeviceName, int32 InDeviceIndex, int32 InPortIndex); + + /** The retail name of the Device, i.e. "IoExpress". */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category=BLACKMAGIC) + FString DeviceName; + + /** The index of the Device */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category=BLACKMAGIC, meta=(ClampMin="0")) + int32 DeviceIndex; + + /** The index of the video input/ouput port on that Device. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category=BLACKMAGIC, meta=(ClampMin="0")) + int32 PortIndex; + +public: + + /** + * Get a string representation of this source. + * @return String representation, i.e. "IoExpress [device0/port1]". + */ + FString ToString() const; + + /** + * Get a url used by the Media framework + * @return Url representation, "blackmagic://device0/port1" + */ + FString ToUrl() const; + + /** Return true if the device & port index have been set properly */ + bool IsValid() const; + + /** + * Build a BlackmagicMediaSourceId from a Url representation. + * @param Url A Url representation, i.e. "IoExpress [device0/port1]". + * @param bDiscoverDeviceName Ask the BlackmagicDevice the name of the Device. If false, the name will be empty. + * @return true on success + */ + bool FromUrl(const FString& Url, bool bDiscoverDeviceName); +}; + +/** + * Identifies a media mode. + */ +USTRUCT(BlueprintType) +struct BLACKMAGICMEDIA_API FBlackmagicMediaMode +{ + GENERATED_BODY() + +public: + /** Default constructor. */ + FBlackmagicMediaMode(); + + /** + * Create and initialize a new instance. + */ + FBlackmagicMediaMode(const FString& InModeName, int32 inMode); + + /** The retail name of the Device, i.e. "IoExpress". */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category=BLACKMAGIC) + FString ModeName; + + /** The index of the Device */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category=BLACKMAGIC, meta=(ClampMin="0")) + int32 Mode; + +public: + + /** + * Get a string representation of this source. + * @return String representation, i.e. "IoExpress [device0/port1]". + */ + FString ToString() const; + + /** + * Get a url used by the Media framework + * @return Url representation, "blackmagic://device0/port1" + */ + FString ToUrl() const; + + /** Return true if the device & port index have been set properly */ + bool IsValid() const; +}; + +/** Used to manage input modes. */ +USTRUCT(BlueprintType) +struct BLACKMAGICMEDIA_API FBlackmagicMediaModeInput : public FBlackmagicMediaMode +{ + GENERATED_BODY() +}; + +/** Used to manage output modes. */ +USTRUCT(BlueprintType) +struct BLACKMAGICMEDIA_API FBlackmagicMediaModeOutput : public FBlackmagicMediaMode +{ + GENERATED_BODY() +}; + +/* + * Find all of the Inputs + */ +UCLASS() +class BLACKMAGICMEDIA_API UBlackmagicMediaFinder : public UObject +{ + GENERATED_BODY() + +public: + + /** + * Get the list of media sources installed in the machine. + * @param OutSources Will contain the collection of found NDI source names and their URLs. + * @return true on success, false if the finder wasn't initialized. + */ + UFUNCTION(BlueprintCallable, Category=Blackmagic) + static bool GetSources(TArray& OutSources); + + UFUNCTION(BlueprintCallable, Category=Blackmagic) + static bool GetModes(TArray& OutSources, bool bInOutput); +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaOutput.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaOutput.h new file mode 100644 index 000000000000..7188a487c646 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaOutput.h @@ -0,0 +1,107 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "BaseMediaSource.h" + +#include "BlackmagicMediaFinder.h" + +#include "BlackmagicMediaOutput.generated.h" + +/** + * Option for output formats. + */ +UENUM(BlueprintType) +enum class EBlackmagicMediaOutputType : uint8 +{ + FillOnly UMETA(Tooltip="Fill will be on the provided FillPort"), + FillAndKey UMETA(Tooltip="Fill will be on provided FillPort pin and Key will be on FillPort + 1"), +}; + +/** + * Output Media for a stream. + * The output format is ARGB8. + */ +UCLASS(BlueprintType) +class BLACKMAGICMEDIA_API UBlackmagicMediaOutput : public UObject +{ + GENERATED_BODY() + + UBlackmagicMediaOutput(); + +public: + /** + * The Device and port to output to". + * This combines the device ID, and the output port. + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = Blackmagic, AssetRegistrySearchable) + FBlackmagicMediaPort FillPort; + + /** Which mode to use for Output */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = Blackmagic, AssetRegistrySearchable) + FBlackmagicMediaModeOutput MediaMode; + + /** + * Whether to output the fill or the fill and key. + * If the key is selected, the pin need to be FillPort.Port + 1. + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = Blackmagic) + EBlackmagicMediaOutputType OutputType; + + /** + * Clear the buffer before filling the output + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = Buffer) + bool bClearBuffer; + + /** + * Color to fill when clearing the buffer + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = Buffer, meta = (EditCondition = bClearBuffer)) + FColor ClearBufferColor; + + /** Whether to embed the timecode to the output frame (if enabled by the Engine). */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = Synchronization) + bool bOutputTimecode; + + /* + * Copy of the "game" frame buffer on the Render Thread or the Game Thread. + * The copy may take some time and can lock the thread. + * If the copy is on the Render Thread, it will guarantee that the output will be available. + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = Synchronization) + bool bCopyOnRenderThread; + + /** + * Wait for an Output Frame to be available on the card. + * The card output at a "Genlock" rate. + * If you do not wait and the Output Frame is not available, the "Game" frame will be discarded. + * If you wait and the Output Frame is not available, the thread is wait (freeze). This can be used as a "Genlock" solution. + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = Synchronization) + bool bWaitForOutputFrame; + + /** + * Try to maintain a the engine "Genlock" with the VSync signal. + * This is not necessary if you are waiting for the Output frame. You will be "Genlock" once the card output buffer are filled. + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = Synchronization) + bool bWaitForVSyncEvent; + + /* + * The Engine will try to detect when it took to much time and missed the VSync signal. + * To do so, it creates another thread. + * If false and you missed the VSync opportunity, the engine will stall for 1 VSync. + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = Synchronization, meta = (EditCondition = bWaitForVSyncEvent)) + bool bVSyncEventOnAnotherThread; + + /** + * Encode Timecode in the output + * Current value will be white. The format will be encoded in hh:mm::ss::ff. Each value, will be on a different line. + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Debug", meta = (EditCondition = "IN_CPP")) + bool bEncodeTimecodeInTexel; + +}; \ No newline at end of file diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaSettings.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaSettings.h new file mode 100644 index 000000000000..6974fbb1abf6 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaSettings.h @@ -0,0 +1,17 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" + +#include "BlackmagicMediaSettings.generated.h" + + +/** + * Settings for the Media plug-in. + */ +UCLASS(config=Engine) +class BLACKMAGICMEDIA_API UBlackmagicMediaSettings : public UObject +{ + GENERATED_BODY() +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaSource.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaSource.h new file mode 100644 index 000000000000..a526ea93f241 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicMediaSource.h @@ -0,0 +1,95 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "TimeSynchronizableMediaSource.h" + +#include "BlackmagicMediaFinder.h" +#include "Misc/FrameRate.h" + +#include "BlackmagicMediaSource.generated.h" + +/** + * Available capture style for sources. + */ +UENUM(BlueprintType) +enum class EBlackmagicMediaCaptureStyle : uint8 +{ + Video, + AudioVideo, +}; + +/** + * Available number of audio channel supported by UE4 & Capture card. + */ +UENUM(BlueprintType) +enum class EBlackmagicMediaAudioChannel : uint8 +{ + Stereo2, + Surround8, +}; + +/** + * Media source description for Blackmagic. + */ +UCLASS(BlueprintType) +class BLACKMAGICMEDIA_API UBlackmagicMediaSource : public UBaseMediaSource +{ + GENERATED_BODY() + +public: + /** + * The input name of the source to be played". + * This combines the device ID, and the input. + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category=Blackmagic, AssetRegistrySearchable) + FBlackmagicMediaPort MediaPort; + + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category=Blackmagic, AssetRegistrySearchable) + FBlackmagicMediaModeInput MediaMode; + + /** Whether to use the time code embedded in the input stream when time code locking is enabled in the Engine. */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category=Synchronization) + bool UseTimecode; + + /** Video or Video+Audio */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category=VideoFormat) + EBlackmagicMediaCaptureStyle CaptureStyle; + + /** Desired number of audio channel to capture. */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category=Audio) + EBlackmagicMediaAudioChannel AudioChannels; + +public: + /** + * Encode Timecode in the output + * Current value will be white. The format will be encoded in hh:mm::ss::ff. Each value, will be on a different line. + */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Debug", meta = (EditCondition = "UseTimecode")) + bool bEncodeTimecodeInTexel; + + /** Use the low level buffering, and polling read. */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = "Debug") + bool UseStreamBuffer; + + /** number of frames to buffer. */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, AdvancedDisplay, Category = Debug, meta = (ClampMin = "2", ClampMax = "16")) + int32 NumberFrameBuffers; + +public: + /** Default constructor. */ + UBlackmagicMediaSource(); + +public: + //~ IMediaOptions interface + + virtual bool GetMediaOption(const FName& Key, bool DefaultValue) const override; + virtual int64 GetMediaOption(const FName& Key, int64 DefaultValue) const override; + virtual bool HasMediaOption(const FName& Key) const override; + +public: + //~ UMediaSource interface + + virtual FString GetUrl() const override; + virtual bool Validate() const override; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicTimecodeProvider.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicTimecodeProvider.h new file mode 100644 index 000000000000..ccd965b52c6c --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/BlackmagicTimecodeProvider.h @@ -0,0 +1,73 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Engine/TimecodeProvider.h" + +#include "BlackmagicMediaFinder.h" +#include "BlackmagicMediaSource.h" + +#include "BlackmagicTimecodeProvider.generated.h" + +namespace BlackmagicDevice +{ + struct IPortShared; + typedef void* FDevice; + typedef IPortShared* FPort; +} + +/** + * Class to fetch a timecode via an AJA card + */ +UCLASS(editinlinenew, meta=(DisplayName="Blackmagic SDI Input")) +class BLACKMAGICMEDIA_API UBlackmagicTimecodeProvider : public UTimecodeProvider +{ + GENERATED_UCLASS_BODY() + +public: + //~ UTimecodeProvider interface + virtual FTimecode GetTimecode() const override; + virtual FFrameRate GetFrameRate() const override { return FrameRate; } + virtual ETimecodeProviderSynchronizationState GetSynchronizationState() const override { return State; } + virtual bool Initialize(class UEngine* InEngine) override; + virtual void Shutdown(class UEngine* InEngine) override; + + //~ UObject interface + virtual void BeginDestroy() override; + +private: + + void ReleaseResources(); + +public: + /** The AJA source from where the Timecode signal will be coming from. */ + UPROPERTY(EditAnywhere, Category="Timecode options", AssetRegistrySearchable, meta=(DisplayName="Source")) + FBlackmagicMediaPort MediaPort; + + /** Frame rate expected from the SDI stream. */ + UPROPERTY(EditAnywhere, Category="Timecode options") + FFrameRate FrameRate; + + /** Desired number of audio channel to capture. */ + UPROPERTY(BlueprintReadWrite, EditAnywhere, Category = Audio) + EBlackmagicMediaAudioChannel AudioChannels; + +private: + + /** Hardware device handle */ + BlackmagicDevice::FDevice Device; + + /** Hardware port handle */ + BlackmagicDevice::FPort Port; + + //* get notifications for InitializationCompleted */ + struct FCallbackHandler; + friend FCallbackHandler; + FCallbackHandler* CallbackHandler; + + /** Input is running */ + bool bIsRunning; + + /** The current SynchronizationState of the TimecodeProvider*/ + ETimecodeProviderSynchronizationState State; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/IBlackmagicMediaModule.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/IBlackmagicMediaModule.h new file mode 100644 index 000000000000..071ef240eb91 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMedia/Public/IBlackmagicMediaModule.h @@ -0,0 +1,28 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Modules/ModuleInterface.h" +#include "Templates/SharedPointer.h" + +class IMediaEventSink; +class IMediaPlayer; + +/** + * Interface for the Media module. + */ +class IBlackmagicMediaModule : public IModuleInterface +{ +public: + + /** + * Create an Blackmagic based media player. + * @param EventSink The object that receives media events from the player. + * @return A new media player, or nullptr if a player couldn't be created. + */ + virtual TSharedPtr CreatePlayer(IMediaEventSink& EventSink) = 0; + + /** @return true if the Blackmagic module and VideoIO.dll could be loaded */ + virtual bool IsInitialized() const = 0; +}; + diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/BlackmagicMediaEditor.Build.cs b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/BlackmagicMediaEditor.Build.cs new file mode 100644 index 000000000000..aee4ea0d4f83 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/BlackmagicMediaEditor.Build.cs @@ -0,0 +1,32 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +namespace UnrealBuildTool.Rules +{ + public class BlackmagicMediaEditor : ModuleRules + { + public BlackmagicMediaEditor(ReadOnlyTargetRules Target) : base(Target) + { + PrivateDependencyModuleNames.AddRange( + new string[] { + "BlackmagicMedia", + "BlackmagicMediaOutput", + "Core", + "CoreUObject", + "MediaAssets", + "Projects", + "PropertyEditor", + "Settings", + "Slate", + "SlateCore", + "UnrealEd", + }); + + PrivateIncludePathModuleNames.AddRange( + new string[] { + "AssetTools", + }); + + PrivateIncludePaths.Add("BlackmagicMediaEditor/Private"); + } + } +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/BlackmagicMediaEditorModule.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/BlackmagicMediaEditorModule.cpp new file mode 100644 index 000000000000..d9ec626398e0 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/BlackmagicMediaEditorModule.cpp @@ -0,0 +1,135 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicMediaFinder.h" + +#include "BlackmagicMediaSettings.h" +#include "BlackmagicMediaFinder.h" + +#include "Customizations/BlackmagicMediaPortCustomization.h" +#include "Customizations/BlackmagicMediaModeCustomization.h" + +#include "Brushes/SlateImageBrush.h" +#include "Interfaces/IPluginManager.h" +#include "ISettingsModule.h" +#include "ISettingsSection.h" +#include "Modules/ModuleInterface.h" +#include "Modules/ModuleManager.h" +#include "PropertyEditorModule.h" +#include "Styling/SlateStyle.h" +#include "Styling/SlateStyleRegistry.h" +#include "Templates/UniquePtr.h" + +#define LOCTEXT_NAMESPACE "BlackmagicMediaEditor" + +/** + * Implements the MediaEditor module. + */ +class FBlackmagicMediaEditorModule : public IModuleInterface +{ +public: + + //~ IModuleInterface interface + + virtual void StartupModule() override + { + RegisterCustomizations(); + RegisterSettings(); + RegisterStyle(); + } + + virtual void ShutdownModule() override + { + if (!UObjectInitialized() && !GIsRequestingExit) + { + UnregisterStyle(); + UnregisterSettings(); + UnregisterCustomizations(); + } + } + +private: + TUniquePtr StyleInstance; + +private: + + /** Register details view customizations. */ + void RegisterCustomizations() + { + FPropertyEditorModule& PropertyModule = FModuleManager::LoadModuleChecked("PropertyEditor"); + PropertyModule.RegisterCustomPropertyTypeLayout(FBlackmagicMediaPort::StaticStruct()->GetFName(), FOnGetPropertyTypeCustomizationInstance::CreateStatic(&FBlackmagicMediaPortCustomization::MakeInstance)); + + PropertyModule.RegisterCustomPropertyTypeLayout(FBlackmagicMediaModeInput::StaticStruct()->GetFName(), FOnGetPropertyTypeCustomizationInstance::CreateStatic(&FBlackmagicMediaModeCustomization::MakeInputInstance)); + PropertyModule.RegisterCustomPropertyTypeLayout(FBlackmagicMediaModeOutput::StaticStruct()->GetFName(), FOnGetPropertyTypeCustomizationInstance::CreateStatic(&FBlackmagicMediaModeCustomization::MakeOutputInstance)); + } + + /** Unregister details view customizations. */ + void UnregisterCustomizations() + { + FPropertyEditorModule& PropertyModule = FModuleManager::LoadModuleChecked("PropertyEditor"); + PropertyModule.UnregisterCustomPropertyTypeLayout(FBlackmagicMediaPort::StaticStruct()->GetFName()); + + PropertyModule.UnregisterCustomPropertyTypeLayout(FBlackmagicMediaModeInput::StaticStruct()->GetFName()); + PropertyModule.UnregisterCustomPropertyTypeLayout(FBlackmagicMediaModeOutput::StaticStruct()->GetFName()); + } + + void RegisterSettings() + { + // register settings + ISettingsModule* SettingsModule = FModuleManager::GetModulePtr("Settings"); + if (SettingsModule != nullptr) + { + ISettingsSectionPtr SettingsSection = SettingsModule->RegisterSettings("Project", "Plugins", "BlackmagicMedia", + LOCTEXT("BlackmagicMediaSettingsName", "Blackmagic Media"), + LOCTEXT("BlackmagicMediaSettingsDescription", "Configure the Blackmagic Media plug-in."), + GetMutableDefault() + ); + } + } + + void UnregisterSettings() + { + // unregister settings + ISettingsModule* SettingsModule = FModuleManager::GetModulePtr("Settings"); + if (SettingsModule != nullptr) + { + SettingsModule->UnregisterSettings("Project", "Plugins", "BlackmagicMedia"); + } + } + + void RegisterStyle() + { +#define IMAGE_BRUSH(RelativePath, ...) FSlateImageBrush(StyleInstance->RootToContentDir(RelativePath, TEXT(".png")), __VA_ARGS__) + + StyleInstance = MakeUnique("BlackmagicStyle"); + + TSharedPtr Plugin = IPluginManager::Get().FindPlugin(TEXT("BlackmagicMedia")); + if (Plugin.IsValid()) + { + StyleInstance->SetContentRoot(FPaths::Combine(Plugin->GetContentDir(), TEXT("Editor/Icons"))); + } + + const FVector2D Icon20x20(20.0f, 20.0f); + const FVector2D Icon64x64(64.0f, 64.0f); + + StyleInstance->Set("ClassThumbnail.BlackmagicMediaSource", new IMAGE_BRUSH("BlackmagicMediaSource_64x", Icon64x64)); + StyleInstance->Set("ClassIcon.BlackmagicMediaSource", new IMAGE_BRUSH("BlackmagicMediaSource_20x", Icon20x20)); + StyleInstance->Set("ClassThumbnail.BlackmagicMediaOutput", new IMAGE_BRUSH("BlackmagicMediaOutput_64x", Icon64x64)); + StyleInstance->Set("ClassIcon.BlackmagicMediaOutput", new IMAGE_BRUSH("BlackmagicMediaOutput_20x", Icon20x20)); + + FSlateStyleRegistry::RegisterSlateStyle(*StyleInstance.Get()); + +#undef IMAGE_BRUSH + } + + void UnregisterStyle() + { + FSlateStyleRegistry::UnRegisterSlateStyle(*StyleInstance.Get()); + StyleInstance.Reset(); + } +}; + + +IMPLEMENT_MODULE(FBlackmagicMediaEditorModule, IModuleInterface); + +#undef LOCTEXT_NAMESPACE + diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaModeCustomization.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaModeCustomization.cpp new file mode 100644 index 000000000000..a5f296a108ea --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaModeCustomization.cpp @@ -0,0 +1,134 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicMediaModeCustomization.h" + +#include "BlackmagicMediaFinder.h" +#include "DetailWidgetRow.h" +#include "Framework/MultiBox/MultiBoxBuilder.h" +#include "Widgets/Input/SComboButton.h" + +#define LOCTEXT_NAMESPACE "BlackmagicMediaPortCustomization" + +FBlackmagicMediaModeCustomization::FBlackmagicMediaModeCustomization(bool InOutput) + : bOutput(InOutput) +{ +} + + +void FBlackmagicMediaModeCustomization::CustomizeHeader(TSharedRef InPropertyHandle, FDetailWidgetRow& HeaderRow, IPropertyTypeCustomizationUtils& CustomizationUtils) +{ + MediaModeProperty = InPropertyHandle; + if (MediaModeProperty->GetNumPerObjectValues() == 1 && MediaModeProperty->IsValidHandle()) + { + UProperty* Property = MediaModeProperty->GetProperty(); + + check(Property && Cast(Property) && Cast(Property)->Struct && + (Cast(Property)->Struct->IsChildOf(FBlackmagicMediaModeInput::StaticStruct()) + || Cast(Property)->Struct->IsChildOf(FBlackmagicMediaModeOutput::StaticStruct()))); + + TArray RawData; + MediaModeProperty->AccessRawData(RawData); + + check(RawData.Num() == 1); + FBlackmagicMediaMode* MediaModeValue = reinterpret_cast(RawData[0]); + + check(MediaModeValue); + + HeaderRow + .NameContent() + [ + InPropertyHandle->CreatePropertyNameWidget() + ] + .ValueContent() + .MaxDesiredWidth(512) + [ + SNew(SHorizontalBox) + + SHorizontalBox::Slot() + .FillWidth(1.0f) + .VAlign(VAlign_Center) + [ + SNew(STextBlock) + .Text(TAttribute::Create(TAttribute::FGetter::CreateLambda([=] { return FText::FromString(MediaModeValue->ToUrl()); }))) + ] + + SHorizontalBox::Slot() + .AutoWidth() + .Padding(FMargin(4.0f, 0.0f, 0.0f, 0.0f)) + .VAlign(VAlign_Center) + [ + SNew(SComboButton) + .OnGetMenuContent(this, &FBlackmagicMediaModeCustomization::HandleSourceComboButtonMenuContent) + .ContentPadding(FMargin(4.0, 2.0)) + ] + ]; + } +} + +void FBlackmagicMediaModeCustomization::CustomizeChildren(TSharedRef InStructPropertyHandle, IDetailChildrenBuilder& StructBuilder, IPropertyTypeCustomizationUtils& StructCustomizationUtils) +{ +} + +TSharedRef FBlackmagicMediaModeCustomization::HandleSourceComboButtonMenuContent() const +{ + // fetch found sources + TArray OutModes; + if (!UBlackmagicMediaFinder::GetModes(OutModes, bOutput)) + { + return SNullWidget::NullWidget; + } + + // generate menu + FMenuBuilder MenuBuilder(true, nullptr); + + const ANSICHAR* SectionName = bOutput ? "AllOutputModes" : "AllInputModes"; + TAttribute HeaderText = bOutput ? LOCTEXT("AllOutputModesSection", "Output Modes") : LOCTEXT("AllInputModesSection", "Input Modes"); + + MenuBuilder.BeginSection(SectionName, HeaderText); + { + bool ModeAdded = false; + + for (const FBlackmagicMediaMode& Mode : OutModes) + { + const TSharedPtr ValueProperty = MediaModeProperty; + const FString Url = Mode.ToUrl(); + + MenuBuilder.AddMenuEntry( + FText::FromString(Mode.ToString()), + FText::FromString(Url), + FSlateIcon(), + FUIAction( + FExecuteAction::CreateLambda([=] { + + TArray RawData; + MediaModeProperty->AccessRawData(RawData); + + check(RawData.Num() == 1); + MediaModeProperty->NotifyPreChange(); + FBlackmagicMediaMode* MediaModeValue = reinterpret_cast(RawData[0]); + *MediaModeValue = Mode; + MediaModeProperty->NotifyPostChange(); + MediaModeProperty->NotifyFinishedChangingProperties(); + }), + FCanExecuteAction(), + FIsActionChecked::CreateLambda([=] { + FString CurrentValue; + return ((ValueProperty->GetValue(CurrentValue) == FPropertyAccess::Success) && CurrentValue == Url); + }) + ), + NAME_None, + EUserInterfaceActionType::RadioButton + ); + + ModeAdded = true; + } + + if (!ModeAdded) + { + MenuBuilder.AddWidget(SNullWidget::NullWidget, LOCTEXT("NoModesFound", "No display mode found"), false, false); + } + } + MenuBuilder.EndSection(); + + return MenuBuilder.MakeWidget(); +} + +#undef LOCTEXT_NAMESPACE diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaModeCustomization.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaModeCustomization.h new file mode 100644 index 000000000000..cd718f9dcc65 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaModeCustomization.h @@ -0,0 +1,39 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "IPropertyTypeCustomization.h" +#include "Widgets/SWidget.h" + +/** + * Implements a details view customization for the MediaPort + */ +class FBlackmagicMediaModeCustomization : public IPropertyTypeCustomization +{ +public: + FBlackmagicMediaModeCustomization(bool InOutput = false); + + static TSharedRef MakeInputInstance() + { + return MakeShareable(new FBlackmagicMediaModeCustomization()); + } + + static TSharedRef MakeOutputInstance() + { + return MakeShareable(new FBlackmagicMediaModeCustomization(true)); + } + + /** IPropertyTypeCustomization interface */ + virtual void CustomizeHeader(TSharedRef InPropertyHandle, class FDetailWidgetRow& HeaderRow, IPropertyTypeCustomizationUtils& PropertyTypeCustomizationUtils) override; + virtual void CustomizeChildren(TSharedRef InPropertyHandle, class IDetailChildrenBuilder& StructBuilder, IPropertyTypeCustomizationUtils& PropertyTypeCustomizationUtils) override; + +private: + TSharedRef HandleSourceComboButtonMenuContent() const; + + /** Direction filter */ + bool bOutput; + + /** Pointer to the MediaPort property handle. */ + TSharedPtr MediaModeProperty; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaPortCustomization.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaPortCustomization.cpp new file mode 100644 index 000000000000..f8dfb2806258 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaPortCustomization.cpp @@ -0,0 +1,122 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicMediaPortCustomization.h" + +#include "BlackmagicMediaFinder.h" +#include "DetailWidgetRow.h" +#include "Framework/MultiBox/MultiBoxBuilder.h" +#include "Widgets/Input/SComboButton.h" + +#define LOCTEXT_NAMESPACE "BlackmagicMediaPortCustomization" + +void FBlackmagicMediaPortCustomization::CustomizeHeader(TSharedRef InPropertyHandle, FDetailWidgetRow& HeaderRow, IPropertyTypeCustomizationUtils& CustomizationUtils) +{ + MediaPortProperty = InPropertyHandle; + if (MediaPortProperty->GetNumPerObjectValues() == 1 && MediaPortProperty->IsValidHandle()) + { + UProperty* Property = MediaPortProperty->GetProperty(); + check(Property && Cast(Property) && Cast(Property)->Struct && Cast(Property)->Struct->IsChildOf(FBlackmagicMediaPort::StaticStruct())); + + TArray RawData; + MediaPortProperty->AccessRawData(RawData); + + check(RawData.Num() == 1); + FBlackmagicMediaPort* MediaPortValue = reinterpret_cast(RawData[0]); + + check(MediaPortValue); + + HeaderRow + .NameContent() + [ + InPropertyHandle->CreatePropertyNameWidget() + ] + .ValueContent() + .MaxDesiredWidth(512) + [ + SNew(SHorizontalBox) + + SHorizontalBox::Slot() + .FillWidth(1.0f) + .VAlign(VAlign_Center) + [ + SNew(STextBlock) + .Text(TAttribute::Create(TAttribute::FGetter::CreateLambda([=] { return FText::FromString(MediaPortValue->ToUrl()); }))) + ] + + SHorizontalBox::Slot() + .AutoWidth() + .Padding(FMargin(4.0f, 0.0f, 0.0f, 0.0f)) + .VAlign(VAlign_Center) + [ + SNew(SComboButton) + .OnGetMenuContent(this, &FBlackmagicMediaPortCustomization::HandleSourceComboButtonMenuContent) + .ContentPadding(FMargin(4.0, 2.0)) + ] + ]; + } +} + +void FBlackmagicMediaPortCustomization::CustomizeChildren(TSharedRef InStructPropertyHandle, IDetailChildrenBuilder& StructBuilder, IPropertyTypeCustomizationUtils& StructCustomizationUtils) +{ +} + +TSharedRef FBlackmagicMediaPortCustomization::HandleSourceComboButtonMenuContent() const +{ + // fetch found sources + TArray OutSources; + if (!UBlackmagicMediaFinder::GetSources(OutSources)) + { + return SNullWidget::NullWidget; + } + + // generate menu + FMenuBuilder MenuBuilder(true, nullptr); + + MenuBuilder.BeginSection("AllSources", LOCTEXT("AllSourcesSection", "All Sources")); + { + bool SourceAdded = false; + + for (const FBlackmagicMediaPort& Source : OutSources) + { + const TSharedPtr ValueProperty = MediaPortProperty; + const FString Url = Source.ToUrl(); + + MenuBuilder.AddMenuEntry( + FText::FromString(Source.ToString()), + FText::FromString(Url), + FSlateIcon(), + FUIAction( + FExecuteAction::CreateLambda([=] { + + TArray RawData; + MediaPortProperty->AccessRawData(RawData); + + check(RawData.Num() == 1); + MediaPortProperty->NotifyPreChange(); + FBlackmagicMediaPort* MediaPortValue = reinterpret_cast(RawData[0]); + *MediaPortValue = Source; + MediaPortProperty->NotifyPostChange(); + MediaPortProperty->NotifyFinishedChangingProperties(); + }), + FCanExecuteAction(), + FIsActionChecked::CreateLambda([=] { + FString CurrentValue; + return ((ValueProperty->GetValue(CurrentValue) == FPropertyAccess::Success) && CurrentValue == Url); + }) + ), + NAME_None, + EUserInterfaceActionType::RadioButton + ); + + SourceAdded = true; + } + + if (!SourceAdded) + { + MenuBuilder.AddWidget(SNullWidget::NullWidget, LOCTEXT("NoSourcesFound", "No sources found"), false, false); + } + } + MenuBuilder.EndSection(); + + return MenuBuilder.MakeWidget(); +} + +#undef LOCTEXT_NAMESPACE diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaPortCustomization.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaPortCustomization.h new file mode 100644 index 000000000000..47f04eaff05d --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Customizations/BlackmagicMediaPortCustomization.h @@ -0,0 +1,29 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "IPropertyTypeCustomization.h" +#include "Widgets/SWidget.h" + +/** + * Implements a details view customization for the MediaPort + */ +class FBlackmagicMediaPortCustomization : public IPropertyTypeCustomization +{ +public: + static TSharedRef MakeInstance() + { + return MakeShareable(new FBlackmagicMediaPortCustomization); + } + + /** IPropertyTypeCustomization interface */ + virtual void CustomizeHeader(TSharedRef InPropertyHandle, class FDetailWidgetRow& HeaderRow, IPropertyTypeCustomizationUtils& PropertyTypeCustomizationUtils) override; + virtual void CustomizeChildren(TSharedRef InPropertyHandle, class IDetailChildrenBuilder& StructBuilder, IPropertyTypeCustomizationUtils& PropertyTypeCustomizationUtils) override; + +private: + TSharedRef HandleSourceComboButtonMenuContent() const; + + /** Pointer to the MediaPort property handle. */ + TSharedPtr MediaPortProperty; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaOutputFactoryNew.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaOutputFactoryNew.cpp new file mode 100644 index 000000000000..9c6d52b71db3 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaOutputFactoryNew.cpp @@ -0,0 +1,39 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicMediaOutputFactoryNew.h" + +#include "AssetTypeCategories.h" +#include "BlackmagicMediaOutput.h" + + +/* UBlackmagicMediaOutputFactoryNew structors + *****************************************************************************/ + +UBlackmagicMediaOutputFactoryNew::UBlackmagicMediaOutputFactoryNew(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) +{ + SupportedClass = UBlackmagicMediaOutput::StaticClass(); + bCreateNew = true; + bEditAfterNew = true; +} + + +/* UFactory overrides + *****************************************************************************/ + +UObject* UBlackmagicMediaOutputFactoryNew::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +{ + return NewObject(InParent, InClass, InName, Flags); +} + + +uint32 UBlackmagicMediaOutputFactoryNew::GetMenuCategories() const +{ + return EAssetTypeCategories::Media; +} + + +bool UBlackmagicMediaOutputFactoryNew::ShouldShowInNewMenu() const +{ + return true; +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaOutputFactoryNew.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaOutputFactoryNew.h new file mode 100644 index 000000000000..bb0d70e6ad7c --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaOutputFactoryNew.h @@ -0,0 +1,22 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Factories/Factory.h" +#include "BlackmagicMediaOutputFactoryNew.generated.h" + + +/** + * Implements a factory for objects. + */ +UCLASS(hidecategories=Object) +class UBlackmagicMediaOutputFactoryNew : public UFactory +{ + GENERATED_UCLASS_BODY() + +public: + //~ UFactory Interface + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual uint32 GetMenuCategories() const override; + virtual bool ShouldShowInNewMenu() const override; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaSourceFactoryNew.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaSourceFactoryNew.cpp new file mode 100644 index 000000000000..135201638d96 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaSourceFactoryNew.cpp @@ -0,0 +1,39 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicMediaSourceFactoryNew.h" + +#include "AssetTypeCategories.h" +#include "BlackmagicMediaSource.h" + + +/* UBlackmagicMediaSourceFactoryNew structors + *****************************************************************************/ + +UBlackmagicMediaSourceFactoryNew::UBlackmagicMediaSourceFactoryNew(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) +{ + SupportedClass = UBlackmagicMediaSource::StaticClass(); + bCreateNew = true; + bEditAfterNew = true; +} + + +/* UFactory overrides + *****************************************************************************/ + +UObject* UBlackmagicMediaSourceFactoryNew::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +{ + return NewObject(InParent, InClass, InName, Flags); +} + + +uint32 UBlackmagicMediaSourceFactoryNew::GetMenuCategories() const +{ + return EAssetTypeCategories::Media; +} + + +bool UBlackmagicMediaSourceFactoryNew::ShouldShowInNewMenu() const +{ + return true; +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaSourceFactoryNew.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaSourceFactoryNew.h new file mode 100644 index 000000000000..774841d9f7d5 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaEditor/Private/Factories/BlackmagicMediaSourceFactoryNew.h @@ -0,0 +1,25 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Factories/Factory.h" +#include "BlackmagicMediaSourceFactoryNew.generated.h" + + +/** + * Implements a factory for objects. + */ +UCLASS(hidecategories=Object) +class UBlackmagicMediaSourceFactoryNew + : public UFactory +{ + GENERATED_UCLASS_BODY() + +public: + + //~ UFactory Interface + + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual uint32 GetMenuCategories() const override; + virtual bool ShouldShowInNewMenu() const override; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaFactory/BlackmagicMediaFactory.Build.cs b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaFactory/BlackmagicMediaFactory.Build.cs new file mode 100644 index 000000000000..4868f557451f --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaFactory/BlackmagicMediaFactory.Build.cs @@ -0,0 +1,43 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +namespace UnrealBuildTool.Rules +{ + public class BlackmagicMediaFactory : ModuleRules + { + public BlackmagicMediaFactory(ReadOnlyTargetRules Target) : base(Target) + { + DynamicallyLoadedModuleNames.AddRange( + new string[] { + "Media", + }); + + PrivateDependencyModuleNames.AddRange( + new string[] { + "MediaAssets", + "Projects", + }); + + PrivateIncludePathModuleNames.AddRange( + new string[] { + "Media", + "BlackmagicMedia", + }); + + PrivateIncludePaths.AddRange( + new string[] { + "BlackmagicMediaFactory/Private", + }); + + PublicDependencyModuleNames.AddRange( + new string[] { + "Core", + "CoreUObject", + }); + + if (Target.Platform == UnrealTargetPlatform.Win64) + { + DynamicallyLoadedModuleNames.Add("BlackmagicMedia"); + } + } + } +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaFactory/Private/BlackmagicMediaFactoryModule.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaFactory/Private/BlackmagicMediaFactoryModule.cpp new file mode 100644 index 000000000000..cba561615a2e --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaFactory/Private/BlackmagicMediaFactoryModule.cpp @@ -0,0 +1,136 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "CoreMinimal.h" +#include "IMediaPlayerFactory.h" +#include "IMediaModule.h" +#include "Modules/ModuleInterface.h" +#include "Modules/ModuleManager.h" + +#if WITH_EDITOR + #include "ISettingsModule.h" + #include "ISettingsSection.h" +#endif + +#include "BlackmagicMediaSettings.h" +#include "IBlackmagicMediaModule.h" + + +#define LOCTEXT_NAMESPACE "BlackmagicMediaFactoryModule" + + +/** + * Implements the MediaFactory module. + */ +class FBlackmagicMediaFactoryModule + : public IMediaPlayerFactory + , public IModuleInterface +{ +public: + + //~ IMediaPlayerFactory interface + + virtual bool CanPlayUrl(const FString& Url, const IMediaOptions* /*Options*/, TArray* /*OutWarnings*/, TArray* OutErrors) const override + { + FString Scheme; + FString Location; + + // check scheme + if (!Url.Split(TEXT("://"), &Scheme, &Location, ESearchCase::CaseSensitive)) + { + if (OutErrors != nullptr) + { + OutErrors->Add(LOCTEXT("NoSchemeFound", "No URI scheme found")); + } + + return false; + } + + if (!SupportedUriSchemes.Contains(Scheme)) + { + if (OutErrors != nullptr) + { + OutErrors->Add(FText::Format(LOCTEXT("SchemeNotSupported", "The URI scheme '{0}' is not supported"), FText::FromString(Scheme))); + } + + return false; + } + + return true; + } + + virtual TSharedPtr CreatePlayer(IMediaEventSink& EventSink) override + { + auto MediaModule = FModuleManager::LoadModulePtr("BlackmagicMedia"); + return (MediaModule != nullptr) ? MediaModule->CreatePlayer(EventSink) : nullptr; + } + + virtual FText GetDisplayName() const override + { + return LOCTEXT("MediaPlayerDisplayName", "Blackmagic Device Interface"); + } + + virtual FName GetPlayerName() const override + { + static FName PlayerName(TEXT("BlackmagicMedia")); + return PlayerName; + } + + virtual const TArray& GetSupportedPlatforms() const override + { + return SupportedPlatforms; + } + + virtual bool SupportsFeature(EMediaFeature Feature) const override + { + return ((Feature == EMediaFeature::AudioSamples) || + (Feature == EMediaFeature::AudioTracks) || + (Feature == EMediaFeature::MetadataTracks) || + (Feature == EMediaFeature::VideoSamples) || + (Feature == EMediaFeature::VideoTracks)); + + } + +public: + + //~ IModuleInterface interface + + virtual void StartupModule() override + { + // supported platforms + SupportedPlatforms.Add(TEXT("Windows")); + + // supported schemes + SupportedUriSchemes.Add(TEXT("blackmagic")); + + // register player factory + auto MediaModule = FModuleManager::LoadModulePtr("Media"); + + if (MediaModule != nullptr) + { + MediaModule->RegisterPlayerFactory(*this); + } + } + + virtual void ShutdownModule() override + { + // unregister player factory + auto MediaModule = FModuleManager::GetModulePtr("Media"); + + if (MediaModule != nullptr) + { + MediaModule->UnregisterPlayerFactory(*this); + } + } + +private: + /** List of platforms that the media player support. */ + TArray SupportedPlatforms; + + /** List of supported URI schemes. */ + TArray SupportedUriSchemes; +}; + + +#undef LOCTEXT_NAMESPACE + +IMPLEMENT_MODULE(FBlackmagicMediaFactoryModule, BlackmagicMediaFactory); diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/BlackmagicMediaOutput.Build.cs b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/BlackmagicMediaOutput.Build.cs new file mode 100644 index 000000000000..adc2182ad91f --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/BlackmagicMediaOutput.Build.cs @@ -0,0 +1,44 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +namespace UnrealBuildTool.Rules +{ + public class BlackmagicMediaOutput : ModuleRules + { + public BlackmagicMediaOutput(ReadOnlyTargetRules Target) : base(Target) + { + PublicDependencyModuleNames.AddRange( + new string[] { + "BlackmagicMedia" + }); + + PrivateIncludePaths.AddRange( + new string[] + { + "BlackmagicMediaOutput/Private", + } + ); + + PrivateDependencyModuleNames.AddRange( + new string[] + { + "Blackmagic", + "Core", + "CoreUObject", + "Engine", + "MediaIOCore", + "MovieSceneCapture", + "RenderCore", + "RHI", + "Slate", + "SlateCore", + "TimeManagement" + } + ); + + if (Target.bBuildEditor == true) + { + PrivateDependencyModuleNames.Add("UnrealEd"); + } + } + } +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaFrameGrabberProtocol.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaFrameGrabberProtocol.cpp new file mode 100644 index 000000000000..5fe1ad2dcd0a --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaFrameGrabberProtocol.cpp @@ -0,0 +1,78 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicMediaFrameGrabberProtocol.h" + +#include "BlackmagicMediaOutput.h" +#include "BlackmagicHardwareSync.h" + +#include "BlackmagicMediaViewportOutputImpl.h" +#include "IBlackmagicMediaOutputModule.h" +#include "MovieSceneCaptureProtocolBase.h" + +#define LOCTEXT_NAMESPACE "BlackmagicMediaOutput" + +/** + * UBlackmagicFrameGrabberProtocol + */ +UBlackmagicFrameGrabberProtocol::UBlackmagicFrameGrabberProtocol(const FObjectInitializer& ObjInit) + : Super(ObjInit) + , Information("FrameRate, Resolution, Output Directory and Filename Format options won't be used with output") + , TransientMediaOutputPtr(nullptr) +{ +} + +bool UBlackmagicFrameGrabberProtocol::StartCaptureImpl() +{ + if (MediaOutput == nullptr) + { + UE_LOG(LogBlackmagicMediaOutput, Error, TEXT("Couldn't start the capture. No Media Output was provided.")); + return false; + } + + TransientMediaOutputPtr = Cast(MediaOutput.TryLoad()); + if (TransientMediaOutputPtr == nullptr) + { + UE_LOG(LogBlackmagicMediaOutput, Error, TEXT("Couldn't start the capture. No Media Output was provided.")); + return false; + } + + Implementation = FBlackmagicMediaViewportOutputImpl::CreateShared(TransientMediaOutputPtr, InitSettings->SceneViewport); + + if (!Implementation.IsValid()) + { + UE_LOG(LogBlackmagicMediaOutput, Error, TEXT("Could not initialize the Output interface.")); + return false; + } + + if (Implementation->GetOutputFrameRate() != CaptureHost->GetCaptureFrameRate()) + { + UE_LOG(LogBlackmagicMediaOutput, Warning, TEXT("BlackmagicMediaOutput %s FrameRate doesn't match sequence FrameRate."), *TransientMediaOutputPtr->GetName()); + } + + return true; +} + +void UBlackmagicFrameGrabberProtocol::CaptureFrameImpl(const FFrameMetrics& FrameMetrics) +{ + check(Implementation.IsValid()); + + const FFrameNumber FrameNumber = (int32)FrameMetrics.FrameNumber; + FTimecode Timecode = FTimecode::FromFrameNumber(FrameNumber, CaptureHost->GetCaptureFrameRate(), FTimecode::IsDropFormatTimecodeSupported(CaptureHost->GetCaptureFrameRate())); + Implementation->Tick(Timecode); +} + +bool UBlackmagicFrameGrabberProtocol::HasFinishedProcessingImpl() const +{ + return !Implementation.IsValid() || Implementation->HasFinishedProcessing(); +} + +void UBlackmagicFrameGrabberProtocol::FinalizeImpl() +{ + if (Implementation.IsValid()) + { + Implementation->Shutdown(); + Implementation.Reset(); + } +} + +#undef LOCTEXT_NAMESPACE diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaOutputModule.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaOutputModule.cpp new file mode 100644 index 000000000000..0739d1b4c169 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaOutputModule.cpp @@ -0,0 +1,18 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "IBlackmagicMediaOutputModule.h" + +#include "BlackmagicMediaFrameGrabberProtocol.h" +#include "Modules/ModuleManager.h" + +#define LOCTEXT_NAMESPACE "BlackmagicMediaOutput" + +DEFINE_LOG_CATEGORY(LogBlackmagicMediaOutput); + +class FBlackmagicMediaOutputModule : public IBlackmagicMediaOutputModule +{ +}; + +IMPLEMENT_MODULE(FBlackmagicMediaOutputModule, BlackmagicMediaOutput ) + +#undef LOCTEXT_NAMESPACE diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaViewportOutput.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaViewportOutput.cpp new file mode 100644 index 000000000000..384e4c54b092 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaViewportOutput.cpp @@ -0,0 +1,128 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "BlackmagicMediaViewportOutput.h" +#include "BlackmagicMediaViewportOutputImpl.h" +#include "IBlackmagicMediaOutputModule.h" + +#include "Engine/GameEngine.h" +#include "Misc/Timecode.h" +#include "Misc/FrameRate.h" +#include "Widgets/SViewport.h" + +#if WITH_EDITOR +#include "Editor/EditorEngine.h" +#endif //WITH_EDITOR + +/* namespace BlackmagicMediaOutputDevice definition +*****************************************************************************/ + +namespace BlackmagicMediaOutputDevice +{ + bool FindSceneViewportAndLevel(TSharedPtr& OutSceneViewport, ULevel*& OutLevel); +} + +/* UBlackmagicMediaViewportOutput +*****************************************************************************/ + +UBlackmagicMediaViewportOutput::UBlackmagicMediaViewportOutput(const FObjectInitializer& ObjectInitializer) + : Super(ObjectInitializer) + , Implementation(nullptr) +{ +} + +void UBlackmagicMediaViewportOutput::BeginDestroy() +{ + DeactivateOutput(); + Super::BeginDestroy(); +} + +ETickableTickType UBlackmagicMediaViewportOutput::GetTickableTickType() const +{ + return HasAnyFlags(RF_ClassDefaultObject) ? ETickableTickType::Never : ETickableTickType::Conditional; +} + +bool UBlackmagicMediaViewportOutput::IsTickable() const +{ + return Implementation.IsValid(); +} + +void UBlackmagicMediaViewportOutput::Tick(float DeltatTime) +{ + if (Implementation.IsValid()) + { + Implementation->Tick(FApp::GetTimecode()); + } +} + +void UBlackmagicMediaViewportOutput::ActivateOutput(UBlackmagicMediaOutput* MediaOutput) +{ + DeactivateOutput(); + + if (MediaOutput != nullptr) + { + TSharedPtr FoundSceneViewport; + ULevel* Level = nullptr; + if (BlackmagicMediaOutputDevice::FindSceneViewportAndLevel(FoundSceneViewport, Level) && FoundSceneViewport.IsValid()) + { + Implementation = FBlackmagicMediaViewportOutputImpl::CreateShared(MediaOutput, FoundSceneViewport); + if (!Implementation.IsValid()) + { + UE_LOG(LogBlackmagicMediaOutput, Error, TEXT("Could not initialized the Output interface.")); + DeactivateOutput(); + } + } + else + { + UE_LOG(LogBlackmagicMediaOutput, Warning, TEXT("No viewport could be found. Play in 'Standalone' or in 'New Editor Window PIE'.")); + } + } + else + { + UE_LOG(LogBlackmagicMediaOutput, Error, TEXT("Couldn't start the capture. No Media Output was provided.")); + } +} + +void UBlackmagicMediaViewportOutput::DeactivateOutput() +{ + if (Implementation.IsValid()) + { + Implementation->Shutdown(); + Implementation.Reset(); + } +} + +/* namespace AjaMediaOutputDevice implementation +*****************************************************************************/ +namespace BlackmagicMediaOutputDevice +{ + bool FindSceneViewportAndLevel(TSharedPtr& OutSceneViewport, ULevel*& OutLevel) + { +#if WITH_EDITOR + if (GIsEditor) + { + for (const FWorldContext& Context : GEngine->GetWorldContexts()) + { + if (Context.WorldType == EWorldType::PIE) + { + UEditorEngine* EditorEngine = CastChecked(GEngine); + FSlatePlayInEditorInfo& Info = EditorEngine->SlatePlayInEditorMap.FindChecked(Context.ContextHandle); + if (Info.SlatePlayInEditorWindowViewport.IsValid()) + { + OutLevel = Context.World()->GetCurrentLevel(); + OutSceneViewport = Info.SlatePlayInEditorWindowViewport; + return true; + } + } + } + return false; + } + else +#endif + { + UGameEngine* GameEngine = CastChecked(GEngine); + OutLevel = GameEngine->GetGameWorld()->GetCurrentLevel(); + OutSceneViewport = GameEngine->SceneViewport; + return true; + } + } +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaViewportOutputImpl.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaViewportOutputImpl.cpp new file mode 100644 index 000000000000..d04cea32d812 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaViewportOutputImpl.cpp @@ -0,0 +1,463 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +#include "BlackmagicMediaViewportOutputImpl.h" + +#include "BlackmagicMediaOutput.h" +#include "IBlackmagicMediaOutputModule.h" + +#include "MediaIOCoreWaitVSyncThread.h" +#include "MediaIOCoreEncodeTime.h" + +#include "RHIResources.h" + +#include "HAL/Event.h" +#include "HAL/Runnable.h" +#include "HAL/RunnableThread.h" +#include "Templates/Atomic.h" + +#include "Widgets/SViewport.h" + + +/* Utility Functions +*****************************************************************************/ + +namespace BlackmagicMediaOutputDevice +{ + void CopyFrame(uint32 InWidth, uint32 InHeight, uint8* InDst, uint32 InDstMod, uint8* InSrc, uint32 InSrcMod);; + BlackmagicDevice::FTimecode ConvertToTimecode(const FTimecode& InTimecode, float InFPS); +} + +/* namespace FramePayload implementation +*****************************************************************************/ +bool FBlackmagicFramePayload::OnFrameReady_RenderThread(FColor* ColorBuffer, FIntPoint BufferSize, FIntPoint TargetSize) const +{ + if (bUseEndFrameRenderThread) + { + // Lock to make sure ViewportOutputImpl won't be deleted while updating the buffer + TSharedPtr Shared = ViewportOutputImpl.Pin(); + if (Shared.IsValid()) + { + Shared->OnEndFrame_RenderThread(Timecode, ColorBuffer, BufferSize.X, BufferSize.Y); + } + } + return !bUseEndFrameRenderThread; +} + +/* MediaViewportOutputImpl implementation +*****************************************************************************/ +FBlackmagicMediaViewportOutputImpl::FBlackmagicMediaViewportOutputImpl() + : VSyncThread(nullptr) + , VSyncRunnableThread(nullptr) + , bClearBuffer(false) + , bOutputTimecode(false) + , bCopyOnRenderThread(false) + , bWaitForOutputFrame(false) + , bWaitForVSyncEvent(false) + , Device(nullptr) + , Port(nullptr) + , LastFrameDropCount(0) + , FrameRate(30, 1) +{ +} + +FBlackmagicMediaViewportOutputImpl::~FBlackmagicMediaViewportOutputImpl() +{ + if (VSyncRunnableThread.IsValid()) + { + check(VSyncThread.IsValid()); + + // Wait for the thread to return. + VSyncRunnableThread->WaitForCompletion(); + VSyncRunnableThread.Reset(); + VSyncThread.Reset(); + } + + ReleaseDevice(); +} + +TSharedPtr FBlackmagicMediaViewportOutputImpl::CreateShared(UBlackmagicMediaOutput* MediaOutput, TSharedPtr SceneViewport) +{ + TSharedPtr Result = TSharedPtr(new FBlackmagicMediaViewportOutputImpl()); + if (!Result->Initialize(MediaOutput, SceneViewport)) + { + Result.Reset(); + } + return Result; +} + +bool FBlackmagicMediaViewportOutputImpl::Initialize(UBlackmagicMediaOutput* MediaOutput, TSharedPtr InSceneViewport) +{ + check(MediaOutput); + check(InSceneViewport.IsValid()); + + bClearBuffer = MediaOutput->bClearBuffer; + ClearBufferColor = MediaOutput->ClearBufferColor; + bOutputTimecode = MediaOutput->bOutputTimecode; + bCopyOnRenderThread = MediaOutput->bCopyOnRenderThread; + bWaitForOutputFrame = MediaOutput->bWaitForOutputFrame; + bWaitForVSyncEvent = MediaOutput->bWaitForVSyncEvent; + PortName = MediaOutput->FillPort.ToString(); + bool bVSyncEventOnAnotherThread = bWaitForVSyncEvent && MediaOutput->bVSyncEventOnAnotherThread; + + if (!BlackmagicDevice::VideoIOModeFrameDesc(MediaOutput->MediaMode.Mode, FrameDesc)) + { + return false; + } + BlackmagicDevice::VideoIOFrameDesc2Info(FrameDesc, FrameInfo); + FrameRate = FFrameRate(FrameInfo.TimeValue, FrameInfo.TimeScale); + + if (!InitDevice(MediaOutput)) + { + return false; + } + check(Port); + + SceneViewport = InSceneViewport; + { + TSharedPtr Widget(InSceneViewport->GetViewportWidget().Pin()); + if (Widget.IsValid()) + { + bSavedIgnoreTextureAlpha = Widget->GetIgnoreTextureAlpha(); + if (MediaOutput->OutputType == EBlackmagicMediaOutputType::FillAndKey) + { + Widget->SetIgnoreTextureAlpha(false); + } + } + } + + EPixelFormat PixelFormat = PF_B8G8R8A8; + uint32 RingBufferSize = 2; + FrameGrabber = MakeShareable(new FFrameGrabber(InSceneViewport.ToSharedRef(), InSceneViewport->GetSize(), PixelFormat, RingBufferSize)); + FrameGrabber->StartCapturingFrames(); + + if (bVSyncEventOnAnotherThread) + { + TSharedPtr HardwareSync(new FBlackmagicHardwareSync(Port)); + VSyncThread = MakeUnique(HardwareSync); + VSyncRunnableThread.Reset(FRunnableThread::Create(VSyncThread.Get(), TEXT("FBlackmagicMediaWaitVSyncThread::FBlackmagicMediaWaitVSyncThread"), TPri_AboveNormal)); + } + + if (!bCopyOnRenderThread) + { + EndFrameHandle = FCoreDelegates::OnEndFrame.AddRaw(this, &FBlackmagicMediaViewportOutputImpl::OnEndFrame_GameThread); + } + + // Managed dropped frames + LastFrameDropCount = Port->FrameDropCount(); + + return true; +} + +void FBlackmagicMediaViewportOutputImpl::Shutdown() +{ + if (VSyncRunnableThread.IsValid()) + { + check(VSyncThread.IsValid()); + VSyncThread->Stop(); // stop but don't wait right now, this may take some time + } + + { // restore the ignore texture alpha state + TSharedPtr ViewPort(SceneViewport.Pin()); + if (ViewPort.IsValid()) + { + TSharedPtr Widget(ViewPort->GetViewportWidget().Pin()); + if (Widget.IsValid()) + { + Widget->SetIgnoreTextureAlpha(bSavedIgnoreTextureAlpha); + } + } + } + + SceneViewport.Reset(); + if(FrameGrabber.IsValid()) + { + FrameGrabber->StopCapturingFrames(); + FrameGrabber.Reset(); + } + + if (EndFrameHandle.IsValid()) + { + FCoreDelegates::OnEndFrame.Remove(EndFrameHandle); + } +} + +void FBlackmagicMediaViewportOutputImpl::Tick(const FTimecode& InTimecode) +{ + if (FrameGrabber.IsValid() && Device && Port) + { + auto CurrentPayload = MakeShared(); + + CurrentPayload->ViewportOutputImpl = AsShared(); + CurrentPayload->bUseEndFrameRenderThread = bCopyOnRenderThread; + CurrentPayload->Timecode = InTimecode; + + FrameGrabber->CaptureThisFrame(CurrentPayload); + + VerifyFrameDropCount(); + } + else + { + Shutdown(); + } +} + +bool FBlackmagicMediaViewportOutputImpl::HasFinishedProcessing() const +{ + return Device == nullptr || Port == nullptr || !FrameGrabber.IsValid() || !FrameGrabber->HasOutstandingFrames(); +} + +bool FBlackmagicMediaViewportOutputImpl::Exec(class UWorld* InWorld, const TCHAR* Cmd, FOutputDevice& Ar) +{ +#if !UE_BUILD_SHIPPING + if (FParse::Command(&Cmd, TEXT("MediaIO"))) + { + if (FParse::Command(&Cmd, TEXT("ShowOutputTimecode"))) + { + bIsTimecodeLogEnable = true; + return true; + } + else if (FParse::Command(&Cmd, TEXT("HideOutputTimecode"))) + { + bIsTimecodeLogEnable = false; + return true; + } + } +#endif + return false; +} + +void FBlackmagicMediaViewportOutputImpl::OnEndFrame_GameThread() +{ + if (FrameGrabber.IsValid() && Port) + { + bool bFrameWasCaptured = false; + if (WaitForVSync()) + { + if (WaitForOutputFrame()) + { + TArray Frames = FrameGrabber->GetCapturedFrames(); + bFrameWasCaptured = true; + if (Frames.Num() > 0) + { + FCapturedFrameData& LastFrame = Frames.Last(); + + FTimecode Timecode; + if (LastFrame.Payload.IsValid()) + { + Timecode = static_cast(LastFrame.Payload.Get())->Timecode; + } + Present(Timecode, reinterpret_cast(LastFrame.ColorBuffer.GetData()), LastFrame.BufferSize.X, LastFrame.BufferSize.Y); + } + else + { + UE_LOG(LogBlackmagicMediaOutput, Error, TEXT("No output frame was available.")); + } + } + else + { + UE_LOG(LogBlackmagicMediaOutput, Error, TEXT("No frame was captured.")); + } + } + + // capture the frame to prevent the buffer to grow + if (!bFrameWasCaptured) + { + FrameGrabber->GetCapturedFrames(); + } + } +} + +void FBlackmagicMediaViewportOutputImpl::OnEndFrame_RenderThread(const FTimecode& FrameTimecode, FColor* ColorBuffer, int32 ColorBufferWidth, int32 ColorBufferHeight) +{ + check(ColorBuffer); + + if (Port) + { + if (WaitForVSync()) + { + if (WaitForOutputFrame()) + { + Present(FrameTimecode, reinterpret_cast(ColorBuffer), ColorBufferWidth, ColorBufferHeight); + } + } + } +} + +bool FBlackmagicMediaViewportOutputImpl::WaitForVSync() const +{ + bool bResult = true; + if (bWaitForVSyncEvent) + { + if (VSyncThread) + { + bResult = VSyncThread->Wait_GameOrRenderThread(); + } + else + { + Port->WaitVSync(); + } + } + + return bResult; +} + +bool FBlackmagicMediaViewportOutputImpl::WaitForOutputFrame() const +{ + bool bResult = bWaitForOutputFrame || Port->PeekFrame(); + if (!bResult) + { + UE_LOG(LogBlackmagicMediaOutput, Error, TEXT("No output frame was available.")); + } + return bResult; +} + +void FBlackmagicMediaViewportOutputImpl::Present(const FTimecode& FrameTimecode, uint8* ColorBuffer, uint32 ColorBufferWidth, uint32 ColorBufferHeight) const +{ + BlackmagicDevice::FFrame Frame = Port->WaitFrame(); + uint32 Width, Height; + BlackmagicDevice::VideoIOFrameDimensions(Frame, Width, Height); + + uint32 Size; + void* Memory = BlackmagicDevice::VideoIOFrameVideoBuffer(Frame, Size); + + // Clip/Center into output buffer + uint32 ClipWidth = (static_cast(ColorBufferWidth) > Width) ? Width : ColorBufferWidth; + uint32 ClipHeight = (static_cast(ColorBufferHeight) > Height) ? Height : ColorBufferHeight; + uint32 DestOffsetX = (Width - ClipWidth) / 2; + uint32 DestOffsetY = (Height - ClipHeight) / 2; + uint32 SrcOffsetX = (ColorBufferWidth - ClipWidth) / 2; + uint32 SrcOffsetY = (ColorBufferHeight - ClipHeight) / 2; + + if (bClearBuffer) + { + uint32 Color = ClearBufferColor.ToPackedARGB(); + uint32* MemoryAsColor = reinterpret_cast(Memory); + uint32* Last = MemoryAsColor + Width*Height; + for (; MemoryAsColor != Last; ++MemoryAsColor) + { + *MemoryAsColor = Color; + } + } + + uint8* DestBuffer = static_cast(Memory) + (DestOffsetX + DestOffsetY * Width) * 4; + uint32 DestMod = Width * 4; + uint8* SrcBuffer = static_cast(ColorBuffer) + (SrcOffsetX + SrcOffsetY * ColorBufferWidth) * 4; + uint32 SrcMod = ColorBufferWidth * 4; + BlackmagicMediaOutputDevice::CopyFrame(ClipWidth, ClipHeight, DestBuffer, DestMod, SrcBuffer, SrcMod); + + // pass the output timecode + if (bOutputTimecode) + { + BlackmagicDevice::FTimecode Timecode = BlackmagicMediaOutputDevice::ConvertToTimecode(FrameTimecode, FrameRate.AsDecimal()); + BlackmagicDevice::VideoIOFrameTimecode(Frame, Timecode); + + if (bIsTimecodeLogEnable) + { + UE_LOG(LogBlackmagicMediaOutput, Log, TEXT("Blackmagic output port %s has timecode : %02d:%02d:%02d:%02d"), *PortName, Timecode.Hours, Timecode.Minutes, Timecode.Seconds, Timecode.Frames); + } + } + BlackmagicDevice::VideoIOReleaseFrame(Frame); +} + +void FBlackmagicMediaViewportOutputImpl::VerifyFrameDropCount() +{ + const uint32 FrameDropCount = Port->FrameDropCount(); + if (FrameDropCount > LastFrameDropCount) + { + UE_LOG(LogBlackmagicMediaOutput, Warning, TEXT("Lost %d frames on output %s. Frame rate may be too slow."), FrameDropCount - LastFrameDropCount, *PortName); + } + LastFrameDropCount = FrameDropCount; +} + +bool FBlackmagicMediaViewportOutputImpl::InitDevice(UBlackmagicMediaOutput* MediaOutput) +{ + check(MediaOutput); + if (!MediaOutput->FillPort.IsValid()) + { + UE_LOG(LogBlackmagicMediaOutput, Warning, TEXT("The FillPort of '%s' is not valid."), *MediaOutput->GetName()); + return false; + } + + Device = BlackmagicDevice::VideoIOCreateDevice(MediaOutput->FillPort.DeviceIndex); + if (!Device) + { + UE_LOG(LogBlackmagicMediaOutput, Warning, TEXT("The Device for '%s' could not be created."), *MediaOutput->GetName()); + return false; + } + + uint32_t PortIndex = MediaOutput->FillPort.PortIndex; + + BlackmagicDevice::FPortOptions Options = {}; + Options.bOutput = true; + Options.bUseTimecode = bOutputTimecode; + Options.bOutputKey = MediaOutput->OutputType == EBlackmagicMediaOutputType::FillAndKey; + + // get the output video mode. + BlackmagicDevice::FUInt MediaMode = MediaOutput->MediaMode.Mode; + if (!BlackmagicDevice::VideoIOModeFrameDesc(MediaMode, FrameDesc)) + { + UE_LOG(LogBlackmagicMediaOutput, Warning, TEXT("bad mode (%d), default to default."), MediaMode); + } + + Port = BlackmagicDevice::VideoIODeviceOpenSharedPort(Device, PortIndex, FrameDesc, Options); + if (!Port) + { + UE_LOG(LogBlackmagicMediaOutput, Warning, TEXT("The output port for '%s' could not be opened."), *MediaOutput->GetName()); + return false; + } + + // Get info on the current video mode + BlackmagicDevice::VideoIOFrameDesc2Info(FrameDesc, FrameInfo); + return true; +} + +void FBlackmagicMediaViewportOutputImpl::ReleaseDevice() +{ + if (Port) + { + Port->Release(); + Port = nullptr; + } + + if (Device) + { + BlackmagicDevice::VideoIOReleaseDevice(Device); + Device = nullptr; + } +} + +/* namespace implementation +*****************************************************************************/ +namespace BlackmagicMediaOutputDevice +{ + void CopyFrame(uint32 InWidth, uint32 InHeight, uint8* InDst, uint32 InDstMod, uint8* InSrc, uint32 InSrcMod) + { + if (InWidth * 4 == InSrcMod && InSrcMod == InDstMod) + { + ::memcpy(InDst, InSrc, InHeight * InWidth * 4); + } + else + { + while (InHeight) + { + ::memcpy(InDst, InSrc, InWidth * 4); + InDst += InDstMod; + InSrc += InSrcMod; + --InHeight; + } + } + } + + BlackmagicDevice::FTimecode ConvertToTimecode(const FTimecode& InTimecode, float InFPS) + { + //We can't write frame numbers greater than 30 + //Get by how much we need to divide the actual count. + const int32 Divider = FMath::CeilToInt(InFPS / 30.0f); + + BlackmagicDevice::FTimecode Timecode; + Timecode.Hours = InTimecode.Hours; + Timecode.Minutes = InTimecode.Minutes; + Timecode.Seconds = InTimecode.Seconds; + Timecode.Frames = InTimecode.Frames / Divider; + return Timecode; + } +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaViewportOutputImpl.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaViewportOutputImpl.h new file mode 100644 index 000000000000..d4faa0edf6c8 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Private/BlackmagicMediaViewportOutputImpl.h @@ -0,0 +1,127 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" + +#include "BlackmagicLib.h" +#include "BlackmagicMediaOutput.h" +#include "BlackmagicHardwareSync.h" + +#include "Engine/EngineBaseTypes.h" +#include "Misc/FrameRate.h" +#include "FrameGrabber.h" +#include "Slate/SceneViewport.h" +#include "Tickable.h" +#include "Misc/Timecode.h" + +class FBlackmagicMediaViewportOutputImpl; +class FMediaIOCoreWaitVSyncThread; +class FRunnableThread; + +/** + * Custom FramePayload for the FrameGrabber + */ +struct FBlackmagicFramePayload : IFramePayload +{ + FTimecode Timecode; + + bool bUseEndFrameRenderThread; + TWeakPtr ViewportOutputImpl; + + virtual bool OnFrameReady_RenderThread(FColor* ColorBuffer, FIntPoint BufferSize, FIntPoint TargetSize) const override; +}; + + +/** + * Implementation of BlackmagicMediaViewportOutput + */ +class FBlackmagicMediaViewportOutputImpl + : public TSharedFromThis + , public FSelfRegisteringExec +{ + friend FBlackmagicFramePayload; + +public: + ~FBlackmagicMediaViewportOutputImpl(); + + static TSharedPtr CreateShared(UBlackmagicMediaOutput* MediaOutput, TSharedPtr SceneViewport); + void Shutdown(); + + void Tick(const FTimecode& InTimecode); + bool HasFinishedProcessing() const; + + FFrameRate GetOutputFrameRate() const { return FrameRate; } + + //~ Begin FSelfRegisteringExec Interface. + + virtual bool Exec(class UWorld* InWorld, const TCHAR* Cmd, FOutputDevice& Ar) override; + +private: + FBlackmagicMediaViewportOutputImpl(); + bool Initialize(UBlackmagicMediaOutput* MediaOutput, TSharedPtr SceneViewport); + + FBlackmagicMediaViewportOutputImpl(const FBlackmagicMediaViewportOutputImpl&) = delete; + FBlackmagicMediaViewportOutputImpl& operator=(const FBlackmagicMediaViewportOutputImpl&) = delete; + + bool InitDevice(UBlackmagicMediaOutput* MediaOutput); + void ReleaseDevice(); + + void OnEndFrame_GameThread(); + void OnEndFrame_RenderThread(const FTimecode& FrameTimecode, FColor* ColorBuffer, int32 Width, int32 Height); + bool WaitForVSync() const; + bool WaitForOutputFrame() const; + void Present(const FTimecode& FrameTimecode, uint8* ColorBuffer, uint32 ColorBufferWidth, uint32 ColorBufferHeight) const; + void VerifyFrameDropCount(); + +private: + /** WaitForVSync task Runnable */ + TUniquePtr VSyncThread; + + /** WaitForVSync thread */ + TUniquePtr VSyncRunnableThread; + + /** Delegate handle for the OnEndFrame event */ + FDelegateHandle EndFrameHandle; + + /** Grab the back buffer in the thread safe way */ + TSharedPtr FrameGrabber; + + /** Viewport we want to grab from */ + TWeakPtr SceneViewport; + + /** Option from MediaOutput */ + bool bClearBuffer; + FColor ClearBufferColor; + bool bOutputTimecode; + bool bCopyOnRenderThread; + bool bWaitForOutputFrame; + bool bWaitForVSyncEvent; + + /** Saved IgnoreTextureAlpa flag from viewport */ + bool bSavedIgnoreTextureAlpha; + + /** Current video mode */ + BlackmagicDevice::FFrameDesc FrameDesc; + + /** Info about the video mode */ + BlackmagicDevice::FFrameInfo FrameInfo; + + /** Device for outputting */ + BlackmagicDevice::FDevice Device; + + /** Port for outputting */ + BlackmagicDevice::FPort Port; + + /* Last frame drop count to detect count */ + uint64 LastFrameDropCount; + + /* Name of this output port */ + FString PortName; + + /* Selected FrameRate of this output. todo: Populate it with future MediaOutput FrameRate*/ + FFrameRate FrameRate; + + /** Enable Output Timecode Log */ + bool bIsTimecodeLogEnable; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Public/BlackmagicMediaFrameGrabberProtocol.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Public/BlackmagicMediaFrameGrabberProtocol.h new file mode 100644 index 000000000000..8fa1af0a5992 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Public/BlackmagicMediaFrameGrabberProtocol.h @@ -0,0 +1,47 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "BlackmagicMediaOutput.h" +#include "Protocols/FrameGrabberProtocol.h" +#include "UObject/StrongObjectPtr.h" + +#include "BlackmagicMediaFrameGrabberProtocol.generated.h" + +class FBlackmagicMediaViewportOutputImpl; + +UCLASS(meta=(DisplayName="Blackmagic Output", CommandLineID="BlackmagicOutput")) +class BLACKMAGICMEDIAOUTPUT_API UBlackmagicFrameGrabberProtocol : public UMovieSceneImageCaptureProtocolBase +{ +public: + GENERATED_BODY() + + UBlackmagicFrameGrabberProtocol(const FObjectInitializer& ObjInit); + + /** ~UMovieSceneCaptureProtocolBase implementation */ + virtual bool StartCaptureImpl() override; + virtual void CaptureFrameImpl(const FFrameMetrics& FrameMetrics) override; + virtual bool HasFinishedProcessingImpl() const override; + virtual void FinalizeImpl() override; + virtual bool CanWriteToFileImpl(const TCHAR* InFilename, bool bOverwriteExisting) const { return false; } + /** ~End UMovieSceneCaptureProtocolBase implementation */ + +public: + + /** Setting to use for the FrameGrabberProtocol */ + UPROPERTY(config, BlueprintReadWrite, EditAnywhere, Category=Blackmagic, meta=(AllowedClasses=BlackmagicMediaOutput)) + FSoftObjectPath MediaOutput; + + /** States unused options for BlackmagicFrameGrabberProtocolSettings */ + UPROPERTY(VisibleAnywhere, Transient, Category=Blackmagic) + FString Information; + +private: + + /** Transient media output pointer to keep the media output alive while this protocol is in use */ + UPROPERTY(transient) + UBlackmagicMediaOutput* TransientMediaOutputPtr; + + TSharedPtr Implementation; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Public/BlackmagicMediaViewportOutput.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Public/BlackmagicMediaViewportOutput.h new file mode 100644 index 000000000000..a9c095d733ff --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Public/BlackmagicMediaViewportOutput.h @@ -0,0 +1,42 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" + +#include "Tickable.h" + +#include "BlackmagicMediaViewportOutput.generated.h" + +class UBlackmagicMediaOutput; +class FBlackmagicMediaViewportOutputImpl; + +/** + * Class to manage to output the viewport + */ +UCLASS(BlueprintType) +class BLACKMAGICMEDIAOUTPUT_API UBlackmagicMediaViewportOutput : public UObject, public FTickableGameObject +{ + GENERATED_UCLASS_BODY() + +public: + //~ FTickableGameObject interface + virtual ETickableTickType GetTickableTickType() const override; + virtual bool IsTickable() const override; + virtual bool IsTickableWhenPaused() const override { return true; } + virtual bool IsTickableInEditor() const override { return true; } + virtual void Tick(float DeltatTime) override; + virtual TStatId GetStatId() const override { RETURN_QUICK_DECLARE_CYCLE_STAT(UBlackmagicMediaViewportOutput, STATGROUP_Tickables); } + + //~ UObject interface + virtual void BeginDestroy() override; + + UFUNCTION(BlueprintCallable, Category=BLACKMAGIC) + void ActivateOutput(UBlackmagicMediaOutput* MediaOutput); + + UFUNCTION(BlueprintCallable, Category=BLACKMAGIC) + void DeactivateOutput(); + +private: + TSharedPtr Implementation; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Public/IBlackmagicMediaOutputModule.h b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Public/IBlackmagicMediaOutputModule.h new file mode 100644 index 000000000000..0ee3329bb7f9 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/BlackmagicMediaOutput/Public/IBlackmagicMediaOutputModule.h @@ -0,0 +1,41 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "Modules/ModuleManager.h" + +#include "UObject/Object.h" + +DECLARE_LOG_CATEGORY_EXTERN(LogBlackmagicMediaOutput, Log, All); + +/** + * The public interface to this module. In most cases, this interface is only public to sibling modules + * within this plugin. + */ +class IBlackmagicMediaOutputModule : public IModuleInterface +{ + +public: + /** + * Singleton-like access to this module's interface. This is just for convenience! + * Beware of calling this during the shutdown phase, though. Your module might have been unloaded already. + * + * @return Returns singleton instance, loading the module on demand if needed + */ + static inline IBlackmagicMediaOutputModule& Get() + { + return FModuleManager::LoadModuleChecked< IBlackmagicMediaOutputModule >( "BlackmagicMediaOutput" ); + } + + /** + * Checks to see if this module is loaded and ready. It is only valid to call Get() if IsAvailable() returns true. + * + * @return True if the module is loaded and ready to use + */ + static inline bool IsAvailable() + { + return FModuleManager::Get().IsModuleLoaded( "BlackmagicMediaOutput" ); + } +}; + diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Build/Blackmagic.Build.cs b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Build/Blackmagic.Build.cs new file mode 100644 index 000000000000..edd879c4b0ab --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Build/Blackmagic.Build.cs @@ -0,0 +1,46 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +using System.IO; +using UnrealBuildTool; + +public class Blackmagic : ModuleRules +{ + public Blackmagic(ReadOnlyTargetRules Target) : base(Target) + { + Type = ModuleType.External; + + if (Target.Platform == UnrealTargetPlatform.Win64) + { + PublicDefinitions.Add("BLACKMAGICMEDIA_DLL_PLATFORM=1"); + + string SDKDir = ModuleDirectory; + string LibPath = Path.Combine(ModuleDirectory, "../../../Binaries/ThirdParty/Win64"); + + string LibraryName = "BlackmagicLib"; + + bool bHaveDebugLib = File.Exists(Path.Combine(LibPath, "BlackmagicLibd.dll")); + if (bHaveDebugLib && Target.Configuration == UnrealTargetConfiguration.Debug) + { + LibraryName = "BlackmagicLibd"; + PublicDefinitions.Add("BLACKMAGICMEDIA_DLL_DEBUG=1"); + } + else + { + PublicDefinitions.Add("BLACKMAGICMEDIA_DLL_DEBUG=0"); + } + + PublicIncludePaths.Add(Path.Combine(SDKDir, "Include")); + PublicLibraryPaths.Add(LibPath); + PublicAdditionalLibraries.Add(LibraryName + ".lib"); + + PublicDelayLoadDLLs.Add(LibraryName + ".dll"); + RuntimeDependencies.Add(Path.Combine(LibPath, LibraryName + ".dll")); + } + else + { + PublicDefinitions.Add("BLACKMAGICMEDIA_DLL_PLATFORM=0"); + PublicDefinitions.Add("BLACKMAGICMEDIA_DLL_DEBUG=0"); + System.Console.WriteLine("BLACKMAGIC not supported on this platform"); + } + } +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Build/Include/BlackmagicLib.h b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Build/Include/BlackmagicLib.h new file mode 100644 index 000000000000..b564aa77fcc5 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Build/Include/BlackmagicLib.h @@ -0,0 +1,287 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#ifdef VIDEOIO_EXPORTS +#define VIDEOIO_API __declspec(dllexport) +#else +#define VIDEOIO_API __declspec(dllimport) +#endif + +namespace BlackmagicDevice +{ + /* + * Types provided from the interface + */ + + struct IPortShared; + + typedef void* FDeviceScanner; + typedef void* FDeviceInfo; + typedef void* FDevice; + typedef IPortShared* FPort; + typedef void* FFrame; + + typedef unsigned int FUInt; + typedef unsigned char FUByte; + + using LoggingCallbackPtr = void(*)(const TCHAR* Format, ...); + + enum struct EFrameFormat + { + FF_PALI, + FF_NTSCI, + FF_NTSCP, + FF_720P, + FF_1080I, + FF_1080P, + FF_AUTO, + }; + + enum struct EPixelFormat + { + PF_UYVY, + PF_ARGB, + PF_MAX, + }; + + enum struct EFrameRate + { + FR_2398, + FR_2400, + FR_2500, + FR_2997, + FR_3000, + FR_5000, + FR_5994, + FR_6000, + FR_AUTO, + }; + + enum struct EModeFilter + { + MF_INPUT, + MF_OUTPUT, + MF_BOTH, + }; + + /* + * Format of the required frame format + */ + + struct FFrameDesc + { + FFrameDesc() + : FrameFormat(EFrameFormat::FF_AUTO) + , PixelFormat(EPixelFormat::PF_ARGB) + , FrameRate(EFrameRate::FR_AUTO) + { + } + + FFrameDesc(EFrameFormat InFrameFormat, EPixelFormat InPixelFormat, EFrameRate InFrameRate) + : FrameFormat(InFrameFormat) + , PixelFormat(InPixelFormat) + , FrameRate(InFrameRate) + { + } + bool operator== (const FFrameDesc& Other) const + { + return FrameFormat == Other.FrameFormat + && PixelFormat == Other.PixelFormat + && FrameRate == Other.FrameRate; + } + EFrameFormat FrameFormat; + EPixelFormat PixelFormat; + EFrameRate FrameRate; + }; + + struct FPortOptions + { + /** use timecode with the port */ + bool bUseTimecode; + + /** use sync only */ + bool bUseSync; + + /** enable video */ + bool bUseVideo; + + /** deliver frames from blackmagic callback */ + bool bUseCallback; + + /** enable audio */ + bool bUseAudio; + + /** port is for output */ + bool bOutput; + + /** number of allocated frame buffers */ + FUInt FrameBuffers; + + /** output should have a key channel */ + /** output port also sends key on port + 1 */ + bool bOutputKey; + + /** number of audio channels */ + FUInt AudioChannels; // number of audio channels to capture + }; + + /* + * Information about a given frame desc + */ + + struct FFrameInfo + { + /** Is Drop framerate */ + bool DropFrame; + + /** Actual framerate */ + float FrameRate; + + /** Root framerate to calculate timecode */ + float RootFrameRate; + + /** Clocks per Second */ + FUInt TimeScale; + + /** Clocks per Frame */ + FUInt TimeValue; + + /** Image Width in pixels */ + FUInt Width; + + /** Image Height in pixels */ + FUInt Height; + + /** Aspect Ratio Width */ + FUInt RatioWidth; + + /** Aspect Ratio Height */ + FUInt RatioHeight; + + /** number of effective bytes per pixel (YUYV is 2 bytes per pixel) */ + FUInt BytesPerPixel; + + /** Name of the display mode */ + const wchar_t* FormatName; + }; + + /* + * Timecode + */ + + struct FTimecode + { + FTimecode() + : Hours(0) + , Minutes(0) + , Seconds(0) + , Frames(0) + { + } + bool operator== (const FTimecode& Other) const + { + return Other.Hours == Hours + && Other.Minutes == Minutes + && Other.Seconds == Seconds + && Other.Frames == Frames; + } + FUInt Hours; + FUInt Minutes; + FUInt Seconds; + /** limited to 30fps */ + FUInt Frames; + bool bField; + bool bIsDropFrame; + }; + + struct VIDEOIO_API IPortCallback + { + IPortCallback(); + virtual ~IPortCallback(); + + //* only called if Option.bUseSync is true */ + virtual void OnInitializationCompleted(bool bSucceed) = 0; + //* only called if Option.bUseVideo is true */ + //* return true if you want to hold the frame */ + virtual bool OnFrameArrived(FFrame InFrame) = 0; + }; + + /* + * Configure Logging + */ + + VIDEOIO_API void VideoIOSetLoggingCallbacks(LoggingCallbackPtr LogInfoFunc, LoggingCallbackPtr LogWarningFunc, LoggingCallbackPtr LogErrorFunc); + + /* + * VideoFormat + */ + + VIDEOIO_API bool VideoIOFrameDescSupported(const FFrameDesc& InFrameDesc); + VIDEOIO_API bool VideoIOFrameDesc2Info(const FFrameDesc& InFrameDesc, FFrameInfo& OutFrameInfo); + VIDEOIO_API bool VideoIOFrameDesc2Name(const FFrameDesc& InFrameDesc, TCHAR* OutModeName, FUInt InSize); + VIDEOIO_API FUInt VideoIOModeCount(); + VIDEOIO_API bool VideoIOModeNames(FUInt InMode, EModeFilter InModeFilter, TCHAR* OutModeName, FUInt InSize); + VIDEOIO_API bool VideoIOModeFrameDesc(FUInt InMode, FFrameDesc& OutFrameDesc); + + /* + * DeviceScanner + */ + + VIDEOIO_API FDeviceScanner VideoIOCreateDeviceScanner(void); + VIDEOIO_API void VideoIOReleaseDeviceScanner(FDeviceScanner InDeviceScanner); + + VIDEOIO_API FUInt VideoIODeviceScannerGetNumDevices(FDeviceScanner InDeviceScanner); + VIDEOIO_API void VideoIODeviceScannerScanHardware(FDeviceScanner InDeviceScanner); + VIDEOIO_API FDeviceInfo VideoIODeviceScannerGetDeviceInfo(FDeviceScanner InDeviceScanner, FUInt InDeviceIndex); + + /* + * Device Info + */ + + VIDEOIO_API void VideoIOReleaseDeviceInfo(FDeviceInfo InDeviceInfo); + VIDEOIO_API bool VideoIODeviceInfoGetDeviceId(FDeviceInfo InDeviceInfo, TCHAR* OutDeviceId, FUInt InSize); + + VIDEOIO_API FUInt VideoIODeviceInfoGetVidInputs(FDeviceInfo InDeviceInfo); + VIDEOIO_API FUInt VideoIODeviceInfoGetVidOutputs(FDeviceInfo InDeviceInfo); + + /* + * Device/Card + */ + + VIDEOIO_API FDevice VideoIOCreateDevice(FUInt InDeviceIndex); + VIDEOIO_API void VideoIOReleaseDevice(FDevice InDevice); + + /* + * Frame + */ + + VIDEOIO_API void VideoIOReleaseFrame(FFrame InFrame); + + /* Returns Frame Stride */ + VIDEOIO_API FUInt VideoIOFrameDimensions(FFrame InFrame, FUInt& OutWidth, FUInt& OutHeight); + + VIDEOIO_API FUByte* VideoIOFrameVideoBuffer(FFrame InFrame, FUInt& OutSize); + VIDEOIO_API int32_t* VideoIOFrameAudioBuffer(FFrame InFrame, FUInt& OutSize, FUInt& OutNumChannels, FUInt& OutAudioRate, FUInt& OutNumSamples); + + VIDEOIO_API FUByte* VideoIOFrameMetaBuffer(FFrame InFrame, FUInt& OutSize); + VIDEOIO_API void VideoIOFrameTimecode(FFrame InFrame, FTimecode& outTimecode); + VIDEOIO_API void VideoIOFrameDesc(FFrame InFrame, FFrameDesc& OutFrameDesc); + + struct VIDEOIO_API IPortShared + { + virtual ~IPortShared() {} + virtual void Release() = 0; + + virtual bool PeekFrame() = 0; + virtual FFrame WaitFrame() = 0; + + virtual bool WaitVSync() = 0; + virtual bool GetTimecode(FTimecode& OutTimecode) = 0; + virtual FUInt FrameDropCount() = 0; + + virtual bool SetCallback(IPortCallback* InCallback) = 0; + }; + + VIDEOIO_API IPortShared* VideoIODeviceOpenSharedPort(FDevice InDevice, FUInt InPortIndex, const FFrameDesc& InFrameDesc, const FPortOptions& InOptions); +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Include/DeckLinkAPIVersion.h b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Include/DeckLinkAPIVersion.h new file mode 100644 index 000000000000..818d7003d619 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Include/DeckLinkAPIVersion.h @@ -0,0 +1,37 @@ +/* -LICENSE-START- + * ** Copyright (c) 2014 Blackmagic Design + * ** + * ** Permission is hereby granted, free of charge, to any person or organization + * ** obtaining a copy of the software and accompanying documentation covered by + * ** this license (the "Software") to use, reproduce, display, distribute, + * ** execute, and transmit the Software, and to prepare derivative works of the + * ** Software, and to permit third-parties to whom the Software is furnished to + * ** do so, all subject to the following: + * ** + * ** The copyright notices in the Software and this entire statement, including + * ** the above license grant, this restriction and the following disclaimer, + * ** must be included in all copies of the Software, in whole or in part, and + * ** all derivative works of the Software, unless such copies or derivative + * ** works are solely in the form of machine-executable object code generated by + * ** a source language processor. + * ** + * ** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * ** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * ** FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT + * ** SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE + * ** FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, + * ** ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * ** DEALINGS IN THE SOFTWARE. + * ** -LICENSE-END- + * */ + +/* DeckLinkAPIVersion.h */ + +#ifndef __DeckLink_API_Version_h__ +#define __DeckLink_API_Version_h__ + +#define BLACKMAGIC_DECKLINK_API_VERSION 0x0a090a00 +#define BLACKMAGIC_DECKLINK_API_VERSION_STRING "10.9.10" + +#endif // __DeckLink_API_Version_h__ + diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/List.h b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/List.h new file mode 100644 index 000000000000..d1476ef13237 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/List.h @@ -0,0 +1,100 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +/* + * Node in a list + */ + +struct ListNode +{ + void Remove() + { + Next->Prev = Prev; + Prev->Next = Next; + } + + bool IsLast() + { + return Next == 0; + } + bool IsFirst() + { + return Prev == 0; + } + + ListNode* Next; + ListNode* Prev; +}; + +/* + * Head/Tail in a list + */ + +struct ListHead +{ + ListHead() + { + Head = reinterpret_cast(&Null); + Null = 0; + Tail = reinterpret_cast(&Head); + } + bool IsEmpty() + { + return Head->IsLast(); + } + void AddBefore(ListNode* InNode, ListNode* Before) + { + InNode->Next = Before; + InNode->Prev = Before->Prev; + Before->Prev->Next = InNode; + Before->Prev = InNode; + } + void AddHead(ListNode& InNode) + { + AddHead(&InNode); + } + void AddHead(ListNode* InNode) + { + AddBefore(InNode, Head); + } + void AddTail(ListNode& InNode) + { + AddTail(&InNode); + } + void AddTail(ListNode* InNode) + { + AddBefore(InNode, reinterpret_cast(&Null)); + } + ListNode* RemHead() + { + if (IsEmpty()) + { + return nullptr; + } + ListNode* Return = Head; + Return->Remove(); + return Return; + } + ListNode* RemTail() + { + if (IsEmpty()) + { + return nullptr; + } + ListNode* Return = Tail; + Return->Remove(); + return Return; + } + + ListNode* Head; + ListNode* Null; + ListNode* Tail; +}; + +/* + * Helper methods + */ + +#define LIST_OFFSET_OF(__CLASS, __MEMBER) reinterpret_cast(&reinterpret_cast<__CLASS*>(0)->__MEMBER) +#define LIST_LISTOF(__CLASS, __MEMBER, __POINTER) reinterpret_cast<__CLASS*>(reinterpret_cast(__POINTER) - LIST_OFFSET_OF(__CLASS, __MEMBER)) diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateDevice.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateDevice.cpp new file mode 100644 index 000000000000..d74d6a5d34e3 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateDevice.cpp @@ -0,0 +1,150 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "stdafx.h" +#include "VideoIOPrivate.h" + +namespace BlackmagicDevice +{ + + /* + * Video IO device + */ + PrivateDeviceCache& PrivateDeviceCache::GetCache() + { + // Static state with guaranteed initialization + static PrivateDeviceCache DeviceCache; + return DeviceCache; + } + + PrivateDevice* PrivateDeviceCache::AquireDevice(FUInt InDeviceId) + { + Thread::FAutoLock AutoLock(Lock); + if (InDeviceId >= MaxDeviceCount) + { + LOG_ERROR(TEXT("Aquire device, bad device index %d (%d)\n"), InDeviceId, MaxDeviceCount); + return nullptr; + } + + if (DeviceList[InDeviceId]) + { + PrivateDevice* Device = DeviceList[InDeviceId]; + Device->AddRef(); + return Device; + } + + RefPointer DeckLinkIterator; + ComCheck(CoCreateInstance(CLSID_CDeckLinkIterator, NULL, CLSCTX_ALL, IID_IDeckLinkIterator, DeckLinkIterator)); + + // Should iterate for the device index + RefPointer DeckLink; + for (FUInt i = 0; DeckLinkIterator->Next(DeckLink) == S_OK && i < InDeviceId; ++i) + { + DeckLink.Reset(); + } + + if (!DeckLink) + { + LOG_ERROR(TEXT("Failed to create device %d\n"), InDeviceId); + return nullptr; + } + + PrivateDevice* Device = new PrivateDevice(InDeviceId, DeckLink); + DeviceList[InDeviceId] = Device; + return Device; + } + + void PrivateDeviceCache::ReleaseDevice(PrivateDevice *InDevice) + { + Thread::FAutoLock AutoLock(Lock); + FUInt DeviceId = InDevice->GetDeviceIndex(); + if (DeviceList[DeviceId] == InDevice) + { + DeviceList[DeviceId] = nullptr; + } + else + { + LOG_ERROR(TEXT("Failed to relase device %d\n"), DeviceId); + } + } + + PrivateDevice::PrivateDevice(int InDeviceId, RefPointer& InDeckLink) + : DeviceId(InDeviceId) + , DeckLink(InDeckLink) + { + AddRef(); + InputPorts.resize(MaxPortCount); + OutputPorts.resize(MaxPortCount); + } + + PrivateDevice::~PrivateDevice() + { + PrivateDeviceCache::GetCache().ReleaseDevice(this); + } + + HRESULT PrivateDevice::QueryInterface(RefPointer& OutDeckLinkInput) + { + return DeckLink->QueryInterface(IID_IDeckLinkInput, OutDeckLinkInput); + } + + HRESULT PrivateDevice::QueryInterface(RefPointer& OutDeckLinkOutput) + { + return DeckLink->QueryInterface(IID_IDeckLinkOutput, OutDeckLinkOutput); + } + + HRESULT PrivateDevice::QueryInterface(RefPointer& OutDeckLinkKeyer) + { + return DeckLink->QueryInterface(IID_IDeckLinkKeyer, OutDeckLinkKeyer); + } + + PrivatePort* PrivateDevice::AquirePort(FUInt InPortId, bool InInput) + { + Thread::FAutoLock AutoLock(Lock); + + if (InPortId >= MaxPortCount) + { + LOG_ERROR(TEXT("Aquire port, bad port %d (%d)\n"), InPortId, MaxPortCount); + return 0; + } + + FPortList& Ports = InInput ? InputPorts : OutputPorts; + + if (Ports[InPortId]) + { + if (!InInput) + { + LOG_ERROR(TEXT("can't share output port %d\n"), InPortId); + return 0; + } + PrivatePort* Port = Ports[InPortId]; + Port->AddRef(); + return Port; + } + + PrivatePort* Port = new PrivatePort(this, InPortId, InInput); + Ports[InPortId] = Port; + return Port; + } + + void PrivateDevice::ReleasePort(PrivatePort* InPort) + { + Thread::FAutoLock AutoLock(Lock); + assert(InPort); + FUInt PortId = InPort->GetPortIndex(); + FPortList& Ports = InPort->IsInput() ? InputPorts : OutputPorts; + + if (Ports[PortId] == InPort) + { + Ports[PortId] = nullptr; + } + else + { + LOG_ERROR(TEXT("Failed to release port %d\n"), PortId); + } + } + + FUInt PrivateDevice::GetDeviceIndex() + { + return DeviceId; + } + +}; \ No newline at end of file diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateDevice.h b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateDevice.h new file mode 100644 index 000000000000..d25ca43bfc18 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateDevice.h @@ -0,0 +1,54 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +struct IDeckLink; + +namespace BlackmagicDevice +{ + class PrivatePort; + /* + * Video IO device + */ + class PrivateDevice : public RefCount + { + static const FUInt MaxPortCount = 8; + public: + PrivateDevice(int InDeviceId, RefPointer& InDeckLink); + ~PrivateDevice(); + + HRESULT QueryInterface(RefPointer& OutDeckLinkInput); + HRESULT QueryInterface(RefPointer& OutDeckLinkInput); + HRESULT QueryInterface(RefPointer& OutDeckLinkKeyer); + + PrivatePort* AquirePort(FUInt InPort, bool InDirection); + void ReleasePort(PrivatePort* InPort); + + FUInt GetDeviceIndex(); + + protected: + Thread::FLock Lock; + + int DeviceId; + RefPointer DeckLink; + + typedef std::vector FPortList; + FPortList InputPorts; + FPortList OutputPorts; + }; + + class PrivateDeviceCache + { + public: + static PrivateDeviceCache& GetCache(); + + PrivateDevice* AquireDevice(FUInt InDeviceId); + void ReleaseDevice(PrivateDevice* InDevice); + + protected: + Thread::FLock Lock; + static const int MaxDeviceCount = 8; + std::array DeviceList; + }; + +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateFrame.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateFrame.cpp new file mode 100644 index 000000000000..fbceef295e7f --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateFrame.cpp @@ -0,0 +1,114 @@ +/// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "stdafx.h" +namespace BlackmagicDevice +{ + + /* + * Video Frame methods + */ + VIDEOIO_API void VideoIOReleaseFrame(FFrame InFrame) + { + PrivateFrame* Frame = reinterpret_cast(InFrame); + Frame->PrivatePort->ReleaseFrame(Frame); + } + + VIDEOIO_API FUInt VideoIOFrameDimensions(FFrame InFrame, FUInt& OutWidth, FUInt& OutHeight) + { + PrivateFrame* Frame = reinterpret_cast(InFrame); + + if (Frame->PrivatePort->IsInput()) + { + + OutWidth = Frame->DeckLinkVideoInputFrame->GetWidth(); + if (Frame->PrivatePort->IsPixelFormat(EPixelFormat::PF_UYVY)) + { + OutWidth /= 2; + } + OutHeight = Frame->DeckLinkVideoInputFrame->GetHeight(); + return Frame->DeckLinkVideoInputFrame->GetRowBytes(); + } + else + { + // different pointer for input output + OutWidth = Frame->DeckLinkMutableVideoFrame->GetWidth(); + if (Frame->PrivatePort->IsPixelFormat(EPixelFormat::PF_UYVY)) + { + OutWidth /= 2; + } + OutHeight = Frame->DeckLinkMutableVideoFrame->GetHeight(); + return Frame->DeckLinkMutableVideoFrame->GetRowBytes(); + } + } + + VIDEOIO_API FUByte* VideoIOFrameVideoBuffer(FFrame InFrame, FUInt& OutSize) + { + PrivateFrame* Frame = reinterpret_cast(InFrame); + + if (Frame->PrivatePort->IsInput()) + { + FUInt Pitch = Frame->DeckLinkVideoInputFrame->GetRowBytes(); + FUInt Height = Frame->DeckLinkVideoInputFrame->GetHeight(); + OutSize = Pitch*Height; + void* Buffer; + ComCheck(Frame->DeckLinkVideoInputFrame->GetBytes(&Buffer)); + return reinterpret_cast(Buffer); + } + // assume output + FUInt Pitch = Frame->DeckLinkMutableVideoFrame->GetRowBytes(); + FUInt Height = Frame->DeckLinkMutableVideoFrame->GetHeight(); + OutSize = Pitch*Height; + void* Buffer; + ComCheck(Frame->DeckLinkMutableVideoFrame->GetBytes(&Buffer)); + return reinterpret_cast(Buffer); + } + + + VIDEOIO_API int32_t* VideoIOFrameAudioBuffer(FFrame InFrame, FUInt& OutSize, FUInt& OutNumChannels, FUInt& OutAudioRate, FUInt& OutNumSamples) + { + PrivateFrame* Frame = reinterpret_cast(InFrame); + // if no audio packet attached to this frame + if (!Frame->ActiveAudioSamples) + { + OutSize = OutNumChannels = OutAudioRate = OutNumSamples = 0; + return 0; + } + + Frame->PrivatePort->GetAudioFormat(OutNumChannels, OutAudioRate); + + OutNumSamples = Frame->ActiveAudioSamples; + OutSize = OutNumChannels * OutNumSamples; + + return reinterpret_cast(Frame->AudioFrame); + } + + VIDEOIO_API FUByte* VideoIOFrameMetaBuffer(FFrame InFrame, FUInt& OutSize) + { + OutSize = 0; + return nullptr; + } + + VIDEOIO_API void VideoIOFrameTimecode(FFrame InFrame, FTimecode& Timecode) + { + PrivateFrame* Frame = reinterpret_cast(InFrame); + if (Frame->PrivatePort->IsInput()) + { + Timecode = Frame->Timecode; + } + else + { + Frame->Timecode = Timecode; + } + } + + VIDEOIO_API void VideoIOFrameDesc(FFrame InFrame, FFrameDesc& OutFrameDesc) + { + PrivateFrame* Frame = reinterpret_cast(InFrame); + OutFrameDesc = Frame->FrameDesc; + } + + PrivateFrame::~PrivateFrame() + { + } + +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateFrame.h b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateFrame.h new file mode 100644 index 000000000000..05ccf14d3b5a --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivateFrame.h @@ -0,0 +1,30 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once +namespace BlackmagicDevice +{ + + /* + * Frame in memory + */ + class PrivatePort; + + class PrivateFrame : public Thread::FMessage + { + public: + ~PrivateFrame(); + + public: + RefPointer PrivatePort; + RefPointer DeckLinkVideoInputFrame; + RefPointer DeckLinkMutableVideoFrame; + + FUInt AudioSamples; + FUInt ActiveAudioSamples; + void *AudioFrame; + + FTimecode Timecode; + FFrameDesc FrameDesc; + }; + +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivatePort.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivatePort.cpp new file mode 100644 index 000000000000..13a72e253a81 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivatePort.cpp @@ -0,0 +1,670 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "stdafx.h" + +namespace BlackmagicDevice +{ + + /* + * Open a port on a device for input/output (C Style interface) + */ + + PrivateInputCallback::PrivateInputCallback(PrivatePort* InPort) + : Port(InPort) + { + AddRef(); + } + + ULONG PrivateInputCallback::AddRef() + { + ++Count; + return Count; + } + + ULONG PrivateInputCallback::Release() + { + ULONG LocalCount = --Count; + if (!LocalCount) + { + delete this; + } + return LocalCount; + } + + HRESULT PrivateInputCallback::QueryInterface(REFIID InRIID, void **OutObject) + { + return E_FAIL; + } + + HRESULT PrivateInputCallback::VideoInputFormatChanged(BMDVideoInputFormatChangedEvents InNotificationEvents, IDeckLinkDisplayMode *InNewDisplayMode, BMDDetectedVideoInputFormatFlags InDetectedSignalFlags) + { + return Port->VideoInputFormatChanged(InNotificationEvents, InNewDisplayMode, InDetectedSignalFlags); + } + + HRESULT PrivateInputCallback::VideoInputFrameArrived(IDeckLinkVideoInputFrame* InVideoFrame, IDeckLinkAudioInputPacket* InAudioPacket) + { + return Port->VideoInputFrameArrived(InVideoFrame, InAudioPacket); + } + + /* + * Exported to allow the frame capture to call back + */ + + PrivateOutputCallback::PrivateOutputCallback(PrivatePort* InPort) + : Port(InPort) + { + AddRef(); + } + + ULONG PrivateOutputCallback::AddRef() + { + ++Count; + return Count; + } + + ULONG PrivateOutputCallback::Release() + { + ULONG LocalCount = --Count; + if (!LocalCount) + { + delete this; + } + return LocalCount; + } + + HRESULT PrivateOutputCallback::QueryInterface(REFIID InRIID, void **OutObject) + { + return E_FAIL; + } + + HRESULT PrivateOutputCallback::ScheduledFrameCompleted(IDeckLinkVideoFrame* InCompletedFrame, BMDOutputFrameCompletionResult InResult) + { + return Port->ScheduledFrameCompleted(); + } + + HRESULT PrivateOutputCallback::ScheduledPlaybackHasStopped() + { + return E_FAIL; + } + + /* + * Input/Output Video port (C++ Object) + */ + + PrivatePort::PrivatePort(PrivateDevice* InDevice, FUInt InPortIndex, bool InInput) + : Started(0) + , bInitializationCompleted(false) + , AudioChannels(2) + , AudioSampleRate(48000) + , Input(InInput) + , Output(!Input) + , Device(InDevice) + , PortIndex(InPortIndex) + , SupportedDesc(nullptr) + { + AddRef(); + } + + PrivatePort::~PrivatePort() + { + Device->ReleasePort(this); + } + + bool PrivatePort::Share(const FPortOptions& InOptions) + { + Thread::FAutoLock AutoLock(Lock); + // can't share output + if (Output) + { + LOG_ERROR(TEXT("Can't share output on port %d\n"), PortIndex); + return false; + } + // using timecode + if (InOptions.bUseTimecode) + { + Options.bUseTimecode = true; + } + // using video + if (InOptions.bUseVideo || InOptions.bUseAudio) + { + if (Options.bUseVideo) + { + LOG_ERROR(TEXT("Can't share video frames on port %d\n"), PortIndex); + return false; + } + Options.bUseVideo = true; + Options.bUseAudio = true; + Options.bUseCallback = InOptions.bUseCallback; + } + bInitializationCompleted = false; + return true; + } + + bool PrivatePort::Unshare(const FPortOptions& InOptions) + { + Thread::FAutoLock AutoLock(Lock); + // can't share output + if (Output) + { + LOG_ERROR(TEXT("Shoudn't share output on port %d\n"), PortIndex); + return false; + } + // using video + if (InOptions.bUseVideo) + { + Options.bUseVideo = false; + } + return true; + } + + + bool PrivatePort::Init(const FFrameDesc& InFrameDesc, const FPortOptions& InOptions) + { + // Sharing a port + if (DeckLinkInput || DeckLinkOutput) + { + return Share(InOptions); + } + + Thread::FAutoLock AutoLock(Lock); + assert(Device); + + // Are we running + + Options = InOptions; + FrameDesc = InFrameDesc; + VideoIOFrameDesc2Info(FrameDesc, FrameInfo); + + if ((SupportedDesc = GetSupportedDescription(InFrameDesc)) == nullptr) + { + LOG_ERROR(TEXT("Unsupported mode %s %f\n"), FrameInfo.FormatName, FrameInfo.FrameRate); + return false; + } + + if (Input) + { + ComCheck(Device->QueryInterface(DeckLinkInput)); + + InputFlags = bmdVideoInputFlagDefault; + + if (InFrameDesc.FrameFormat == EFrameFormat::FF_AUTO) + { + InputFlags |= bmdVideoInputEnableFormatDetection; + } + else + { + // validate mode against the hardware + BMDDisplayModeSupport bSupported = bmdDisplayModeNotSupported; + ComCheck(DeckLinkInput->DoesSupportVideoMode(SupportedDesc->DisplayMode, SupportedDesc->PixelFormat, bmdVideoInputFlagDefault, &bSupported, nullptr)); + if (bSupported == bmdDisplayModeNotSupported) + { + LOG_ERROR(TEXT("Invalid Frame Desciption, open port failed\n")); + return false; + } + } + + // seems ok, lets use it + ComCheck(DeckLinkInput->EnableVideoInput(SupportedDesc->DisplayMode, SupportedDesc->PixelFormat, InputFlags)); + + // if using Audio + AudioChannels = Options.AudioChannels; + if (!(AudioChannels == 2 + || AudioChannels == 8)) + { + AudioChannels = 2; + LOG_WARNING(TEXT("ConfigureAudio: Changed number of audio channel to %d.\n"), AudioChannels); + } + + AudioSampleRate = 48000; + ComCheck(DeckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, bmdAudioSampleType32bitInteger, AudioChannels)); + } + else + { + // Try and take the output after configure the mode? + ComCheck(Device->QueryInterface(DeckLinkOutput)); + + // validate mode against the hardware + BMDDisplayModeSupport bSupported = bmdDisplayModeNotSupported; + ComCheck(DeckLinkOutput->DoesSupportVideoMode(SupportedDesc->DisplayMode, SupportedDesc->PixelFormat, bmdVideoOutputFlagDefault, &bSupported, nullptr)); + if (bSupported == bmdDisplayModeNotSupported) + { + DeckLinkOutput.Reset(); + return false; + } + + // if bOutputKey, enable the output keyer + if (Options.bOutputKey) + { + ComCheck(Device->QueryInterface(DeckLinkKeyer)); + DeckLinkKeyer->Enable(true); + DeckLinkKeyer->SetLevel(255); + } + + BMDVideoOutputFlags VideoOutputFlags = bmdVideoOutputFlagDefault; + if (Options.bUseTimecode) + { + VideoOutputFlags = bmdVideoOutputRP188; + } + ComCheck(DeckLinkOutput->EnableVideoOutput(SupportedDesc->DisplayMode, VideoOutputFlags)); + } + + return true; + } + + bool PrivatePort::Deinit(const FPortOptions& InOptions) + { + Unshare(InOptions); + if (!Stop()) + { + return false; + } + Thread::FAutoLock AutoLock(Lock); + if (Input) + { + ComCheck(DeckLinkInput->DisableVideoInput()); + ComCheck(DeckLinkInput->SetCallback(nullptr)); + while (PeekFrame()) + { + PrivateFrame* Frame = reinterpret_cast(WaitFrame()); + ReleaseFrame(Frame); + } + DeckLinkInput.Reset(); + } + else + { + ComCheck(DeckLinkOutput->DisableVideoOutput()); + ComCheck(DeckLinkOutput->SetScheduledFrameCompletionCallback(nullptr)); + DeckLinkOutput.Reset(); + } + return true; + } + + FUInt PrivatePort::FrameSize() + { + return FrameInfo.Width * FrameInfo.Height * FrameInfo.BytesPerPixel; + } + + FUInt PrivatePort::FrameDimensions(FUInt& OutWidth, FUInt& OutHeight) + { + OutWidth = FrameInfo.Width; + OutWidth = FrameInfo.Height; + return FrameInfo.Width * FrameInfo.Height * FrameInfo.BytesPerPixel; + } + + bool PrivatePort::Start(FUInt InFrames) + { + Thread::FAutoLock AutoLock(Lock); + ++Started; + if (Started != 1) + { + return true; + } + if (Input) + { + if (!Frames) + { + Frames = std::make_unique(InFrames); + // worst case 24fps, because the mode can change. + FUInt FrameSamples = static_cast(AudioSampleRate / (24.0f - 1.0f)); + AudioFrames = std::make_unique(FrameSamples * AudioChannels * InFrames); + + for (FUInt i = 0; i < InFrames; ++i) + { + Frames[i].AudioFrame = &AudioFrames[i * FrameSamples * AudioChannels]; + Frames[i].AudioSamples = FrameSamples; + Frames[i].ActiveAudioSamples = 0; + FreeFrames.Send(&Frames[i]); + } + } + // set the frame complete call back + InputHandler = new PrivateInputCallback(this); + ComCheck(DeckLinkInput->SetCallback(InputHandler)); + ComCheck(DeckLinkInput->FlushStreams()); + ComCheck(DeckLinkInput->StartStreams()); + return true; + } + else + { + // 60fps + OutputTime = 0; + + if (!Frames) + { + Frames = std::make_unique(InFrames); + for (FUInt i = 0; i < InFrames; ++i) + { + // This requires the resolution + ComCheck(DeckLinkOutput->CreateVideoFrame(FrameInfo.Width, FrameInfo.Height, FrameInfo.Width*FrameInfo.BytesPerPixel, bmdFormat8BitBGRA, bmdFrameFlagDefault, Frames[i].DeckLinkMutableVideoFrame)); + FreeFrames.Send(&Frames[i]); + } + } + + // connect the callback + OutputHandler = new PrivateOutputCallback(this); + ComCheck(DeckLinkOutput->SetScheduledFrameCompletionCallback(OutputHandler)); + + // Send first frame to start the pipe running + PrivateFrame* Frame = LIST_LISTOF(PrivateFrame, MessageList, FreeFrames.Read()); + Frame->PrivatePort = this; + InFlightFrames.Send(Frame); + OutputTime += FrameInfo.TimeValue; + ComCheck(DeckLinkOutput->ScheduleVideoFrame(Frame->DeckLinkMutableVideoFrame, OutputTime, FrameInfo.TimeValue, FrameInfo.TimeScale)); + ComCheck(DeckLinkOutput->StartScheduledPlayback(OutputTime, FrameInfo.TimeScale, 1.0)); + return true; + } + return false; + } + + bool PrivatePort::Stop() + { + Thread::FAutoLock AutoLock(Lock); + --Started; + if (Started) + { + return false; + } + if (Input) + { + ComCheck(DeckLinkInput->StopStreams()); + } + else + { + ComCheck(DeckLinkOutput->StopScheduledPlayback(OutputTime + FrameInfo.TimeValue, nullptr, FrameInfo.TimeScale)); + } + return true; + } + + bool PrivatePort::WaitVSync() + { + Thread::FAutoLock Lock(VSyncLock); + if (Input) + { + VSyncEvent.Wait(VSyncLock, 50); + return true; + } + else + { + if (InFlightFrames.Peek()) + { + VSyncEvent.Wait(VSyncLock, 50); + return true; + } + } + return false; + } + + bool PrivatePort::PeekFrame() + { + if (Input) + { + return FullFrames.Peek(); + } + else + { + return FreeFrames.Peek(); + } + } + + FFrame PrivatePort::WaitFrame() + { + if (Input) + { + PrivateFrame* Frame = LIST_LISTOF(PrivateFrame, MessageList, FullFrames.Read()); + Frame->PrivatePort = this; + return Frame; + } + else + { + PrivateFrame* Frame = LIST_LISTOF(PrivateFrame, MessageList, FreeFrames.Read()); + Frame->PrivatePort = this; + return Frame; + } + } + + void PrivatePort::ReleaseFrame(PrivateFrame* InFrame) + { + // Guard, as frame might hold last reference count + RefPointer Port(InFrame->PrivatePort); + if (Input) + { + InFrame->PrivatePort.Reset(); + InFrame->DeckLinkVideoInputFrame.Reset(); + Port->FreeFrames.Send(InFrame); + } + else + { + InFrame->PrivatePort.Reset(); + InFlightFrames.Send(InFrame); + OutputTime += FrameInfo.TimeValue; + if (Port->Options.bUseTimecode) + { + BMDTimecodeFlags Flags = bmdTimecodeFlagDefault; + Flags += InFrame->Timecode.bField ? bmdTimecodeFieldMark : 0; + Flags += InFrame->Timecode.bIsDropFrame ? bmdTimecodeIsDropFrame : 0; + InFrame->DeckLinkMutableVideoFrame->SetTimecodeFromComponents(bmdTimecodeRP188LTC, InFrame->Timecode.Hours, InFrame->Timecode.Minutes, InFrame->Timecode.Seconds, InFrame->Timecode.Frames, Flags); + } + ComCheck(DeckLinkOutput->ScheduleVideoFrame(InFrame->DeckLinkMutableVideoFrame, OutputTime, FrameInfo.TimeValue, FrameInfo.TimeScale)); + } + } + + FUInt PrivatePort::DropCount() const + { + return DroppedFrames; + } + + bool PrivatePort::IsInput() const + { + return Input; + } + + bool PrivatePort::IsOutput() const + { + return Output; + } + + void PrivatePort::GetAudioFormat(FUInt& OutChannels, FUInt& OutSampleRate) const + { + OutChannels = AudioChannels; + OutSampleRate = AudioSampleRate; + } + + bool PrivatePort::IsPixelFormat(EPixelFormat InFormat) const + { + return FrameDesc.PixelFormat == InFormat; + } + + FUInt PrivatePort::GetPortIndex() const + { + return PortIndex; + } + + PrivateDevice* PrivatePort::GetDevice() + { + return Device; + } + + bool PrivatePort::GetTimecode(FTimecode& OutTimecode) const + { + OutTimecode = Timecode; + return bInitializationCompleted; + } + + bool PrivatePort::AddCallback(IPortCallback* InCallback) + { + Thread::FAutoLock Lock(VSyncLock); + PortCallbacks.push_back(InCallback); + return true; + } + + bool PrivatePort::RemCallback(IPortCallback* InCallback) + { + Thread::FAutoLock AutoLock(Lock); + auto Callback = std::find_if(PortCallbacks.begin(), PortCallbacks.end(), [InCallback](IPortCallback* Callback) { return InCallback == Callback; }); + if (Callback == PortCallbacks.end()) + { + return false; + } + PortCallbacks.erase(Callback); + return true; + } + + bool PrivatePort::InvokeInitializationCompleted(bool bSucceed) + { + Thread::FAutoLock AutoLock(Lock); + for (auto i = PortCallbacks.begin(); i != PortCallbacks.end(); ++i) + { + (*i)->OnInitializationCompleted(bSucceed); + } + return true; + } + + // Only one callback client can receive, and keep, + // the frame, stop at first one that reports it will + // keep it. + bool PrivatePort::InvokeOnFrameArrived(FFrame InFrame) + { + Thread::FAutoLock AutoLock(Lock); + for (auto i = PortCallbacks.begin(); i != PortCallbacks.end(); ++i) + { + if ((*i)->OnFrameArrived(InFrame)) + { + return true; + } + } + return false; + } + + // Callback from capture card to deliver video/audio frames + HRESULT PrivatePort::VideoInputFormatChanged(BMDVideoInputFormatChangedEvents InNotificationEvents, IDeckLinkDisplayMode *InNewDisplayMode, BMDDetectedVideoInputFormatFlags InDetectedSignalFlags) + { + BMDDisplayMode DisplayMode = InNewDisplayMode->GetDisplayMode(); + + if ((SupportedDesc = GetSupportedDescription(DisplayMode)) != nullptr) { + FrameDesc = SupportedDesc->FrameDesc; + VideoIOFrameDesc2Info(FrameDesc, FrameInfo); + + if (DeckLinkInput) { + // Restart the video/audio + ComCheck(DeckLinkInput->PauseStreams()); + ComCheck(DeckLinkInput->FlushStreams()); + + ComCheck(DeckLinkInput->EnableVideoInput(SupportedDesc->DisplayMode, SupportedDesc->PixelFormat, InputFlags)); + if (Options.bUseAudio) + { + ComCheck(DeckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, bmdAudioSampleType32bitInteger, AudioChannels)); + } + + ComCheck(DeckLinkInput->StartStreams()); + } + } + else + { + LOG_ERROR(TEXT("Unsupported video input format")); + } + + return S_OK; + } + + // Callback from capture card to notify that input mode has changed + HRESULT PrivatePort::VideoInputFrameArrived(IDeckLinkVideoInputFrame* InVideoFrame, IDeckLinkAudioInputPacket* InAudioPacket) + { + bool bHaveFrameTimecode = false; + uint8_t Hours, Minutes, Seconds, Frames; + + if (InVideoFrame && !bInitializationCompleted) + { + bInitializationCompleted = true; + InvokeInitializationCompleted(true); + } + + if (InVideoFrame) + { + RefPointer DeckLinkTimecode; + HRESULT Error = InVideoFrame->GetTimecode(bmdTimecodeRP188LTC, DeckLinkTimecode); + if (DeckLinkTimecode) + { + ComCheck(DeckLinkTimecode->GetComponents(&Hours, &Minutes, &Seconds, &Frames)); + Timecode.Hours = Hours; + Timecode.Minutes = Minutes; + Timecode.Seconds = Seconds; + Timecode.Frames = Frames; + + // get the extra timecode flags + BMDTimecodeFlags TimecodeFlags = DeckLinkTimecode->GetFlags(); + // we don't pull color framing here, its an analogue feature + // that I don't think is needed anymore. + Timecode.bField = (TimecodeFlags&bmdTimecodeFieldMark) != 0; + Timecode.bIsDropFrame = (TimecodeFlags&bmdTimecodeIsDropFrame) != 0; + bHaveFrameTimecode = true; + } + if (Options.bUseVideo && FreeFrames.Peek()) + { + PrivateFrame* Frame = LIST_LISTOF(PrivateFrame, MessageList, FreeFrames.Read()); + Frame->PrivatePort = this; + Frame->DeckLinkVideoInputFrame = InVideoFrame; + + if (InAudioPacket) + { + Frame->ActiveAudioSamples = InAudioPacket->GetSampleFrameCount(); + if (Frame->ActiveAudioSamples <= Frame->AudioSamples) + { + void *Buffer = nullptr; + ComCheck(InAudioPacket->GetBytes(&Buffer)); + if (Buffer) + { + ::memcpy(Frame->AudioFrame, Buffer, Frame->ActiveAudioSamples * AudioChannels * sizeof(int32_t)); + } + } + else + { + Frame->ActiveAudioSamples = 0; + } + } + else + { + Frame->ActiveAudioSamples = 0; + } + + if (bHaveFrameTimecode) + { + Frame->Timecode.Hours = Hours; + Frame->Timecode.Minutes = Minutes; + Frame->Timecode.Seconds = Seconds; + Frame->Timecode.Frames = Frames; + } + { + Frame->FrameDesc = FrameDesc; + } + + // if no-one keeps the frame, we can free it + if (Options.bUseCallback) + { + if (!InvokeOnFrameArrived(Frame)) + { + Frame->PrivatePort.Reset(); + Frame->DeckLinkVideoInputFrame.Reset(); + FreeFrames.Send(Frame); + } + } + else + { + FullFrames.Send(Frame); + } + } + else + { + ++DroppedFrames; + } + } + VSyncEvent.Signal(); + return S_OK; + } + + HRESULT PrivatePort::ScheduledFrameCompleted() + { + PrivateFrame* Frame = LIST_LISTOF(PrivateFrame, MessageList, InFlightFrames.Read()); + FreeFrames.Send(Frame); + VSyncEvent.Signal(); + return S_OK; + } +}; \ No newline at end of file diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivatePort.h b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivatePort.h new file mode 100644 index 000000000000..6a48fb8d87ea --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/PrivatePort.h @@ -0,0 +1,156 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +template class RefPointer; +struct IDeckLinkInput; + +namespace BlackmagicDevice +{ + + class PrivateDevice; + + /* + * Input/Output Video port + */ + class PrivatePort; + + /* + * Frame arrived callback + */ + class PrivateInputCallback : public IDeckLinkInputCallback + { + public: + PrivateInputCallback(PrivatePort* InPort); + + ULONG AddRef() override; + ULONG Release() override; + HRESULT QueryInterface(REFIID InRIID, void **OutObject) override; + + HRESULT VideoInputFormatChanged(BMDVideoInputFormatChangedEvents InNotificationEvents, IDeckLinkDisplayMode *InNewDisplayMode, BMDDetectedVideoInputFormatFlags InDetectedSignalFlags) override; + HRESULT VideoInputFrameArrived(IDeckLinkVideoInputFrame* InVideoFrame, IDeckLinkAudioInputPacket* InAudioPacket) override; + protected: + Thread::FAtomic Count; + // weak pointer + PrivatePort* Port; + }; + + class PrivateOutputCallback : public IDeckLinkVideoOutputCallback + { + public: + PrivateOutputCallback(PrivatePort* InPort); + + ULONG AddRef() override; + ULONG Release() override; + HRESULT QueryInterface(REFIID InRIID, void **OutObject) override; + + HRESULT ScheduledFrameCompleted(IDeckLinkVideoFrame* completedFrame, BMDOutputFrameCompletionResult result) override; + HRESULT ScheduledPlaybackHasStopped() override; + + protected: + Thread::FAtomic Count; + // weak pointer + PrivatePort* Port; + }; + + struct FSupportedDescription; + + class PrivatePort : public RefCount + { + public: + PrivatePort(PrivateDevice* InDevice, FUInt InPortIndex, bool InDirection); + ~PrivatePort(); + + bool Init(const FFrameDesc& InFrameDesc, const FPortOptions& InOptions); + bool Deinit(const FPortOptions& InOptions); + + FUInt FrameSize(); + FUInt FrameDimensions(FUInt& OutWidth, FUInt& OutHeight); + + bool Start(FUInt InFrames); + bool Stop(); + + bool WaitVSync(); + + bool PeekFrame(); + FFrame WaitFrame(); + void ReleaseFrame(PrivateFrame* InFrame); + + FUInt DropCount() const; + + HRESULT VideoInputFormatChanged(BMDVideoInputFormatChangedEvents InNotificationEvents, IDeckLinkDisplayMode *InNewDisplayMode, BMDDetectedVideoInputFormatFlags InDetectedSignalFlags); + HRESULT VideoInputFrameArrived(IDeckLinkVideoInputFrame* InVideoFrame, IDeckLinkAudioInputPacket* InAudioPacket); + HRESULT ScheduledFrameCompleted(); + + bool IsInput() const; + bool IsOutput() const; + void GetAudioFormat(FUInt& OutChannels, FUInt& OutSampleRate) const; + bool IsPixelFormat(EPixelFormat InFormat) const; + FUInt GetPortIndex() const; + PrivateDevice* GetDevice(); + + bool GetTimecode(FTimecode& outTimecode) const; + + bool AddCallback(IPortCallback* InCallback); + bool RemCallback(IPortCallback* InCallback); + + protected: + bool Share(const FPortOptions& InOptions); + bool Unshare(const FPortOptions& InOptions); + + protected: + bool bInitializationCompleted; + bool InvokeInitializationCompleted(bool bSucceed); + bool InvokeOnFrameArrived(FFrame InFrame); + + protected: + + Thread::FLock Lock; + FUInt Started; + + FPortOptions Options; + + FUInt AudioChannels; + FUInt AudioSampleRate; + + bool Input; + bool Output; + + FTimecode Timecode; + + BMDTimeValue OutputTime; + BMDTimeValue OutputFrameTime; + BMDTimeScale OutputScale; + + RefPointer Device; + + FUInt PortIndex; + + FSupportedDescription* SupportedDesc; + FFrameDesc FrameDesc; + FFrameInfo FrameInfo; + + BMDVideoInputFlags InputFlags; + RefPointer DeckLinkInput; + RefPointer InputHandler; + + RefPointer DeckLinkOutput; + RefPointer OutputHandler; + + RefPointer DeckLinkKeyer; + + std::unique_ptr Frames; + std::unique_ptr AudioFrames; + + Thread::FMailbox FreeFrames; + Thread::FMailbox InFlightFrames; + Thread::FMailbox FullFrames; + + volatile FUInt DroppedFrames; + Thread::FLock VSyncLock; + Thread::FEvent VSyncEvent; + + std::vector PortCallbacks; + }; + +}; \ No newline at end of file diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/RefCount.h b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/RefCount.h new file mode 100644 index 000000000000..bfc941900a80 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/RefCount.h @@ -0,0 +1,130 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +/* + * Simple Reference count base class + */ +class RefCount +{ +public: + virtual ~RefCount() {}; + virtual int AddRef(void) + { + ++Count; + return Count; + } + virtual int Release(void) + { + // copied to avoid the delete + int LocalCount = --Count; + if (!LocalCount) + { + delete this; + } + return LocalCount; + } +protected: + Thread::FAtomic Count; +}; + +/* + * AutoPointer to manage Reference counted pointers + */ +template +class RefPointer +{ +public: + RefPointer() + : Pointer(nullptr) + { + } + RefPointer(const RefPointer& InAutoPointer) + : Pointer(InAutoPointer.Pointer) + { + if (Pointer) + { + Pointer->AddRef(); + } + } + RefPointer(T* InPointer) + : Pointer(InPointer) + { + if (Pointer) + { + Pointer->AddRef(); + } + } + RefPointer(T&& InAutoPointer) + : Pointer(InAutoPointer.Pointer) + { + InAutoPointer.Pointer = nullptr; + } + RefPointer& operator=(const RefPointer& InAutoPointer) + { + Reset(); + Pointer = InAutoPointer.Pointer; + if (Pointer) + { + Pointer->AddRef(); + } + return *this; + } + RefPointer& operator=(T* InPointer) + { + Reset(); + Pointer = InPointer; + if (Pointer) + { + Pointer->AddRef(); + } + return *this; + } + RefPointer& operator=(const RefPointer&& InAutoPointer) + { + Reset(); + Pointer = InAutoPointer.Pointer; + InAutoPointer.Pointer = nullptr; + return *this; + } + ~RefPointer() + { + Reset(); + } + T* operator->() + { + return Pointer; + } + void Reset(void) + { + if (Pointer) + { + Pointer->Release(); + Pointer = nullptr; + } + } + T* Get() + { + return Pointer; + } + operator T*() + { + return Pointer; + } + operator void**() + { + Reset(); + return reinterpret_cast(&Pointer); + } + operator T**() + { + Reset(); + return &Pointer; + } + operator bool() + { + return Pointer != nullptr; + } +protected: + T* Pointer; +}; diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/Thread.h b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/Thread.h new file mode 100644 index 000000000000..d794fed5721f --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/Thread.h @@ -0,0 +1,159 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "List.h" + +namespace Thread +{ + class FAtomic + { + public: + FAtomic() + : Count(0) + { + } + FAtomic& operator ++() + { + ::InterlockedIncrement(&Count); + return *this; + } + FAtomic& operator --() + { + ::InterlockedDecrement(&Count); + return *this; + } + operator int() + { + return static_cast(Count); + } + protected: + LONG Count; + }; + + /* + * Simple lock + */ + class FEvent; + class FLock + { + public: + friend FEvent; + FLock() + { + ::InitializeCriticalSection(&CriticalSection); + } + ~FLock() + { + ::DeleteCriticalSection(&CriticalSection); + } + void Lock(void) + { + EnterCriticalSection(&CriticalSection); + } + void Unlock(void) + { + LeaveCriticalSection(&CriticalSection); + } + protected: + CRITICAL_SECTION CriticalSection; + }; + /* + * AutoLock to manage locks + */ + class FAutoLock + { + public: + FAutoLock(FLock& InLock) + : Lock(InLock) + { + Lock.Lock(); + } + ~FAutoLock() + { + Lock.Unlock(); + } + protected: + FLock& Lock; + }; + /* + * Simple Event + */ + class FEvent + { + friend FLock; + public: + FEvent() + { + InitializeConditionVariable(&ConditionVariable); + } + void Wait(FLock& InLock, uint32_t InMilliseconds = INFINITE) + { + SleepConditionVariableCS(&ConditionVariable, &InLock.CriticalSection, InMilliseconds); + } + void Signal(void) + { + WakeConditionVariable(&ConditionVariable); + } + void SignalAll(void) + { + WakeAllConditionVariable(&ConditionVariable); + } + protected: + CONDITION_VARIABLE ConditionVariable; + }; + /* + * base of message + */ + class FMessage + { + public: + ListNode MessageList; + int MessageType; + }; + /* + * Thread safe message queue + */ + class FMailbox + { + public: + FMailbox() + : Count(0) + { + } + ~FMailbox() + { + } + void Send(FMessage* InMessage) + { + FAutoLock AutoLock(Lock); + List.AddTail(InMessage->MessageList); + ++Count; + Event.Signal(); + } + bool Peek() + { + return !List.IsEmpty(); + } + FMessage* Read() + { + FAutoLock AutoLock(Lock); + while (List.IsEmpty()) + { + Event.Wait(Lock); + } + FMessage* OutMessage = LIST_LISTOF(FMessage, MessageList, List.RemHead()); + --Count; + return OutMessage; + } + int GetCount() + { + return Count; + } + protected: + int Count; + FLock Lock; + FEvent Event; + ListHead List; + }; +} diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIO.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIO.cpp new file mode 100644 index 000000000000..b1ce0c02feba --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIO.cpp @@ -0,0 +1,383 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "stdafx.h" + +namespace BlackmagicDevice +{ + static struct FFrameFormatInfo + { + FUInt Width; + FUInt Height; + FUInt RatioWidth; + FUInt RatioHeight; + const wchar_t* FormatName; + } FrameFormatInfo[] = { + { 720, 576, 4, 3, TEXT("PALI"), }, + { 720, 480, 4, 3, TEXT("NTSCI"), }, + { 720, 486, 4, 3, TEXT("NTSCP"), }, + { 1280, 720, 16, 9, TEXT("720p"), }, + { 1920, 1080, 16, 9, TEXT("1080i"), }, + { 1920, 1080, 16, 9, TEXT("1080p"), }, + /** added for auto */ + { 0, 0, 1, 1, TEXT("Automatic"), }, + }; + + static struct FPixelFormatInfo + { + EPixelFormat PixelFormat; + const wchar_t* FormatName; + } PixelFormatInfo[] = { + { EPixelFormat::PF_UYVY, TEXT("YUV"), }, + { EPixelFormat::PF_ARGB, TEXT("RGBA"), }, + }; + + static struct FFrameRateInfo + { + /** Is this a frame rate thats normal uses a drop timecode format */ + bool DropFrame; + + /** The actual framerate clock */ + float FrameRate; + + /** Framerate for encoding the drop timecode rate */ + float RootFrameRate; + + /** Clocks per Second */ + FUInt TimeScale; + + /** Clocks per Frame */ + FUInt TimeValue; + + /** Textual format */ + const wchar_t* FormatName; + + } FrameRateInfo[] = { + { true, 23.98f, 24.00f, 24000, 1001, TEXT("23.98fps"), }, + { false, 24.00f, 24.00f, 24000, 1000, TEXT("24fps"), }, + { false, 25.00f, 25.00f, 25000, 1000, TEXT("25fps"), }, + { true, 29.97f, 30.00f, 30000, 1001, TEXT("29.97fps"), }, + { false, 30.00f, 30.00f, 30000, 1000, TEXT("30fps"), }, + { false, 50.00f, 50.00f, 50000, 1000, TEXT("50fps"), }, + { true, 59.94f, 60.00f, 60000, 1001, TEXT("59.94fps"), }, + { false, 60.00f, 60.00f, 60000, 1000, TEXT("60fps"), }, + /** added for Auto */ + { false, 60.00f, 60.00f, 60, 1, TEXT(""), }, + }; + + static FSupportedDescription SupportedDescription[] = { + {{ EFrameFormat::FF_AUTO, EPixelFormat::PF_UYVY, EFrameRate::FR_AUTO, }, bmdModeHD1080p6000, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, // Real NTSC + + {{ EFrameFormat::FF_NTSCI, EPixelFormat::PF_UYVY, EFrameRate::FR_2997, }, bmdModeNTSC, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, // Real NTSC + {{ EFrameFormat::FF_PALI, EPixelFormat::PF_UYVY, EFrameRate::FR_2500, }, bmdModePAL, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, // Real PAL + + {{ EFrameFormat::FF_720P, EPixelFormat::PF_UYVY, EFrameRate::FR_5000, }, bmdModeHD720p50, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + {{ EFrameFormat::FF_720P, EPixelFormat::PF_UYVY, EFrameRate::FR_5994, }, bmdModeHD720p5994, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + {{ EFrameFormat::FF_720P, EPixelFormat::PF_UYVY, EFrameRate::FR_6000, }, bmdModeHD720p60, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + + {{ EFrameFormat::FF_1080I, EPixelFormat::PF_UYVY, EFrameRate::FR_5000, }, bmdModeHD1080i50, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + {{ EFrameFormat::FF_1080I, EPixelFormat::PF_UYVY, EFrameRate::FR_5994, }, bmdModeHD1080i5994, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + {{ EFrameFormat::FF_1080I, EPixelFormat::PF_UYVY, EFrameRate::FR_6000, }, bmdModeHD1080i6000, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_UYVY, EFrameRate::FR_2398, }, bmdModeHD1080p2398, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_UYVY, EFrameRate::FR_2400, }, bmdModeHD1080p24, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_UYVY, EFrameRate::FR_2500, }, bmdModeHD1080p25, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_UYVY, EFrameRate::FR_2997, }, bmdModeHD1080p2997, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_UYVY, EFrameRate::FR_3000, }, bmdModeHD1080p30, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_UYVY, EFrameRate::FR_5000, }, bmdModeHD1080p50, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_UYVY, EFrameRate::FR_5994, }, bmdModeHD1080p5994, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_UYVY, EFrameRate::FR_6000, }, bmdModeHD1080p6000, bmdFormat8BitYUV, EVIDEOIO_SD_INPUT, }, + + {{ EFrameFormat::FF_NTSCI, EPixelFormat::PF_ARGB, EFrameRate::FR_2997, }, bmdModeNTSC, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, // Real NTSC + {{ EFrameFormat::FF_PALI, EPixelFormat::PF_ARGB, EFrameRate::FR_2500, }, bmdModePAL, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, // Real PAL + + {{ EFrameFormat::FF_720P, EPixelFormat::PF_ARGB, EFrameRate::FR_5000, }, bmdModeHD720p50, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + {{ EFrameFormat::FF_720P, EPixelFormat::PF_ARGB, EFrameRate::FR_5994, }, bmdModeHD720p5994, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + {{ EFrameFormat::FF_720P, EPixelFormat::PF_ARGB, EFrameRate::FR_6000, }, bmdModeHD720p60, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + + {{ EFrameFormat::FF_1080I, EPixelFormat::PF_ARGB, EFrameRate::FR_5000, }, bmdModeHD1080i50, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + {{ EFrameFormat::FF_1080I, EPixelFormat::PF_ARGB, EFrameRate::FR_5994, }, bmdModeHD1080i5994, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + {{ EFrameFormat::FF_1080I, EPixelFormat::PF_ARGB, EFrameRate::FR_6000, }, bmdModeHD1080i6000, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_ARGB, EFrameRate::FR_2398, }, bmdModeHD1080p2398, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_ARGB, EFrameRate::FR_2400, }, bmdModeHD1080p24, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_ARGB, EFrameRate::FR_2500, }, bmdModeHD1080p25, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_ARGB, EFrameRate::FR_2997, }, bmdModeHD1080p2997, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_ARGB, EFrameRate::FR_3000, }, bmdModeHD1080p30, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_ARGB, EFrameRate::FR_5000, }, bmdModeHD1080p50, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_ARGB, EFrameRate::FR_5994, }, bmdModeHD1080p5994, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + {{ EFrameFormat::FF_1080P, EPixelFormat::PF_ARGB, EFrameRate::FR_6000, }, bmdModeHD1080p6000, bmdFormat8BitARGB, EVIDEOIO_SD_OUTPUT, }, + }; + + FSupportedDescription* GetSupportedDescription(BMDDisplayMode InDisplayMode) + { + for (int i = 0; i < (sizeof(SupportedDescription) / sizeof(FSupportedDescription)); i++) + { + if (SupportedDescription[i].DisplayMode == InDisplayMode) + { + return SupportedDescription + i; + } + } + return nullptr; + } + + FSupportedDescription* GetSupportedDescription(const FFrameDesc& InFrameDesc) + { + for (int i = 0; i < (sizeof(SupportedDescription) / sizeof(FSupportedDescription)); i++) + { + if (SupportedDescription[i].FrameDesc == InFrameDesc) + { + return SupportedDescription + i; + } + } + return nullptr; + } + + VIDEOIO_API bool VideoIOFrameDescSupported(const FFrameDesc& InFrameDesc) + { + return (GetSupportedDescription(InFrameDesc) != nullptr); + } + + template + static T TClamp(T In, T InMin, T InMax) + { + if (In < InMin) + { + return InMin; + } + if (InMax < In) + { + return InMax; + } + return In; + } + + static bool ClenseFrameDesc(const FFrameDesc& inFrameDesc, FFrameDesc& OutFrameDesc) + { + OutFrameDesc.FrameFormat = TClamp(inFrameDesc.FrameFormat, EFrameFormat::FF_PALI, EFrameFormat::FF_AUTO); + OutFrameDesc.PixelFormat = TClamp(inFrameDesc.PixelFormat, EPixelFormat::PF_UYVY, EPixelFormat::PF_ARGB); + OutFrameDesc.FrameRate = TClamp(inFrameDesc.FrameRate, EFrameRate::FR_2398, EFrameRate::FR_AUTO); + return inFrameDesc == OutFrameDesc; + } + + VIDEOIO_API bool VideoIOFrameDesc2Info(const FFrameDesc& InFrameDesc, FFrameInfo& OutFrameInfo) + { + FFrameDesc FrameDesc; + ClenseFrameDesc(InFrameDesc, FrameDesc); + + OutFrameInfo.DropFrame = FrameRateInfo[static_cast(FrameDesc.FrameRate)].DropFrame; + OutFrameInfo.FrameRate = FrameRateInfo[static_cast(FrameDesc.FrameRate)].FrameRate; + OutFrameInfo.RootFrameRate = FrameRateInfo[static_cast(FrameDesc.FrameRate)].RootFrameRate; + + OutFrameInfo.TimeScale = FrameRateInfo[static_cast(FrameDesc.FrameRate)].TimeScale; + OutFrameInfo.TimeValue = FrameRateInfo[static_cast(FrameDesc.FrameRate)].TimeValue; + + OutFrameInfo.Width = FrameFormatInfo[static_cast(FrameDesc.FrameFormat)].Width; + OutFrameInfo.Height = FrameFormatInfo[static_cast(FrameDesc.FrameFormat)].Height; + OutFrameInfo.RatioWidth = FrameFormatInfo[static_cast(FrameDesc.FrameFormat)].RatioWidth; + OutFrameInfo.RatioHeight = FrameFormatInfo[static_cast(FrameDesc.FrameFormat)].RatioHeight; + + OutFrameInfo.FormatName = FrameFormatInfo[static_cast(FrameDesc.FrameFormat)].FormatName; + + OutFrameInfo.BytesPerPixel = (FrameDesc.PixelFormat == EPixelFormat::PF_UYVY) ? 2 : 4; + return true; + } + + VIDEOIO_API FUInt VideoIOModeCount() + { + return sizeof(SupportedDescription) / sizeof(FSupportedDescription); + } + + static bool ModeNames(const FFrameDesc& InFrameDesc, TCHAR* OutModeName, FUInt InSize, bool InShort) + { + std::wstringstream StringStream; + if (InShort) + { + StringStream << PixelFormatInfo[static_cast(InFrameDesc.PixelFormat)].FormatName << " " + << FrameFormatInfo[static_cast(InFrameDesc.FrameFormat)].FormatName; + } + else + { + StringStream << PixelFormatInfo[static_cast(InFrameDesc.PixelFormat)].FormatName << " " + << FrameFormatInfo[static_cast(InFrameDesc.FrameFormat)].FormatName << " (" + << FrameFormatInfo[static_cast(InFrameDesc.FrameFormat)].Width << "x" + << FrameFormatInfo[static_cast(InFrameDesc.FrameFormat)].Height << ") " + << FrameRateInfo[static_cast(InFrameDesc.FrameRate)].FormatName; + } + std::wstring String(StringStream.str()); + wcscpy_s(OutModeName, InSize, String.c_str()); + return true; + } + + VIDEOIO_API bool VideoIOFrameDesc2Name(const FFrameDesc& InFrameDesc, TCHAR* OutModeName, FUInt InSize) + { + FFrameDesc FrameDesc; + if (!OutModeName || !ClenseFrameDesc(InFrameDesc, FrameDesc)) + { + if (OutModeName) + { + // return empty string; + OutModeName[0] = 0; + } + return false; + } + + return ModeNames(FrameDesc, OutModeName, InSize, true); + } + + VIDEOIO_API bool VideoIOModeNames(FUInt InMode, EModeFilter InModeFilter, TCHAR* OutModeName, FUInt InSize) + { + if (InMode > sizeof(SupportedDescription) / sizeof(FSupportedDescription)) + { + return false; + } + + FSupportedDescription& SupportedDescriptionRef = SupportedDescription[InMode]; + + if (!((InModeFilter == EModeFilter::MF_INPUT && SupportedDescriptionRef.SupportedDirection&EVIDEOIO_SD_INPUT) + || (InModeFilter == EModeFilter::MF_OUTPUT && SupportedDescriptionRef.SupportedDirection&EVIDEOIO_SD_OUTPUT) + || InModeFilter == EModeFilter::MF_BOTH)) + { + return false; + } + + return ModeNames(SupportedDescriptionRef.FrameDesc, OutModeName, InSize, false); + } + + VIDEOIO_API bool VideoIOModeFrameDesc(FUInt InMode, FFrameDesc& OutFrameDesc) + { + if (InMode > sizeof(SupportedDescription) / sizeof(FSupportedDescription)) + { + // out of range, so return the first mode. + OutFrameDesc = SupportedDescription[0].FrameDesc; + return false; + } + OutFrameDesc = SupportedDescription[InMode].FrameDesc; + return true; + } + + /* + * Logging Callbacks + */ + + VIDEOIO_API void VideoIOSetLoggingCallbacks(LoggingCallbackPtr LogInfoFunc, LoggingCallbackPtr LogWarningFunc, LoggingCallbackPtr LogErrorFunc) + { + GLogInfo = LogInfoFunc; + GLogWarning = LogWarningFunc; + GLogError = LogErrorFunc; + } + + /* + * IPortCallback Methods + */ + + IPortCallback::IPortCallback() + { + } + + IPortCallback::~IPortCallback() + { + } + + /* + * Device Scanner + */ + + VIDEOIO_API FDeviceScanner VideoIOCreateDeviceScanner(void) + { + { // Handle CoInitialize + static bool IsInitialized = false; + if (!IsInitialized) + { + ::CoInitializeEx(NULL, COINIT_MULTITHREADED); + IsInitialized = true; + } + } + + return new PrivateDeviceScanner(); + } + + VIDEOIO_API void VideoIOReleaseDeviceScanner(FDeviceScanner InDeviceScanner) + { + PrivateDeviceScanner* DeviceScanner = reinterpret_cast(InDeviceScanner); + DeviceScanner->Release(); + } + + VIDEOIO_API FUInt VideoIODeviceScannerGetNumDevices(FDeviceScanner InDeviceScanner) + { + PrivateDeviceScanner* DeviceScanner = reinterpret_cast(InDeviceScanner); + return DeviceScanner->GetDeviceCount(); + } + + VIDEOIO_API void VideoIODeviceScannerScanHardware(FDeviceScanner InDeviceScanner) + { + } + + VIDEOIO_API FDeviceInfo VideoIODeviceScannerGetDeviceInfo(FDeviceScanner InDeviceScanner, FUInt InDeviceId) + { + PrivateDeviceScanner* DeviceScanner = reinterpret_cast(InDeviceScanner); + RefPointer DeckLink(DeviceScanner->GetDevice(InDeviceId)); + if (DeckLink) + { + return new PrivateDeviceScannerInfo(InDeviceId, DeckLink); + } + return nullptr; + } + + /* + * Device Info + */ + + VIDEOIO_API void VideoIOReleaseDeviceInfo(FDeviceInfo InDeviceInfo) + { + PrivateDeviceScannerInfo* DeviceScannerInfo = reinterpret_cast(InDeviceInfo); + DeviceScannerInfo->Release(); + } + + VIDEOIO_API bool VideoIODeviceInfoGetDeviceId(FDeviceInfo InDeviceInfo, TCHAR* OutDeviceId, FUInt InSize) + { + PrivateDeviceScannerInfo* DeviceScannerInfo = reinterpret_cast(InDeviceInfo); + + BSTR DeviceName = 0; + ComCheck(DeviceScannerInfo->GetDevice()->GetDisplayName(&DeviceName)); + wcscpy_s(OutDeviceId, InSize, DeviceName); + ::SysFreeString(DeviceName); + + return false; + } + + VIDEOIO_API FUInt VideoIODeviceInfoGetVidInputs(FDeviceInfo InDeviceInfo) + { + return 1; + } + + VIDEOIO_API FUInt VideoIODeviceInfoGetVidOutputs(FDeviceInfo InDeviceInfo) + { + return 1; + } + + /* + * Device Handling + */ + + VIDEOIO_API FDevice VideoIOCreateDevice(FUInt InDeviceId) + { + return PrivateDeviceCache::GetCache().AquireDevice(InDeviceId); + } + + VIDEOIO_API void VideoIOReleaseDevice(FDevice InDevice) + { + PrivateDevice* Device = reinterpret_cast(InDevice); + Device->Release(); + } + + VIDEOIO_API bool VideoIODeviceIsDeviceReady(FDevice InDevice) + { + return true; + } + + VIDEOIO_API bool VideoIODeviceCanDoCapture(FDevice InDevice) + { + return true; + } + +}; \ No newline at end of file diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIOLog.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIOLog.cpp new file mode 100644 index 000000000000..7325605499ac --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIOLog.cpp @@ -0,0 +1,19 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "stdafx.h" +#include + +#include "VideoIOLog.h" + +/* + * Global logging callbacks + */ + +namespace BlackmagicDevice +{ + + LoggingCallbackPtr GLogInfo = nullptr; + LoggingCallbackPtr GLogWarning = nullptr; + LoggingCallbackPtr GLogError = nullptr; + +}; \ No newline at end of file diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIOLog.h b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIOLog.h new file mode 100644 index 000000000000..bb8c3ccc428b --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIOLog.h @@ -0,0 +1,49 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "BlackmagicLib.h" +#include "VideoIOLog.h" + +namespace BlackmagicDevice +{ + +extern LoggingCallbackPtr GLogInfo; +extern LoggingCallbackPtr GLogWarning; +extern LoggingCallbackPtr GLogError; + +}; + +#define ENABLE_AJA_LOGGING 1 + +/* + * Wrappers around logging callbacks + */ + +#if ENABLE_AJA_LOGGING == 1 +#define LOG_INFO(Format, ...) \ +{ \ + if(GLogInfo != nullptr) \ + { \ + GLogInfo(Format, ##__VA_ARGS__); \ + } \ +} +#define LOG_WARNING(Format, ...) \ +{ \ + if(GLogWarning != nullptr) \ + { \ + GLogWarning(Format, ##__VA_ARGS__); \ + } \ +} +#define LOG_ERROR(Format, ...) \ +{ \ + if(GLogError != nullptr) \ + { \ + GLogError(Format, ##__VA_ARGS__); \ + } \ +} +#else +#define LOG_INFO(Format, ...) +#define LOG_WARNING(Format, ...) +#define LOG_ERROR(Format, ...) +#endif diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIOPrivate.h b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIOPrivate.h new file mode 100644 index 000000000000..c30e2fd1d047 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/VideoIOPrivate.h @@ -0,0 +1,98 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "PrivateDevice.h" +#include "PrivateFrame.h" +#include "PrivatePort.h" +#include "SharedPort.h" + + +struct IDeckLinkInput; + +namespace BlackmagicDevice +{ + + enum ESupportedDirection { + EVIDEOIO_SD_INPUT = 1, + EVIDEOIO_SD_OUTPUT = 2, + }; + + struct FSupportedDescription { + FFrameDesc FrameDesc; + BMDDisplayMode DisplayMode; + BMDPixelFormat PixelFormat; + ESupportedDirection SupportedDirection; + }; + + FSupportedDescription* GetSupportedDescription(BMDDisplayMode InDisplayMode); + FSupportedDescription* GetSupportedDescription(const FFrameDesc& InFrameDesc); + + /* + * Video device scanner + */ + class PrivateDeviceScanner : public RefCount + { + public: + PrivateDeviceScanner() + { + AddRef(); + + RefPointer DeckLinkIterator; + ComCheck(CoCreateInstance(CLSID_CDeckLinkIterator, NULL, CLSCTX_ALL, IID_IDeckLinkIterator, DeckLinkIterator)); + + if (DeckLinkIterator) + { + RefPointer DeckLink; + while (DeckLinkIterator->Next(DeckLink) == S_OK) + { + Devices.push_back(DeckLink); + DeckLink.Reset(); + } + } + } + ~PrivateDeviceScanner() + { + } + FUInt GetDeviceCount(void) + { + return static_cast(Devices.size()); + } + IDeckLink* GetDevice(FUInt InDeviceId) + { + if (InDeviceId < Devices.size()) + { + return Devices[InDeviceId]; + } + return nullptr; + } + protected: + std::vector> Devices; + }; + + /* + * Device scanner info + */ + class PrivateDeviceScannerInfo : public RefCount + { + public: + PrivateDeviceScannerInfo(FUInt InDeviceId, RefPointer& InDeckLink) + : DeviceId(InDeviceId) + , DeckLink(InDeckLink) + { + AddRef(); + } + + ~PrivateDeviceScannerInfo() + { + } + IDeckLink* GetDevice() + { + return DeckLink; + } + protected: + FUInt DeviceId; + RefPointer DeckLink; + }; + +}; \ No newline at end of file diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/dllmain.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/dllmain.cpp new file mode 100644 index 000000000000..5179b64451f0 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/dllmain.cpp @@ -0,0 +1,21 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "stdafx.h" +#include + +BOOL APIENTRY DllMain( HMODULE hModule, + DWORD ul_reason_for_call, + LPVOID lpReserved + ) +{ + switch (ul_reason_for_call) + { + case DLL_PROCESS_ATTACH: + case DLL_THREAD_ATTACH: + case DLL_THREAD_DETACH: + case DLL_PROCESS_DETACH: + break; + } + return TRUE; +} + diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/stdafx.cpp b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/stdafx.cpp new file mode 100644 index 000000000000..f094ae1d260e --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/stdafx.cpp @@ -0,0 +1,3 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "stdafx.h" diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/stdafx.h b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/stdafx.h new file mode 100644 index 000000000000..14ca12cc469b --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/stdafx.h @@ -0,0 +1,37 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "targetver.h" +#include "assert.h" + +#include +#include + +#include +#include +#include + +// Windows Header Files: +#define WIN32_LEAN_AND_MEAN +#include + +#include +#include "../DeckLinkAPI_h.h" + +#ifdef _DEBUG +#define VideoIOCHECK(FUNCTION) if (!FUNCTION) { *reinterpret_cast(0) = 0; } +#define ComCheck(FUNCTION) if ((FUNCTION) != S_OK) { *reinterpret_cast(0) = 0; } +#else +#define VideoIOCHECK(FUNCTION) (FUNCTION) +#define ComCheck(FUNCTION) (FUNCTION) +#endif + +#include "Thread.h" +#include "RefCount.h" + +#include "BlackmagicLib.h" +#include "VideoIOPrivate.h" + +#include "VideoIOLog.h" + diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/targetver.h b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/targetver.h new file mode 100644 index 000000000000..92f6e2571915 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/Source/targetver.h @@ -0,0 +1,10 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +// Including SDKDDKVer.h defines the highest available Windows platform. + +// If you wish to build your application for a previous Windows platform, include WinSDKVer.h and +// set the _WIN32_WINNT macro to the platform you wish to support before including SDKDDKVer.h. + +#include diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/VideoIO.sln b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/VideoIO.sln new file mode 100644 index 000000000000..911a8e40a41e --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/VideoIO.sln @@ -0,0 +1,22 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 14 +VisualStudioVersion = 14.0.25420.1 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "VideoIO", "VideoIO.vcxproj", "{E58C7090-3EBD-459F-B9A3-ED4FC262E0DD}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|x64 = Debug|x64 + Release|x64 = Release|x64 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {E58C7090-3EBD-459F-B9A3-ED4FC262E0DD}.Debug|x64.ActiveCfg = Debug|x64 + {E58C7090-3EBD-459F-B9A3-ED4FC262E0DD}.Debug|x64.Build.0 = Debug|x64 + {E58C7090-3EBD-459F-B9A3-ED4FC262E0DD}.Release|x64.ActiveCfg = Release|x64 + {E58C7090-3EBD-459F-B9A3-ED4FC262E0DD}.Release|x64.Build.0 = Release|x64 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/VideoIO.vcxproj b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/VideoIO.vcxproj new file mode 100644 index 000000000000..62cd3714f0e7 --- /dev/null +++ b/Engine/Plugins/Media/BlackmagicMedia/Source/ThirdParty/Dll/VideoIO.vcxproj @@ -0,0 +1,134 @@ + + + + + Debug + x64 + + + Release + x64 + + + + {E58C7090-3EBD-459F-B9A3-ED4FC262E0DD} + Win32Proj + VideoIO + 8.1 + + + + DynamicLibrary + true + v140 + Unicode + + + DynamicLibrary + false + v140 + true + Unicode + + + + + + + + + + + + + + + true + ..\..\..\Binaries\ThirdParty\Win64\ + BlackmagicLibd + Intermediate\$(Platform)\$(Configuration)\ + + + false + ..\..\..\Binaries\ThirdParty\Win64\ + Intermediate\$(Platform)\$(Configuration)\ + BlackmagicLib + + + + + + Level3 + Disabled + _DEBUG;_WINDOWS;_USRDLL;VIDEOIO_EXPORTS;%(PreprocessorDefinitions) + true + ..\Build\Include;Include;%(AdditionalIncludeDirectories) + + + Windows + true + lib;%(AdditionalLibraryDirectories) + %(AdditionalDependencies) + $(OutDir)$(TargetName)$(TargetExt) + + + + + Level3 + + + MaxSpeed + true + true + NDEBUG;_WINDOWS;_USRDLL;VIDEOIO_EXPORTS;%(PreprocessorDefinitions) + true + ..\Build\Include;./Include/;%(AdditionalIncludeDirectories) + + + Windows + true + true + true + lib;%(AdditionalLibraryDirectories) + %(AdditionalDependencies) + $(OutDir)$(TargetName)$(TargetExt) + + + + + + + + + + + + + + + + + + + + + + false + + + false + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaAssets/ProxyMediaOutput.cpp b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaAssets/ProxyMediaOutput.cpp index 5730612ef8e0..7887da24e4fc 100644 --- a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaAssets/ProxyMediaOutput.cpp +++ b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaAssets/ProxyMediaOutput.cpp @@ -9,7 +9,8 @@ UProxyMediaOutput::UProxyMediaOutput() - : bValidateGuard(false) + : bLeafMediaOutput(false) + , bValidateGuard(false) , bRequestedSizeGuard(false) , bRequestedPixelFormatGuard(false) , bCreateMediaCaptureImplGuard(false) @@ -96,6 +97,25 @@ UMediaOutput* UProxyMediaOutput::GetMediaOutput() const } +UMediaOutput* UProxyMediaOutput::GetLeafMediaOutput() const +{ + // Guard against reentrant calls. + if (bLeafMediaOutput) + { + UE_LOG(LogMediaFrameworkUtilities, Warning, TEXT("UMediaSourceProxy::GetLeafMediaOutput - Reentrant calls are not supported. Asset: %s"), *GetPathName()); + return nullptr; + } + TGuardValue ValidatingGuard(bLeafMediaOutput, true); + + UMediaOutput* MediaOutput = GetMediaOutput(); + if (UProxyMediaOutput* ProxyMediaOutput = Cast(MediaOutput)) + { + MediaOutput = ProxyMediaOutput->GetLeafMediaOutput(); + } + return MediaOutput; +} + + void UProxyMediaOutput::SetDynamicMediaOutput(UMediaOutput* InProxy) { DynamicProxy = (Proxy == InProxy) ? nullptr : InProxy; diff --git a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaAssets/ProxyMediaSource.cpp b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaAssets/ProxyMediaSource.cpp index d33bd79c3525..2c9a97933811 100644 --- a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaAssets/ProxyMediaSource.cpp +++ b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaAssets/ProxyMediaSource.cpp @@ -11,6 +11,7 @@ UProxyMediaSource::UProxyMediaSource() : bUrlGuard(false) , bValidateGuard(false) + , bLeafMediaSource(false) , bMediaOptionGuard(false) {} @@ -58,6 +59,25 @@ UMediaSource* UProxyMediaSource::GetMediaSource() const } +UMediaSource* UProxyMediaSource::GetLeafMediaSource() const +{ + // Guard against reentrant calls. + if (bLeafMediaSource) + { + UE_LOG(LogMediaFrameworkUtilities, Warning, TEXT("UMediaSourceProxy::GetLeafMediaSource - Reentrant calls are not supported. Asset: %s"), *GetPathName()); + return nullptr; + } + TGuardValue ValidatingGuard(bLeafMediaSource, true); + + UMediaSource* MediaSource = GetMediaSource(); + if (UProxyMediaSource* ProxyMediaSource = Cast(MediaSource)) + { + MediaSource = ProxyMediaSource->GetLeafMediaSource(); + } + return MediaSource; +} + + void UProxyMediaSource::SetDynamicMediaSource(UMediaSource* InProxy) { DynamicProxy = (Proxy == InProxy) ? nullptr : InProxy; diff --git a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaBundleTimeSynchronizationSource.cpp b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaBundleTimeSynchronizationSource.cpp index 2793b29ae9e3..9c46a888f81f 100644 --- a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaBundleTimeSynchronizationSource.cpp +++ b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaBundleTimeSynchronizationSource.cpp @@ -17,7 +17,7 @@ void UMediaBundleTimeSynchronizationSource::PostEditChangeProperty(FPropertyChan { if (PropertyChangedEvent.GetPropertyName() == GET_MEMBER_NAME_CHECKED(UMediaBundleTimeSynchronizationSource, MediaBundle)) { - if (bUseForSynchronization && MediaBundle && MediaBundle->GetMediaSource() ) + if (bUseForSynchronization && MediaBundle && MediaBundle->GetMediaSource()) { UTimeSynchronizableMediaSource* SynchronizableMediaSource = Cast(MediaBundle->GetMediaSource()); if (SynchronizableMediaSource == nullptr || SynchronizableMediaSource->bUseTimeSynchronization) @@ -32,9 +32,14 @@ void UMediaBundleTimeSynchronizationSource::PostEditChangeProperty(FPropertyChan } #endif -FFrameTime UMediaBundleTimeSynchronizationSource::GetNextSampleTime() const +static FFrameTime TimeSpanToFrameTime(const FTimespan& Timespan, const FFrameRate& FrameRate) { - FFrameTime NextSampleTime; + return FFrameTime::FromDecimal(Timespan.GetTotalSeconds() * FrameRate.AsDecimal()).RoundToFrame(); +} + +FFrameTime UMediaBundleTimeSynchronizationSource::GetNewestSampleTime() const +{ + TOptional UseTimespan; if (MediaBundle && MediaBundle->GetMediaPlayer() && MediaBundle->GetMediaTexture()) { @@ -44,51 +49,54 @@ FFrameTime UMediaBundleTimeSynchronizationSource::GetNextSampleTime() const //If there is a sample in the Texture, we consider it as the next one to be used/rendered if (MediaBundle->GetMediaTexture()->GetAvailableSampleCount() > 0) { - const FTimespan TextureTime = MediaBundle->GetMediaTexture()->GetNextSampleTime(); - NextSampleTime = FFrameTime::FromDecimal(TextureTime.GetTotalSeconds() * GetFrameRate().AsDecimal()).RoundToFrame(); + UseTimespan = MediaBundle->GetMediaTexture()->GetNextSampleTime(); } - else if (Player->GetCache().GetSampleCount(EMediaCacheState::Loaded) > 0) + + if (Player->GetCache().GetSampleCount(EMediaCacheState::Loaded) > 0) { TRangeSet SampleTimes; if (Player->GetCache().QueryCacheState(EMediaCacheState::Loaded, SampleTimes)) { //Fetch the minimum sample time from all ranges queried from the player's cache - TArray> Ranges; - SampleTimes.GetRanges(Ranges); - check(Ranges.Num() > 0); - - TRangeBound MinBound = Ranges[0].GetLowerBound(); - for (const auto& Range : Ranges) - { - MinBound = TRangeBound::MinLower(MinBound, Range.GetLowerBound()); - } - const FTimespan MinSampleTime = MinBound.GetValue(); - - NextSampleTime = FFrameTime::FromDecimal(MinSampleTime.GetTotalSeconds() * GetFrameRate().AsDecimal()).RoundToFrame(); + const FTimespan MinBound = SampleTimes.GetMaxBoundValue(); + UseTimespan = (UseTimespan.IsSet()) ? FMath::Max(MinBound, UseTimespan.GetValue()) : MinBound; } } } } - return NextSampleTime; + return UseTimespan.IsSet() ? TimeSpanToFrameTime(UseTimespan.GetValue(), GetFrameRate()) : FFrameTime(); } -int32 UMediaBundleTimeSynchronizationSource::GetAvailableSampleCount() const +FFrameTime UMediaBundleTimeSynchronizationSource::GetOldestSampleTime() const { - int32 AvailableSampleCount = 0; + TOptional UseTimespan; if (MediaBundle && MediaBundle->GetMediaPlayer() && MediaBundle->GetMediaTexture()) { const TSharedPtr& Player = MediaBundle->GetMediaPlayer()->GetPlayerFacade()->GetPlayer(); if (Player.IsValid()) { - const int32 TextureSampleCount = MediaBundle->GetMediaTexture()->GetAvailableSampleCount(); - const int32 PlayerSampleCount = Player->GetCache().GetSampleCount(EMediaCacheState::Loaded); - AvailableSampleCount = TextureSampleCount + PlayerSampleCount; + //If there is a sample in the Texture, we consider it as the next one to be used/rendered + if (MediaBundle->GetMediaTexture()->GetAvailableSampleCount() > 0) + { + UseTimespan = MediaBundle->GetMediaTexture()->GetNextSampleTime(); + } + + if (Player->GetCache().GetSampleCount(EMediaCacheState::Loaded) > 0) + { + TRangeSet SampleTimes; + if (Player->GetCache().QueryCacheState(EMediaCacheState::Loaded, SampleTimes)) + { + //Fetch the minimum sample time from all ranges queried from the player's cache + const FTimespan MinBound = SampleTimes.GetMinBoundValue(); + UseTimespan = (UseTimespan.IsSet()) ? FMath::Min(MinBound, UseTimespan.GetValue()) : MinBound; + } + } } } - return AvailableSampleCount; + return UseTimespan.IsSet() ? TimeSpanToFrameTime(UseTimespan.GetValue(), GetFrameRate()) : FFrameTime(); } FFrameRate UMediaBundleTimeSynchronizationSource::GetFrameRate() const @@ -119,8 +127,9 @@ bool UMediaBundleTimeSynchronizationSource::IsReady() const return MediaBundle && MediaBundle->GetMediaPlayer() && MediaBundle->GetMediaPlayer()->IsReady() && MediaBundle->GetMediaSource() && MediaBundle->GetMediaTexture(); } -bool UMediaBundleTimeSynchronizationSource::Open() +bool UMediaBundleTimeSynchronizationSource::Open(const FTimeSynchronizationOpenData& InOpenData) { + OpenData = InOpenData; bool bResult = false; if (MediaBundle) { @@ -155,21 +164,49 @@ bool UMediaBundleTimeSynchronizationSource::Open() return bResult; } -void UMediaBundleTimeSynchronizationSource::Start() +void UMediaBundleTimeSynchronizationSource::Start(const FTimeSynchronizationStartData& InStartData) { + StartData = InStartData; UMediaPlayer* MediaPlayer = MediaBundle ? MediaBundle->GetMediaPlayer() : nullptr; if (MediaPlayer) { - //Once we're on the verge of playing the source, it's time to setup the delay - if (!bUseForSynchronization) + const FFrameRate LocalFrameRate = GetFrameRate(); + const FFrameTime LocalStartFrame = FFrameRate::TransformTime(StartData->StartFrame, OpenData->SynchronizationFrameRate, LocalFrameRate); + const FTimespan StartTimespan = FTimespan::FromSeconds(LocalFrameRate.AsSeconds(LocalStartFrame)); + + // If this source is used for synchronization, then we'll try to seek to the start frame. + if (bUseForSynchronization) { - MediaPlayer->SetTimeDelay(FTimespan::FromSeconds(TimeDelay)); + if (MediaPlayer->SupportsSeeking()) + { + MediaPlayer->Seek(StartTimespan); + } } + + // Otherwise, we'll at least try to set a delay so it sort of lines up. + else + { + const FFrameTime MinimumTime = GetOldestSampleTime(); + + // TODO: Verify this is the correct order. The comments on SetDelay seem confusing. + // TODO: Maybe also do this for sync sources that don't support seeking? Need test cases. + const FFrameTime DelayFrames = LocalStartFrame - MinimumTime; + const double Delay = LocalFrameRate.AsSeconds(DelayFrames); + + if (Delay > 0) + { + MediaPlayer->SetTimeDelay(Delay); + } + } + + MediaPlayer->Play(); } } void UMediaBundleTimeSynchronizationSource::Close() { + StartData.Reset(); + OpenData.Reset(); if (MediaBundle) { MediaBundle->CloseMediaSource(); diff --git a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaFrameworkUtilitiesModule.cpp b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaFrameworkUtilitiesModule.cpp index fe449c6173a2..6506b139f384 100644 --- a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaFrameworkUtilitiesModule.cpp +++ b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/MediaFrameworkUtilitiesModule.cpp @@ -17,13 +17,14 @@ #endif //WITH_EDITOR + DEFINE_LOG_CATEGORY(LogMediaFrameworkUtilities); #define LOCTEXT_NAMESPACE "MediaFrameworkUtilities" /** -* Implements the MediaFrameworkUtilitiesModule module. -*/ + * Implements the MediaFrameworkUtilitiesModule module. + */ class FMediaFrameworkUtilitiesModule : public IMediaFrameworkUtilitiesModule { FMediaProfileManager MediaProfileManager; diff --git a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/Profile/MediaProfile.cpp b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/Profile/MediaProfile.cpp index 806aa3570266..0bc0c2ae2d13 100644 --- a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/Profile/MediaProfile.cpp +++ b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/Profile/MediaProfile.cpp @@ -6,6 +6,8 @@ #include "MediaFrameworkUtilitiesModule.h" #include "Engine/Engine.h" +#include "Engine/EngineCustomTimeStep.h" +#include "Engine/TimecodeProvider.h" #include "MediaAssets/ProxyMediaOutput.h" #include "MediaAssets/ProxyMediaSource.h" #include "MediaOutput.h" @@ -33,6 +35,18 @@ UMediaOutput* UMediaProfile::GetMediaOutput(int32 Index) const } +UTimecodeProvider* UMediaProfile::GetTimecodeProvider() const +{ + return bOverrideTimecodeProvider ? TimecodeProvider : nullptr; +} + + +UEngineCustomTimeStep* UMediaProfile::GetCustomTimeStep() const +{ + return bOverrideCustomTimeStep ? CustomTimeStep : nullptr; +} + + void UMediaProfile::Apply() { if (GEngine == nullptr) @@ -94,4 +108,79 @@ void UMediaProfile::Apply() } } } + + if (bOverrideTimecodeProvider) + { + if (TimecodeProvider) + { + bool bResult = GEngine->SetTimecodeProvider(TimecodeProvider); + if (!bResult) + { + UE_LOG(LogMediaFrameworkUtilities, Error, TEXT("The TimecodeProvider '%s' could not be initialized."), *TimecodeProvider->GetName()); + } + } + else + { + GEngine->SetTimecodeProvider(nullptr); + } + } + + if (bOverrideCustomTimeStep) + { + if (CustomTimeStep) + { + bool bResult = GEngine->SetCustomTimeStep(CustomTimeStep); + if (!bResult) + { + UE_LOG(LogMediaFrameworkUtilities, Error, TEXT("The Custom Time Step '%s' could not be initialized."), *CustomTimeStep->GetName()); + } + } + else + { + GEngine->SetCustomTimeStep(nullptr); + } + } +} + + +void UMediaProfile::Reset() +{ + if (GEngine == nullptr) + { + UE_LOG(LogMediaFrameworkUtilities, Error, TEXT("The MediaProfile '%s' could not be reset. The Engine is not initialized."), *GetName()); + return; + } + + { + // Reset the proxies + TArray SourceProxies = GetDefault()->GetAllMediaSourceProxy(); + for (UProxyMediaSource* Proxy : SourceProxies) + { + if (Proxy) + { + Proxy->SetDynamicMediaSource(nullptr); + } + } + } + + { + TArray OutputProxies = GetDefault()->GetAllMediaOutputProxy(); + for (UProxyMediaOutput* Proxy : OutputProxies) + { + if (Proxy) + { + Proxy->SetDynamicMediaOutput(nullptr); + } + } + } + + if (bOverrideTimecodeProvider) + { + GEngine->SetTimecodeProvider(nullptr); + } + + if (bOverrideCustomTimeStep) + { + GEngine->SetCustomTimeStep(GEngine->GetDefaultCustomTimeStep()); + } } diff --git a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/Profile/MediaProfileManager.cpp b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/Profile/MediaProfileManager.cpp index cafb929c6795..027c30706f0c 100644 --- a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/Profile/MediaProfileManager.cpp +++ b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Private/Profile/MediaProfileManager.cpp @@ -20,14 +20,15 @@ void FMediaProfileManager::SetCurrentMediaProfile(UMediaProfile* InMediaProfile) UMediaProfile* Previous = CurrentMediaProfile.Get(); if (InMediaProfile != Previous) { + if (Previous) + { + Previous->Reset(); + } + if (InMediaProfile) { InMediaProfile->Apply(); } - else - { - GetMutableDefault()->Apply(); - } CurrentMediaProfile.Reset(InMediaProfile); MediaProfileChangedDelegate.Broadcast(Previous, InMediaProfile); diff --git a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/MediaAssets/ProxyMediaOutput.h b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/MediaAssets/ProxyMediaOutput.h index 07f8a30526a7..e14182590518 100644 --- a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/MediaAssets/ProxyMediaOutput.h +++ b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/MediaAssets/ProxyMediaOutput.h @@ -37,6 +37,13 @@ public: */ UMediaOutput* GetMediaOutput() const; + /** + * Get the last element of the media output chain that is not a proxy media output. + * + * @return The media output, or nullptr if not set. + */ + UMediaOutput* GetLeafMediaOutput() const; + /** * Set the dynamic media output proxy. * @@ -57,6 +64,7 @@ protected: private: + mutable bool bLeafMediaOutput; mutable bool bValidateGuard; mutable bool bRequestedSizeGuard; mutable bool bRequestedPixelFormatGuard; diff --git a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/MediaAssets/ProxyMediaSource.h b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/MediaAssets/ProxyMediaSource.h index 789e6ab9341e..5baa92cc33b0 100644 --- a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/MediaAssets/ProxyMediaSource.h +++ b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/MediaAssets/ProxyMediaSource.h @@ -37,6 +37,13 @@ public: */ UMediaSource* GetMediaSource() const; + /** + * Get the last element of the media source chain that is not a proxy media source. + * + * @return The media source, or nullptr if not set. + */ + UMediaSource* GetLeafMediaSource() const; + /** * Set the dynamic media source proxy. * @@ -66,6 +73,7 @@ private: mutable bool bUrlGuard; mutable bool bValidateGuard; + mutable bool bLeafMediaSource; mutable bool bMediaOptionGuard; }; diff --git a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/MediaBundleTimeSynchronizationSource.h b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/MediaBundleTimeSynchronizationSource.h index a4d4f215b78c..0313f1d9144e 100644 --- a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/MediaBundleTimeSynchronizationSource.h +++ b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/MediaBundleTimeSynchronizationSource.h @@ -31,13 +31,18 @@ public: //~ End UObject Interface //~ Begin UTimeSynchronizationSource Interface - virtual FFrameTime GetNextSampleTime() const override; - virtual int32 GetAvailableSampleCount() const override; + virtual FFrameTime GetNewestSampleTime() const override; + virtual FFrameTime GetOldestSampleTime() const override; virtual FFrameRate GetFrameRate() const override; virtual bool IsReady() const override; - virtual bool Open() override; - virtual void Start() override; + virtual bool Open(const FTimeSynchronizationOpenData& InOpenData) override; + virtual void Start(const FTimeSynchronizationStartData& InStartData) override; virtual void Close() override; virtual FString GetDisplayName() const override; //~ End UTimeSynchronizationSource Interface + +private: + + TOptional OpenData; + TOptional StartData; }; diff --git a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/Profile/MediaProfile.h b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/Profile/MediaProfile.h index 5748e30ee01f..9a0bc01c7fb0 100644 --- a/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/Profile/MediaProfile.h +++ b/Engine/Plugins/Media/MediaFrameworkUtilities/Source/MediaFrameworkUtilities/Public/Profile/MediaProfile.h @@ -8,8 +8,10 @@ #include "MediaProfile.generated.h" +class UEngineCustomTimeStep; class UMediaOutput; class UMediaSource; +class UTimecodeProvider; /** * A media profile that configures the inputs, outputs, timecode provider and custom time step. @@ -29,6 +31,22 @@ private: UPROPERTY(EditAnywhere, Instanced, Category="Outputs") TArray MediaOutputs; + /** Override the Engine's Timecode provider defined in the project settings. */ + UPROPERTY(EditAnywhere, Category="Timecode Provider", meta=(DisplayName="Override Project Settings")) + bool bOverrideTimecodeProvider; + + /** Timecode provider. */ + UPROPERTY(EditAnywhere, Instanced, Category="Timecode Provider", meta=(EditCondition="bOverrideTimecodeProvider")) + UTimecodeProvider* TimecodeProvider; + + /** Override the Engine's Custom time step defined in the project settings. */ + UPROPERTY(EditAnywhere, Category="Genlock", meta=(DisplayName="Override Project Settings")) + bool bOverrideCustomTimeStep; + + /** Custom time step */ + UPROPERTY(EditAnywhere, Instanced, Category="Genlock", meta=(EditCondition="bOverrideCustomTimeStep")) + UEngineCustomTimeStep* CustomTimeStep; + public: /** @@ -45,14 +63,34 @@ public: */ UMediaOutput* GetMediaOutput(int32 Index) const; + /** + * Get the timecode provider. + * + * @return The timecode provider, or nullptr if not set. + */ + UTimecodeProvider* GetTimecodeProvider() const; + + /** + * Get the custom time step. + * + * @return The custom time step, or nullptr if not set. + */ + UEngineCustomTimeStep* GetCustomTimeStep() const; + public: /** - * Apply the media profile as the current profile. + * Apply the media profile. * Will change the engine's timecode provider & custom time step and redirect the media profile source/output proxy for the correct media source/output. */ void Apply(); + /** + * Reset the media profile. + * Will reset the engine's timecode provider & custom time step and redirect the media profile source/output proxy for no media source/output. + */ + void Reset(); + /** * Apply the media profile as the current profile. diff --git a/Engine/Plugins/Media/MediaPlayerEditor/Source/MediaPlayerEditor/MediaPlayerEditor.Build.cs b/Engine/Plugins/Media/MediaPlayerEditor/Source/MediaPlayerEditor/MediaPlayerEditor.Build.cs index e989d8be6e63..03d3ba4e99cf 100644 --- a/Engine/Plugins/Media/MediaPlayerEditor/Source/MediaPlayerEditor/MediaPlayerEditor.Build.cs +++ b/Engine/Plugins/Media/MediaPlayerEditor/Source/MediaPlayerEditor/MediaPlayerEditor.Build.cs @@ -26,12 +26,18 @@ public class MediaPlayerEditor : ModuleRules "MediaPlayerEditor/Private/Widgets", }); + PublicDependencyModuleNames.AddRange( + new string[] { + "Core", + "Slate", + "SlateCore", + }); + PrivateDependencyModuleNames.AddRange( new string[] { "AudioMixer", "ComponentVisualizers", "ContentBrowser", - "Core", "CoreUObject", "ApplicationCore", "DesktopPlatform", @@ -46,8 +52,6 @@ public class MediaPlayerEditor : ModuleRules "RenderCore", "RHI", "ShaderCore", - "Slate", - "SlateCore", "TextureEditor", "UnrealEd", }); diff --git a/Engine/Plugins/Media/MediaPlayerEditor/Source/MediaPlayerEditor/Public/Widgets/SMediaPermutationsSelector.h b/Engine/Plugins/Media/MediaPlayerEditor/Source/MediaPlayerEditor/Public/Widgets/SMediaPermutationsSelector.h new file mode 100644 index 000000000000..d49a86ec8678 --- /dev/null +++ b/Engine/Plugins/Media/MediaPlayerEditor/Source/MediaPlayerEditor/Public/Widgets/SMediaPermutationsSelector.h @@ -0,0 +1,195 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "Misc/Optional.h" +#include "Widgets/DeclarativeSyntaxSupport.h" +#include "Widgets/SCompoundWidget.h" + + +/** + * A widget that let you select a single permutation from a list . It groups the values into categories and removes duplicates inside that category. + * + * A trivial use case appears below: + * + * struct FMyItem { int32 A; int32 B; } + * struct FMyBuilder + * { + * static const FName ColumnA = "A"; + * static const FName ColumnB = "B"; + * static bool IdenticalProperty(FName ColumnName, const FMyItem& Left, const FMyItem& Right) { return ColumnName == ColumnA ? Left.A == Right.A : Left.B == Right.B; } + * static bool Less(FName ColumnName, const FMyItem& Left, const FMyItem& Right) { return ColumnName == ColumnA ? Left.A < Right.A : Left.B < Right.B; } + * static FText GetLabel(FName ColumnName, const FMyItem& Item) { return ColumnName == ColumnA ? FText::AsNumber(Item.A) : FText::AsNumber(Item.B); } + * static FText GetTooltip(FName ColumnName, const FMyItem& Item) { return LOCTEXT("Tooltip", "Tooltip"); } + * }; + * + * TArray< FMyItem > Items; + * FMyItem Value1; + * Value1.A = 1; Value1.B = 2; + * Items.Add( Value1 ); + * FMyItem Value2; + * Value2.A = 1; Value2.B = 3; + * Items.Add( Value2 ); + * + * using TSelection = SMediaPermutationsSelector< FMyItem, FMyBuilder >; + * SNew( TSelection ) + * .PermutationsSource( MoveTemp(Items) ) + * .SelectedPermutation( Value2 ) + * + TSelection::Column( FMyBuilder::A ); + * .Label( LOCTEXT("ExampleA", "The A") ) + * + TSelection::Column( FMyBuilder::B ); + * .Label( LOCTEXT("ExampleB", "The B") ) + * + * In the example, we make all 2 columns. One for A and one for B. + * The first column will have 1 element: "1". The second column will have 2 elements: "2", "3" + * + */ + +class SHorizontalBox; + +template +class MEDIAPLAYEREDITOR_API TMediaPermutationsSelectorBuilder +{ +public: + static bool IdenticalProperty(FName ColumnName, ItemType Left, ItemType Right) { return Left == Right; } + static bool Less(FName ColumnName, ItemType Left, ItemType Right) { return Left < Right; } + static FText GetLabel(FName ColumnName, ItemType Item) { return FText::FromName(ColumnName); } + static FText GetTooltip(FName ColumnName, ItemType Item) { return FText::FromName(ColumnName); } +}; + +template > +class SMediaPermutationsSelector : public SCompoundWidget +{ +public: + using ThisClass = SMediaPermutationsSelector; + + /** A delegate type invoked when the selection changes. */ + DECLARE_DELEGATE_OneParam(FOnSelectionChanged, ItemType /* NewItemSelected*/); + + /** + * A delegate type invoked when we fill a column and wants to check if it should be visible. + * @param ColumnName the name of the ColumnName + * @param UniqueItemInColumn List of items used to generate the column. Multiples items may be shared the value but only one will be in that list. + * @return true if the column should be visible + */ + DECLARE_DELEGATE_RetVal_TwoParams(bool, FIsColumnVisible, FName /*ColumnName*/, const TArray& /*UniquePermutationsForThisColumn */); + +public: + /** Describes a single column */ + class FColumn + { + public: + SLATE_BEGIN_ARGS(FColumn) + {} + /** A unique ID for this property, so that it can be saved and restored. */ + SLATE_ARGUMENT(FName, ColumnName) + /** Text to use as the Column header. */ + SLATE_ATTRIBUTE(FText, Label) + /** Text to use as the Column tooltip. */ + SLATE_ATTRIBUTE(FText, Tooltip) + /** Delegate to invoke when build the column and check the visibility. */ + SLATE_EVENT(FIsColumnVisible, IsColumnVisible) + + SLATE_END_ARGS() + + FColumn(const FArguments& InArgs) + : ColumnName(InArgs._ColumnName) + , Label(InArgs._Label) + , Tooltip(InArgs._Tooltip) + , IsColumnVisible(InArgs._IsColumnVisible) + { } + + public: + /** A unique ID for this property, so that it can be saved and restored. */ + FName ColumnName; + + /** Text to use as the Column header. */ + TAttribute< FText > Label; + + /** Text to use as the Column tooltip. */ + TAttribute< FText > Tooltip; + + /** Widget created by this menu. */ + TSharedPtr< SWidget > Widget; + + /** Delegate to invoke when build the column and check the visibility. */ + FIsColumnVisible IsColumnVisible; + }; + +public: + SLATE_BEGIN_ARGS(ThisClass) + : _ColumnHeight(200) + {} + /** Array of columns */ + SLATE_SUPPORTS_SLOT_WITH_ARGS(typename FColumn) + /** Array of data items that we are displaying */ + SLATE_ARGUMENT(TArray, PermutationsSource) + /** Default selected item in ItemsSource */ + SLATE_ARGUMENT(TOptional, SelectedPermutation) + /** Desired height of the columns */ + SLATE_ATTRIBUTE(FOptionalSize, ColumnHeight) + /** Override the "apply" button widget */ + SLATE_ARGUMENT(TSharedPtr, OverrideButtonWidget) + /** Delegate to invoke when the button is clicked. */ + SLATE_EVENT(FOnClicked, OnButtonClicked) + /** Delegate to invoke when selection changes. */ + SLATE_EVENT(FOnSelectionChanged, OnSelectionChanged) + SLATE_END_ARGS() + + void Construct(const FArguments& InArgs); + + /** Create a column with a specified ColumnId */ + static typename FColumn::FArguments Column(const FName& InColumnName); + +private: + /** Array of data items that we are displaying. */ + TArray PermutationsSource; + + /** Index of the selected item in ItemsSource. It will always be valid. */ + int32 SelectedPermutationIndex; + + /** Delegate to invoke when selection changes. */ + FOnSelectionChanged OnSelectionChanged; + + /** Columns information. */ + TIndirectArray PropertyColumns; + + /** Box used as container for the radio button menu. */ + TSharedPtr ColumnContainer; + +private: + void BuildColumns(int32 StartIndex); + void ItemSelected(int32 UniqueItemIndexSelected, int32 ColumnSelected); + TArray GenerateItemIndexes() const; + bool IsColumnVisible(const FColumn& Column, const TArray& UniqueItemIndexes) const; + +private: + bool IdenticalProperty(FName ColumnName, int32 LeftItemIndex, int32 RightItemIndex) + { + check(PermutationsSource.IsValidIndex(LeftItemIndex)); + check(PermutationsSource.IsValidIndex(RightItemIndex)); + return ItemBuilder::IdenticalProperty(ColumnName, PermutationsSource[LeftItemIndex], PermutationsSource[RightItemIndex]); + } + + bool Less(FName ColumnName, int32 LeftItemIndex, int32 RightItemIndex) + { + check(PermutationsSource.IsValidIndex(LeftItemIndex)); + check(PermutationsSource.IsValidIndex(RightItemIndex)); + return ItemBuilder::Less(ColumnName, PermutationsSource[LeftItemIndex], PermutationsSource[RightItemIndex]); + } + + FText GetLabel(FName ColumnName, int32 ItemIndex) + { + check(PermutationsSource.IsValidIndex(ItemIndex)); + return ItemBuilder::GetLabel(ColumnName, PermutationsSource[ItemIndex]); + } + + FText GetTooltip(FName ColumnName, int32 ItemIndex) + { + check(PermutationsSource.IsValidIndex(ItemIndex)); + return ItemBuilder::GetTooltip(ColumnName, PermutationsSource[ItemIndex]); + } +}; + +#include "Widgets/SMediaPermutationsSelector.inl" diff --git a/Engine/Plugins/Media/MediaPlayerEditor/Source/MediaPlayerEditor/Public/Widgets/SMediaPermutationsSelector.inl b/Engine/Plugins/Media/MediaPlayerEditor/Source/MediaPlayerEditor/Public/Widgets/SMediaPermutationsSelector.inl new file mode 100644 index 000000000000..44a2cc3cdf5c --- /dev/null +++ b/Engine/Plugins/Media/MediaPlayerEditor/Source/MediaPlayerEditor/Public/Widgets/SMediaPermutationsSelector.inl @@ -0,0 +1,297 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "Framework/MultiBox/MultiBoxBuilder.h" +#include "Widgets/SBoxPanel.h" +#include "Widgets/Input/SButton.h" +#include "Widgets/Layout/SBox.h" + +template +typename SMediaPermutationsSelector::FColumn::FArguments SMediaPermutationsSelector::Column(const FName& InColumnName) +{ + FColumn::FArguments NewArgs; + NewArgs._ColumnName = InColumnName; + return NewArgs; +} + +template +void SMediaPermutationsSelector::Construct(const FArguments& InArgs) +{ + SWidget::Construct(InArgs._ToolTipText, InArgs._ToolTip, InArgs._Cursor, InArgs._IsEnabled, InArgs._Visibility, InArgs._RenderOpacity, InArgs._RenderTransform, InArgs._RenderTransformPivot, InArgs._Tag, InArgs._ForceVolatile, InArgs._Clipping, InArgs.MetaData); + + PermutationsSource = InArgs._PermutationsSource; + SelectedPermutationIndex = INDEX_NONE; + OnSelectionChanged = InArgs._OnSelectionChanged; + + if (InArgs._SelectedPermutation.IsSet()) + { + SelectedPermutationIndex = PermutationsSource.IndexOfByKey(InArgs._SelectedPermutation.GetValue()); + } + + // Copy all the column info from the declaration + PropertyColumns.Empty(InArgs.Slots.Num()); + for (int32 SlotIndex = 0; SlotIndex < InArgs.Slots.Num(); ++SlotIndex) + { + FColumn* const Column = InArgs.Slots[SlotIndex]; + if (Column && Column->ColumnName != NAME_None) + { + PropertyColumns.Add(Column); + } + } + + SAssignNew(ColumnContainer, SHorizontalBox); + + BuildColumns(0); + + ChildSlot + [ + SNew(SBox) + .HeightOverride(InArgs._ColumnHeight) + [ + SNew(SVerticalBox) + + SVerticalBox::Slot() + .FillHeight(1.f) + [ + ColumnContainer.ToSharedRef() + ] + + SVerticalBox::Slot() + .AutoHeight() + [ + SNew(SHorizontalBox) + + SHorizontalBox::Slot() + .FillWidth(1.f) + [ + SNullWidget::NullWidget + ] + + SHorizontalBox::Slot() + .Padding(4.f) + .AutoWidth() + [ + InArgs._OverrideButtonWidget.IsValid() ? + InArgs._OverrideButtonWidget.ToSharedRef() + : + SNew(SButton) + .VAlign(VAlign_Center) + .HAlign(HAlign_Center) + .Text(NSLOCTEXT("MediaPlayerEditor", "ApplyLabel", "Apply")) + .OnClicked(InArgs._OnButtonClicked) + ] + ] + ] + ]; +} + +template +void SMediaPermutationsSelector::BuildColumns(int32 StartIndex) +{ + const int32 NumberOfItems = PermutationsSource.Num(); + if (!PropertyColumns.IsValidIndex(StartIndex) || NumberOfItems == 0) + { + return; + } + + if (SelectedPermutationIndex == INDEX_NONE) + { + SelectedPermutationIndex = 0; + OnSelectionChanged.ExecuteIfBound(PermutationsSource[SelectedPermutationIndex]); + } + + // Get only the valid item for that selected input + TArray AllValidItemIndexes = GenerateItemIndexes(); + + // Clean the items list. AllValidItems should contains only the valid entry up to category StartIndex. + for (int32 ColumnIndex = 0; ColumnIndex < StartIndex; ++ColumnIndex) + { + const FColumn& Column = PropertyColumns[ColumnIndex]; + for (int32 ItemIndex = AllValidItemIndexes.Num() - 1; ItemIndex >= 0; --ItemIndex) + { + if (!IdenticalProperty(Column.ColumnName, SelectedPermutationIndex, AllValidItemIndexes[ItemIndex])) + { + AllValidItemIndexes.RemoveAtSwap(ItemIndex); + } + } + } + + // ReBuild the items + for (int32 ColumnIndex = StartIndex; ColumnIndex < PropertyColumns.Num(); ++ColumnIndex) + { + FColumn& Column = PropertyColumns[ColumnIndex]; + if (Column.Widget.IsValid()) + { + ColumnContainer->RemoveSlot(Column.Widget.ToSharedRef()); + Column.Widget.Reset(); + } + + // Gather the unique item and remove all that do not match with the current selected item + TArray UniqueItemsForColumnIndexes; + const FName ColumnName = Column.ColumnName; + + for (int32 ItemIndex = AllValidItemIndexes.Num() - 1; ItemIndex >= 0; --ItemIndex) + { + bool bFound = false; + for (int32 UniqueIndex : UniqueItemsForColumnIndexes) + { + if (IdenticalProperty(ColumnName, UniqueIndex, AllValidItemIndexes[ItemIndex])) + { + bFound = true; + break; + } + } + if (!bFound) + { + UniqueItemsForColumnIndexes.Add(AllValidItemIndexes[ItemIndex]); + } + + if (!IdenticalProperty(ColumnName, SelectedPermutationIndex, AllValidItemIndexes[ItemIndex])) + { + AllValidItemIndexes.RemoveAtSwap(ItemIndex); + } + } + + // Only show the column if the user desire it + if (!IsColumnVisible(Column, UniqueItemsForColumnIndexes)) + { + continue; + } + + // Sort the column items + UniqueItemsForColumnIndexes.Sort([this, ColumnName](int32 Left, int32 Right) { return Less(ColumnName, Left, Right); }); + + // Build the radio buttons + FMenuBuilder MenuBuilder(false, nullptr); + for (int32 UniqueItemIndex : UniqueItemsForColumnIndexes) + { + MenuBuilder.AddMenuEntry( + GetLabel(ColumnName, UniqueItemIndex), + GetTooltip(ColumnName, UniqueItemIndex), + FSlateIcon(), + FUIAction( + FExecuteAction::CreateLambda([this, UniqueItemIndex, ColumnIndex] { ItemSelected(UniqueItemIndex, ColumnIndex); }), + FCanExecuteAction(), + FIsActionChecked::CreateLambda([this, UniqueItemIndex, ColumnName] { return IdenticalProperty(ColumnName, SelectedPermutationIndex, UniqueItemIndex); }) + ), + NAME_None, + EUserInterfaceActionType::RadioButton + ); + } + + // Create the widget + SAssignNew(Column.Widget, SVerticalBox) + + SVerticalBox::Slot() + .AutoHeight() + .HAlign(HAlign_Left) + .Padding(2, 0, 5, 0) + [ + SNew(STextBlock) + .Text(Column.Label) + ] + + SVerticalBox::Slot() + .FillHeight(1.f) + [ + MenuBuilder.MakeWidget() + ]; + + // Attach the widget + ColumnContainer->AddSlot() + .AutoWidth() + [ + Column.Widget.ToSharedRef() + ]; + } +} + +template +void SMediaPermutationsSelector::ItemSelected(int32 UniqueItemIndex, int32 ColumnIndex) +{ + const int32 NumberOfItems = PermutationsSource.Num(); + if (!PropertyColumns.IsValidIndex(ColumnIndex) || NumberOfItems == 0) + { + return; + } + + int32 PreviousSelectedItemIndex = SelectedPermutationIndex; + SelectedPermutationIndex = INDEX_NONE; + + // Get only the valid item for that selected input + TArray AllValidItemIndexes = GenerateItemIndexes(); + + int32 MaxColumns = FMath::Min(ColumnIndex + 1, PropertyColumns.Num()); + for (int32 Index = 0; Index < MaxColumns; ++Index) + { + const FColumn& Column = PropertyColumns[Index]; + for (int32 ItemIndex = AllValidItemIndexes.Num() - 1; ItemIndex >= 0; --ItemIndex) + { + if (!IdenticalProperty(Column.ColumnName, UniqueItemIndex, AllValidItemIndexes[ItemIndex])) + { + AllValidItemIndexes.RemoveAtSwap(ItemIndex); + } + } + } + + if (AllValidItemIndexes.Num() > 0) + { + SelectedPermutationIndex = AllValidItemIndexes[0]; + } + + // Try to find something that matches what we used to have. + for (int32 Index = MaxColumns; Index < PropertyColumns.Num(); ++Index) + { + const FColumn& Column = PropertyColumns[Index]; + for (int32 ItemIndex = AllValidItemIndexes.Num() - 1; ItemIndex >= 0; --ItemIndex) + { + if (!IdenticalProperty(Column.ColumnName, PreviousSelectedItemIndex, AllValidItemIndexes[ItemIndex])) + { + AllValidItemIndexes.RemoveAtSwap(ItemIndex); + } + } + + if (AllValidItemIndexes.Num() > 0) + { + SelectedPermutationIndex = AllValidItemIndexes[0]; + } + } + + // There should always be one selected item + if (SelectedPermutationIndex == INDEX_NONE) + { + SelectedPermutationIndex = UniqueItemIndex; + } + + BuildColumns(ColumnIndex + 1); + + if (SelectedPermutationIndex != PreviousSelectedItemIndex) + { + OnSelectionChanged.ExecuteIfBound(PermutationsSource[SelectedPermutationIndex]); + } +} + +template +TArray SMediaPermutationsSelector::GenerateItemIndexes() const +{ + TArray Result; + const int32 NumberOfItems = PermutationsSource.Num(); + Result.Reserve(NumberOfItems); + for (int32 Index = 0; Index < NumberOfItems; ++Index) + { + Result.Add(Index); + } + return MoveTemp(Result); +} + +template +bool SMediaPermutationsSelector::IsColumnVisible(const FColumn& Column, const TArray& UniqueItemIndexes) const +{ + bool bResult = true; + if (Column.IsColumnVisible.IsBound()) + { + TArray UniqueItems; + UniqueItems.Empty(UniqueItemIndexes.Num()); + for (int32 ItemIndex : UniqueItemIndexes) + { + UniqueItems.Add(PermutationsSource[ItemIndex]); + } + + bResult = Column.IsColumnVisible.Execute(Column.ColumnName, UniqueItems); + } + + return bResult; +} diff --git a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Private/MediaPlayerTimeSynchronizationSource.cpp b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Private/MediaPlayerTimeSynchronizationSource.cpp index 8a70ebedf934..dcfe5ea1e0a2 100644 --- a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Private/MediaPlayerTimeSynchronizationSource.cpp +++ b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Private/MediaPlayerTimeSynchronizationSource.cpp @@ -27,63 +27,79 @@ void UMediaPlayerTimeSynchronizationSource::PostEditChangeProperty(FPropertyChan } #endif -FFrameTime UMediaPlayerTimeSynchronizationSource::GetNextSampleTime() const +static FFrameTime TimeSpanToFrameTime(const FTimespan& Timespan, const FFrameRate& FrameRate) { - FFrameTime NextSampleTime; + return FFrameTime::FromDecimal(Timespan.GetTotalSeconds() * FrameRate.AsDecimal()).RoundToFrame(); +} + +FFrameTime UMediaPlayerTimeSynchronizationSource::GetOldestSampleTime() const +{ + TOptional UseTimespan; if (MediaTexture && MediaTexture->GetMediaPlayer()) { + if (MediaTexture->GetAvailableSampleCount() > 0) + { + // Ideally, the MediaTexture (or more likely, the TMediaSampleQueue) would be able to track + // the current span of samples available. However, that's already prone to some threading issues + // and trying to manage more data will only exacerbate that. + + // Therefore, we can only use the next available sample time. + UseTimespan = MediaTexture->GetNextSampleTime(); + } + const TSharedPtr& Player = MediaTexture->GetMediaPlayer()->GetPlayerFacade()->GetPlayer(); if (Player.IsValid()) { - //If there is a sample in the Texture, we consider it as the next one to be used/rendered - if (MediaTexture->GetAvailableSampleCount() > 0) - { - const FTimespan TextureTime = MediaTexture->GetNextSampleTime(); - NextSampleTime = FFrameTime::FromDecimal(TextureTime.GetTotalSeconds() * GetFrameRate().AsDecimal()).RoundToFrame(); - } - else if (Player->GetCache().GetSampleCount(EMediaCacheState::Loaded) > 0) + IMediaCache& Cache = Player->GetCache(); + if (Cache.GetSampleCount(EMediaCacheState::Loaded) > 0) { TRangeSet SampleTimes; - if (Player->GetCache().QueryCacheState(EMediaCacheState::Loaded, SampleTimes)) + if (Cache.QueryCacheState(EMediaCacheState::Loaded, SampleTimes)) { - //Fetch the minimum sample time from all ranges queried from the player's cache - TArray> Ranges; - SampleTimes.GetRanges(Ranges); - check(Ranges.Num() > 0); - - TRangeBound MinBound = Ranges[0].GetLowerBound(); - for (const auto& Range : Ranges) - { - MinBound = TRangeBound::MinLower(MinBound, Range.GetLowerBound()); - } - const FTimespan MinSampleTime = MinBound.GetValue(); - - NextSampleTime = FFrameTime::FromDecimal(MinSampleTime.GetTotalSeconds() * GetFrameRate().AsDecimal()).RoundToFrame(); + const FTimespan MinBound = SampleTimes.GetMinBoundValue(); + UseTimespan = (UseTimespan.IsSet()) ? FMath::Min(MinBound, UseTimespan.GetValue()) : MinBound; } } } } - return NextSampleTime; + return UseTimespan.IsSet() ? TimeSpanToFrameTime(UseTimespan.GetValue(), GetFrameRate()) : FFrameTime(0); } -int32 UMediaPlayerTimeSynchronizationSource::GetAvailableSampleCount() const +FFrameTime UMediaPlayerTimeSynchronizationSource::GetNewestSampleTime() const { - int32 AvailableSampleCount = 0; + TOptional UseTimespan; if (MediaTexture && MediaTexture->GetMediaPlayer()) { + if (MediaTexture->GetAvailableSampleCount() > 0) + { + // Ideally, the MediaTexture (or more likely, the TMediaSampleQueue) would be able to track + // the current span of samples available. However, that's already prone to some threading issues + // and trying to manage more data will only exacerbate that. + + // Therefore, we can only use the next available sample time. + UseTimespan = MediaTexture->GetNextSampleTime(); + } + const TSharedPtr& Player = MediaTexture->GetMediaPlayer()->GetPlayerFacade()->GetPlayer(); if (Player.IsValid()) { - const int32 TextureSampleCount = MediaTexture->GetAvailableSampleCount(); - const int32 PlayerSampleCount = Player->GetCache().GetSampleCount(EMediaCacheState::Loaded); - AvailableSampleCount = TextureSampleCount + PlayerSampleCount; + IMediaCache& Cache = Player->GetCache(); + if (Cache.GetSampleCount(EMediaCacheState::Loaded) > 0) + { + TRangeSet SampleTimes; + if (Cache.QueryCacheState(EMediaCacheState::Loaded, SampleTimes)) + { + const FTimespan MaxBound = SampleTimes.GetMaxBoundValue(); + UseTimespan = (UseTimespan.IsSet()) ? FMath::Max(MaxBound, UseTimespan.GetValue()) : MaxBound; + } + } } } - return AvailableSampleCount; + return UseTimespan.IsSet() ? TimeSpanToFrameTime(UseTimespan.GetValue(), GetFrameRate()) : FFrameTime(0); } FFrameRate UMediaPlayerTimeSynchronizationSource::GetFrameRate() const @@ -114,8 +130,10 @@ bool UMediaPlayerTimeSynchronizationSource::IsReady() const return MediaTexture && MediaTexture->GetMediaPlayer() && MediaTexture->GetMediaPlayer()->IsReady() && MediaSource; } -bool UMediaPlayerTimeSynchronizationSource::Open() +bool UMediaPlayerTimeSynchronizationSource::Open(const FTimeSynchronizationOpenData& InOpenData) { + OpenData = InOpenData; + bool bResult = false; if (MediaSource && MediaTexture) { @@ -149,15 +167,40 @@ bool UMediaPlayerTimeSynchronizationSource::Open() return bResult; } -void UMediaPlayerTimeSynchronizationSource::Start() +void UMediaPlayerTimeSynchronizationSource::Start(const FTimeSynchronizationStartData& InStartData) { + StartData = InStartData; + UMediaPlayer* MediaPlayer = MediaTexture ? MediaTexture->GetMediaPlayer() : nullptr; if (MediaPlayer) { - //Once we're on the verge of playing the source, it's time to setup the delay - if (!bUseForSynchronization) + const FFrameRate LocalFrameRate = GetFrameRate(); + const FFrameTime LocalStartFrame = FFrameRate::TransformTime(StartData->StartFrame, OpenData->SynchronizationFrameRate, LocalFrameRate); + const FTimespan StartTimespan = FTimespan::FromSeconds(LocalFrameRate.AsSeconds(LocalStartFrame)); + + // If this source is used for synchronization, then we'll try to seek to the start frame. + if (bUseForSynchronization) { - MediaPlayer->SetTimeDelay(FTimespan::FromSeconds(TimeDelay)); + if (MediaPlayer->SupportsSeeking()) + { + MediaPlayer->Seek(StartTimespan); + } + } + + // Otherwise, we'll at least try to set a delay so it sort of lines up. + else + { + const FFrameTime MinimumTime = GetOldestSampleTime(); + + // TODO: Verify this is the correct order. The comments on SetDelay seem confusing. + // TODO: Maybe also do this for sync sources that don't support seeking? Need test cases. + const FFrameTime DelayFrames = LocalStartFrame - MinimumTime; + const double Delay = LocalFrameRate.AsSeconds(DelayFrames); + + if (Delay > 0) + { + MediaPlayer->SetTimeDelay(Delay); + } } MediaPlayer->Play(); @@ -174,6 +217,9 @@ void UMediaPlayerTimeSynchronizationSource::Close() MediaPlayer->Close(); } } + + StartData.Reset(); + OpenData.Reset(); } FString UMediaPlayerTimeSynchronizationSource::GetDisplayName() const diff --git a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Private/TimecodeSynchronizer.cpp b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Private/TimecodeSynchronizer.cpp index a77d57e2bcb7..45d170daefd8 100644 --- a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Private/TimecodeSynchronizer.cpp +++ b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Private/TimecodeSynchronizer.cpp @@ -11,33 +11,178 @@ #define LOCTEXT_NAMESPACE "TimecodeSynchronizer" -/** - * FTimecodeSynchronizerActiveTimecodedInputSource - */ - -void FTimecodeSynchronizerActiveTimecodedInputSource::ConvertToLocalFrameRate(const FFrameRate& InLocalFrameRate) +namespace TimecodeSynchronizerPrivate { - const FFrameTime MaxSampleTime = NextSampleTime + AvailableSampleCount; - NextSampleLocalTime = FFrameRate::TransformTime(NextSampleTime, FrameRate, InLocalFrameRate); - MaxSampleLocalTime = FFrameRate::TransformTime(MaxSampleTime, FrameRate, InLocalFrameRate); + struct FTimecodeInputSourceValidator + { + private: + + const FTimecodeSynchronizerCachedSyncState& SyncState; + + bool bTimecodeErrors = false; + int32 FoundOffset = 0; + + FFrameTime Newest; + FFrameTime Oldest; + + bool bAnySourcesHadRollover = false; + bool bAllSourcesHadRollover = false; + + public: + + FTimecodeInputSourceValidator(const FTimecodeSynchronizerCachedSyncState& InSyncState, const FTimecodeSynchronizerActiveTimecodedInputSource& InitialInputSource) : + SyncState(InSyncState) + { + ValidateSource(InitialInputSource); + if (AllSourcesAreValid()) + { + const FTimecodeSourceState& SynchronizerRelativeState = InitialInputSource.GetSynchronizerRelativeState(); + Newest = SynchronizerRelativeState.NewestAvailableSample; + Oldest = SynchronizerRelativeState.OldestAvailableSample; + bAnySourcesHadRollover = (SyncState.RolloverFrame.IsSet() && Newest < Oldest); + bAllSourcesHadRollover = bAnySourcesHadRollover; + } + } + + void UpdateFrameTimes(const FTimecodeSynchronizerActiveTimecodedInputSource& InputSource) + { + ValidateSource(InputSource); + if (AllSourcesAreValid()) + { + const FTimecodeSourceState& SynchronizerRelativeState = InputSource.GetSynchronizerRelativeState(); + Oldest = FMath::Max(SynchronizerRelativeState.OldestAvailableSample, Oldest); + Newest = FMath::Min(SynchronizerRelativeState.NewestAvailableSample, Newest); + } + } + + const bool AllSourcesAreValid() const + { + return !FoundTimecodeErrors() && !FoundFrameRolloverMistmatch(); + } + + const bool FoundFrameRolloverMistmatch() const + { + return bAllSourcesHadRollover != bAnySourcesHadRollover; + } + + const bool FoundTimecodeErrors() const + { + return bTimecodeErrors; + } + + const bool DoAllSourcesContainFrame(const FFrameTime& FrameToCheck) const + { + if (FoundTimecodeErrors() || FoundFrameRolloverMistmatch()) + { + return false; + } + else if (!SyncState.RolloverFrame.IsSet() || !bAnySourcesHadRollover) + { + return (Oldest <= FrameToCheck) && (FrameToCheck <= Newest); + } + else + { + return UTimeSynchronizationSource::IsFrameBetweenWithRolloverModulus(FrameToCheck, Oldest, Newest, SyncState.RolloverFrame.GetValue()); + } + } + + const int32 CalculateOffsetNewest(const FFrameTime& FrameTime) const + { + // These cases should never happen, but they may be recoverable, so don't crash. + ensureAlwaysMsgf(!FoundTimecodeErrors(), TEXT("FTimecodeInputSourceValidator::CalculateOffsetNewest - Called with TimecodeErrors")); + ensureAlwaysMsgf(!FoundFrameRolloverMistmatch(), TEXT("FTimecodeInputSourceValidater::CalculateOffsetNewest - Called with FrameRolloverMismatch")); + + bool bUnused_DidRollover; + return UTimeSynchronizationSource::FindDistanceBetweenFramesWithRolloverModulus(FrameTime, Newest, SyncState.RolloverFrame, bUnused_DidRollover); + } + + const int32 CalculateOffsetOldest(const FFrameTime& FrameTime) const + { + // These cases should never happen, but they may be recoverable, so don't crash. + ensureAlwaysMsgf(!FoundTimecodeErrors(), TEXT("FTimecodeInputSourceValidator::CalculateOffsetOldest - Called with TimecodeErrors")); + ensureAlwaysMsgf(!FoundFrameRolloverMistmatch(), TEXT("FTimecodeInputSourceValidater::CalculateOffsetOldest - Called with FrameRolloverMismatch")); + + bool bUnused_DidRollover; + + // Because we switched order of inputs, we need to flip the output as well. + return -UTimeSynchronizationSource::FindDistanceBetweenFramesWithRolloverModulus(Oldest, FrameTime, SyncState.RolloverFrame, bUnused_DidRollover); + } + + private: + + void ValidateSource(const FTimecodeSynchronizerActiveTimecodedInputSource& InputSource) + { + const FTimecodeSourceState& SynchronizerRelativeState = InputSource.GetSynchronizerRelativeState(); + const FFrameTime& OldestSample = SynchronizerRelativeState.OldestAvailableSample; + const FFrameTime& NewestSample = SynchronizerRelativeState.NewestAvailableSample; + + const bool bUseRollover = SyncState.RolloverFrame.IsSet(); + const bool bSourceBufferHasRolledOver = (bUseRollover && OldestSample > NewestSample); + + if (!bUseRollover) + { + // If we're not using rollover, but Oldest time is later than the Newest time, then the source is + // reporting incorrect values. + if (OldestSample > NewestSample) + { + UE_LOG(LogTimecodeSynchronizer, Warning, TEXT("Source %s reported out of order frame times (Oldest = %d | Newest = %d)"), + *InputSource.GetDisplayName(), OldestSample.GetFrame().Value, NewestSample.GetFrame().Value); + + bTimecodeErrors = true; + } + } + else + { + const FFrameTime& RolloverFrame = SyncState.RolloverFrame.GetValue(); + + // If we're using rollover, and either source has reported a value beyond where we expect to rollover, + // then the source is reporting incorrect values. + if ((OldestSample >= RolloverFrame) || (NewestSample >= RolloverFrame)) + { + UE_LOG(LogTimecodeSynchronizer, Warning, TEXT("Source %s reported frames that go beyond expected rollover point (Oldest = %d | Newest = %d | Rollover = %d"), + *InputSource.GetDisplayName(), OldestSample.GetFrame().Value, NewestSample.GetFrame().Value, RolloverFrame.GetFrame().Value); + + bTimecodeErrors = true; + } + + if (bSourceBufferHasRolledOver) + { + // See CalculateOffset for the justification + + // Since we think a rollover has occurred, then we'd expect the frame values to be relatively + // far apart. + const int32 Offset = (OldestSample - NewestSample).GetFrame().Value; + if (FMath::Abs(Offset) < (RolloverFrame.GetFrame().Value / 2)) + { + UE_LOG(LogTimecodeSynchronizer, Warning, TEXT("Source %s reported out of order frame times (Oldest = %d | Newest = %d)"), + *InputSource.GetDisplayName(), OldestSample.GetFrame().Value, NewestSample.GetFrame().Value); + + bTimecodeErrors = true; + } + } + } + + bAllSourcesHadRollover &= bSourceBufferHasRolledOver; + bAnySourcesHadRollover |= bSourceBufferHasRolledOver; + } + }; + } /** * UTimecodeSynchronizer */ -UTimecodeSynchronizer::UTimecodeSynchronizer(const FObjectInitializer& ObjectInitializer) - : Super(ObjectInitializer) - , bUseCustomTimeStep(false) +UTimecodeSynchronizer::UTimecodeSynchronizer() + : bUseCustomTimeStep(false) , CustomTimeStep(nullptr) , FixedFrameRate(30, 1) - , TimecodeProviderType(ETimecodeSynchronizationTimecodeType::SystemTime) + , TimecodeProviderType(ETimecodeSynchronizationTimecodeType::TimecodeProvider) , TimecodeProvider(nullptr) , MasterSynchronizationSourceIndex(INDEX_NONE) , PreRollingTimecodeMarginOfErrors(4) , PreRollingTimeout(30.f) , State(ESynchronizationState::None) - , CurrentFrameTime(0) , StartPreRollingTime(0.0) , bRegistered(false) , PreviousFixedFrameRate(0.f) @@ -64,14 +209,24 @@ bool UTimecodeSynchronizer::CanEditChange(const UProperty* InProperty) const return false; } - if (InProperty->GetFName() == GET_MEMBER_NAME_CHECKED(UTimecodeSynchronizer, TimecodeProvider)) + const FName PropertyName = InProperty->GetFName(); + if (PropertyName == GET_MEMBER_NAME_CHECKED(UTimecodeSynchronizer, TimecodeProvider)) { return TimecodeProviderType == ETimecodeSynchronizationTimecodeType::TimecodeProvider; } - if (InProperty->GetFName() == GET_MEMBER_NAME_CHECKED(UTimecodeSynchronizer, MasterSynchronizationSourceIndex)) + else if (PropertyName == GET_MEMBER_NAME_CHECKED(UTimecodeSynchronizer, MasterSynchronizationSourceIndex)) { return TimecodeProviderType == ETimecodeSynchronizationTimecodeType::InputSource; } + else if (PropertyName == GET_MEMBER_NAME_CHECKED(UTimecodeSynchronizer, FrameOffset)) + { + return SyncMode == ETimecodeSynchronizationSyncMode::UserDefinedOffset; + } + else if (PropertyName == GET_MEMBER_NAME_CHECKED(UTimecodeSynchronizer, AutoFrameOffset)) + { + return (SyncMode == ETimecodeSynchronizationSyncMode::Auto) || + (SyncMode == ETimecodeSynchronizationSyncMode::AutoOldest); + } return true; } @@ -94,44 +249,64 @@ void UTimecodeSynchronizer::PostEditChangeChainProperty(FPropertyChangedChainEve } #endif -FFrameTime UTimecodeSynchronizer::ConvertTimecodeToFrameTime(const FTimecode& InTimecode) const -{ - return InTimecode.ToFrameNumber(GetFrameRate()); -} - -FTimecode UTimecodeSynchronizer::ConvertFrameTimeToTimecode(const FFrameTime& InFFrameTime) const -{ - const bool bIsDropFrame = FTimecode::IsDropFormatTimecodeSupported(GetFrameRate()); - return FTimecode::FromFrameNumber(InFFrameTime.FrameNumber, GetFrameRate(), bIsDropFrame); -} - FTimecode UTimecodeSynchronizer::GetTimecode() const { - if(TimecodeProviderType == ETimecodeSynchronizationTimecodeType::InputSource) + FTimecode Timecode; + if (IsSynchronized()) { - if(ActiveTimecodedInputSources.IsValidIndex(ActiveMasterSynchronizationTimecodedSourceIndex)) - { - FTimecodeSynchronizerActiveTimecodedInputSource TimecodedInputSource = ActiveTimecodedInputSources[ActiveMasterSynchronizationTimecodedSourceIndex]; - FFrameTime NextSampleTime = TimecodedInputSource.InputSource->GetNextSampleTime(); - if (NextSampleTime != 0) - { - TimecodedInputSource.NextSampleTime = NextSampleTime; - TimecodedInputSource.AvailableSampleCount = TimecodedInputSource.InputSource->GetAvailableSampleCount(); - TimecodedInputSource.ConvertToLocalFrameRate(GetFrameRate()); - } - return ConvertFrameTimeToTimecode(TimecodedInputSource.MaxSampleLocalTime); - } + Timecode = UTimeSynchronizationSource::ConvertFrameTimeToTimecode(CurrentSystemFrameTime.GetValue(), CachedSyncState.FrameRate); } - else if(TimecodeProviderType == ETimecodeSynchronizationTimecodeType::TimecodeProvider) + else if (IsSynchronizing()) { - if (RegisteredTimecodeProvider) - { - return RegisteredTimecodeProvider->GetTimecode(); - } + Timecode = UTimeSynchronizationSource::ConvertFrameTimeToTimecode(CurrentProviderFrameTime, CachedSyncState.FrameRate); + } + else + { + Timecode = UTimeSynchronizationSource::ConvertFrameTimeToTimecode(GetProviderFrameTime(), GetFrameRate()); } - FTimecode Result = UTimecodeProvider::GetSystemTimeTimecode(GetFrameRate()); - return Result; + return Timecode; +} + +FFrameTime UTimecodeSynchronizer::GetProviderFrameTime() const +{ + FFrameTime ProviderFrameTime; + + if (TimecodeProviderType == ETimecodeSynchronizationTimecodeType::InputSource) + { + if (SynchronizedSources.IsValidIndex(ActiveMasterSynchronizationTimecodedSourceIndex)) + { + const FTimecodeSynchronizerActiveTimecodedInputSource& TimecodedInputSource = SynchronizedSources[ActiveMasterSynchronizationTimecodedSourceIndex]; + + if (GFrameCounter != LastUpdatedSources) + { + const_cast(TimecodedInputSource).UpdateSourceState(GetFrameRate()); + } + + if (TimecodedInputSource.IsReady()) + { + ProviderFrameTime = TimecodedInputSource.GetSynchronizerRelativeState().NewestAvailableSample; + } + else + { + UE_LOG(LogTimecodeSynchronizer, Log, TEXT("Unable to get frame time - Specified source was not ready.")); + } + } + else + { + UE_LOG(LogTimecodeSynchronizer, Log, TEXT("Unable to get frame time - Invalid source specified.")); + } + } + else + { + // In the case where we aren't registered, or we've registered ourselves, we'll use the engine default provider. + const bool bIsProviderValid = (RegisteredTimecodeProvider != nullptr && RegisteredTimecodeProvider != this); + const UTimecodeProvider* Provider = bIsProviderValid ? RegisteredTimecodeProvider : GEngine->GetDefaultTimecodeProvider(); + + ProviderFrameTime = FFrameTime(Provider->GetTimecode().ToFrameNumber(GetFrameRate())); + } + + return ProviderFrameTime; } FFrameRate UTimecodeSynchronizer::GetFrameRate() const @@ -141,18 +316,17 @@ FFrameRate UTimecodeSynchronizer::GetFrameRate() const ETimecodeProviderSynchronizationState UTimecodeSynchronizer::GetSynchronizationState() const { - switch(State) + switch (State) { - case ESynchronizationState::PreRolling_WaitGenlockTimecodeProvider: - case ESynchronizationState::PreRolling_WaitReadiness: - case ESynchronizationState::PreRolling_Synchronizing: - case ESynchronizationState::PreRolling_Buffering: - return ETimecodeProviderSynchronizationState::Synchronizing; - case ESynchronizationState::Synchronized: - case ESynchronizationState::Rolling: - return ETimecodeProviderSynchronizationState::Synchronized; - case ESynchronizationState::Error: - return ETimecodeProviderSynchronizationState::Error; + case ESynchronizationState::Initializing: + case ESynchronizationState::PreRolling_WaitGenlockTimecodeProvider: + case ESynchronizationState::PreRolling_WaitReadiness: + case ESynchronizationState::PreRolling_Synchronizing: + return ETimecodeProviderSynchronizationState::Synchronizing; + case ESynchronizationState::Synchronized: + return ETimecodeProviderSynchronizationState::Synchronized; + case ESynchronizationState::Error: + return ETimecodeProviderSynchronizationState::Error; } return ETimecodeProviderSynchronizationState::Closed; } @@ -162,112 +336,117 @@ bool UTimecodeSynchronizer::IsSynchronizing() const return State == ESynchronizationState::PreRolling_WaitGenlockTimecodeProvider || State == ESynchronizationState::PreRolling_WaitReadiness || State == ESynchronizationState::PreRolling_Synchronizing - || State == ESynchronizationState::PreRolling_Buffering; + || State == ESynchronizationState::Initializing; } bool UTimecodeSynchronizer::IsSynchronized() const { - return State == ESynchronizationState::Synchronized - || State == ESynchronizationState::Rolling; + return State == ESynchronizationState::Synchronized; +} + +bool UTimecodeSynchronizer::IsError() const +{ + return State == ESynchronizationState::Error; } void UTimecodeSynchronizer::Register() { - // Set CustomTimeStep - bRegistered = false; - - if (bUseCustomTimeStep) + if (!bRegistered) { - if (GEngine->GetCustomTimeStep()) + bRegistered = true; + + if (bUseCustomTimeStep) { - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("Genlock source is already in place.")); - SwitchState(ESynchronizationState::Error); + if (GEngine->GetCustomTimeStep()) + { + UE_LOG(LogTimecodeSynchronizer, Error, TEXT("Genlock source is already in place.")); + SwitchState(ESynchronizationState::Error); + return; + } + else if (!CustomTimeStep) + { + UE_LOG(LogTimecodeSynchronizer, Error, TEXT("The Genlock source is not set.")); + SwitchState(ESynchronizationState::Error); + return; + } + else if (!GEngine->SetCustomTimeStep(CustomTimeStep)) + { + UE_LOG(LogTimecodeSynchronizer, Error, TEXT("The Genlock source failed to be set on Engine.")); + SwitchState(ESynchronizationState::Error); + return; + } + + RegisteredCustomTimeStep = CustomTimeStep; + } + else + { + PreviousFixedFrameRate = GEngine->FixedFrameRate; + bPreviousUseFixedFrameRate = GEngine->bUseFixedFrameRate; + GEngine->FixedFrameRate = FixedFrameRate.AsDecimal(); + GEngine->bUseFixedFrameRate = true; } - if (!CustomTimeStep) + // Set TimecodeProvider + if (GEngine->GetTimecodeProvider() != GEngine->GetDefaultTimecodeProvider()) { - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("The Genlock source is not set.")); + UE_LOG(LogTimecodeSynchronizer, Error, TEXT("A Timecode Provider is already in place.")); SwitchState(ESynchronizationState::Error); return; } - - if (!GEngine->SetCustomTimeStep(CustomTimeStep)) + else if (TimecodeProviderType == ETimecodeSynchronizationTimecodeType::TimecodeProvider && TimecodeProvider) { - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("The Genlock source failed to be set on Engine.")); - SwitchState(ESynchronizationState::Error); - return; + if (!GEngine->SetTimecodeProvider(TimecodeProvider)) + { + UE_LOG(LogTimecodeSynchronizer, Error, TEXT("TimecodeProvider failed to be set on Engine.")); + SwitchState(ESynchronizationState::Error); + return; + } + + RegisteredTimecodeProvider = TimecodeProvider; } - RegisteredCustomTimeStep = CustomTimeStep; - } - else - { - PreviousFixedFrameRate = GEngine->FixedFrameRate; - bPreviousUseFixedFrameRate = GEngine->bUseFixedFrameRate; - GEngine->FixedFrameRate = FixedFrameRate.AsDecimal(); - GEngine->bUseFixedFrameRate = true; - } - - // Set TimecodeProvider - if (GEngine->GetTimecodeProvider()) - { - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("A Timecode Provider is already in place.")); - SwitchState(ESynchronizationState::Error); - } - else if (TimecodeProviderType == ETimecodeSynchronizationTimecodeType::TimecodeProvider) - { - if (!TimecodeProvider) + else { - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("TimecodeProvider is not set.")); - SwitchState(ESynchronizationState::Error); - return; + if (!GEngine->SetTimecodeProvider(this)) + { + UE_LOG(LogTimecodeSynchronizer, Error, TEXT("TimecodeSynchronizer failed to be set as the TimecodeProvider for the Engine.")); + SwitchState(ESynchronizationState::Error); + return; + } + + RegisteredTimecodeProvider = this; } - if (!GEngine->SetTimecodeProvider(TimecodeProvider)) - { - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("TimecodeProvider failed to be set on Engine.")); - SwitchState(ESynchronizationState::Error); - return; - } - RegisteredTimecodeProvider = TimecodeProvider; + SetTickEnabled(true); } - else - { - if (!GEngine->SetTimecodeProvider(this)) - { - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("TimecodeSynchronizer failed to be set as the TimecodeProvider for the Engine.")); - SwitchState(ESynchronizationState::Error); - return; - } - RegisteredTimecodeProvider = this; - } - - bRegistered = true; - SetTickEnabled(bRegistered); } void UTimecodeSynchronizer::Unregister() { - UTimecodeProvider* Provider = GEngine->GetTimecodeProvider(); - if (Provider == RegisteredTimecodeProvider) + if (bRegistered) { - GEngine->SetTimecodeProvider(nullptr); - } - RegisteredTimecodeProvider = nullptr; + bRegistered = false; - UEngineCustomTimeStep* TimeStep = GEngine->GetCustomTimeStep(); - if (TimeStep == RegisteredCustomTimeStep) - { - GEngine->SetCustomTimeStep(nullptr); - } - else if (RegisteredCustomTimeStep == nullptr) - { - GEngine->FixedFrameRate = PreviousFixedFrameRate; - GEngine->bUseFixedFrameRate = bPreviousUseFixedFrameRate; - } - RegisteredCustomTimeStep = nullptr; + const UTimecodeProvider* Provider = GEngine->GetTimecodeProvider(); + if (Provider == RegisteredTimecodeProvider) + { + GEngine->SetTimecodeProvider(nullptr); + } + RegisteredTimecodeProvider = nullptr; - bRegistered = false; - SetTickEnabled(bRegistered); + UEngineCustomTimeStep* TimeStep = GEngine->GetCustomTimeStep(); + if (TimeStep == RegisteredCustomTimeStep) + { + GEngine->SetCustomTimeStep(nullptr); + } + else if (RegisteredCustomTimeStep == nullptr) + { + GEngine->FixedFrameRate = PreviousFixedFrameRate; + GEngine->bUseFixedFrameRate = bPreviousUseFixedFrameRate; + } + RegisteredCustomTimeStep = nullptr; + + SetTickEnabled(false); + } } void UTimecodeSynchronizer::SetTickEnabled(bool bEnabled) @@ -289,6 +468,9 @@ void UTimecodeSynchronizer::SetTickEnabled(bool bEnabled) void UTimecodeSynchronizer::Tick() { + UpdateSourceStates(); + CurrentProviderFrameTime = GetProviderFrameTime(); + Tick_Switch(); if (IsSynchronizing() && bUsePreRollingTimeout) @@ -302,201 +484,151 @@ void UTimecodeSynchronizer::Tick() } } -bool UTimecodeSynchronizer::StartPreRoll() +bool UTimecodeSynchronizer::StartSynchronization() { if (IsSynchronizing() || IsSynchronized()) { - UE_LOG(LogTimecodeSynchronizer, Warning, TEXT("Already synchronizing or synchronized.")); - return false; + UE_LOG(LogTimecodeSynchronizer, Log, TEXT("Already synchronizing or synchronized.")); + return true; } else { - StopInputSources(); - - ActiveMasterSynchronizationTimecodedSourceIndex = INDEX_NONE; - - // Go through all sources and select usable ones - for (int32 Index = 0; Index < TimeSynchronizationInputSources.Num(); ++Index) + if (!ensure(SynchronizedSources.Num() == 0) || !ensure(NonSynchronizedSources.Num() == 0) || !ensure(ActiveMasterSynchronizationTimecodedSourceIndex)) { - UTimeSynchronizationSource* InputSource = TimeSynchronizationInputSources[Index]; - if (InputSource) - { - if (InputSource->bUseForSynchronization && InputSource->Open()) - { - const int32 NewItemIndex = ActiveTimecodedInputSources.AddDefaulted(); - FTimecodeSynchronizerActiveTimecodedInputSource& NewSource = ActiveTimecodedInputSources[NewItemIndex]; - NewSource.InputSource = InputSource; - - if (TimecodeProviderType == ETimecodeSynchronizationTimecodeType::InputSource && Index == MasterSynchronizationSourceIndex) - { - ActiveMasterSynchronizationTimecodedSourceIndex = NewItemIndex; - } - } - else if (!InputSource->bUseForSynchronization && InputSource->Open()) - { - const int32 NewItemIndex = ActiveSynchronizedSources.AddDefaulted(); - FTimecodeSynchronizerActiveTimecodedInputSource& NewSource = ActiveSynchronizedSources[NewItemIndex]; - NewSource.InputSource = InputSource; - - //Stamp source FrameRate for time conversion - NewSource.FrameRate = InputSource->GetFrameRate(); - NewSource.bCanBeSynchronized = false; - } - } + UE_LOG(LogTimecodeSynchronizer, Error, TEXT("StartSynchronization called without properly closing sources")); + CloseSources(); } - if (TimecodeProviderType == ETimecodeSynchronizationTimecodeType::InputSource && ActiveMasterSynchronizationTimecodedSourceIndex == INDEX_NONE) + SwitchState(ESynchronizationState::Initializing); + OpenSources(); + + if (SynchronizedSources.Num() == 0) + { + UE_LOG(LogTimecodeSynchronizer, Warning, TEXT("No sources available to synchronize.")); + SwitchState(ESynchronizationState::Error); + } + else if (TimecodeProviderType == ETimecodeSynchronizationTimecodeType::InputSource && ActiveMasterSynchronizationTimecodedSourceIndex == INDEX_NONE) { UE_LOG(LogTimecodeSynchronizer, Warning, TEXT("The Master Synchronization Source could not be found.")); - } - - if (ActiveTimecodedInputSources.Num() > 0) - { - Register(); - } - - //Engage synchronization procedure only if we've successfully - if (bRegistered) - { - const bool bDoTick = true; - SwitchState(ESynchronizationState::PreRolling_WaitGenlockTimecodeProvider, bDoTick); + SwitchState(ESynchronizationState::Error); } else { - StopInputSources(); + Register(); - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("Couldn't start preroll. TimecodeSynchronizer is not registered. (Maybe there is no input sources)")); - SwitchState(ESynchronizationState::Error); + if (bRegistered) + { + SwitchState(ESynchronizationState::PreRolling_WaitGenlockTimecodeProvider); + } } return bRegistered; } } -void UTimecodeSynchronizer::StopInputSources() +void UTimecodeSynchronizer::StopSynchronization() { - Unregister(); - for (FTimecodeSynchronizerActiveTimecodedInputSource& TimecodedInputSource : ActiveTimecodedInputSources) + if (IsSynchronizing() || IsSynchronized() || IsError()) { - if (TimecodedInputSource.InputSource) - { - TimecodedInputSource.InputSource->Close(); - } - } + Unregister(); + CloseSources(); - for (FTimecodeSynchronizerActiveTimecodedInputSource& SynchronizedInputSource : ActiveSynchronizedSources) - { - if (SynchronizedInputSource.InputSource) - { - SynchronizedInputSource.InputSource->Close(); - } - } + LastUpdatedSources = 0; + CurrentSystemFrameTime.Reset(); + CurrentProviderFrameTime = FFrameTime(0); + StartPreRollingTime = 0.f; - CurrentFrameTime = FFrameTime(0); - ActiveTimecodedInputSources.Reset(); - ActiveSynchronizedSources.Reset(); - SwitchState(ESynchronizationState::None); - ActiveMasterSynchronizationTimecodedSourceIndex = INDEX_NONE; + SwitchState(ESynchronizationState::None); + } } -void UTimecodeSynchronizer::SwitchState(const ESynchronizationState NewState, const bool bDoTick) +void UTimecodeSynchronizer::SwitchState(const ESynchronizationState NewState) { if (NewState != State) { State = NewState; - //Do State entering procedure and tick if required + // Do any setup that needs to happen to "enter" the state. switch (NewState) { - case ESynchronizationState::None: - { + case ESynchronizationState::Initializing: + CachedSyncState.FrameRate = GetFrameRate(); + CachedSyncState.SyncMode = SyncMode; + CachedSyncState.FrameOffset = FrameOffset; + + // System time inherently has rollover. + if (bWithRollover) + { + // In most cases, rollover occurs on 24 periods. + // TODO: Make this configurable + CachedSyncState.RolloverFrame = FTimecode(24, 0, 0, 0, false).ToFrameNumber(CachedSyncState.FrameRate); + } + else + { + CachedSyncState.RolloverFrame.Reset(); + } + break; - } + case ESynchronizationState::PreRolling_WaitGenlockTimecodeProvider: - { StartPreRollingTime = FApp::GetCurrentTime(); SynchronizationEvent.Broadcast(ETimecodeSynchronizationEvent::SynchronizationStarted); - if (bDoTick) - { - TickPreRolling_WaitGenlockTimecodeProvider(); - } break; - } - case ESynchronizationState::PreRolling_WaitReadiness: - { - if (bDoTick) - { - TickPreRolling_WaitReadiness(); - } - break; - } - case ESynchronizationState::PreRolling_Synchronizing: - { - if (bDoTick) - { - TickPreRolling_Synchronizing(); - } - break; - } - case ESynchronizationState::PreRolling_Buffering: - { - if (bDoTick) - { - TickPreRolling_Buffering(); - } - break; - } + case ESynchronizationState::Synchronized: - { - bSourceStarted = false; + StartSources(); SynchronizationEvent.Broadcast(ETimecodeSynchronizationEvent::SynchronizationSucceeded); - if (bDoTick) - { - TickSynchronized(); - } break; - } + case ESynchronizationState::Error: - { - EnterStateError(); - if (bDoTick) - { - TickError(); - } + StopSynchronization(); + SynchronizationEvent.Broadcast(ETimecodeSynchronizationEvent::SynchronizationFailed); break; - } + default: - { - SetTickEnabled(false); break; - } }; + + Tick_Switch(); } } void UTimecodeSynchronizer::Tick_Switch() { +#define CONDITIONALLY_CALL_TICK(TickFunc) {if (ShouldTick()) {TickFunc();}} + switch (State) { + case ESynchronizationState::Initializing: + break; + case ESynchronizationState::PreRolling_WaitGenlockTimecodeProvider: - TickPreRolling_WaitGenlockTimecodeProvider(); + CONDITIONALLY_CALL_TICK(TickPreRolling_WaitGenlockTimecodeProvider); break; + case ESynchronizationState::PreRolling_WaitReadiness: - TickPreRolling_WaitReadiness(); + CONDITIONALLY_CALL_TICK(TickPreRolling_WaitReadiness); break; + case ESynchronizationState::PreRolling_Synchronizing: - TickPreRolling_Synchronizing(); - break; - case ESynchronizationState::PreRolling_Buffering: - TickPreRolling_Buffering(); + CONDITIONALLY_CALL_TICK(TickPreRolling_Synchronizing); break; + case ESynchronizationState::Synchronized: - TickSynchronized(); + CONDITIONALLY_CALL_TICK(Tick_Synchronized); break; + default: SetTickEnabled(false); break; } + +#undef CONDITIONALLY_CALL_TICK +} + +bool UTimecodeSynchronizer::ShouldTick() +{ + return Tick_TestGenlock() && Tick_TestTimecode(); } bool UTimecodeSynchronizer::Tick_TestGenlock() @@ -518,7 +650,6 @@ bool UTimecodeSynchronizer::Tick_TestGenlock() } const ECustomTimeStepSynchronizationState SynchronizationState = RegisteredCustomTimeStep->GetSynchronizationState(); - if (SynchronizationState != ECustomTimeStepSynchronizationState::Synchronized && SynchronizationState != ECustomTimeStepSynchronizationState::Synchronizing) { UE_LOG(LogTimecodeSynchronizer, Error, TEXT("The Genlock source stopped while synchronizing.")); @@ -550,7 +681,6 @@ bool UTimecodeSynchronizer::Tick_TestTimecode() } const ETimecodeProviderSynchronizationState SynchronizationState = RegisteredTimecodeProvider->GetSynchronizationState(); - if (SynchronizationState != ETimecodeProviderSynchronizationState::Synchronized && SynchronizationState != ETimecodeProviderSynchronizationState::Synchronizing) { UE_LOG(LogTimecodeSynchronizer, Error, TEXT("The TimecodeProvider stopped while synchronizing.")); @@ -558,6 +688,11 @@ bool UTimecodeSynchronizer::Tick_TestTimecode() return false; } + if (RegisteredTimecodeProvider == this) + { + return true; + } + if (SynchronizationState == ETimecodeProviderSynchronizationState::Synchronized) { if (RegisteredTimecodeProvider->GetFrameRate() != GetFrameRate()) @@ -571,294 +706,270 @@ bool UTimecodeSynchronizer::Tick_TestTimecode() } else if (TimecodeProviderType == ETimecodeSynchronizationTimecodeType::InputSource) { - if (!ActiveTimecodedInputSources.IsValidIndex(ActiveMasterSynchronizationTimecodedSourceIndex)) + if (!SynchronizedSources.IsValidIndex(ActiveMasterSynchronizationTimecodedSourceIndex)) { UE_LOG(LogTimecodeSynchronizer, Error, TEXT("The InputSource '%d' that we try to synchronize on is not valid."), ActiveMasterSynchronizationTimecodedSourceIndex); SwitchState(ESynchronizationState::Error); return false; } - if (ActiveTimecodedInputSources[ActiveMasterSynchronizationTimecodedSourceIndex].InputSource == nullptr) - { - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("The InputSource '%d' doesn't have an input source."), ActiveMasterSynchronizationTimecodedSourceIndex); - SwitchState(ESynchronizationState::Error); - return false; - } - - return ActiveTimecodedInputSources[ActiveMasterSynchronizationTimecodedSourceIndex].InputSource->IsReady(); + return SynchronizedSources[ActiveMasterSynchronizationTimecodedSourceIndex].IsReady(); } return true; } void UTimecodeSynchronizer::TickPreRolling_WaitGenlockTimecodeProvider() { - const bool bCustomTimeStepReady = Tick_TestGenlock(); - const bool bTimecodeProvider = Tick_TestTimecode(); - - if (bCustomTimeStepReady && bTimecodeProvider) - { - const bool bDoTick = true; - SwitchState(ESynchronizationState::PreRolling_WaitReadiness, bDoTick); - } + SwitchState(ESynchronizationState::PreRolling_WaitReadiness); } void UTimecodeSynchronizer::TickPreRolling_WaitReadiness() { - const bool bCustomTimeStepReady = Tick_TestGenlock(); - const bool bTimecodeProvider = Tick_TestTimecode(); - if (!bCustomTimeStepReady || !bTimecodeProvider) - { - return; - } - bool bAllSourceAreReady = true; - for (FTimecodeSynchronizerActiveTimecodedInputSource& TimecodedInputSource : ActiveTimecodedInputSources) + + for (const FTimecodeSynchronizerActiveTimecodedInputSource& InputSource : SynchronizedSources) { - check(TimecodedInputSource.InputSource); - - bool bIsReady = TimecodedInputSource.InputSource->IsReady(); - if (bIsReady != TimecodedInputSource.bIsReady) + if (InputSource.IsReady()) { - if (bIsReady) + const FFrameRate SourceFrameRate = InputSource.GetFrameRate(); + if (!SourceFrameRate.IsMultipleOf(CachedSyncState.FrameRate) && !SourceFrameRate.IsFactorOf(CachedSyncState.FrameRate)) { - check(TimecodedInputSource.InputSource); - - TimecodedInputSource.AvailableSampleCount = TimecodedInputSource.InputSource->GetAvailableSampleCount(); - bIsReady = bIsReady && TimecodedInputSource.AvailableSampleCount > 0; - - if (!TimecodedInputSource.bIsReady && bIsReady) - { - //Stamp source FrameRate for time conversion - TimecodedInputSource.FrameRate = TimecodedInputSource.InputSource->GetFrameRate(); - if (!TimecodedInputSource.FrameRate.IsMultipleOf(GetFrameRate()) && !TimecodedInputSource.FrameRate.IsFactorOf(GetFrameRate())) - { - UE_LOG(LogTimecodeSynchronizer, Warning, TEXT("Source %s doesn't have a frame rate common to TimecodeSynchronizer frame rate."), *TimecodedInputSource.InputSource->GetDisplayName()) - } - } - - TimecodedInputSource.bIsReady = bIsReady; + UE_LOG(LogTimecodeSynchronizer, Warning, TEXT("Source %s doesn't have a frame rate common to TimecodeSynchronizer frame rate."), *InputSource.GetDisplayName()) } } - - bAllSourceAreReady = bAllSourceAreReady && bIsReady; + else + { + bAllSourceAreReady = false; + } } if (bAllSourceAreReady) { - const bool bDoTick = true; - SwitchState(ESynchronizationState::PreRolling_Synchronizing, bDoTick); + SwitchState(ESynchronizationState::PreRolling_Synchronizing); } } void UTimecodeSynchronizer::TickPreRolling_Synchronizing() { - const bool bCustomTimeStepReady = Tick_TestGenlock(); - const bool bTimecodeProvider = Tick_TestTimecode(); - if (!bCustomTimeStepReady || !bTimecodeProvider) + TimecodeSynchronizerPrivate::FTimecodeInputSourceValidator Validator(CachedSyncState, SynchronizedSources[0]); + for (int32 i = 1; i < SynchronizedSources.Num(); ++i) { - return; + Validator.UpdateFrameTimes(SynchronizedSources[i]); } - // Fetch each sources samples time and early exit if a source isn`t ready - for (FTimecodeSynchronizerActiveTimecodedInputSource& TimecodedInputSource : ActiveTimecodedInputSources) + if (Validator.AllSourcesAreValid()) { - check(TimecodedInputSource.InputSource); - - FFrameTime NextSampleTime = TimecodedInputSource.InputSource->GetNextSampleTime(); - if (NextSampleTime != 0) + switch (CachedSyncState.SyncMode) { - TimecodedInputSource.NextSampleTime = NextSampleTime; - TimecodedInputSource.AvailableSampleCount = TimecodedInputSource.InputSource->GetAvailableSampleCount(); - TimecodedInputSource.ConvertToLocalFrameRate(GetFrameRate()); - } + case ETimecodeSynchronizationSyncMode::Auto: + ActualFrameOffset = Validator.CalculateOffsetNewest(CurrentProviderFrameTime) - AutoFrameOffset; + break; - const bool bIsReady = TimecodedInputSource.InputSource->IsReady(); - if (!bIsReady) - { - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("Source '%s' stopped while synchronizing."), *TimecodedInputSource.InputSource->GetDisplayName()); - SwitchState(ESynchronizationState::Error); - return; - } - } + case ETimecodeSynchronizationSyncMode::AutoOldest: + ActualFrameOffset = Validator.CalculateOffsetOldest(CurrentProviderFrameTime) + AutoFrameOffset; + break; - FFrameTime NewSynchronizedTime = ConvertTimecodeToFrameTime(GetTimecode()); - - // Check if all inputs have that valid FrameTime - bool bDoContains = true; - for (FTimecodeSynchronizerActiveTimecodedInputSource& TimecodedInputSource : ActiveTimecodedInputSources) - { - TimecodedInputSource.bCanBeSynchronized = TimecodedInputSource.NextSampleLocalTime <= NewSynchronizedTime && NewSynchronizedTime <= TimecodedInputSource.MaxSampleLocalTime; - if (!TimecodedInputSource.bCanBeSynchronized) - { - bDoContains = false; + default: + ActualFrameOffset = CachedSyncState.FrameOffset; break; } - if (bUsePreRollingTimecodeMarginOfErrors) + if (Validator.DoAllSourcesContainFrame(CalculateSyncTime())) { - const FFrameTime Difference = TimecodedInputSource.MaxSampleLocalTime - NewSynchronizedTime; - if (Difference.FrameNumber.Value > PreRollingTimecodeMarginOfErrors) - { - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("PreRollingTimecodeMarginOfErrors '%s'."), *TimecodedInputSource.InputSource->GetDisplayName()); - SwitchState(ESynchronizationState::Error); - return; - } - } - } - - if (bDoContains) - { - CurrentFrameTime = NewSynchronizedTime; - - const bool bDoTick = true; - SwitchState(ESynchronizationState::PreRolling_Buffering, bDoTick); - } -} - -void UTimecodeSynchronizer::TickPreRolling_Buffering() -{ - const bool bCustomTimeStepReady = Tick_TestGenlock(); - const bool bTimecodeProvider = Tick_TestTimecode(); - if (!bCustomTimeStepReady || !bTimecodeProvider) - { - return; - } - - // Wait for all the NumberOfExtraBufferedFrame - bool bAllBuffered = true; - for (FTimecodeSynchronizerActiveTimecodedInputSource& TimecodedInputSource : ActiveTimecodedInputSources) - { - check(TimecodedInputSource.InputSource); - if (TimecodedInputSource.InputSource->NumberOfExtraBufferedFrame > 0) - { - FFrameTime NextSampleTime = TimecodedInputSource.InputSource->GetNextSampleTime(); - if (NextSampleTime != 0) - { - TimecodedInputSource.NextSampleTime = NextSampleTime; - TimecodedInputSource.AvailableSampleCount = TimecodedInputSource.InputSource->GetAvailableSampleCount(); - TimecodedInputSource.ConvertToLocalFrameRate(GetFrameRate()); - } - - bool bIsReady = TimecodedInputSource.InputSource->IsReady(); - if (bIsReady && TimecodedInputSource.AvailableSampleCount > 0) - { - //Count buffered frame from the selected start time and not from this source next sample time - const FFrameTime NextSampleDelta = TimecodedInputSource.NextSampleLocalTime - CurrentFrameTime; - const int32 FrameCountAfterStartTime = TimecodedInputSource.AvailableSampleCount - NextSampleDelta.AsDecimal(); - if (FrameCountAfterStartTime < TimecodedInputSource.InputSource->NumberOfExtraBufferedFrame) - { - bAllBuffered = false; - break; - } - } - else - { - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("Source '%s' stopped while buffering."), *TimecodedInputSource.InputSource->GetDisplayName()); - SwitchState(ESynchronizationState::Error); - break; - } - } - } - - if (bAllBuffered) - { - const bool bCanProceed = AreSourcesReady(); - if (bCanProceed) - { - const bool bDoTick = false; - SwitchState(ESynchronizationState::Synchronized, bDoTick); + SwitchState(ESynchronizationState::Synchronized); } } } -void UTimecodeSynchronizer::TickSynchronized() +void UTimecodeSynchronizer::Tick_Synchronized() { - const bool bCustomTimeStepReady = Tick_TestGenlock(); - const bool bTimecodeProvider = Tick_TestTimecode(); - if (!bCustomTimeStepReady || !bTimecodeProvider) - { - return; - } + // Sanity check to make sure all sources still have valid frames. + CurrentSystemFrameTime = CalculateSyncTime(); + const FFrameTime& UseFrameTime = CurrentSystemFrameTime.GetValue(); - if (!bSourceStarted) + if (CachedSyncState.RolloverFrame.IsSet()) { - StartSources(); - bSourceStarted = true; - UE_LOG(LogTimecodeSynchronizer, Log, TEXT("TimecodeProvider synchronized at %s"), *FApp::GetTimecode().ToString()); - } - - CurrentFrameTime = ConvertTimecodeToFrameTime(FApp::GetTimecode()); - - // Test if all sources have the frame - for (FTimecodeSynchronizerActiveTimecodedInputSource& TimecodedInputSource : ActiveTimecodedInputSources) - { - FFrameTime NextSampleTime = TimecodedInputSource.InputSource->GetNextSampleTime(); - if (NextSampleTime != 0) + for (const FTimecodeSynchronizerActiveTimecodedInputSource& InputSource : SynchronizedSources) { - TimecodedInputSource.NextSampleTime = NextSampleTime; - TimecodedInputSource.AvailableSampleCount = TimecodedInputSource.InputSource->GetAvailableSampleCount(); - TimecodedInputSource.ConvertToLocalFrameRate(GetFrameRate()); - } - - const bool bIsReady = TimecodedInputSource.InputSource->IsReady(); - if (bIsReady) - { - const bool bDoContains = TimecodedInputSource.NextSampleLocalTime <= CurrentFrameTime && CurrentFrameTime <= TimecodedInputSource.MaxSampleLocalTime; - if (!bDoContains) + const FTimecodeSourceState& SynchronizerRelativeState = InputSource.GetSynchronizerRelativeState(); + if (!UTimeSynchronizationSource::IsFrameBetweenWithRolloverModulus(UseFrameTime, SynchronizerRelativeState.OldestAvailableSample, SynchronizerRelativeState.NewestAvailableSample, CachedSyncState.RolloverFrame.GetValue())) { - UE_LOG(LogTimecodeSynchronizer, Warning, TEXT("Source '%s' doesn't have the timecode ready."), *TimecodedInputSource.InputSource->GetDisplayName()); + UE_LOG(LogTimecodeSynchronizer, Warning, TEXT("Source '%s' doesn't have the timecode ready."), *InputSource.GetDisplayName()); } } + } + else + { + for (const FTimecodeSynchronizerActiveTimecodedInputSource& InputSource : SynchronizedSources) + { + const FTimecodeSourceState& SynchronizerRelativeState = InputSource.GetSynchronizerRelativeState(); + if (SynchronizerRelativeState.OldestAvailableSample > UseFrameTime || UseFrameTime > SynchronizerRelativeState.NewestAvailableSample) + { + UE_LOG(LogTimecodeSynchronizer, Warning, TEXT("Source '%s' doesn't have the timecode ready."), *InputSource.GetDisplayName()); + } + } + } +} + +const bool FTimecodeSynchronizerActiveTimecodedInputSource::UpdateSourceState(const FFrameRate& SynchronizerFrameRate) +{ + check(InputSource); + + bIsReady = InputSource->IsReady(); + + if (bIsReady) + { + FrameRate = InputSource->GetFrameRate(); + + InputSourceState.NewestAvailableSample = InputSource->GetNewestSampleTime(); + InputSourceState.OldestAvailableSample = InputSource->GetOldestSampleTime(); + + if (FrameRate != SynchronizerFrameRate) + { + SynchronizerRelativeState.NewestAvailableSample = FFrameRate::TransformTime(InputSourceState.NewestAvailableSample, FrameRate, SynchronizerFrameRate); + SynchronizerRelativeState.OldestAvailableSample = FFrameRate::TransformTime(InputSourceState.OldestAvailableSample, FrameRate, SynchronizerFrameRate); + } else { - UE_LOG(LogTimecodeSynchronizer, Error, TEXT("Source '%s' stopped when all sources were synchronized."), *TimecodedInputSource.InputSource->GetDisplayName()); - SwitchState(ESynchronizationState::Error); - } - } -} - -void UTimecodeSynchronizer::EnterStateError() -{ - StopInputSources(); - SynchronizationEvent.Broadcast(ETimecodeSynchronizationEvent::SynchronizationFailed); -} - -void UTimecodeSynchronizer::TickError() -{ - -} - -bool UTimecodeSynchronizer::AreSourcesReady() const -{ - for (const FTimecodeSynchronizerActiveTimecodedInputSource& InputSource : ActiveTimecodedInputSources) - { - if (!InputSource.InputSource->IsReady()) - { - return false; + SynchronizerRelativeState = InputSourceState; } } - for (const FTimecodeSynchronizerActiveTimecodedInputSource& InputSource : ActiveSynchronizedSources) - { - if (!InputSource.InputSource->IsReady()) - { - return false; - } - } - - return true; + return bIsReady; } void UTimecodeSynchronizer::StartSources() { - for (FTimecodeSynchronizerActiveTimecodedInputSource& InputSource : ActiveTimecodedInputSources) + FTimeSynchronizationStartData StartData; + CurrentSystemFrameTime = StartData.StartFrame = CalculateSyncTime(); + + FApp::SetTimecodeAndFrameRate(GetTimecode(), GetFrameRate()); + + for (UTimeSynchronizationSource* InputSource : TimeSynchronizationInputSources) { - InputSource.InputSource->Start(); + if (InputSource != nullptr) + { + InputSource->Start(StartData); + } + } +} + +void UTimecodeSynchronizer::OpenSources() +{ + FTimeSynchronizationOpenData OpenData; + OpenData.RolloverFrame = CachedSyncState.RolloverFrame; + OpenData.SynchronizationFrameRate = CachedSyncState.FrameRate; + for (int32 Index = 0; Index < TimeSynchronizationInputSources.Num(); ++Index) + { + if (UTimeSynchronizationSource* InputSource = TimeSynchronizationInputSources[Index]) + { + if (InputSource->Open(OpenData)) + { + if (InputSource->bUseForSynchronization) + { + FTimecodeSynchronizerActiveTimecodedInputSource& NewSource = SynchronizedSources.Emplace_GetRef(InputSource); + if (TimecodeProviderType == ETimecodeSynchronizationTimecodeType::InputSource && Index == MasterSynchronizationSourceIndex) + { + ActiveMasterSynchronizationTimecodedSourceIndex = SynchronizedSources.Num() - 1; + } + } + else + { + NonSynchronizedSources.Emplace(InputSource); + } + } + } + } +} + +void UTimecodeSynchronizer::CloseSources() +{ + for (UTimeSynchronizationSource* InputSource : TimeSynchronizationInputSources) + { + if (InputSource != nullptr) + { + InputSource->Close(); + } } - for (FTimecodeSynchronizerActiveTimecodedInputSource& InputSource : ActiveSynchronizedSources) + SynchronizedSources.Reset(); + NonSynchronizedSources.Reset(); + ActiveMasterSynchronizationTimecodedSourceIndex = INDEX_NONE; +} + +void UTimecodeSynchronizer::UpdateSourceStates() +{ + // Update all of our source states. + if (GFrameCounter != LastUpdatedSources) { - InputSource.InputSource->Start(); + LastUpdatedSources = GFrameCounter; + + // If we're in the process of synchronizing, or have already achieved synchronization, + // we don't expect sources to become unready. If they do, that's an error. + // This is only relevant to + const bool bTreatUnreadyAsError = (State > ESynchronizationState::PreRolling_WaitReadiness); + TArray UnreadySources; + TArray InvalidSources; + + const FFrameRate FrameRate = GetFrameRate(); + for (FTimecodeSynchronizerActiveTimecodedInputSource& InputSource : SynchronizedSources) + { + InputSource.UpdateSourceState(FrameRate); + if (!InputSource.IsInputSourceValid()) + { + InvalidSources.Add(&InputSource); + } + else if (!InputSource.IsReady()) + { + UnreadySources.Add(&InputSource); + } + } + + // Don't track readiness for these sources, they are not actively being used. + for (FTimecodeSynchronizerActiveTimecodedInputSource& InputSource : NonSynchronizedSources) + { + InputSource.UpdateSourceState(FrameRate); + if (!InputSource.IsInputSourceValid()) + { + InvalidSources.Add(&InputSource); + } + } + + const FString StateString = SynchronizationStateToString(State); + if (InvalidSources.Num() > 0) + { + for (const FTimecodeSynchronizerActiveTimecodedInputSource* UnreadySource : UnreadySources) + { + UE_LOG(LogTimecodeSynchronizer, Error, TEXT("Invalid source found unready during State '%s'"), *StateString); + } + } + + // Process our unready sources. + // This is done here to keep the loops above fairly clean. + if (bTreatUnreadyAsError && UnreadySources.Num() > 0) + { + for (const FTimecodeSynchronizerActiveTimecodedInputSource* UnreadySource : UnreadySources) + { + UE_LOG(LogTimecodeSynchronizer, Error, TEXT("Source '%s' became unready during State '%s'"), *(UnreadySource->GetDisplayName()), *StateString); + } + } + + if (InvalidSources.Num() > 0 || (bTreatUnreadyAsError && UnreadySources.Num() > 0)) + { + SwitchState(ESynchronizationState::Error); + } + } +} + +FFrameTime UTimecodeSynchronizer::CalculateSyncTime() +{ + if (CachedSyncState.RolloverFrame.IsSet()) + { + return UTimeSynchronizationSource::AddOffsetWithRolloverModulus(CurrentProviderFrameTime, ActualFrameOffset, CachedSyncState.RolloverFrame.GetValue()); + } + else + { + return CurrentProviderFrameTime + ActualFrameOffset; } } diff --git a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Private/TimecodeSynchronizerModule.cpp b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Private/TimecodeSynchronizerModule.cpp index 7a24f0cadfe4..e0575157507f 100644 --- a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Private/TimecodeSynchronizerModule.cpp +++ b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Private/TimecodeSynchronizerModule.cpp @@ -45,7 +45,7 @@ public: { if (UTimecodeSynchronizer* Asset = MediaAsset.LoadSynchronous()) { - Asset->StartPreRoll(); + Asset->StartSynchronization(); } } return true; @@ -58,7 +58,7 @@ public: { if (UTimecodeSynchronizer* Asset = MediaAsset.LoadSynchronous()) { - Asset->StopInputSources(); + Asset->StopSynchronization(); } } return true; diff --git a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Public/MediaPlayerTimeSynchronizationSource.h b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Public/MediaPlayerTimeSynchronizationSource.h index 798318833c10..745273d6ef7c 100644 --- a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Public/MediaPlayerTimeSynchronizationSource.h +++ b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Public/MediaPlayerTimeSynchronizationSource.h @@ -36,13 +36,18 @@ public: //~ End UObject Interface //~ Begin UTimeSynchronizationSource Interface - virtual FFrameTime GetNextSampleTime() const override; - virtual int32 GetAvailableSampleCount() const override; + virtual FFrameTime GetNewestSampleTime() const override; + virtual FFrameTime GetOldestSampleTime() const override; virtual FFrameRate GetFrameRate() const override; virtual bool IsReady() const override; - virtual bool Open() override; - virtual void Start() override; + virtual bool Open(const FTimeSynchronizationOpenData& InOpenData) override; + virtual void Start(const FTimeSynchronizationStartData& InStartData) override; virtual void Close() override; virtual FString GetDisplayName() const override; //~ End UTimeSynchronizationSource Interface + +private: + + TOptional OpenData; + TOptional StartData; }; diff --git a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Public/TimecodeSynchronizer.h b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Public/TimecodeSynchronizer.h index c6da278e2c6c..1fb413eaddf4 100644 --- a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Public/TimecodeSynchronizer.h +++ b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizer/Public/TimecodeSynchronizer.h @@ -2,66 +2,50 @@ #pragma once +#include "CoreMinimal.h" #include "Engine/TimecodeProvider.h" #include "Delegates/Delegate.h" +#include "Delegates/DelegateCombinations.h" #include "TimeSynchronizationSource.h" +#include "Misc/QualifiedFrameTime.h" + #include "TimecodeSynchronizer.generated.h" - class UFixedFrameRateCustomTimeStep; -class UTimeSynchronizationSource; - -USTRUCT() -struct FTimecodeSynchronizerActiveTimecodedInputSource +/** + * Defines the various modes that the synchronizer can use to try and achieve synchronization. + */ +UENUM() +enum class ETimecodeSynchronizationSyncMode { - GENERATED_BODY() + /** + * User will specify an offset (number of frames) from the Timecode Source (see ETimecodeSycnrhonizationTimecodeType). + * This offset may be positive or negative depending on the latency of the source. + * Synchronization will be achieved once the synchronizer detects all input sources have frames that correspond + * with the offset timecode. + * + * This is suitable for applications trying to keep multiple UE4 instances in sync while using nDisplay / genlock. + */ + UserDefinedOffset, - FTimecodeSynchronizerActiveTimecodedInputSource() - : InputSource(nullptr) - , bIsReady(false) - , bCanBeSynchronized(false) - , NextSampleTime(0) - , AvailableSampleCount(0) - , FrameRate(30, 1) - , NextSampleLocalTime(0) - , MaxSampleLocalTime(0) - {} + /** + * Engine will try and automatically determine an appropriate offset based on what frames are available + * on the given sources. + * + * This is suitable for running a single UE4 instance that just wants to synchronize its inputs. + */ + Auto, - /* Associated source pointers */ - UPROPERTY(VisibleAnywhere, Transient, Category=Debug) - UTimeSynchronizationSource* InputSource; - - /* Flag stating if the source is ready */ - UPROPERTY(VisibleAnywhere, Transient, Category=Debug) - bool bIsReady; - - /* Flag stating if this source can be synchronized */ - UPROPERTY(VisibleAnywhere, Transient, Category=Debug) - bool bCanBeSynchronized; - - /* Next sample FrameTime */ - FFrameTime NextSampleTime; - - /* Available sample count in the source */ - int32 AvailableSampleCount; - - /* The source FrameRate */ - FFrameRate FrameRate; - - /* Local Time of the next sample of the source */ - FFrameTime NextSampleLocalTime; - - /* Local Time of the maximum sample of the source */ - FFrameTime MaxSampleLocalTime; - - /* Convert source Timecode to FrameTime based on TimecodeSynchronizer FrameRate */ - void ConvertToLocalFrameRate(const FFrameRate& InLocalFrameRate); + /** + * The same as Auto except that instead of trying to find a suitable timecode nearest to the + * newest common frame, we try to find a suitable timecode nearest to the oldest common frame. + */ + AutoOldest, }; - /** * Enumerates Timecode source type. */ @@ -73,9 +57,6 @@ enum class ETimecodeSynchronizationTimecodeType /** Use one of the InputSource as the Timecode Provider. */ InputSource, - - /** Use one of the SystemTime as the Timecode Provider. */ - SystemTime, }; /** @@ -93,15 +74,170 @@ enum class ETimecodeSynchronizationEvent SynchronizationSucceeded, }; +/** Cached values to use during synchronization / while synchronized */ +struct FTimecodeSynchronizerCachedSyncState +{ + /** If we're using rollover, the frame time that represents the rollover point (e.g., the modulus). */ + TOptional RolloverFrame; + + /** The FrameRate of the synchronizer. */ + FFrameRate FrameRate; + + /** Synchronization mode that's being used. */ + ETimecodeSynchronizationSyncMode SyncMode; + + /** Frame offset that will be used if SyncMode != Auto; */ + int32 FrameOffset; +}; + +/** Cached frame values for a given source. */ +struct FTimecodeSourceState +{ + /** Frame time of the newest available sample. */ + FFrameTime NewestAvailableSample; + + /** Frame time of the oldest available sample. */ + FFrameTime OldestAvailableSample; +}; + /** - * + * Provides a wrapper around a UTimeSynchronizerSource, and caches data necessary + * to provide synchronization. + * + * The values are typically updated once per frame. + */ +USTRUCT() +struct FTimecodeSynchronizerActiveTimecodedInputSource +{ + GENERATED_BODY() + +public: + + FTimecodeSynchronizerActiveTimecodedInputSource() + : bIsReady(false) + , bCanBeSynchronized(false) + , TotalNumberOfSamples(0) + , FrameRate(60, 1) + , InputSource(nullptr) + { + } + + FTimecodeSynchronizerActiveTimecodedInputSource(UTimeSynchronizationSource* Source) + : bIsReady(false) + , bCanBeSynchronized(Source->bUseForSynchronization) + , TotalNumberOfSamples(0) + , FrameRate(60, 1) + , InputSource(Source) + { + } + + /** Updates the internal state of this source, returning whether or not the source is ready (e.g. IsReady() == true). */ + const bool UpdateSourceState(const FFrameRate& SynchronizerFrameRate); + + FORCEINLINE const UTimeSynchronizationSource* GetInputSource() const + { + return InputSource; + } + + FORCEINLINE bool IsInputSourceValid() const + { + return nullptr != InputSource; + } + + FORCEINLINE FString GetDisplayName() const + { + return InputSource->GetDisplayName(); + } + + /** Whether or not this source is ready. */ + FORCEINLINE bool IsReady() const + { + return bIsReady; + } + + /** Whether or not this source can be synchronized. */ + FORCEINLINE bool CanBeSynchronized() const + { + return bCanBeSynchronized; + } + + /** Gets the FrameRate of the source. */ + FORCEINLINE const FFrameRate& GetFrameRate() const + { + return FrameRate; + } + + /** Gets the state of the Source relative to its own frame rate. */ + FORCEINLINE const FTimecodeSourceState& GetInputSourceState() const + { + return InputSourceState; + } + + /** Gets the state of the Source relative to the Synchronizer's frame rate. */ + FORCEINLINE const FTimecodeSourceState& GetSynchronizerRelativeState() const + { + return SynchronizerRelativeState; + } + +private: + + /* Flag stating if the source is ready */ + UPROPERTY(VisibleAnywhere, Transient, Category=Debug, Meta=(DisplayName = "Is Ready")) + bool bIsReady; + + /* Flag stating if this source can be synchronized */ + UPROPERTY(VisibleAnywhere, Transient, Category=Debug, Meta=(DisplayName = "Can Be Synchronized")) + bool bCanBeSynchronized; + + UPROPERTY(VisibleAnywhere, Transient, Category=Debug) + int32 TotalNumberOfSamples; + + FFrameRate FrameRate; + + FTimecodeSourceState InputSourceState; + FTimecodeSourceState SynchronizerRelativeState; + + /* Associated source pointers */ + UPROPERTY(VisibleAnywhere, Transient, Category = Debug, Meta=(DisplayName="Input Source")) + UTimeSynchronizationSource* InputSource; + + FTimecodeSynchronizerActiveTimecodedInputSource(const FTimecodeSynchronizerActiveTimecodedInputSource&) = delete; + FTimecodeSynchronizerActiveTimecodedInputSource& operator=(const FTimecodeSynchronizerActiveTimecodedInputSource&) = delete; + FTimecodeSynchronizerActiveTimecodedInputSource(FTimecodeSynchronizerActiveTimecodedInputSource&&) = delete; + FTimecodeSynchronizerActiveTimecodedInputSource& operator=(FTimecodeSynchronizerActiveTimecodedInputSource&&) = delete; +}; + +template<> +struct TStructOpsTypeTraits : public TStructOpsTypeTraitsBase2 +{ + enum + { + WithCopy = false + }; +}; + +/** + * Timecode Synchronizer is intended to correlate multiple timecode sources to help ensure + * that all sources can produce data that is frame aligned. + * + * This typically works by having sources buffer data until we have enough frames that + * such that we can find an overlap. Once that process is finished, the Synchronizer will + * provide the appropriate timecode to the engine (which can be retrieved via FApp::GetTimecode + * and FApp::GetTimecodeFrameRate). + * + * Note, the Synchronizer doesn't perform any buffering of data itself (that is left up to + * TimeSynchronizationSources). Instead, the synchronizer simply acts as a coordinator + * making sure all sources are ready, determining if sync is possible, etc. */ UCLASS() class TIMECODESYNCHRONIZER_API UTimecodeSynchronizer : public UTimecodeProvider { - GENERATED_UCLASS_BODY() + GENERATED_BODY() public: + + UTimecodeSynchronizer(); + //~ Begin UObject Interface virtual void BeginDestroy() override; #if WITH_EDITOR @@ -110,21 +246,40 @@ public: #endif //~ End UObject Interface + //~ Begin TimecodeProvider Interface + virtual FTimecode GetTimecode() const override; + virtual FFrameRate GetFrameRate() const override; + virtual ETimecodeProviderSynchronizationState GetSynchronizationState() const override; + virtual bool Initialize(class UEngine* InEngine) override { return true; } + virtual void Shutdown(class UEngine* InEngine) override {} + //~ End TimecodeProvider Interface + public: - /** Start, or stop if already started. preroll mechanism*/ - bool StartPreRoll(); + + /** + * Starts the synchronization process. Does nothing if we're already synchronized, or attempting to synchronize. + * + * @return True if the synchronization process was successfully started (or was previously started). + */ + bool StartSynchronization(); + + /** Stops the synchronization process. Does nothing if we're not synchronized, or attempting to synchronize. */ + void StopSynchronization(); + + DEPRECATED(4.21, "Please use GetSynchronizedSources.") + const TArray& GetTimecodedSources() const { return GetSynchronizedSources(); } + + DEPRECATED(4.21, "Please use GetNonSynchronizedSources.") + const TArray& GetSynchronizationSources() const { return GetNonSynchronizedSources(); } + + /** Returns the list of sources that are used to perform synchronization. */ + const TArray& GetSynchronizedSources() const { return SynchronizedSources; } + + /** Returns the list of sources that are not actively being used in synchronization. */ + const TArray& GetNonSynchronizedSources() const { return NonSynchronizedSources; } - /** Stops sources and resets internals */ - void StopInputSources(); - - /** Gets the sources used for synchronization */ - const TArray& GetTimecodedSources() const { return ActiveTimecodedInputSources; } - - /** Gets the sources that want to be synchronized */ - const TArray& GetSynchronizationSources() const { return ActiveSynchronizedSources; } - - /** Gets the master synchronization source index */ - int32 GetActiveMasterSynchronizationTimecodedSourceIndex() const { return ActiveMasterSynchronizationTimecodedSourceIndex; } + /** Returns the index of the Master Synchronization Source in the Synchronized Sources list. */ + int32 GetActiveMasterSynchronizationTimecodedSourceIndex() const { return MasterSynchronizationSourceIndex; } /** * Get an event delegate that is invoked when a Asset synchronization event occurred. @@ -137,41 +292,52 @@ public: return SynchronizationEvent; } -public: - //~ Begin TimecodeProvider Interface - virtual FTimecode GetTimecode() const override; - virtual FFrameRate GetFrameRate() const override; - virtual ETimecodeProviderSynchronizationState GetSynchronizationState() const override; - virtual bool Initialize(class UEngine* InEngine) override { return true; } - virtual void Shutdown(class UEngine* InEngine) override {} - //~ End TimecodeProvider Interface - private: - + /** Synchronization states */ enum class ESynchronizationState : uint8 { None, - PreRolling_WaitGenlockTimecodeProvider, // wait for the TimecodeProvider & CustomTimeStep to be Ready - PreRolling_WaitReadiness, // wait for all source to be Ready - PreRolling_Synchronizing, // wait and find a valid Timecode to start with - PreRolling_Buffering, // make sure each source have a big enough buffer - Synchronized, // all sources are running and synchronized - Rolling, Error, + Initializing, // Kicking off the initialization process. + PreRolling_WaitGenlockTimecodeProvider, // wait for the TimecodeProvider & CustomTimeStep to be Ready + PreRolling_WaitReadiness, // wait for all source to be Ready + PreRolling_Synchronizing, // wait and find a valid Timecode to start with + Synchronized, // all sources are running and synchronized }; - /** Is the TimecodeSynchronizer synchronizing */ - bool IsSynchronizing() const; + FORCEINLINE static FString SynchronizationStateToString(ESynchronizationState InState) + { + switch (InState) + { + case ESynchronizationState::None: + return FString(TEXT("None")); - /** Is the TimecodeSynchronizer synchronized */ - bool IsSynchronized() const; + case ESynchronizationState::Initializing: + return FString(TEXT("Initializing")); + + case ESynchronizationState::PreRolling_WaitGenlockTimecodeProvider: + return FString(TEXT("WaitGenlockTimecodeProvider")); + + case ESynchronizationState::PreRolling_WaitReadiness: + return FString(TEXT("WaitReadiness")); + + case ESynchronizationState::PreRolling_Synchronizing: + return FString(TEXT("Synchronizing")); + + case ESynchronizationState::Synchronized: + return FString(TEXT("Synchronized")); + + case ESynchronizationState::Error: + return FString(TEXT("Error")); + + default: + return FString::Printf(TEXT("Invalid State %d"), static_cast(InState)); + } + } /** Registers asset to MediaModule tick */ void SetTickEnabled(bool bEnabled); - - /** Changes internal state and execute it if required */ - void SwitchState(const ESynchronizationState NewState, const bool bDoTick = false); /** Tick method of the asset */ void Tick(); @@ -179,30 +345,23 @@ private: /** Switches on current state and ticks it */ void Tick_Switch(); + bool ShouldTick(); + /** Test if the genlock & timecode provider are properly setup */ bool Tick_TestGenlock(); bool Tick_TestTimecode(); - /** Process PreRolling_WaitGenlockTimecodeProvider state */ + /** Process PreRolling_WaitGenlockTimecodeProvider state. */ void TickPreRolling_WaitGenlockTimecodeProvider(); - /** Process PreRolling_WaitReadiness state */ + /** Process PreRolling_WaitReadiness state. */ void TickPreRolling_WaitReadiness(); - /** Process PreRolling_Synchronizing state */ + /** Process PreRolling_Synchronizing state. */ void TickPreRolling_Synchronizing(); - - /** Process PreRolling_Buffering state */ - void TickPreRolling_Buffering(); - - /** Process Synchronized state */ - void TickSynchronized(); - - /** Prepare for ErrorState to be engaged */ - void EnterStateError(); - - /** Process Error state */ - void TickError(); + + /** Process Synchronized state. */ + void Tick_Synchronized(); /** Register TimecodeSynchronizer as the TimecodeProvider */ void Register(); @@ -210,44 +369,50 @@ private: /** Unregister TimecodeSynchronizer as the TimecodeProvider */ void Unregister(); - /** Convert Timecode to a FrameTime */ - FFrameTime ConvertTimecodeToFrameTime(const FTimecode& InTimecode) const; - FTimecode ConvertFrameTimeToTimecode(const FFrameTime& InFFrameTime) const; - - /** Verify if all sources are ready */ - bool AreSourcesReady() const; - - /** Start all sources once we're ready to advance time */ + void OpenSources(); void StartSources(); + void CloseSources(); + /** Updates and caches the state of the sources. */ + void UpdateSourceStates(); + FFrameTime CalculateSyncTime(); + + bool IsSynchronizing() const; + bool IsSynchronized() const; + bool IsError() const; + + /** Changes internal state and execute it if required */ + void SwitchState(const ESynchronizationState NewState); + + FFrameTime GetProviderFrameTime() const; public: /** The fixed framerate to use. */ - UPROPERTY(EditAnywhere, Category="Genlock", meta=(DisplayName="Enable")) + UPROPERTY(EditAnywhere, Category="Genlock", Meta=(DisplayName="Enable")) bool bUseCustomTimeStep; /** Custom strategy to tick in a interval. */ - UPROPERTY(EditAnywhere, Instanced, Category="Genlock", meta=(EditCondition="bUseCustomTimeStep", DisplayName="Genlock Source")) + UPROPERTY(EditAnywhere, Instanced, Category="Genlock", Meta=(EditCondition="bUseCustomTimeStep", DisplayName="Genlock Source")) UFixedFrameRateCustomTimeStep* CustomTimeStep; /** The fixed framerate to use. */ - UPROPERTY(EditAnywhere, Category="Genlock", meta=(EditCondition="!bUseCustomTimeStep", ClampMin="15.0")) + UPROPERTY(EditAnywhere, Category="Genlock", Meta=(EditCondition="!bUseCustomTimeStep", ClampMin="15.0")) FFrameRate FixedFrameRate; public: /** Use a Timecode Provider. */ - UPROPERTY(EditAnywhere, Category="Timecode Provider", meta=(DisplayName="Select")) + UPROPERTY(EditAnywhere, Category="Timecode Provider", Meta=(DisplayName="Select")) ETimecodeSynchronizationTimecodeType TimecodeProviderType; /** Custom strategy to tick in a interval. */ - UPROPERTY(EditAnywhere, Instanced, Category="Timecode Provider", meta=(EditCondition="IN_CPP", DisplayName="Timecode Source")) + UPROPERTY(EditAnywhere, Instanced, Category="Timecode Provider", Meta=(EditCondition="IN_CPP", DisplayName="Timecode Source")) UTimecodeProvider* TimecodeProvider; /** * Index of the source that drives the synchronized Timecode. * The source need to be timecoded and flag as bUseForSynchronization */ - UPROPERTY(EditAnywhere, Category="Timecode Provider", meta=(EditCondition="IN_CPP")) + UPROPERTY(EditAnywhere, Category="Timecode Provider", Meta=(EditCondition="IN_CPP")) int32 MasterSynchronizationSourceIndex; public: @@ -256,7 +421,7 @@ public: bool bUsePreRollingTimecodeMarginOfErrors; /** Maximum gap size between synchronized time and source time */ - UPROPERTY(EditAnywhere, Category="Synchronization", meta=(EditCondition="bUsePreRollingTimecodeMarginOfErrors", ClampMin="0")) + UPROPERTY(EditAnywhere, Category="Synchronization", Meta=(EditCondition="bUsePreRollingTimecodeMarginOfErrors", ClampMin="0")) int32 PreRollingTimecodeMarginOfErrors; /** Enable PreRoll timeout */ @@ -264,38 +429,68 @@ public: bool bUsePreRollingTimeout; /** How long to wait for all source to be ready */ - UPROPERTY(EditAnywhere, Category="Synchronization", meta=(EditCondition="bUsePreRollingTimeout", ClampMin="0.0")) + UPROPERTY(EditAnywhere, Category="Synchronization", Meta=(EditCondition="bUsePreRollingTimeout", ClampMin="0.0")) float PreRollingTimeout; public: - /** Array of all the sources that wants to be synchronized*/ + UPROPERTY(EditAnywhere, Instanced, Category="Input") TArray TimeSynchronizationInputSources; private: - + + /** What mode will be used for synchronization. */ + UPROPERTY(EditAnywhere, Category = "Synchronization", Meta=(EditCondition="IN_CPP")) + ETimecodeSynchronizationSyncMode SyncMode; + + /** + * When UserDefined mode is used, the number of frames delayed from the Provider's timecode. + * Negative values indicate the used timecode will be ahead of the Provider's. + */ + UPROPERTY(EditAnywhere, Category = "Synchronization", Meta=(EditCondition = "IN_CPP", ClampMin="-640", ClampMax="640")) + int32 FrameOffset; + + /** + * Similar to FrameOffset. + * For Auto mode, this represents the number of frames behind the newest synced frame. + * For AutoModeOldest, the is the of frames ahead of the last synced frame. + */ + UPROPERTY(EditAnywhere, Category = "Synchronization", Meta = (EditCondition = "IN_CPP", ClampMin = "0", ClampMax = "640")) + int32 AutoFrameOffset = 3; + + /** Whether or not the specified Provider's timecode rolls over. (Rollover is expected to occur at Timecode 24:00:00:00). */ + UPROPERTY(EditAnywhere, Category = "Synchronization", Meta=(EditCondition="IN_CPP")) + bool bWithRollover = false; + /** Sources used for synchronization */ - UPROPERTY(VisibleAnywhere, Category=Debug) - TArray ActiveTimecodedInputSources; + UPROPERTY(Transient, DuplicateTransient, VisibleAnywhere, Category=Debug) + TArray SynchronizedSources; /* Sources that wants to be synchronized */ - UPROPERTY(VisibleAnywhere, Category=Debug) - TArray ActiveSynchronizedSources; + UPROPERTY(Transient, DuplicateTransient, VisibleAnywhere, Category=Debug) + TArray NonSynchronizedSources; UPROPERTY(Transient) UFixedFrameRateCustomTimeStep* RegisteredCustomTimeStep; UPROPERTY(Transient) - UTimecodeProvider* RegisteredTimecodeProvider; + const UTimecodeProvider* RegisteredTimecodeProvider; + + UPROPERTY(Transient, DuplicateTransient, VisibleAnywhere, Category = "Synchronization") + int32 ActualFrameOffset; private: + int64 LastUpdatedSources = 0; + /** The actual synchronization state */ ESynchronizationState State; - bool bSourceStarted; - /** Current FrameTime of the system */ - FFrameTime CurrentFrameTime; + /** Frame time that we'll use for the system */ + TOptional CurrentSystemFrameTime; + + /** The current frame from our specified provider. */ + FFrameTime CurrentProviderFrameTime; /** Timestamp when PreRolling has started */ double StartPreRollingTime; @@ -310,4 +505,6 @@ private: /** An event delegate that is invoked when a synchronization event occurred. */ FOnTimecodeSynchronizationEvent SynchronizationEvent; + + FTimecodeSynchronizerCachedSyncState CachedSyncState; }; diff --git a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/AssetEditor/TimecodeSynchronizerEditorToolkit.cpp b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/AssetEditor/TimecodeSynchronizerEditorToolkit.cpp index c3a41b2cba56..9f3c26a0b357 100644 --- a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/AssetEditor/TimecodeSynchronizerEditorToolkit.cpp +++ b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/AssetEditor/TimecodeSynchronizerEditorToolkit.cpp @@ -296,7 +296,7 @@ void FTimecodeSynchronizerEditorToolkit::ExtendToolBar() { if (UTimecodeSynchronizer* Asset = GetTimecodeSynchronizer()) { - Asset->StartPreRoll(); + Asset->StartSynchronization(); } }), FCanExecuteAction::CreateLambda([&]() @@ -322,7 +322,7 @@ void FTimecodeSynchronizerEditorToolkit::ExtendToolBar() { if (UTimecodeSynchronizer* Asset = GetTimecodeSynchronizer()) { - Asset->StopInputSources(); + Asset->StopSynchronization(); } }), FCanExecuteAction::CreateLambda([&]() diff --git a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/Widgets/STimecodeSynchronizerSourceViewer.cpp b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/Widgets/STimecodeSynchronizerSourceViewer.cpp index 30392c710cc0..cb0c9d4ea60a 100644 --- a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/Widgets/STimecodeSynchronizerSourceViewer.cpp +++ b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/Widgets/STimecodeSynchronizerSourceViewer.cpp @@ -66,53 +66,38 @@ void STimecodeSynchronizerSourceViewer::PopulateActiveSources() if (TimecodeSynchronizer.IsValid()) { - const TArray& TimecodedSources = TimecodeSynchronizer->GetTimecodedSources(); - for (int32 Index = 0; Index < TimecodedSources.Num(); ++Index) + struct FLocal { - const FTimecodeSynchronizerActiveTimecodedInputSource& Source = TimecodedSources[Index]; - if (Source.InputSource) + static void BuildViewportsForSources(UTimecodeSynchronizer* Synchronizer, TSharedPtr Owner, const bool bSynchronizedSources) { - UMediaPlayerTimeSynchronizationSource* MediaPlayerSource = Cast(Source.InputSource); - UMediaTexture* TextureArg = MediaPlayerSource ? MediaPlayerSource->MediaTexture : nullptr; - - //Add a Viewport Widget for each active Source - ViewportVerticalBox->AddSlot() - .Padding(1.0f, 1.0f, 1.0f, 1.0f) - [ - SNew(SBorder) - .BorderImage(FCoreStyle::Get().GetBrush("GreenBrush")) - .Padding(0.0f) - [ - //Source area - SNew(STimecodeSynchronizerSourceViewport, TimecodeSynchronizer.Get(), Index, true, TextureArg) - ] - ]; - } - } + const TArray& TimecodedSources = bSynchronizedSources ? Synchronizer->GetSynchronizedSources() : Synchronizer->GetNonSynchronizedSources(); + for (int32 Index = 0; Index < TimecodedSources.Num(); ++Index) + { + const FTimecodeSynchronizerActiveTimecodedInputSource& Source = TimecodedSources[Index]; + if (const UTimeSynchronizationSource* SyncSource = Source.GetInputSource()) + { + const UMediaPlayerTimeSynchronizationSource* MediaPlayerSource = Cast(SyncSource); + const UMediaTexture* TextureArg = MediaPlayerSource ? MediaPlayerSource->MediaTexture : nullptr; - const TArray& SynchronizationSources = TimecodeSynchronizer->GetSynchronizationSources(); - for (int32 Index = 0; Index < SynchronizationSources.Num(); ++Index) - { - const FTimecodeSynchronizerActiveTimecodedInputSource& Source = SynchronizationSources[Index]; - if (Source.InputSource) - { - UMediaPlayerTimeSynchronizationSource* MediaPlayerSource = Cast(Source.InputSource); - UMediaTexture* TextureArg = MediaPlayerSource ? MediaPlayerSource->MediaTexture : nullptr; - - //Add a Viewport Widget for each active Source - ViewportVerticalBox->AddSlot() - .Padding(1.0f, 1.0f, 1.0f, 1.0f) - [ - SNew(SBorder) - .BorderImage(FCoreStyle::Get().GetBrush("GreenBrush")) - .Padding(0.0f) - [ - //Source area - SNew(STimecodeSynchronizerSourceViewport, TimecodeSynchronizer.Get(), Index, false, TextureArg) - ] - ]; + //Add a Viewport Widget for each active Source + Owner->AddSlot() + .Padding(1.0f, 1.0f, 1.0f, 1.0f) + [ + SNew(SBorder) + .BorderImage(FCoreStyle::Get().GetBrush("GreenBrush")) + .Padding(0.0f) + [ + //Source area + SNew(STimecodeSynchronizerSourceViewport, Synchronizer, Index, bSynchronizedSources, const_cast(TextureArg)) + ] + ]; + } + } } - } + }; + + FLocal::BuildViewportsForSources(TimecodeSynchronizer.Get(), ViewportVerticalBox, true); + FLocal::BuildViewportsForSources(TimecodeSynchronizer.Get(), ViewportVerticalBox, false); } } diff --git a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/Widgets/STimecodeSynchronizerSourceViewport.cpp b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/Widgets/STimecodeSynchronizerSourceViewport.cpp index 1fc3dd8b8099..f4619b553e48 100644 --- a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/Widgets/STimecodeSynchronizerSourceViewport.cpp +++ b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/Widgets/STimecodeSynchronizerSourceViewport.cpp @@ -28,7 +28,7 @@ STimecodeSynchronizerSourceViewport::STimecodeSynchronizerSourceViewport() , SourceTextBox(nullptr) , TimecodeSynchronization(nullptr) , AttachedSourceIndex(INDEX_NONE) - , bIsTimecodedSource(false) + , bIsSynchronizedSource(false) , Material(nullptr) , MaterialBrush(nullptr) , TextureSampler(nullptr) @@ -37,11 +37,11 @@ STimecodeSynchronizerSourceViewport::STimecodeSynchronizerSourceViewport() /* STimecodeSynchronizerSourceViewport interface *****************************************************************************/ -void STimecodeSynchronizerSourceViewport::Construct(const FArguments& InArgs, UTimecodeSynchronizer* InTimecodeSynchronizer, int32 InAttachedSourceIndex, bool InTimecodedSource, UTexture* InTexture) +void STimecodeSynchronizerSourceViewport::Construct(const FArguments& InArgs, UTimecodeSynchronizer* InTimecodeSynchronizer, int32 InAttachedSourceIndex, bool bInIsSynchronizedSource, UTexture* InTexture) { TimecodeSynchronization = InTimecodeSynchronizer; AttachedSourceIndex = InAttachedSourceIndex; - bIsTimecodedSource = InTimecodedSource; + bIsSynchronizedSource = bInIsSynchronizedSource; //If no texture is attached, a default static texture will be used (for non live viewable sources) MaterialBrush = nullptr; @@ -105,9 +105,9 @@ void STimecodeSynchronizerSourceViewport::Construct(const FArguments& InArgs, UT .Text_Lambda([&]() -> FText { const FTimecodeSynchronizerActiveTimecodedInputSource* AttachedSource = GetAttachedSource(); - if (AttachedSource && AttachedSource->bIsReady) + if (AttachedSource && AttachedSource->IsReady()) { - return FText::FromString(GetAttachedSource()->InputSource->GetDisplayName()); + return FText::FromString(GetAttachedSource()->GetDisplayName()); } return FText(); }) @@ -211,10 +211,11 @@ FText STimecodeSynchronizerSourceViewport::HandleIntervalMinTimecodeText() const { FTimecode Timecode; const FTimecodeSynchronizerActiveTimecodedInputSource* AttachedSource = GetAttachedSource(); - if (AttachedSource && AttachedSource->bIsReady) + if (AttachedSource && AttachedSource->IsReady()) { - const bool bIsDropFrame = FTimecode::IsDropFormatTimecodeSupported(AttachedSource->FrameRate); - Timecode = FTimecode::FromFrameNumber(AttachedSource->NextSampleTime.FrameNumber, AttachedSource->FrameRate, bIsDropFrame); + const FFrameNumber OldestFrame = AttachedSource->GetInputSourceState().OldestAvailableSample.GetFrame(); + const bool bIsDropFrame = FTimecode::IsDropFormatTimecodeSupported(AttachedSource->GetFrameRate()); + Timecode = FTimecode::FromFrameNumber(OldestFrame, AttachedSource->GetFrameRate(), bIsDropFrame); } return FText::FromString(Timecode.ToString()); @@ -224,11 +225,11 @@ FText STimecodeSynchronizerSourceViewport::HandleIntervalMaxTimecodeText() const { FTimecode Timecode; const FTimecodeSynchronizerActiveTimecodedInputSource* AttachedSource = GetAttachedSource(); - if (AttachedSource && AttachedSource->bIsReady) + if (AttachedSource && AttachedSource->IsReady()) { - const FFrameNumber NextFrame = AttachedSource->NextSampleTime.FrameNumber + AttachedSource->AvailableSampleCount; - const bool bIsDropFrame = FTimecode::IsDropFormatTimecodeSupported(AttachedSource->FrameRate); - Timecode = FTimecode::FromFrameNumber(NextFrame, AttachedSource->FrameRate, bIsDropFrame); + const FFrameNumber NewestFrame = AttachedSource->GetInputSourceState().NewestAvailableSample.GetFrame(); + const bool bIsDropFrame = FTimecode::IsDropFormatTimecodeSupported(AttachedSource->GetFrameRate()); + Timecode = FTimecode::FromFrameNumber(NewestFrame, AttachedSource->GetFrameRate(), bIsDropFrame); } return FText::FromString(Timecode.ToString()); @@ -242,7 +243,7 @@ FText STimecodeSynchronizerSourceViewport::HandleCurrentTimecodeText() const FText STimecodeSynchronizerSourceViewport::HandleIsSourceMasterText() const { FString Role; - if (TimecodeSynchronization && AttachedSourceIndex != INDEX_NONE && bIsTimecodedSource && TimecodeSynchronization->GetActiveMasterSynchronizationTimecodedSourceIndex() == AttachedSourceIndex) + if (TimecodeSynchronization && AttachedSourceIndex != INDEX_NONE && bIsSynchronizedSource && TimecodeSynchronization->GetActiveMasterSynchronizationTimecodedSourceIndex() == AttachedSourceIndex) { Role = "Master"; } @@ -256,7 +257,7 @@ const FTimecodeSynchronizerActiveTimecodedInputSource* STimecodeSynchronizerSour if (TimecodeSynchronization && AttachedSourceIndex != INDEX_NONE) { - const TArray& Sources = bIsTimecodedSource ? TimecodeSynchronization->GetTimecodedSources() : TimecodeSynchronization->GetSynchronizationSources(); + const TArray& Sources = bIsSynchronizedSource ? TimecodeSynchronization->GetSynchronizedSources() : TimecodeSynchronization->GetNonSynchronizedSources(); if (Sources.IsValidIndex(AttachedSourceIndex)) { Result = &Sources[AttachedSourceIndex]; diff --git a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/Widgets/STimecodeSynchronizerSourceViewport.h b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/Widgets/STimecodeSynchronizerSourceViewport.h index 6c7d8b4cc0de..63658cd9dbbc 100644 --- a/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/Widgets/STimecodeSynchronizerSourceViewport.h +++ b/Engine/Plugins/Media/TimecodeSynchronizer/Source/TimecodeSynchronizerEditor/Private/Widgets/STimecodeSynchronizerSourceViewport.h @@ -89,11 +89,11 @@ private: /** Current TimecodeSynchronization being used. */ UTimecodeSynchronizer* TimecodeSynchronization; - /** Attached Input source index in TimecodeSynchronization.GetActiveSources(). */ + /** Attached Input source index in either TimecodeSynchronization.GetSynchronizedSources() or GetNonSynchronizedSources(). */ int32 AttachedSourceIndex; - /** Attached Input source index in TimecodeSynchronization.GetActiveSources(). */ - bool bIsTimecodedSource; + /** Whether or not this source is used for synchronization. */ + bool bIsSynchronizedSource; /** The material that wraps the video texture for display in an SImage. */ UMaterial* Material; diff --git a/Engine/Plugins/Messaging/TcpMessaging/Source/TcpMessaging/Private/Transport/TcpDeserializedMessage.cpp b/Engine/Plugins/Messaging/TcpMessaging/Source/TcpMessaging/Private/Transport/TcpDeserializedMessage.cpp index 519c3f81e182..de39e89f09d1 100644 --- a/Engine/Plugins/Messaging/TcpMessaging/Source/TcpMessaging/Private/Transport/TcpDeserializedMessage.cpp +++ b/Engine/Plugins/Messaging/TcpMessaging/Source/TcpMessaging/Private/Transport/TcpDeserializedMessage.cpp @@ -178,6 +178,11 @@ EMessageScope FTcpDeserializedMessage::GetScope() const return Scope; } +EMessageFlags FTcpDeserializedMessage::GetFlags() const +{ + return EMessageFlags::None; +} + const FMessageAddress& FTcpDeserializedMessage::GetSender() const { diff --git a/Engine/Plugins/Messaging/TcpMessaging/Source/TcpMessaging/Private/Transport/TcpDeserializedMessage.h b/Engine/Plugins/Messaging/TcpMessaging/Source/TcpMessaging/Private/Transport/TcpDeserializedMessage.h index 296d7af261e7..0db25a6cc8f9 100644 --- a/Engine/Plugins/Messaging/TcpMessaging/Source/TcpMessaging/Private/Transport/TcpDeserializedMessage.h +++ b/Engine/Plugins/Messaging/TcpMessaging/Source/TcpMessaging/Private/Transport/TcpDeserializedMessage.h @@ -47,6 +47,7 @@ public: virtual TSharedPtr GetOriginalContext() const override; virtual const TArray& GetRecipients() const override; virtual EMessageScope GetScope() const override; + virtual EMessageFlags GetFlags() const override; virtual const FMessageAddress& GetSender() const override; virtual ENamedThreads::Type GetSenderThread() const override; virtual const FDateTime& GetTimeForwarded() const override; diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Shared/UdpMessageSegment.h b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Shared/UdpMessageSegment.h index 3a035e34a2f9..5cda750e6a83 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Shared/UdpMessageSegment.h +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Shared/UdpMessageSegment.h @@ -7,6 +7,8 @@ #include "Misc/Guid.h" #include "Serialization/Archive.h" +// IMessageContext forward declaration +enum class EMessageFlags : uint32; /** * Enumerates message segment types. @@ -35,7 +37,18 @@ enum class EUdpMessageSegments : uint8 Retransmit, /** Notification that an inbound message timed out. */ - Timeout + Timeout, + + /** Acknowledges that message segments were received successfully */ + AcknowledgeSegments, + + /** Announces existence to static endpoints. */ + Ping, + + /** Answers back to ping segment. */ + Pong, + + // New segment type needs to be added at the end }; @@ -68,7 +81,7 @@ namespace FUdpMessageSegment * Serializes the given header from or into the specified archive. * * @param Ar The archive to serialize from or into. - * @param DateTime The header to serialize. + * @param Header The header to serialize. * @return The archive. */ friend FArchive& operator<<(FArchive& Ar, FHeader& Header) @@ -77,7 +90,6 @@ namespace FUdpMessageSegment } }; - /** * Structure for the sub-header of Abort segments. */ @@ -89,15 +101,14 @@ namespace FUdpMessageSegment public: /** - * Serializes the given header from or to the specified archive. + * Serializes the given header from or to the specified archive for the specified version. * * @param Ar The archive to serialize from or into. - * @param DateTime The header to serialize. - * @return The archive. + * @param ProtocolVersion The protocol version we want to serialize the Chunk in. */ - friend FArchive& operator<<(FArchive& Ar, FAbortChunk& Header) + void Serialize(FArchive& Ar, uint8 /*ProtocolVersion*/) { - return Ar << Header.MessageId; + Ar << MessageId; } }; @@ -111,17 +122,41 @@ namespace FUdpMessageSegment int32 MessageId; public: - /** - * Serializes the given header from or into the specified archive. + * Serializes the given header from or to the specified archive for the specified version. * * @param Ar The archive to serialize from or into. - * @param DateTime The header to serialize. - * @return The archive. + * @param ProtocolVersion The protocol version we want to serialize the Chunk in. */ - friend FArchive& operator<<(FArchive& Ar, FAcknowledgeChunk& Header) + void Serialize(FArchive& Ar, uint8 /*ProtocolVersion*/) { - return Ar << Header.MessageId; + Ar << MessageId; + } + }; + + + /** + * Structure for the header of AcknowledgeSegments segments. + */ + struct FAcknowledgeSegmentsChunk + { + /** Holds the identifier of the message that received segments successfully. */ + int32 MessageId; + + /** List of Acknowledged segments */ + TArray Segments; + + public: + + /** + * Serializes the given header from or to the specified archive for the specified version. + * + * @param Ar The archive to serialize from or into. + * @param ProtocolVersion The protocol version we want to serialize the Chunk in. + */ + void Serialize(FArchive& Ar, uint8 /*ProtocolVersion*/) + { + Ar << MessageId << Segments; } }; @@ -137,6 +172,9 @@ namespace FUdpMessageSegment /** Holds the total size of the message. */ int32 MessageSize; + /** Holds the message flags. */ + EMessageFlags MessageFlags; + /** Holds the sequence number of this segment. */ uint16 SegmentNumber; @@ -153,24 +191,28 @@ namespace FUdpMessageSegment TArray Data; public: + FDataChunk() = default; /** - * Serializes the given header from or into the specified archive. + * Serializes the given header from or to the specified archive for the specified version. * * @param Ar The archive to serialize from or into. - * @param DateTime The header to serialize. - * @return The archive. + * @param ProtocolVersion The protocol version we want to serialize the Chunk in. */ - friend FArchive& operator<<(FArchive& Ar, FDataChunk& Chunk) + void Serialize(FArchive& Ar, uint8 ProtocolVersion) { - return Ar - << Chunk.MessageId - << Chunk.MessageSize - << Chunk.SegmentNumber - << Chunk.SegmentOffset - << Chunk.Sequence - << Chunk.TotalSegments - << Chunk.Data; + Ar << MessageId + << MessageSize + << SegmentNumber + << SegmentOffset + << Sequence + << TotalSegments + << Data; + // if the protocol version is 11 onward + if (ProtocolVersion > 10) + { + Ar << MessageFlags; + } } }; @@ -193,15 +235,14 @@ namespace FUdpMessageSegment public: /** - * Serializes the given header from or into the specified archive. + * Serializes the given header from or to the specified archive for the specified version. * * @param Ar The archive to serialize from or into. - * @param DateTime The header to serialize. - * @return The archive. + * @param ProtocolVersion The protocol version we want to serialize the Chunk in. */ - friend FArchive& operator<<(FArchive& Ar, FRetransmitChunk& Header) + void Serialize(FArchive& Ar, uint8 /*ProtocolVersion*/) { - return Ar << Header.MessageId << Header.Segments; + Ar << MessageId << Segments; } }; @@ -217,15 +258,14 @@ namespace FUdpMessageSegment public: /** - * Serializes the given header from or into the specified archive. + * Serializes the given header from or to the specified archive for the specified version. * * @param Ar The archive to serialize from or into. - * @param DateTime The header to serialize. - * @return The archive. + * @param ProtocolVersion The protocol version we want to serialize the Chunk in. */ - friend FArchive& operator<<(FArchive& Ar, FTimeoutChunk& Header) + void Serialize(FArchive& Ar, uint8 /*ProtocolVersion*/) { - return Ar << Header.MessageId; + Ar << MessageId; } }; }; diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tests/UdpMessageSegmenterTest.cpp b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tests/UdpMessageSegmenterTest.cpp index 5eff8e4b5fbc..d0abf8de62c0 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tests/UdpMessageSegmenterTest.cpp +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tests/UdpMessageSegmenterTest.cpp @@ -2,6 +2,9 @@ #include "CoreMinimal.h" #include "Misc/AutomationTest.h" +#include "Containers/ArrayBuilder.h" + +#include "UdpMessagingPrivate.h" #include "Transport/UdpSerializedMessage.h" #include "Transport/UdpMessageSegmenter.h" @@ -17,7 +20,7 @@ void RunSegmentationTest(FAutomationTestBase& Test, uint32 MessageSize, uint16 S Test.AddInfo(FString::Printf(TEXT("Segmenting message of size %i with %i segments of size %i..."), MessageSize, NumSegments, SegmentSize)); // create a large message to segment - TSharedRef Message = MakeShareable(new FUdpSerializedMessage()); + TSharedRef Message = MakeShareable(new FUdpSerializedMessage(UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION, EMessageFlags::None)); for (uint8 SegmentIndex = 0; SegmentIndex < NumSegments; ++SegmentIndex) { @@ -66,7 +69,7 @@ void RunSegmentationTest(FAutomationTestBase& Test, uint32 MessageSize, uint16 S while (Segmenter.GetNextPendingSegment(OutData, OutSegmentNumber)) { - Segmenter.MarkAsSent(OutSegmentNumber); + Segmenter.MarkAsAcknowledged(TArrayBuilder().Add(OutSegmentNumber)); ++GeneratedSegmentCount; // verify segment size diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tests/UdpMessagingTestTypes.h b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tests/UdpMessagingTestTypes.h index 2dcc081ca0aa..c1b7a922137a 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tests/UdpMessagingTestTypes.h +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tests/UdpMessagingTestTypes.h @@ -37,6 +37,7 @@ public: : Expiration(FDateTime::MaxValue()) , Message(InMessage) , Scope(EMessageScope::Network) + , Flags(EMessageFlags::None) , SenderThread(ENamedThreads::AnyThread) , TimeSent(InTimeSent) , TypeInfo(FUdpMockMessage::StaticStruct()) @@ -69,6 +70,8 @@ public: virtual TSharedPtr GetOriginalContext() const override { return OriginalContext; } virtual const TArray& GetRecipients() const override { return Recipients; } virtual EMessageScope GetScope() const override { return Scope; } + virtual EMessageFlags GetFlags() const override { return Flags; } + virtual const FMessageAddress& GetSender() const override { return Sender; } virtual ENamedThreads::Type GetSenderThread() const override { return SenderThread; } virtual const FDateTime& GetTimeForwarded() const override { return TimeSent; } @@ -83,6 +86,7 @@ private: TSharedPtr OriginalContext; TArray Recipients; EMessageScope Scope; + EMessageFlags Flags; FMessageAddress Sender; ENamedThreads::Type SenderThread; FDateTime TimeSent; diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tests/UdpSerializeMessageTaskTest.cpp b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tests/UdpSerializeMessageTaskTest.cpp index 69af193b8e6d..5a9b4628da53 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tests/UdpSerializeMessageTaskTest.cpp +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tests/UdpSerializeMessageTaskTest.cpp @@ -4,13 +4,14 @@ #include "HAL/Event.h" #include "Misc/AutomationTest.h" #include "Async/TaskGraphInterfaces.h" + +#include "UdpMessagingPrivate.h" #include "Transport/UdpSerializedMessage.h" #include "IMessageContext.h" #include "Transport/UdpSerializeMessageTask.h" #include "Tests/UdpMessagingTestTypes.h" - IMPLEMENT_SIMPLE_AUTOMATION_TEST(FUdpSerializeMessageTaskTest, "System.Core.Messaging.Transports.Udp.UdpSerializedMessage", EAutomationTestFlags::ApplicationContextMask | EAutomationTestFlags::EngineFilter) @@ -28,7 +29,7 @@ bool FUdpSerializeMessageTaskTest::RunTest(const FString& Parameters) const auto Context = MakeShared(new FUdpMockMessage, TimeSent); // synchronous reference serialization - const auto Message1 = MakeShared(); + const auto Message1 = MakeShared(UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION, Context->GetFlags()); FUdpSerializeMessageTask Task1(Context, Message1, nullptr); { @@ -41,7 +42,7 @@ bool FUdpSerializeMessageTaskTest::RunTest(const FString& Parameters) FPlatformProcess::ReturnSynchEventToPool(EventToDelete); }); - TSharedRef Message2 = MakeShareable(new FUdpSerializedMessage); + TSharedRef Message2 = MakeShared(UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION, Context->GetFlags()); TGraphTask::CreateTask().ConstructAndDispatchWhenReady(Context, Message2, CompletionEvent); const bool Completed = CompletionEvent->Wait(MaxWaitTime); diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpDeserializedMessage.cpp b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpDeserializedMessage.cpp index bf092051fa9d..66f9be17a6dc 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpDeserializedMessage.cpp +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpDeserializedMessage.cpp @@ -3,9 +3,10 @@ #include "Transport/UdpDeserializedMessage.h" #include "Backends/JsonStructDeserializerBackend.h" -#include "IMessageAttachment.h" -#include "Serialization/MemoryReader.h" +#include "Backends/CborStructDeserializerBackend.h" #include "StructDeserializer.h" +#include "Serialization/MemoryReader.h" +#include "IMessageAttachment.h" #include "Transport/UdpReassembledMessage.h" #include "UObject/Class.h" #include "UObject/Package.h" @@ -18,6 +19,7 @@ FUdpDeserializedMessage::FUdpDeserializedMessage(const TSharedPtr& InAttachment) : Attachment(InAttachment) , MessageData(nullptr) + , Flags(EMessageFlags::None) { } @@ -35,104 +37,23 @@ FUdpDeserializedMessage::~FUdpDeserializedMessage() } } +/** Helper class for protocol deserialization dispatching */ +class FUdpDeserializedMessageDetails +{ +public: + static bool DeserializeV10(FUdpDeserializedMessage& DeserializedMessage, FMemoryReader& MessageReader); + static bool DeserializeV11(FUdpDeserializedMessage& DeserializedMessage, FMemoryReader& MessageReader); + static bool Deserialize(FUdpDeserializedMessage& DeserializedMessage, const FUdpReassembledMessage& ReassembledMessage); +}; /* FUdpDeserializedMessage interface *****************************************************************************/ bool FUdpDeserializedMessage::Deserialize(const FUdpReassembledMessage& ReassembledMessage) { - // Note that some complex values are deserialized manually here, so that we - // can sanity check their values. @see FUdpSerializeMessageTask::DoTask() - - FMemoryReader MessageReader(ReassembledMessage.GetData()); - MessageReader.ArMaxSerializeSize = NAME_SIZE; - - // message type info - { - FName MessageType; - MessageReader << MessageType; - - // @todo gmp: cache message types for faster lookup - TypeInfo = FindObjectSafe(ANY_PACKAGE, *MessageType.ToString()); - - if (!TypeInfo.IsValid(false, true)) - { - return false; - } - } - - // sender address - { - MessageReader << Sender; - } - - // recipient addresses - { - int32 NumRecipients = 0; - MessageReader << NumRecipients; - - if ((NumRecipients < 0) || (NumRecipients > UDP_MESSAGING_MAX_RECIPIENTS)) - { - return false; - } - - Recipients.Empty(NumRecipients); - - while (0 < NumRecipients--) - { - MessageReader << *::new(Recipients) FMessageAddress; - } - } - - // message scope - { - MessageReader << Scope; - - if (Scope > EMessageScope::All) - { - return false; - } - } - - // time sent & expiration - { - MessageReader << TimeSent; - MessageReader << Expiration; - } - - // annotations - { - int32 NumAnnotations = 0; - MessageReader << NumAnnotations; - - if (NumAnnotations > UDP_MESSAGING_MAX_ANNOTATIONS) - { - return false; - } - - while (0 < NumAnnotations--) - { - FName Key; - FString Value; - - MessageReader << Key; - MessageReader << Value; - - Annotations.Add(Key, Value); - } - } - - // create message body - MessageData = FMemory::Malloc(TypeInfo->GetStructureSize()); - TypeInfo->InitializeStruct(MessageData); - - // deserialize message body - FJsonStructDeserializerBackend Backend(MessageReader); - - return FStructDeserializer::Deserialize(MessageData, *TypeInfo, Backend); + return FUdpDeserializedMessageDetails::Deserialize(*this, ReassembledMessage); } - /* IMessageContext interface *****************************************************************************/ @@ -184,6 +105,12 @@ EMessageScope FUdpDeserializedMessage::GetScope() const } +EMessageFlags FUdpDeserializedMessage::GetFlags() const +{ + return Flags; +} + + const FMessageAddress& FUdpDeserializedMessage::GetSender() const { return Sender; @@ -206,3 +133,241 @@ const FDateTime& FUdpDeserializedMessage::GetTimeSent() const { return TimeSent; } + + +/* FUdpDeserializedMessageDetails implementation +*****************************************************************************/ + + +bool FUdpDeserializedMessageDetails::DeserializeV10(FUdpDeserializedMessage& DeserializedMessage, FMemoryReader& MessageReader) +{ + // message type info + { + FName MessageType; + MessageReader << MessageType; + + // @todo gmp: cache message types for faster lookup + DeserializedMessage.TypeInfo = FindObjectSafe(ANY_PACKAGE, *MessageType.ToString()); + + if (!DeserializedMessage.TypeInfo.IsValid(false, true)) + { + return false; + } + } + + // sender address + { + MessageReader << DeserializedMessage.Sender; + } + + // recipient addresses + { + int32 NumRecipients = 0; + MessageReader << NumRecipients; + + if ((NumRecipients < 0) || (NumRecipients > UDP_MESSAGING_MAX_RECIPIENTS)) + { + return false; + } + + DeserializedMessage.Recipients.Empty(NumRecipients); + + while (0 < NumRecipients--) + { + MessageReader << DeserializedMessage.Recipients.AddDefaulted_GetRef(); + } + } + + // message scope + { + MessageReader << DeserializedMessage.Scope; + + if (DeserializedMessage.Scope > EMessageScope::All) + { + return false; + } + } + + // time sent & expiration + { + MessageReader << DeserializedMessage.TimeSent; + MessageReader << DeserializedMessage.Expiration; + } + + // annotations + { + int32 NumAnnotations = 0; + MessageReader << NumAnnotations; + + if (NumAnnotations > UDP_MESSAGING_MAX_ANNOTATIONS) + { + return false; + } + + while (0 < NumAnnotations--) + { + FName Key; + FString Value; + + MessageReader << Key; + MessageReader << Value; + + DeserializedMessage.Annotations.Add(Key, Value); + } + } + + // create message body + DeserializedMessage.MessageData = FMemory::Malloc(DeserializedMessage.TypeInfo->GetStructureSize()); + DeserializedMessage.TypeInfo->InitializeStruct(DeserializedMessage.MessageData); + + // deserialize message body + FJsonStructDeserializerBackend Backend(MessageReader); + return FStructDeserializer::Deserialize(DeserializedMessage.MessageData, *DeserializedMessage.TypeInfo, Backend); +} + +bool FUdpDeserializedMessageDetails::DeserializeV11(FUdpDeserializedMessage& DeserializedMessage, FMemoryReader& MessageReader) +{ + // message type info + { + FName MessageType; + MessageReader << MessageType; + + // @todo gmp: cache message types for faster lookup + DeserializedMessage.TypeInfo = FindObjectSafe(ANY_PACKAGE, *MessageType.ToString()); + + if (!DeserializedMessage.TypeInfo.IsValid(false, true)) + { + return false; + } + } + + // sender address + { + MessageReader << DeserializedMessage.Sender; + } + + // recipient addresses + { + int32 NumRecipients = 0; + MessageReader << NumRecipients; + + if ((NumRecipients < 0) || (NumRecipients > UDP_MESSAGING_MAX_RECIPIENTS)) + { + return false; + } + + DeserializedMessage.Recipients.Empty(NumRecipients); + + while (0 < NumRecipients--) + { + MessageReader << DeserializedMessage.Recipients.AddDefaulted_GetRef(); + } + } + + // message scope + { + MessageReader << DeserializedMessage.Scope; + + if (DeserializedMessage.Scope > EMessageScope::All) + { + return false; + } + } + + // message flags + { + MessageReader << DeserializedMessage.Flags; + } + + // time sent & expiration + { + MessageReader << DeserializedMessage.TimeSent; + MessageReader << DeserializedMessage.Expiration; + } + + // annotations + { + int32 NumAnnotations = 0; + MessageReader << NumAnnotations; + + if (NumAnnotations > UDP_MESSAGING_MAX_ANNOTATIONS) + { + return false; + } + + while (0 < NumAnnotations--) + { + FName Key; + FString Value; + + MessageReader << Key; + MessageReader << Value; + + DeserializedMessage.Annotations.Add(Key, Value); + } + } + + // wire format + uint8 FormatId; + MessageReader << FormatId; + EUdpMessageFormat MessageFormat = (EUdpMessageFormat)FormatId; + + // create message body + DeserializedMessage.MessageData = FMemory::Malloc(DeserializedMessage.TypeInfo->GetStructureSize()); + DeserializedMessage.TypeInfo->InitializeStruct(DeserializedMessage.MessageData); + + switch (MessageFormat) + { + case EUdpMessageFormat::Json: + { + // deserialize json + FJsonStructDeserializerBackend Backend(MessageReader); + return FStructDeserializer::Deserialize(DeserializedMessage.MessageData, *DeserializedMessage.TypeInfo, Backend); + } + break; + case EUdpMessageFormat::Cbor: + { + // deserialize cbor + FCborStructDeserializerBackend Backend(MessageReader); + return FStructDeserializer::Deserialize(DeserializedMessage.MessageData, *DeserializedMessage.TypeInfo, Backend); + } + break; + case EUdpMessageFormat::TaggedProperty: + { + // deserialize message body using tagged property + // Hack : this binary serialization should use a more standard protocol, should use cbor + DeserializedMessage.TypeInfo->SerializeItem(MessageReader, DeserializedMessage.MessageData, nullptr); + return !MessageReader.GetError(); + } + break; + default: + // Unsupported format + return false; + } +} + +bool FUdpDeserializedMessageDetails::Deserialize(FUdpDeserializedMessage& DeserializedMessage, const FUdpReassembledMessage& ReassembledMessage) +{ + // Note that some complex values are deserialized manually here, so that we + // can sanity check their values. @see FUdpSerializeMessageTask::DoTask() + + FMemoryReader MessageReader(ReassembledMessage.GetData()); + MessageReader.ArMaxSerializeSize = NAME_SIZE; + + switch (ReassembledMessage.GetProtocolVersion()) + { + case 10: + return DeserializeV10(DeserializedMessage, MessageReader); + break; + + case 11: + return DeserializeV11(DeserializedMessage, MessageReader); + break; + + default: + UE_LOG(LogUdpMessaging, Error, TEXT("Unsupported Protocol Version message tasked for deserialization, discarding...")); + break; + } + return false; +} + diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpDeserializedMessage.h b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpDeserializedMessage.h index d01965efd3e5..011289f718de 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpDeserializedMessage.h +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpDeserializedMessage.h @@ -12,7 +12,7 @@ class FUdpReassembledMessage; class IMessageAttachment; class UScriptStruct; - +class FUdpDeserializedMessageDetails; /** * Holds a deserialized message. @@ -54,12 +54,15 @@ public: virtual TSharedPtr GetOriginalContext() const override; virtual const TArray& GetRecipients() const override; virtual EMessageScope GetScope() const override; + virtual EMessageFlags GetFlags() const override; + virtual const FMessageAddress& GetSender() const override; virtual ENamedThreads::Type GetSenderThread() const override; virtual const FDateTime& GetTimeForwarded() const override; virtual const FDateTime& GetTimeSent() const override; private: + friend class FUdpDeserializedMessageDetails; /** Holds the optional message annotations. */ TMap Annotations; @@ -79,6 +82,9 @@ private: /** Holds the message's scope. */ EMessageScope Scope; + /** Holds the message's flags. */ + EMessageFlags Flags; + /** Holds the sender's identifier. */ FMessageAddress Sender; diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageBeacon.cpp b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageBeacon.cpp index ab4d297f3ab4..42bba0c9ca2e 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageBeacon.cpp +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageBeacon.cpp @@ -21,7 +21,7 @@ const FTimespan FUdpMessageBeacon::MinimumInterval = FTimespan::FromMilliseconds /* FUdpMessageHelloSender structors *****************************************************************************/ -FUdpMessageBeacon::FUdpMessageBeacon(FSocket* InSocket, const FGuid& InSocketId, const FIPv4Endpoint& InMulticastEndpoint) +FUdpMessageBeacon::FUdpMessageBeacon(FSocket* InSocket, const FGuid& InSocketId, const FIPv4Endpoint& InMulticastEndpoint, const TArray& InStaticEndpoints) : BeaconInterval(MinimumInterval) , LastEndpointCount(1) , LastHelloSent(FDateTime::MinValue()) @@ -32,6 +32,10 @@ FUdpMessageBeacon::FUdpMessageBeacon(FSocket* InSocket, const FGuid& InSocketId, { EndpointLeftEvent = FPlatformProcess::GetSynchEventFromPool(false); MulticastAddress = InMulticastEndpoint.ToInternetAddr(); + for (const FIPv4Endpoint& Endpoint : InStaticEndpoints) + { + StaticAddresses.Add(Endpoint.ToInternetAddr()); + } Thread = FRunnableThread::Create(this, TEXT("FUdpMessageBeacon"), 128 * 1024, TPri_AboveNormal, FPlatformAffinity::GetPoolThreadMask()); } @@ -46,6 +50,7 @@ FUdpMessageBeacon::~FUdpMessageBeacon() } MulticastAddress = nullptr; + StaticAddresses.Empty(); FPlatformProcess::ReturnSynchEventToPool(EndpointLeftEvent); EndpointLeftEvent = nullptr; @@ -143,6 +148,43 @@ bool FUdpMessageBeacon::SendSegment(EUdpMessageSegments SegmentType, const FTime } +bool FUdpMessageBeacon::SendPing(const FTimespan& SocketWaitTime) +{ + FUdpMessageSegment::FHeader Header; + { + Header.SenderNodeId = NodeId; + // Pings were introduced at ProtocolVersion 11 and those messages needs to be send with that header to allow backward and forward discoverability + Header.ProtocolVersion = 11; + Header.SegmentType = EUdpMessageSegments::Ping; + } + uint8 ActualProtocolVersion = UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION; + + FArrayWriter Writer; + { + Writer << Header; + Writer << NodeId; + Writer << ActualProtocolVersion; // Send our actual Protocol version as part of the ping message + } + + + if (!Socket->Wait(ESocketWaitConditions::WaitForWrite, SocketWaitTime)) + { + return false; // socket not ready for sending + } + + int32 Sent; + for (const auto& StaticAddress : StaticAddresses) + { + if (!Socket->SendTo(Writer.GetData(), Writer.Num(), Sent, *StaticAddress)) + { + return false; // send failed + } + + } + return true; +} + + void FUdpMessageBeacon::Update(const FDateTime& CurrentTime, const FTimespan& SocketWaitTime) { if (CurrentTime < NextHelloTime) @@ -156,6 +198,7 @@ void FUdpMessageBeacon::Update(const FDateTime& CurrentTime, const FTimespan& So { NextHelloTime = CurrentTime + BeaconInterval; } + SendPing(SocketWaitTime); } diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageBeacon.h b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageBeacon.h index 93913b27788d..ddd9719138c5 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageBeacon.h +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageBeacon.h @@ -34,8 +34,9 @@ public: * @param InSocket The network socket used to send Hello segments. * @param InSocketId The network socket identifier (used to detect unicast endpoint). * @param InMulticastEndpoint The multicast group endpoint to transport messages to. + * @param InStaticEndpoints The static nodes to broadcast to alongside the multicast. */ - FUdpMessageBeacon(FSocket* InSocket, const FGuid& InSocketId, const FIPv4Endpoint& InMulticastEndpoint); + FUdpMessageBeacon(FSocket* InSocket, const FGuid& InSocketId, const FIPv4Endpoint& InMulticastEndpoint, const TArray& InStaticEndpoints); /** Virtual destructor. */ virtual ~FUdpMessageBeacon(); @@ -80,6 +81,12 @@ protected: */ bool SendSegment(EUdpMessageSegments SegmentType, const FTimespan& SocketWaitTime); + /** + * Sends a ping segment to static addresses. + * @return true on success, false otherwise. + */ + bool SendPing(const FTimespan& SocketWaitTime); + /** * Update the beacon sender. * @@ -111,6 +118,9 @@ private: /** Holds the multicast address and port number to send to. */ TSharedPtr MulticastAddress; + /** Holds the static addresses to broadcast ping to. */ + TArray> StaticAddresses; + /** Holds the time at which the next Hello segment must be sent. */ FDateTime NextHelloTime; diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageProcessor.cpp b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageProcessor.cpp index 940f25e2f5e7..2734a1345e87 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageProcessor.cpp +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageProcessor.cpp @@ -10,6 +10,7 @@ #include "Serialization/ArrayReader.h" #include "Serialization/ArrayWriter.h" #include "Sockets.h" +#include "Math/UnrealMathUtility.h" #include "UObject/Class.h" #include "Shared/UdpMessagingSettings.h" @@ -17,6 +18,7 @@ #include "Transport/UdpMessageSegmenter.h" #include "Transport/UdpReassembledMessage.h" #include "Transport/UdpSerializedMessage.h" +#include "Transport/UdpSerializeMessageTask.h" /* FUdpMessageHelloSender static initialization @@ -30,8 +32,8 @@ const int32 FUdpMessageProcessor::DeadHelloIntervals = 5; FUdpMessageProcessor::FUdpMessageProcessor(FSocket& InSocket, const FGuid& InNodeId, const FIPv4Endpoint& InMulticastEndpoint) : Beacon(nullptr) - , LastSentMessage(-1) , LocalNodeId(InNodeId) + , LastSentMessage(-1) , MulticastEndpoint(InMulticastEndpoint) , Socket(&InSocket) , SocketSender(nullptr) @@ -42,8 +44,6 @@ FUdpMessageProcessor::FUdpMessageProcessor(FSocket& InSocket, const FGuid& InNod FPlatformProcess::ReturnSynchEventToPool(EventToDelete); }); - Thread = FRunnableThread::Create(this, TEXT("FUdpMessageProcessor"), 128 * 1024, TPri_AboveNormal, FPlatformAffinity::GetPoolThreadMask()); - const UUdpMessagingSettings& Settings = *GetDefault(); for (auto& StaticEndpoint : Settings.StaticEndpoints) @@ -60,6 +60,8 @@ FUdpMessageProcessor::FUdpMessageProcessor(FSocket& InSocket, const FGuid& InNod UE_LOG(LogUdpMessaging, Warning, TEXT("Invalid UDP Messaging Static Endpoint '%s'"), *StaticEndpoint); } } + + Thread = FRunnableThread::Create(this, TEXT("FUdpMessageProcessor"), 128 * 1024, TPri_AboveNormal, FPlatformAffinity::GetPoolThreadMask()); } @@ -86,7 +88,37 @@ FUdpMessageProcessor::~FUdpMessageProcessor() /* FUdpMessageProcessor interface *****************************************************************************/ -bool FUdpMessageProcessor::EnqueueInboundSegment(const TSharedPtr& Data, const FIPv4Endpoint& InSender) +TMap> FUdpMessageProcessor::GetRecipientsPerProtocolVersion(const TArray& Recipients) +{ + TMap> NodesPerVersion; + { + FScopeLock NodeVersionsLock(&NodeVersionCS); + + // No recipients means a publish, so broadcast to all known nodes (static nodes are in known nodes.) + // We used to broadcast on the multicast endpoint, but the discovery of nodes should have found available nodes using multicast already + if (Recipients.Num() == 0) + { + for (auto& NodePair : NodeVersions) + { + NodesPerVersion.FindOrAdd(NodePair.Value).Add(NodePair.Key); + } + } + else + { + for (const FGuid& Recipient : Recipients) + { + uint8* Version = NodeVersions.Find(Recipient); + if (Version) + { + NodesPerVersion.FindOrAdd(*Version).Add(Recipient); + } + } + } + } + return NodesPerVersion; +} + +bool FUdpMessageProcessor::EnqueueInboundSegment(const TSharedPtr& Data, const FIPv4Endpoint& InSender) { if (!InboundSegments.Enqueue(FInboundSegment(Data, InSender))) { @@ -98,12 +130,26 @@ bool FUdpMessageProcessor::EnqueueInboundSegment(const TSharedPtr& SerializedMessage, const FGuid& Recipient) +bool FUdpMessageProcessor::EnqueueOutboundMessage(const TSharedRef& MessageContext, const TArray& Recipients) { - return OutboundMessages.Enqueue(FOutboundMessage(SerializedMessage, Recipient)); -} + TMap> RecipientPerVersions = GetRecipientsPerProtocolVersion(Recipients); + for (const auto& RecipientVersion : RecipientPerVersions) + { + // Create a message to serialize using that protocol version + TSharedRef SerializedMessage = MakeShared(RecipientVersion.Key, MessageContext->GetFlags()); + // Kick off the serialization task + TGraphTask::CreateTask().ConstructAndDispatchWhenReady(MessageContext, SerializedMessage, WorkEvent); + + // Enqueue the message + if (!OutboundMessages.Enqueue(FOutboundMessage(SerializedMessage, RecipientVersion.Value))) + { + return false; + } + } + + return true; +} /* FRunnable interface *****************************************************************************/ @@ -116,9 +162,16 @@ FSingleThreadRunnable* FUdpMessageProcessor::GetSingleThreadInterface() bool FUdpMessageProcessor::Init() { - Beacon = new FUdpMessageBeacon(Socket, LocalNodeId, MulticastEndpoint); + TArray StaticEndpoints; + StaticNodes.GenerateKeyArray(StaticEndpoints); + + Beacon = new FUdpMessageBeacon(Socket, LocalNodeId, MulticastEndpoint, StaticEndpoints); SocketSender = new FUdpSocketSender(Socket, TEXT("FUdpMessageProcessor.Sender")); + SupportedProtocolVersions.Add(UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION); + // Support Protocol version 10 + SupportedProtocolVersions.Add(10); + return true; } @@ -127,10 +180,15 @@ uint32 FUdpMessageProcessor::Run() { while (!Stopping) { + CurrentTime = FDateTime::UtcNow(); + if (WorkEvent->Wait(CalculateWaitTime())) { - Update(); + ConsumeInboundSegments(); + ConsumeOutboundMessages(); } + UpdateKnownNodes(); + UpdateStaticNodes(); } delete Beacon; @@ -150,6 +208,19 @@ void FUdpMessageProcessor::Stop() } +/* FSingleThreadRunnable interface +*****************************************************************************/ + +void FUdpMessageProcessor::Tick() +{ + CurrentTime = FDateTime::UtcNow(); + + ConsumeInboundSegments(); + ConsumeOutboundMessages(); + UpdateKnownNodes(); + UpdateStaticNodes(); +} + /* FUdpMessageProcessor implementation *****************************************************************************/ @@ -159,7 +230,7 @@ void FUdpMessageProcessor::AcknowledgeReceipt(int32 MessageId, const FNodeInfo& { Header.RecipientNodeId = NodeInfo.NodeId; Header.SenderNodeId = LocalNodeId; - Header.ProtocolVersion = UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION; + Header.ProtocolVersion = NodeInfo.ProtocolVersion; Header.SegmentType = EUdpMessageSegments::Acknowledge; } @@ -171,7 +242,7 @@ void FUdpMessageProcessor::AcknowledgeReceipt(int32 MessageId, const FNodeInfo& FArrayWriter Writer; { Writer << Header; - Writer << AcknowledgeChunk; + AcknowledgeChunk.Serialize(Writer, NodeInfo.ProtocolVersion); } int32 OutSent; @@ -201,16 +272,18 @@ void FUdpMessageProcessor::ConsumeInboundSegments() *Segment.Data << Header; - if (FilterSegment(Header, Segment.Data, Segment.Sender)) + if (FilterSegment(Header)) { FNodeInfo& NodeInfo = KnownNodes.FindOrAdd(Header.SenderNodeId); if (!NodeInfo.NodeId.IsValid()) { NodeInfo.NodeId = Header.SenderNodeId; + NodeInfo.ProtocolVersion = Header.ProtocolVersion; NodeDiscoveredDelegate.ExecuteIfBound(NodeInfo.NodeId); } + check(NodeInfo.ProtocolVersion == Header.ProtocolVersion); NodeInfo.Endpoint = Segment.Sender; switch (Header.SegmentType) @@ -223,6 +296,10 @@ void FUdpMessageProcessor::ConsumeInboundSegments() ProcessAcknowledgeSegment(Segment, NodeInfo); break; + case EUdpMessageSegments::AcknowledgeSegments: + ProcessAcknowledgeSegmentsSegment(Segment, NodeInfo); + break; + case EUdpMessageSegments::Bye: ProcessByeSegment(Segment, NodeInfo); break; @@ -235,6 +312,14 @@ void FUdpMessageProcessor::ConsumeInboundSegments() ProcessHelloSegment(Segment, NodeInfo); break; + case EUdpMessageSegments::Ping: + ProcessPingSegment(Segment, NodeInfo); + break; + + case EUdpMessageSegments::Pong: + ProcessPongSegment(Segment, NodeInfo); + break; + case EUdpMessageSegments::Retransmit: ProcessRetransmitSegment(Segment, NodeInfo); break; @@ -259,46 +344,34 @@ void FUdpMessageProcessor::ConsumeOutboundMessages() while (OutboundMessages.Dequeue(OutboundMessage)) { - if (OutboundMessage.SerializedMessage->TotalSize() > 1024 * 65536) - { - continue; - } - ++LastSentMessage; - FNodeInfo& RecipientNodeInfo = KnownNodes.FindOrAdd(OutboundMessage.RecipientId); - - if (!OutboundMessage.RecipientId.IsValid()) + for (const FGuid& RecipientId : OutboundMessage.RecipientIds) { - RecipientNodeInfo.Endpoint = MulticastEndpoint; - - for (auto& StaticNodeInfoPair : StaticNodes) + FNodeInfo* RecipientNodeInfo = KnownNodes.Find(RecipientId); + // Queue segmenters to the nodes we are dispatching to + if (RecipientNodeInfo) { - StaticNodeInfoPair.Value.Segmenters.Add( + RecipientNodeInfo->Segmenters.Add( LastSentMessage, - MakeShareable(new FUdpMessageSegmenter(OutboundMessage.SerializedMessage.ToSharedRef(), 1024)) + MakeShared(OutboundMessage.SerializedMessage.ToSharedRef(), UDP_MESSAGING_SEGMENT_SIZE) ); } } - - RecipientNodeInfo.Segmenters.Add( - LastSentMessage, - MakeShareable(new FUdpMessageSegmenter(OutboundMessage.SerializedMessage.ToSharedRef(), 1024)) - ); } } -bool FUdpMessageProcessor::FilterSegment(const FUdpMessageSegment::FHeader& Header, const TSharedPtr& Data, const FIPv4Endpoint& InSender) +bool FUdpMessageProcessor::FilterSegment(const FUdpMessageSegment::FHeader& Header) { - // filter unsupported protocol versions - if (Header.ProtocolVersion != UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION) + // filter locally generated segments + if (Header.SenderNodeId == LocalNodeId) { return false; } - // filter locally generated segments - if (Header.SenderNodeId == LocalNodeId) + // filter unsupported protocol versions + if (!SupportedProtocolVersions.Contains(Header.ProtocolVersion)) { return false; } @@ -310,7 +383,7 @@ bool FUdpMessageProcessor::FilterSegment(const FUdpMessageSegment::FHeader& Head void FUdpMessageProcessor::ProcessAbortSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo) { FUdpMessageSegment::FAbortChunk AbortChunk; - *Segment.Data << AbortChunk; + AbortChunk.Serialize(*Segment.Data, NodeInfo.ProtocolVersion); NodeInfo.Segmenters.Remove(AbortChunk.MessageId); } @@ -319,12 +392,29 @@ void FUdpMessageProcessor::ProcessAbortSegment(FInboundSegment& Segment, FNodeIn void FUdpMessageProcessor::ProcessAcknowledgeSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo) { FUdpMessageSegment::FAcknowledgeChunk AcknowledgeChunk; - *Segment.Data << AcknowledgeChunk; + AcknowledgeChunk.Serialize(*Segment.Data, NodeInfo.ProtocolVersion); NodeInfo.Segmenters.Remove(AcknowledgeChunk.MessageId); } +void FUdpMessageProcessor::ProcessAcknowledgeSegmentsSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo) // TODO: Rename function +{ + FUdpMessageSegment::FAcknowledgeSegmentsChunk AcknowledgeChunk; + AcknowledgeChunk.Serialize(*Segment.Data, NodeInfo.ProtocolVersion); + + TSharedPtr Segmenter = NodeInfo.Segmenters.FindRef(AcknowledgeChunk.MessageId); + if (Segmenter.IsValid()) + { + Segmenter->MarkAsAcknowledged(AcknowledgeChunk.Segments); + if (Segmenter->IsComplete()) + { + NodeInfo.Segmenters.Remove(AcknowledgeChunk.MessageId); + } + } +} + + void FUdpMessageProcessor::ProcessByeSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo) { FGuid RemoteNodeId; @@ -340,8 +430,8 @@ void FUdpMessageProcessor::ProcessByeSegment(FInboundSegment& Segment, FNodeInfo void FUdpMessageProcessor::ProcessDataSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo) { FUdpMessageSegment::FDataChunk DataChunk; - *Segment.Data << DataChunk; - + DataChunk.Serialize(*Segment.Data, NodeInfo.ProtocolVersion); + // Discard late segments for sequenced messages if ((DataChunk.Sequence != 0) && (DataChunk.Sequence < NodeInfo.Resequencer.GetNextSequence())) { @@ -353,13 +443,13 @@ void FUdpMessageProcessor::ProcessDataSegment(FInboundSegment& Segment, FNodeInf // Reassemble message if (!ReassembledMessage.IsValid()) { - ReassembledMessage = MakeShareable(new FUdpReassembledMessage(DataChunk.MessageSize, DataChunk.TotalSegments, DataChunk.Sequence, Segment.Sender)); + ReassembledMessage = MakeShared(NodeInfo.ProtocolVersion, DataChunk.MessageFlags, DataChunk.MessageSize, DataChunk.TotalSegments, DataChunk.Sequence, Segment.Sender); } ReassembledMessage->Reassemble(DataChunk.SegmentNumber, DataChunk.SegmentOffset, DataChunk.Data, CurrentTime); // Deliver or re-sequence message - if (!ReassembledMessage->IsComplete()) + if (!ReassembledMessage->IsComplete() || ReassembledMessage->IsDelivered()) { return; } @@ -385,8 +475,8 @@ void FUdpMessageProcessor::ProcessDataSegment(FInboundSegment& Segment, FNodeInf } } } - - NodeInfo.ReassembledMessages.Remove(DataChunk.MessageId); + // Mark the message delivered but do not remove it from the list yet, this is to prevent the double delivery of reliable message + ReassembledMessage->MarkDelivered(); } @@ -401,11 +491,68 @@ void FUdpMessageProcessor::ProcessHelloSegment(FInboundSegment& Segment, FNodeIn } } +void FUdpMessageProcessor::ProcessPingSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo) +{ + FGuid RemoteNodeId; + *Segment.Data << RemoteNodeId; + uint8 NodeProtocolVersion; + *Segment.Data << NodeProtocolVersion; + + if (RemoteNodeId.IsValid()) + { + NodeInfo.ResetIfRestarted(RemoteNodeId); + } + + // The protocol version we are going to use to communicate to this node is the smallest between its version and our own + uint8 ProtocolVersion = FMath::Min(NodeProtocolVersion, UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION); + + // if that protocol isn't in our supported protocols we do not reply to the pong and remove this node since we don't support its version + if (!SupportedProtocolVersions.Contains(ProtocolVersion)) + { + KnownNodes.Remove(NodeInfo.NodeId); + return; + } + + // Set this node protocol to our agreed protocol + NodeInfo.ProtocolVersion = ProtocolVersion; + + // Send the pong + FUdpMessageSegment::FHeader Header; + { + Header.RecipientNodeId = NodeInfo.NodeId; + Header.SenderNodeId = LocalNodeId; + // Reply to the ping using the agreed protocol + Header.ProtocolVersion = ProtocolVersion; + Header.SegmentType = EUdpMessageSegments::Pong; + } + + FArrayWriter Writer; + { + Writer << Header; + Writer << LocalNodeId; + } + + int32 OutSent; + Socket->SendTo(Writer.GetData(), Writer.Num(), OutSent, *NodeInfo.Endpoint.ToInternetAddr()); +} + + +void FUdpMessageProcessor::ProcessPongSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo) +{ + FGuid RemoteNodeId; + *Segment.Data << RemoteNodeId; + + if (RemoteNodeId.IsValid()) + { + NodeInfo.ResetIfRestarted(RemoteNodeId); + } +} + void FUdpMessageProcessor::ProcessRetransmitSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo) { FUdpMessageSegment::FRetransmitChunk RetransmitChunk; - *Segment.Data << RetransmitChunk; + RetransmitChunk.Serialize(*Segment.Data, NodeInfo.ProtocolVersion); TSharedPtr Segmenter = NodeInfo.Segmenters.FindRef(RetransmitChunk.MessageId); @@ -419,7 +566,7 @@ void FUdpMessageProcessor::ProcessRetransmitSegment(FInboundSegment& Segment, FN void FUdpMessageProcessor::ProcessTimeoutSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo) { FUdpMessageSegment::FTimeoutChunk TimeoutChunk; - *Segment.Data << TimeoutChunk; + TimeoutChunk.Serialize(*Segment.Data, NodeInfo.ProtocolVersion); TSharedPtr Segmenter = NodeInfo.Segmenters.FindRef(TimeoutChunk.MessageId); @@ -442,18 +589,6 @@ void FUdpMessageProcessor::RemoveKnownNode(const FGuid& NodeId) KnownNodes.Remove(NodeId); } - -void FUdpMessageProcessor::Update() -{ - CurrentTime = FDateTime::UtcNow(); - - ConsumeInboundSegments(); - ConsumeOutboundMessages(); - UpdateKnownNodes(); - UpdateStaticNodes(); -} - - void FUdpMessageProcessor::UpdateKnownNodes() { // remove dead remote endpoints @@ -472,6 +607,7 @@ void FUdpMessageProcessor::UpdateKnownNodes() else { UpdateSegmenters(NodeInfo); + UpdateReassemblers(NodeInfo); } } @@ -480,19 +616,21 @@ void FUdpMessageProcessor::UpdateKnownNodes() RemoveKnownNode(Node); } + UpdateNodesPerVersion(); + Beacon->SetEndpointCount(KnownNodes.Num() + 1); } void FUdpMessageProcessor::UpdateSegmenters(FNodeInfo& NodeInfo) { - FUdpMessageSegment::FHeader Header; + FUdpMessageSegment::FHeader Header { - Header.RecipientNodeId = NodeInfo.NodeId; - Header.SenderNodeId = LocalNodeId; - Header.ProtocolVersion = UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION; - Header.SegmentType = EUdpMessageSegments::Data; - } + NodeInfo.ProtocolVersion, // Header.ProtocolVersion - Send data segment using the node protocol version + NodeInfo.NodeId, // Header.RecipientNodeId + LocalNodeId, // Header.SenderNodeId + EUdpMessageSegments::Data // Header.SegmentType + }; for (TMap >::TIterator It(NodeInfo.Segmenters); It; ++It) { @@ -500,33 +638,47 @@ void FUdpMessageProcessor::UpdateSegmenters(FNodeInfo& NodeInfo) Segmenter->Initialize(); - if (Segmenter->IsInitialized()) + if (Segmenter->IsInitialized() && Segmenter->NeedSending(CurrentTime)) { FUdpMessageSegment::FDataChunk DataChunk; - while (Segmenter->GetNextPendingSegment(DataChunk.Data, DataChunk.SegmentNumber)) + for (TConstSetBitIterator<> BIt(Segmenter->GetPendingSegments()); BIt; ++BIt) { + Segmenter->GetPendingSegment(BIt.GetIndex(), DataChunk.Data); + DataChunk.SegmentNumber = BIt.GetIndex(); + DataChunk.MessageId = It.Key(); + DataChunk.MessageFlags = Segmenter->GetMessageFlags(); DataChunk.MessageSize = Segmenter->GetMessageSize(); - DataChunk.SegmentOffset = 1024 * DataChunk.SegmentNumber; + DataChunk.SegmentOffset = UDP_MESSAGING_SEGMENT_SIZE * DataChunk.SegmentNumber; DataChunk.Sequence = 0; // @todo gmp: implement message sequencing DataChunk.TotalSegments = Segmenter->GetSegmentCount(); - TSharedRef Writer = MakeShareable(new FArrayWriter); + // validate with are sending message in the proper protocol version + check(Header.ProtocolVersion == Segmenter->GetProtocolVersion()); + + TSharedRef Writer = MakeShared(); { *Writer << Header; - *Writer << DataChunk; + DataChunk.Serialize(*Writer, Header.ProtocolVersion); } if (!SocketSender->Send(Writer, NodeInfo.Endpoint)) { return; } - - Segmenter->MarkAsSent(DataChunk.SegmentNumber); } - It.RemoveCurrent(); + // update sent time for reliable messages + if (EnumHasAnyFlags(Segmenter->GetMessageFlags(), EMessageFlags::Reliable)) + { + Segmenter->UpdateSentTime(CurrentTime); + } + // if message isn't reliable no need to keep track + else + { + It.RemoveCurrent(); + } } else if (Segmenter->IsInvalid()) { @@ -536,6 +688,52 @@ void FUdpMessageProcessor::UpdateSegmenters(FNodeInfo& NodeInfo) } +const FTimespan FUdpMessageProcessor::StaleReassemblyInterval = FTimespan::FromSeconds(30); + +void FUdpMessageProcessor::UpdateReassemblers(FNodeInfo& NodeInfo) +{ + FUdpMessageSegment::FHeader Header + { + FMath::Max(NodeInfo.ProtocolVersion, (uint8)11), // Header.ProtocolVersion, AcknowledgeSegments are version 11 and onward segment + NodeInfo.NodeId, // Header.RecipientNodeId + LocalNodeId, // Header.SenderNodeId + EUdpMessageSegments::AcknowledgeSegments // Header.SegmentType + }; + + for (TMap>::TIterator It(NodeInfo.ReassembledMessages); It; ++It) + { + TSharedPtr& ReassembledMessage = It.Value(); + FUdpMessageSegment::FAcknowledgeSegmentsChunk AcknowledgeChunk + { + It.Key(), // MessageId + ReassembledMessage->GetPendingAcknowledgments() // Segments + }; + + // Send pending acknowledgments + if (AcknowledgeChunk.Segments.Num() > 0) + { + TSharedRef Writer = MakeShared(); + { + *Writer << Header; + AcknowledgeChunk.Serialize(*Writer, Header.ProtocolVersion); + } + + if (!SocketSender->Send(Writer, NodeInfo.Endpoint)) + { + return; + } + } + + // Remove stale reassembled message if they aren't reliable or are marked delivered + if (ReassembledMessage->GetLastSegmentTime() + StaleReassemblyInterval <= CurrentTime && + (!EnumHasAnyFlags(ReassembledMessage->GetFlags(), EMessageFlags::Reliable) || ReassembledMessage->IsDelivered())) + { + It.RemoveCurrent(); + } + } +} + + void FUdpMessageProcessor::UpdateStaticNodes() { for (auto& StaticNodePair : StaticNodes) @@ -544,11 +742,12 @@ void FUdpMessageProcessor::UpdateStaticNodes() } } - -/* FSingleThreadRunnable interface - *****************************************************************************/ - -void FUdpMessageProcessor::Tick() +void FUdpMessageProcessor::UpdateNodesPerVersion() { - Update(); + FScopeLock NodeVersionLock(&NodeVersionCS); + NodeVersions.Empty(); + for (auto& NodePair : KnownNodes) + { + NodeVersions.Add(NodePair.Key, NodePair.Value.ProtocolVersion); + } } diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageProcessor.h b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageProcessor.h index 7e0f310c9a7c..523d422c161a 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageProcessor.h +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageProcessor.h @@ -15,6 +15,7 @@ #include "Misc/Timespan.h" #include "Templates/SharedPointer.h" +#include "UdpMessagingPrivate.h" #include "Shared/UdpMessageSegment.h" #include "Transport/UdpMessageResequencer.h" @@ -49,6 +50,9 @@ class FUdpMessageProcessor /** Holds the endpoint's node identifier. */ FGuid NodeId; + /** Holds the protocol version this node is communicating with */ + uint8 ProtocolVersion; + /** Holds the collection of reassembled messages. */ TMap> ReassembledMessages; @@ -56,12 +60,13 @@ class FUdpMessageProcessor FUdpMessageResequencer Resequencer; /** Holds the collection of message segmenters. */ - TMap > Segmenters; + TMap> Segmenters; /** Default constructor. */ FNodeInfo() : LastSegmentReceivedTime(FDateTime::MinValue()) , NodeId() + , ProtocolVersion(UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION) { } /** Resets the endpoint info. */ @@ -104,16 +109,16 @@ class FUdpMessageProcessor /** Holds the serialized message. */ TSharedPtr SerializedMessage; - /** Holds the recipient. */ - FGuid RecipientId; + /** Holds the recipients. */ + TArray RecipientIds; /** Default constructor. */ FOutboundMessage() { } /** Creates and initializes a new instance. */ - FOutboundMessage(const TSharedRef& InSerializedMessage, const FGuid& InRecipientId) + FOutboundMessage(TSharedPtr InSerializedMessage, const TArray& InRecipientIds) : SerializedMessage(InSerializedMessage) - , RecipientId(InRecipientId) + , RecipientIds(InRecipientIds) { } }; @@ -133,6 +138,14 @@ public: public: + /** + * Get a list of Nodes Ids split by supported Protocol version + * + * @param Recipients The list of recipients Ids + * @return A map of protocol version -> list of node ids for that protocol + */ + TMap> GetRecipientsPerProtocolVersion(const TArray& Recipients); + /** * Queues up an inbound message segment. * @@ -145,11 +158,11 @@ public: /** * Queues up an outbound message. * - * @param SerializedMessage The serialized message to send. - * @param Recipient The recipient's IPv4 endpoint. + * @param MessageContext The message to serialize and send. + * @param Recipients The recipients ids to send to. * @return true if the message was queued up, false otherwise. */ - bool EnqueueOutboundMessage(const TSharedRef& SerializedMessage, const FGuid& Recipient); + bool EnqueueOutboundMessage(const TSharedRef& MessageContext, const TArray& Recipients); /** * Get the event used to signal the message processor that work is available. @@ -243,7 +256,7 @@ protected: * @param Sender The segment sender. * @return true if the segment passed the filter, false otherwise. */ - bool FilterSegment(const FUdpMessageSegment::FHeader& Header, const TSharedPtr& Data, const FIPv4Endpoint& Sender); + bool FilterSegment(const FUdpMessageSegment::FHeader& Header); /** * Processes an Abort segment. @@ -254,13 +267,21 @@ protected: void ProcessAbortSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo); /** - * Processes a Success segment. + * Processes an Acknowledgement segment. * * @param Segment The segment to process. * @param NodeInfo Details for the node that sent the segment. */ void ProcessAcknowledgeSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo); + /** + * Processes an AcknowledgmentSegments segment. + * + * @param Segment The segment to process. + * @param NodeInfo Details for the node that sent the segment. + */ + void ProcessAcknowledgeSegmentsSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo); + /** * Processes a Bye segment. * @@ -291,6 +312,22 @@ protected: * @param Segment The segment to process. * @param NodeInfo Details for the node that sent the segment. */ + void ProcessPingSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo); + + /** + * Processes a Pong segment. + * + * @param Segment The segment to process. + * @param NodeInfo Details for the node that sent the segment. + */ + void ProcessPongSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo); + + /** + * Processes a Retransmit segment. + * + * @param Segment The segment to process. + * @param NodeInfo Details for the node that sent the segment. + */ void ProcessRetransmitSegment(FInboundSegment& Segment, FNodeInfo& NodeInfo); /** @@ -317,8 +354,6 @@ protected: */ void RemoveKnownNode(const FGuid& NodeId); - /** Update the message processor. */ - void Update(); /** Updates all known remote nodes. */ void UpdateKnownNodes(); @@ -330,9 +365,20 @@ protected: */ void UpdateSegmenters(FNodeInfo& NodeInfo); + /** + * Updates all reassemblers of the specified node. + * + * @param NodeInfo Details for the node to update. + */ + void UpdateReassemblers(FNodeInfo& NodeInfo); + + /** Updates all static remote nodes. */ void UpdateStaticNodes(); + /** Updates nodes per protocol version map */ + void UpdateNodesPerVersion(); + protected: //~ FSingleThreadRunnable interface @@ -355,15 +401,27 @@ private: /** Holds the current time. */ FDateTime CurrentTime; + /** Holds the protocol version that can be communicated in. */ + TArray SupportedProtocolVersions; + + /** Mutex protecting access to the NodeVersions map. */ + mutable FCriticalSection NodeVersionCS; + + /** Holds the protocol version of each nodes separately for safe access (NodeId -> Protocol Version). */ + TMap NodeVersions; + /** Holds the collection of known remote nodes. */ TMap KnownNodes; - /** Holds the last sent message number. */ - int32 LastSentMessage; + /** Holds the collection of static remote nodes. */ + TMap StaticNodes; /** Holds the local node identifier. */ FGuid LocalNodeId; + /** Holds the last sent message number. */ + int32 LastSentMessage; + /** Holds the multicast endpoint. */ FIPv4Endpoint MulticastEndpoint; @@ -373,9 +431,6 @@ private: /** Holds the socket sender. */ FUdpSocketSender* SocketSender; - /** Holds the collection of static remote nodes. */ - TMap StaticNodes; - /** Holds a flag indicating that the thread is stopping. */ bool Stopping; @@ -400,4 +455,7 @@ private: /** Defines the maximum number of Hello segments that can be dropped before a remote endpoint is considered dead. */ static const int32 DeadHelloIntervals; + + /** Defines a timespan after which non fully reassembled messages that have stopped receiving segments are dropped. */ + static const FTimespan StaleReassemblyInterval; }; diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageSegmenter.cpp b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageSegmenter.cpp index 31db8ee71281..a5eceefdf323 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageSegmenter.cpp +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageSegmenter.cpp @@ -67,6 +67,36 @@ bool FUdpMessageSegmenter::GetNextPendingSegment(TArray& OutData, uint16& } +bool FUdpMessageSegmenter::GetPendingSegment(uint16 InSegment, TArray& OutData) const +{ + if (MessageReader == nullptr) + { + return false; + } + + if (InSegment < PendingSegments.Num() && PendingSegments[InSegment]) + { + uint32 SegmentOffset = InSegment * SegmentSize; + int32 ActualSegmentSize = MessageReader->TotalSize() - SegmentOffset; + + if (ActualSegmentSize > SegmentSize) + { + ActualSegmentSize = SegmentSize; + } + + OutData.Reset(ActualSegmentSize); + OutData.AddUninitialized(ActualSegmentSize); + + MessageReader->Seek(SegmentOffset); + MessageReader->Serialize(OutData.GetData(), ActualSegmentSize); + + return true; + } + + return false; +} + + void FUdpMessageSegmenter::Initialize() { if (MessageReader != nullptr) @@ -89,12 +119,27 @@ bool FUdpMessageSegmenter::IsInvalid() const } -void FUdpMessageSegmenter::MarkAsSent(uint16 Segment) +uint8 FUdpMessageSegmenter::GetProtocolVersion() const { - if (Segment < PendingSegments.Num()) + return SerializedMessage->GetProtocolVersion(); +} + + +EMessageFlags FUdpMessageSegmenter::GetMessageFlags() const +{ + return SerializedMessage->GetFlags(); +} + + +void FUdpMessageSegmenter::MarkAsAcknowledged(const TArray& Segments) +{ + for (const auto& Segment : Segments) { - PendingSegments[Segment] = false; - --PendingSegmentsCount; + if (Segment < PendingSegments.Num()) + { + PendingSegments[Segment] = false; + --PendingSegmentsCount; + } } } @@ -109,3 +154,16 @@ void FUdpMessageSegmenter::MarkForRetransmission(const TArray& Segments) } } } + +const FTimespan FUdpMessageSegmenter::SendInterval = FTimespan::FromMilliseconds(100); + +bool FUdpMessageSegmenter::NeedSending(const FDateTime& CurrentTime) +{ + return LastSentTime + SendInterval <= CurrentTime; +} + +void FUdpMessageSegmenter::UpdateSentTime(const FDateTime& CurrentTime) +{ + LastSentTime = CurrentTime; + ++SentNumber; +} diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageSegmenter.h b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageSegmenter.h index ece55fe11c41..38e8524351ae 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageSegmenter.h +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageSegmenter.h @@ -6,6 +6,10 @@ #include "Containers/Array.h" #include "Containers/BitArray.h" #include "Templates/SharedPointer.h" +#include "Misc/DateTime.h" + +// IMessageContext forward declaration +enum class EMessageFlags : uint32; class FArchive; class FUdpSerializedMessage; @@ -24,6 +28,7 @@ public: /** Default constructor. */ FUdpMessageSegmenter() : MessageReader(nullptr) + , SentNumber(0) { } /** @@ -34,6 +39,8 @@ public: FUdpMessageSegmenter(const TSharedRef& InSerializedMessage, uint16 InSegmentSize) : MessageReader(nullptr) , SegmentSize(InSegmentSize) + , SentNumber(0) + , LastSentTime(0) , SerializedMessage(InSerializedMessage) { } @@ -58,6 +65,25 @@ public: */ bool GetNextPendingSegment(TArray& OutData, uint16& OutSegment) const; + /** + * Gets the pending segment at. + * + * @param InSegment the segment number we are requesting the data for. + * @param OutData Will hold the segment data. + * @return true if a segment was returned, false if that segment is no longer pending or the segment number is invalid. + */ + bool GetPendingSegment(uint16 InSegment, TArray& OutData) const; + + + /** + * Get the pending segments array. + * @return the list of pending segments flags. + */ + const TBitArray<>& GetPendingSegments() const + { + return PendingSegments; + } + /** * Gets the number of segments that haven't been received yet. * @@ -108,12 +134,18 @@ public: */ bool IsInvalid() const; + /** Return the Protocol Version for this segmenter. */ + uint8 GetProtocolVersion() const; + + /** @return the message flags. */ + EMessageFlags GetMessageFlags() const; + /** - * Marks the specified segment as sent. - * - * @param Segment The sent segment. - */ - void MarkAsSent(uint16 Segment); + * Marks the specified segments as acknowledged. + * + * @param Segments The acknowledged segments. + */ + void MarkAsAcknowledged(const TArray& Segments); /** * Marks the entire message for retransmission. @@ -130,7 +162,23 @@ public: */ void MarkForRetransmission(const TArray& Segments); + /** + * Checks if this segmenter needs to send segments + * + * @return true if the segmenter needs to send + */ + bool NeedSending(const FDateTime& CurrentTime); + + /** + * Update the last sent time and increment the sent number for this segmenter + * + * @param CurrentTime the time to update to. + */ + void UpdateSentTime(const FDateTime& CurrentTime); + private: + /** Defines the time interval for sending. */ + static const FTimespan SendInterval; /** temp hack to support new transport API. */ FArchive* MessageReader; @@ -144,6 +192,12 @@ private: /** Holds the segment size. */ uint16 SegmentSize; + /** Holds the number of time we sent the segments */ + uint16 SentNumber; + + /** Holds the time at which we last sent */ + FDateTime LastSentTime; + /** Holds the message. */ TSharedPtr SerializedMessage; }; diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageTransport.cpp b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageTransport.cpp index 19836933a92a..d49cc8e97497 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageTransport.cpp +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpMessageTransport.cpp @@ -17,7 +17,6 @@ #include "Transport/UdpDeserializedMessage.h" #include "Transport/UdpSerializedMessage.h" #include "Transport/UdpMessageProcessor.h" -#include "Transport/UdpSerializeMessageTask.h" /* FUdpMessageTransport structors @@ -180,25 +179,7 @@ bool FUdpMessageTransport::TransportMessage(const TSharedRef SerializedMessage = MakeShared(); - - if (Recipients.Num() == 0) - { - // publish the message - MessageProcessor->EnqueueOutboundMessage(SerializedMessage, FGuid()); - } - else - { - // send the message - for (const auto& Recipient : Recipients) - { - MessageProcessor->EnqueueOutboundMessage(SerializedMessage, Recipient); - } - } - - TGraphTask::CreateTask().ConstructAndDispatchWhenReady(Context, SerializedMessage, MessageProcessor->GetWorkEvent()); - - return true; + return MessageProcessor->EnqueueOutboundMessage(Context, Recipients); } @@ -208,7 +189,7 @@ bool FUdpMessageTransport::TransportMessage(const TSharedRef& Attachment, const FGuid& NodeId) { // @todo gmp: move message deserialization into an async task - TSharedRef DeserializedMessage = MakeShareable(new FUdpDeserializedMessage(Attachment)); + TSharedRef DeserializedMessage = MakeShared(Attachment); if (DeserializedMessage->Deserialize(ReassembledMessage)) { diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpReassembledMessage.h b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpReassembledMessage.h index bb4cd3d9839f..06650a1afc49 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpReassembledMessage.h +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpReassembledMessage.h @@ -8,6 +8,8 @@ #include "Interfaces/IPv4/IPv4Endpoint.h" #include "Misc/DateTime.h" +// IMessageContext forward declaration +enum class EMessageFlags : uint32; /** * Implements a reassembled message. @@ -17,37 +19,62 @@ class FUdpReassembledMessage public: /** Default constructor. */ - FUdpReassembledMessage() { } + FUdpReassembledMessage() = default; /** * Creates and initializes a new inbound message info. * + * @param ProtocolVersion The protocol version the message is serialized in. + * @param MessageFlags The message flags of the reassembled message. * @param MessageSize The total size of the message in bytes. * @param SegmentCount The total number of segments that need to be received for this message. * @param InSequence The message sequence number. * @param InSender The IPv4 endpoint of the sender. */ - FUdpReassembledMessage(int32 MessageSize, int32 SegmentCount, uint64 InSequence, const FIPv4Endpoint& InSender) - : PendingSegments(true, SegmentCount) + FUdpReassembledMessage(uint8 InProtocolVersion, EMessageFlags InFlags, int32 MessageSize, int32 SegmentCount, uint64 InSequence, const FIPv4Endpoint& InSender) + : ProtocolVersion(InProtocolVersion) + , MessageFlags(InFlags) + , PendingSegments(true, SegmentCount) , PendingSegmentsCount(SegmentCount) , ReceivedBytes(0) + , bIsDelivered(false) + , RetransmitRequestsCount(0) , Sender(InSender) , Sequence(InSequence) { Data.AddUninitialized(MessageSize); } - /** Virtual destructor. */ - virtual ~FUdpReassembledMessage() { } + ~FUdpReassembledMessage() = default; public: + /** + * Gets the message protocol version. + * + * @return The Message protocol version. + */ + uint8 GetProtocolVersion() const + { + return ProtocolVersion; + } + + /** + * Gets the message flags + * + * @return The Message flags. + */ + EMessageFlags GetFlags() const + { + return MessageFlags; + } + /** * Gets the message data. * * @return Message data. */ - virtual const TArray& GetData() const + const TArray& GetData() const { return Data; } @@ -82,6 +109,17 @@ public: return Result; } + + /** + * Gets the total number of segments. + * + * @return Number of total segments. + */ + uint16 GetTotalSegmentsCount() const + { + return PendingSegments.Num(); + } + /** * Gets the number of segments that haven't been received yet. * @@ -132,6 +170,36 @@ public: return (Data.Num() < 0); } + /** + * Get the list of pending Acknowledgments and clear it + * + * @return The array of pending acknowledgments + */ + TArray GetPendingAcknowledgments() + { + TArray Temp; + Swap(Temp, PendingAcknowledgments); + return Temp; + } + + /** + * Get if the message has been delivered. + * @return true if the message has been marked delivered. + */ + bool IsDelivered() const + { + return bIsDelivered; + } + + /** + * Mark the reasembled message as delivered. + */ + void MarkDelivered() + { + bIsDelivered = true; + } + + /** * Reassembles a segment into the specified message. * @@ -158,13 +226,18 @@ public: PendingSegments[SegmentNumber] = false; --PendingSegmentsCount; - ReceivedBytes += SegmentData.Num(); } } + PendingAcknowledgments.Add(SegmentNumber); } private: + /** Holds the message protocol version. */ + uint8 ProtocolVersion; + + /** */ + EMessageFlags MessageFlags; /** Holds the message data. */ TArray Data; @@ -178,9 +251,15 @@ private: /** Holds the number of segments that haven't been received yet. */ uint16 PendingSegmentsCount; + /** Acknowledgment yet to be sent about segments we received */ + TArray PendingAcknowledgments; + /** Holds the number of bytes received so far. */ int32 ReceivedBytes; + /** Holds if the reassembled message has been delivered */ + bool bIsDelivered; + /** Holds the number of retransmit requests that were sent since the last segment was received. */ int32 RetransmitRequestsCount; diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpSerializeMessageTask.cpp b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpSerializeMessageTask.cpp index 33f60b09a4cf..ce9f5796efea 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpSerializeMessageTask.cpp +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpSerializeMessageTask.cpp @@ -2,13 +2,96 @@ #include "Transport/UdpSerializeMessageTask.h" -#include "Backends/JsonStructSerializerBackend.h" #include "HAL/Event.h" #include "IMessageContext.h" +#include "Backends/JsonStructSerializerBackend.h" +#include "Backends/CborStructSerializerBackend.h" #include "StructSerializer.h" +#include "UdpMessagingPrivate.h" #include "Transport/UdpSerializedMessage.h" +namespace UdpSerializeMessageTaskDetails +{ + +/** Serialization Routine for message using Protocol version 10 */ +void SerializeMessageV10(FArchive& Archive, const TSharedRef& MessageContext) +{ + const FName& MessageType = MessageContext->GetMessageType(); + Archive << const_cast(MessageType); + + const FMessageAddress& Sender = MessageContext->GetSender(); + Archive << const_cast(Sender); + + const TArray& Recipients = MessageContext->GetRecipients(); + Archive << const_cast&>(Recipients); + + EMessageScope Scope = MessageContext->GetScope(); + Archive << Scope; + + const FDateTime& TimeSent = MessageContext->GetTimeSent(); + Archive << const_cast(TimeSent); + + const FDateTime& Expiration = MessageContext->GetExpiration(); + Archive << const_cast(Expiration); + + int32 NumAnnotations = MessageContext->GetAnnotations().Num(); + Archive << NumAnnotations; + + for (const auto& AnnotationPair : MessageContext->GetAnnotations()) + { + Archive << const_cast(AnnotationPair.Key); + Archive << const_cast(AnnotationPair.Value); + } + + // serialize message body + FJsonStructSerializerBackend Backend(Archive); + FStructSerializer::Serialize(MessageContext->GetMessage(), *MessageContext->GetMessageTypeInfo(), Backend); +} + +void SerializeMessageV11(FArchive& Archive, const TSharedRef& MessageContext) +{ + const FName& MessageType = MessageContext->GetMessageType(); + Archive << const_cast(MessageType); + + const FMessageAddress& Sender = MessageContext->GetSender(); + Archive << const_cast(Sender); + + const TArray& Recipients = MessageContext->GetRecipients(); + Archive << const_cast&>(Recipients); + + EMessageScope Scope = MessageContext->GetScope(); + Archive << Scope; + + EMessageFlags Flags = MessageContext->GetFlags(); + Archive << Flags; + + const FDateTime& TimeSent = MessageContext->GetTimeSent(); + Archive << const_cast(TimeSent); + + const FDateTime& Expiration = MessageContext->GetExpiration(); + Archive << const_cast(Expiration); + + int32 NumAnnotations = MessageContext->GetAnnotations().Num(); + Archive << NumAnnotations; + + for (const auto& AnnotationPair : MessageContext->GetAnnotations()) + { + Archive << const_cast(AnnotationPair.Key); + Archive << const_cast(AnnotationPair.Value); + } + + // Message Wire Format Id + uint8 MessageFormat = (uint8)EUdpMessageFormat::Cbor; + Archive << MessageFormat; + + // serialize message body with cbor + FCborStructSerializerBackend Backend(Archive); + FStructSerializer::Serialize(MessageContext->GetMessage(), *MessageContext->GetMessageTypeInfo(), Backend); +} + +} // namespace UdpSerializeMessageTaskDetails + /* FUdpSerializeMessageTask interface *****************************************************************************/ @@ -21,42 +104,41 @@ void FUdpSerializeMessageTask::DoTask(ENamedThreads::Type CurrentThread, const F // a consistent wire format, if their implementations change. This allows us to sanity // check the values during deserialization. @see FUdpDeserializeMessage::Deserialize() - // serialize context + // serialize context depending on supported protocol version FArchive& Archive = SerializedMessage.Get(); + bool Serialized = true; + switch (SerializedMessage->GetProtocolVersion()) { - const FName& MessageType = MessageContext->GetMessageType(); - Archive << const_cast(MessageType); + case 10: + UdpSerializeMessageTaskDetails::SerializeMessageV10(Archive, MessageContext); + break; - const FMessageAddress& Sender = MessageContext->GetSender(); - Archive << const_cast(Sender); + case 11: + UdpSerializeMessageTaskDetails::SerializeMessageV11(Archive, MessageContext); + break; - const TArray& Recipients = MessageContext->GetRecipients(); - Archive << const_cast&>(Recipients); - - EMessageScope Scope = MessageContext->GetScope(); - Archive << Scope; - - const FDateTime& TimeSent = MessageContext->GetTimeSent(); - Archive << const_cast(TimeSent); - - const FDateTime& Expiration = MessageContext->GetExpiration(); - Archive << const_cast(Expiration); - - int32 NumAnnotations = MessageContext->GetAnnotations().Num(); - Archive << NumAnnotations; - - for (const auto& AnnotationPair : MessageContext->GetAnnotations()) - { - Archive << const_cast(AnnotationPair.Key); - Archive << const_cast(AnnotationPair.Value); - } + default: + // Unsupported protocol version + Serialized = false; + break; } - // serialize message body - FJsonStructSerializerBackend Backend(Archive); - FStructSerializer::Serialize(MessageContext->GetMessage(), *MessageContext->GetMessageTypeInfo(), Backend); - - SerializedMessage->UpdateState(EUdpSerializedMessageState::Complete); + // if the message wasn't serialized, flag it invalid + if (!Serialized) + { + UE_LOG(LogUdpMessaging, Error, TEXT("Unsupported Protocol Version message tasked for serialization, discarding...")); + SerializedMessage->UpdateState(EUdpSerializedMessageState::Invalid); + } + // Once serialized if the size of the message is bigger than the maximum allow mark it as invalid and log an error + else if (SerializedMessage->TotalSize() > UDP_MESSAGING_SEGMENT_SIZE * 65536) + { + UE_LOG(LogUdpMessaging, Error, TEXT("Serialized Message total size '%i' is over the allowed maximum '%i', discarding..."), SerializedMessage->TotalSize(), UDP_MESSAGING_SEGMENT_SIZE * 65536); + SerializedMessage->UpdateState(EUdpSerializedMessageState::Invalid); + } + else + { + SerializedMessage->UpdateState(EUdpSerializedMessageState::Complete); + } } else { diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpSerializedMessage.h b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpSerializedMessage.h index fb2d2ce36887..ef99a91c708e 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpSerializedMessage.h +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Transport/UdpSerializedMessage.h @@ -5,10 +5,11 @@ #include "CoreTypes.h" #include "Containers/Array.h" #include "Delegates/Delegate.h" +#include "Templates/SharedPointer.h" #include "Serialization/Archive.h" #include "Serialization/MemoryWriter.h" #include "Serialization/MemoryReader.h" - +#include "IMessageContext.h" /** * Enumerates possibly states of a serialized message. @@ -37,10 +38,18 @@ class FUdpSerializedMessage public: /** Default constructor. */ - FUdpSerializedMessage() + FUdpSerializedMessage(uint8 InProtocolVersion, EMessageFlags InFlags) : FMemoryWriter(DataArray, true) , State(EUdpSerializedMessageState::Incomplete) - { } + , Flags(InFlags) + , ProtocolVersion(InProtocolVersion) + { + // Flags aren't supported in protocol version previous to 11 + if (ProtocolVersion < 11) + { + Flags = EMessageFlags::None; + } + } public: @@ -89,6 +98,18 @@ public: State = InState; } + /** @return the message flags. */ + EMessageFlags GetFlags() const + { + return Flags; + } + + /** @return the message protocol version. */ + uint8 GetProtocolVersion() const + { + return ProtocolVersion; + } + private: /** Holds the serialized data. */ @@ -96,4 +117,10 @@ private: /** Holds the message data state. */ EUdpSerializedMessageState State; + + /** Holds message flags, captured from context. */ + EMessageFlags Flags; + + /** Holds the Protocol Version the message will be serialized in. */ + uint8 ProtocolVersion; }; diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tunnel/UdpMessageTunnel.cpp b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tunnel/UdpMessageTunnel.cpp index 298d27bc5bef..fb5269f1d3ca 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tunnel/UdpMessageTunnel.cpp +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/Tunnel/UdpMessageTunnel.cpp @@ -225,7 +225,7 @@ void FUdpMessageTunnel::TcpToUdp() *Payload << Header; // check protocol version - if (Header.ProtocolVersion != UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION) + if (Header.ProtocolVersion > UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION) { return; } @@ -288,7 +288,7 @@ void FUdpMessageTunnel::UdpToTcp(FSocket* Socket) *Datagram << Header; // check protocol version - if (Header.ProtocolVersion != UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION) + if (Header.ProtocolVersion > UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION) { return; } diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/UdpMessagingPrivate.h b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/UdpMessagingPrivate.h index 260278650d17..c44a06d11253 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/UdpMessagingPrivate.h +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/Private/UdpMessagingPrivate.h @@ -23,5 +23,17 @@ DECLARE_LOG_CATEGORY_EXTERN(LogUdpMessaging, Log, All); /** Defines the desired size of socket receive buffers (in bytes). */ #define UDP_MESSAGING_RECEIVE_BUFFER_SIZE 2 * 1024 * 1024 +/** Define the desired size for the message segments */ +#define UDP_MESSAGING_SEGMENT_SIZE 1024 + /** Defines the protocol version of the UDP message transport. */ -#define UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION 10 +#define UDP_MESSAGING_TRANSPORT_PROTOCOL_VERSION 11 + +/** Supported message serialization format */ +enum class EUdpMessageFormat : uint8 +{ + None = 0, + Json, + TaggedProperty, + Cbor, +}; \ No newline at end of file diff --git a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/UdpMessaging.Build.cs b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/UdpMessaging.Build.cs index 5b9c053a2f30..1500bd743e5d 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/UdpMessaging.Build.cs +++ b/Engine/Plugins/Messaging/UdpMessaging/Source/UdpMessaging/UdpMessaging.Build.cs @@ -21,6 +21,7 @@ namespace UnrealBuildTool.Rules new string[] { "CoreUObject", "Json", + "Cbor", "Networking", "Serialization", "Sockets", diff --git a/Engine/Plugins/Messaging/UdpMessaging/UdpMessaging.uplugin b/Engine/Plugins/Messaging/UdpMessaging/UdpMessaging.uplugin index 59777f39d94f..41dc48a45938 100644 --- a/Engine/Plugins/Messaging/UdpMessaging/UdpMessaging.uplugin +++ b/Engine/Plugins/Messaging/UdpMessaging/UdpMessaging.uplugin @@ -14,7 +14,12 @@ "CanContainContent" : false, "IsBetaVersion" : false, "Installed" : false, - "SupportedPrograms" : [ "UnrealFrontend", "UnrealLightmass" ], + "SupportedPrograms" : + [ + "UnrealFrontend", + "UnrealCollaborationServer", + "UnrealLightmass" + ], "Modules" : [ { @@ -25,7 +30,12 @@ [ "HTML5" ], - "WhitelistPrograms" : [ "UnrealFrontend", "UnrealLightmass" ] + "WhitelistPrograms": + [ + "UnrealFrontend", + "UnrealCollaborationServer", + "UnrealLightmass" + ] } ] } \ No newline at end of file diff --git a/Engine/Plugins/MovieScene/LevelSequenceEditor/Source/LevelSequenceEditor/Private/Misc/LevelSequenceEditorActorSpawner.cpp b/Engine/Plugins/MovieScene/LevelSequenceEditor/Source/LevelSequenceEditor/Private/Misc/LevelSequenceEditorActorSpawner.cpp index 28fcf44ea799..b37648c276f2 100644 --- a/Engine/Plugins/MovieScene/LevelSequenceEditor/Source/LevelSequenceEditor/Private/Misc/LevelSequenceEditorActorSpawner.cpp +++ b/Engine/Plugins/MovieScene/LevelSequenceEditor/Source/LevelSequenceEditor/Private/Misc/LevelSequenceEditorActorSpawner.cpp @@ -35,14 +35,26 @@ TValueOrError FLevelSequenceEditorActorSpawner::CreateNewS FText ErrorText; - // First off, deal with creating a spawnable from a class // Deal with creating a spawnable from an instance of an actor if (AActor* Actor = Cast(&SourceObject)) { - AActor* SpawnedActor = Cast(StaticDuplicateObject(Actor, &OwnerMovieScene, TemplateName, RF_AllFlags & ~RF_Transactional)); + // If the source actor is not transactional, temporarily add the flag to ensure that the duplicated object is created with the transactional flag. + // This is necessary for the creation of the object to exist in the transaction buffer for multi-user workflows + const bool bWasTransactional = Actor->HasAnyFlags(RF_Transactional); + if (!bWasTransactional) + { + Actor->SetFlags(RF_Transactional); + } + + AActor* SpawnedActor = Cast(StaticDuplicateObject(Actor, &OwnerMovieScene, TemplateName, RF_AllFlags)); SpawnedActor->bIsEditorPreviewActor = false; NewSpawnable.ObjectTemplate = SpawnedActor; - NewSpawnable.Name = Actor->GetActorLabel(); + NewSpawnable.Name = Actor->GetActorLabel(); + + if (!bWasTransactional) + { + Actor->ClearFlags(RF_Transactional); + } } // If it's a blueprint, we need some special handling @@ -54,7 +66,7 @@ TValueOrError FLevelSequenceEditorActorSpawner::CreateNewS return MakeError(ErrorText); } - NewSpawnable.ObjectTemplate = NewObject(&OwnerMovieScene, SourceBlueprint->GeneratedClass, TemplateName); + NewSpawnable.ObjectTemplate = NewObject(&OwnerMovieScene, SourceBlueprint->GeneratedClass, TemplateName, RF_Transactional); } // At this point we have to assume it's an asset @@ -80,7 +92,7 @@ TValueOrError FLevelSequenceEditorActorSpawner::CreateNewS } } - AActor* Instance = FactoryToUse->CreateActor(&SourceObject, GWorld->PersistentLevel, FTransform(), RF_Transient, TemplateName ); + AActor* Instance = FactoryToUse->CreateActor(&SourceObject, GWorld->PersistentLevel, FTransform(), RF_Transient | RF_Transactional, TemplateName ); Instance->bIsEditorPreviewActor = false; NewSpawnable.ObjectTemplate = StaticDuplicateObject(Instance, &OwnerMovieScene, TemplateName, RF_AllFlags & ~RF_Transient); @@ -100,7 +112,7 @@ TValueOrError FLevelSequenceEditorActorSpawner::CreateNewS return MakeError(ErrorText); } - NewSpawnable.ObjectTemplate = NewObject(&OwnerMovieScene, InClass, TemplateName); + NewSpawnable.ObjectTemplate = NewObject(&OwnerMovieScene, InClass, TemplateName, RF_Transactional); } if (!NewSpawnable.ObjectTemplate || !NewSpawnable.ObjectTemplate->IsA()) diff --git a/Engine/Plugins/MovieScene/SequencerScripting/SequencerScripting.uplugin b/Engine/Plugins/MovieScene/SequencerScripting/SequencerScripting.uplugin index 5972a370434f..2430cf70b7b4 100644 --- a/Engine/Plugins/MovieScene/SequencerScripting/SequencerScripting.uplugin +++ b/Engine/Plugins/MovieScene/SequencerScripting/SequencerScripting.uplugin @@ -22,7 +22,7 @@ "Modules": [ { "Name": "SequencerScripting", - "Type": "Developer", + "Type": "Editor", "LoadingPhase": "Default" } ] diff --git a/Engine/Plugins/Runtime/EditableMesh/Source/EditableMesh/EditableMesh.cpp b/Engine/Plugins/Runtime/EditableMesh/Source/EditableMesh/EditableMesh.cpp index eaf27274a0e2..7bbb296e9d4b 100644 --- a/Engine/Plugins/Runtime/EditableMesh/Source/EditableMesh/EditableMesh.cpp +++ b/Engine/Plugins/Runtime/EditableMesh/Source/EditableMesh/EditableMesh.cpp @@ -8395,19 +8395,14 @@ void UEditableMesh::RebuildOctree() } } - -void UEditableMesh::SearchSpatialDatabaseForPolygonsPotentiallyIntersectingLineSegment( const FVector LineSegmentStart, const FVector LineSegmentEnd, TArray& OutPolygons ) const +void UEditableMesh::SearchSpatialDatabaseWithPredicate( TFunctionRef< bool( const FBox& Bounds ) > Predicate, TArray< FPolygonID >& OutPolygons ) const { OutPolygons.Reset(); - // @todo mesheditor perf: Ideally we "early out" of octree traversal, by walking through the octree along the ray, reporting back to QueryElement to find out when we should stop // @todo mesheditor scripting: Should spit a warning for Blueprint users if Octree is not allowed when calling this function if( IsSpatialDatabaseAllowed() && ensure( Octree.IsValid() ) ) { - const FVector LineSegmentVector = LineSegmentEnd - LineSegmentStart; - const FVector LineSegmentVectorReciprocal = LineSegmentVector.Reciprocal(); - // @todo mesheditor perf: Do we need to use a custom stack allocator for iterating? The default should probably be okay. for( FEditableMeshOctree::TConstIterator<> OctreeIt( *Octree ); OctreeIt.HasPendingNodes(); @@ -8419,33 +8414,17 @@ void UEditableMesh::SearchSpatialDatabaseForPolygonsPotentiallyIntersectingLineS // Leaf nodes have no children, so don't bother iterating if( !OctreeNode.IsLeaf() ) { - // Find children of this octree node that overlap our line segment FOREACH_OCTREE_CHILD_NODE( ChildRef ) { if( OctreeNode.HasChild( ChildRef ) ) { const FOctreeNodeContext ChildContext = OctreeNodeContext.GetChildContext( ChildRef ); - // @todo mesheditor: LineBoxIntersection() has a magic number in its implementation we might want to look at (search for BOX_SIDE_THRESHOLD) - const bool bIsOverlappingLineSegment = - FMath::LineBoxIntersection( - ChildContext.Bounds.GetBox(), - LineSegmentStart, - LineSegmentEnd, - LineSegmentVector, - LineSegmentVectorReciprocal ); - - if( bIsOverlappingLineSegment ) + if( Predicate( ChildContext.Bounds.GetBox() ) ) { - // DrawDebugBox( GWorld, ChildContext.Bounds.Center, ChildContext.Bounds.Extent * 0.8f, FQuat::Identity, FColor::Green, false, 0.0f ); // @todo mesheditor debug: (also, wrong coordinate system!) - // Push it on the iterator's pending node stack. OctreeIt.PushChild( ChildRef ); } - else - { - // DrawDebugBox( GWorld, ChildContext.Bounds.Center, ChildContext.Bounds.Extent, FQuat::Identity, FColor( 128, 128, 128 ), false, 0.0f ); // @todo mesheditor debug: (also, wrong coordinate system!) - } } } } @@ -8460,6 +8439,38 @@ void UEditableMesh::SearchSpatialDatabaseForPolygonsPotentiallyIntersectingLineS } } +void UEditableMesh::SearchSpatialDatabaseForPolygonsInVolume( const TArray& Planes, TArray& OutPolygons ) const +{ + auto SearchInVolume = [ &Planes ]( const FBox& Bounds ) + { + bool bIsInside = true; + + // Inside volume if node intersects or above all planes that form the volume + for( int32 Index = 0; bIsInside && Index < Planes.Num(); ++Index ) + { + bIsInside = bIsInside && FMath::PlaneAABBRelativePosition( Planes[ Index ], Bounds ) >= 0; + } + return bIsInside; + }; + + SearchSpatialDatabaseWithPredicate( SearchInVolume, OutPolygons ); +} + +void UEditableMesh::SearchSpatialDatabaseForPolygonsPotentiallyIntersectingLineSegment( const FVector LineSegmentStart, const FVector LineSegmentEnd, TArray& OutPolygons ) const +{ + const FVector LineSegmentVector = LineSegmentEnd - LineSegmentStart; + const FVector LineSegmentVectorReciprocal = LineSegmentVector.Reciprocal(); + + auto SearchByLineSegmentIntersection = [ & ]( const FBox& Bounds ) + { + // @todo mesheditor: LineBoxIntersection() has a magic number in its implementation we might want to look at (search for BOX_SIDE_THRESHOLD) + return FMath::LineBoxIntersection( Bounds, LineSegmentStart, LineSegmentEnd, LineSegmentVector, LineSegmentVectorReciprocal ); + + }; + + SearchSpatialDatabaseWithPredicate( SearchByLineSegmentIntersection, OutPolygons ); +} + void UEditableMesh::SetAllowSpatialDatabase( const bool bInAllowSpatialDatabase ) { diff --git a/Engine/Plugins/Runtime/EditableMesh/Source/EditableMesh/Public/EditableMesh.h b/Engine/Plugins/Runtime/EditableMesh/Source/EditableMesh/Public/EditableMesh.h index 5ebe09b1de29..cd962cddb2a3 100644 --- a/Engine/Plugins/Runtime/EditableMesh/Source/EditableMesh/Public/EditableMesh.h +++ b/Engine/Plugins/Runtime/EditableMesh/Source/EditableMesh/Public/EditableMesh.h @@ -437,7 +437,7 @@ public: UFUNCTION( BlueprintPure, Category="Editable Mesh" ) void ComputePolygonsSharedEdges( const TArray& PolygonIDs, TArray& OutSharedEdgeIDs ) const; UFUNCTION( BlueprintPure, Category="Editable Mesh" ) void FindPolygonLoop( const FEdgeID EdgeID, TArray& OutEdgeLoopEdgeIDs, TArray& OutFlippedEdgeIDs, TArray& OutReversedEdgeIDPathToTake, TArray& OutPolygonIDsToSplit ) const; UFUNCTION( BlueprintPure, Category="Editable Mesh" ) void SearchSpatialDatabaseForPolygonsPotentiallyIntersectingLineSegment( const FVector LineSegmentStart, const FVector LineSegmentEnd, TArray& OutPolygons ) const; - + UFUNCTION( BlueprintPure, Category="Editable Mesh" ) void SearchSpatialDatabaseForPolygonsInVolume( const TArray& Planes, TArray& OutPolygons ) const; UFUNCTION( BlueprintCallable, Category="Editable Mesh" ) void SetSubdivisionCount( const int32 NewSubdivisionCount ); UFUNCTION( BlueprintCallable, Category="Editable Mesh" ) void MoveVertices( const TArray& VerticesToMove ); @@ -493,6 +493,8 @@ protected: the undo will not be stored */ void AddUndo( TUniquePtr NewUndo ); + void SearchSpatialDatabaseWithPredicate( TFunctionRef< bool( const FBox& Bounds ) > Predicate, TArray< FPolygonID >& OutPolygons ) const; + public: // @todo mesheditor: temporarily changed access to public so the adapter can call it when building the editable mesh from the source static mesh. Think about this some more. /** Refreshes the entire OpenSubdiv state for this mesh and generates subdivision geometry (if the mesh is configured to have subdivision levels) */ diff --git a/Engine/Plugins/Runtime/Oculus/OculusVR/Source/OculusEditor/Private/OculusToolWidget.cpp b/Engine/Plugins/Runtime/Oculus/OculusVR/Source/OculusEditor/Private/OculusToolWidget.cpp index 08627495b704..f49830c00f3f 100644 --- a/Engine/Plugins/Runtime/Oculus/OculusVR/Source/OculusEditor/Private/OculusToolWidget.cpp +++ b/Engine/Plugins/Runtime/Oculus/OculusVR/Source/OculusEditor/Private/OculusToolWidget.cpp @@ -248,7 +248,7 @@ void SOculusToolWidget::RebuildLayout() } } - box = NewCategory(scroller, LOCTEXT("MiscHeader", "")); + box = NewCategory(scroller, FText::GetEmpty()); box.Get().AddSlot() .Padding(10, 5) [ diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlMaster.cpp b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlMaster.cpp index e361c4ab3798..8011f3e19464 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlMaster.cpp +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlMaster.cpp @@ -11,6 +11,8 @@ #include "DisplayClusterGlobals.h" #include "IPDisplayCluster.h" +#include "Misc/App.h" + FDisplayClusterClusterNodeCtrlMaster::FDisplayClusterClusterNodeCtrlMaster(const FString& ctrlName, const FString& nodeName) : FDisplayClusterClusterNodeCtrlSlave(ctrlName, nodeName) @@ -21,6 +23,15 @@ FDisplayClusterClusterNodeCtrlMaster::~FDisplayClusterClusterNodeCtrlMaster() { } +////////////////////////////////////////////////////////////////////////////////////////////// +// IPDisplayClusterClusterSyncProtocol +////////////////////////////////////////////////////////////////////////////////////////////// +void FDisplayClusterClusterNodeCtrlMaster::GetTimecode(FTimecode& timecode, FFrameRate& frameRate) +{ + // This values are updated in UEngine::UpdateTimeAndHandleMaxTickRate (via UpdateTimecode). + timecode = FApp::GetTimecode(); + frameRate = FApp::GetTimecodeFrameRate(); +} ////////////////////////////////////////////////////////////////////////////////////////////// // IPDisplayClusterNodeController diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlMaster.h b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlMaster.h index 41539d0162b8..cfe5c5a58212 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlMaster.h +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlMaster.h @@ -20,6 +20,12 @@ public: FDisplayClusterClusterNodeCtrlMaster(const FString& ctrlName, const FString& nodeName); virtual ~FDisplayClusterClusterNodeCtrlMaster(); +public: + ////////////////////////////////////////////////////////////////////////////////////////////// + // IPDisplayClusterClusterSyncProtocol + ////////////////////////////////////////////////////////////////////////////////////////////// + virtual void GetTimecode(FTimecode& timecode, FFrameRate& frameRate) override; + public: ////////////////////////////////////////////////////////////////////////////////////////////// // IPDisplayClusterNodeController diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlSlave.cpp b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlSlave.cpp index 1b748a4f8b28..cd044a619e27 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlSlave.cpp +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlSlave.cpp @@ -54,6 +54,11 @@ void FDisplayClusterClusterNodeCtrlSlave::GetDeltaTime(float& deltaTime) ClusterSyncClient->GetDeltaTime(deltaTime); } +void FDisplayClusterClusterNodeCtrlSlave::GetTimecode(FTimecode& timecode, FFrameRate& frameRate) +{ + ClusterSyncClient->GetTimecode(timecode, frameRate); +} + void FDisplayClusterClusterNodeCtrlSlave::GetSyncData(FDisplayClusterMessage::DataType& data) { ClusterSyncClient->GetSyncData(data); diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlSlave.h b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlSlave.h index 9eac8983238a..063cd0250166 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlSlave.h +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterClusterNodeCtrlSlave.h @@ -36,6 +36,7 @@ public: virtual void WaitForFrameEnd() override final; virtual void WaitForTickEnd() override final; virtual void GetDeltaTime(float& deltaTime) override final; + virtual void GetTimecode(FTimecode& timecode, FFrameRate& frameRate) override; virtual void GetSyncData(FDisplayClusterMessage::DataType& data) override; virtual void GetInputData(FDisplayClusterMessage::DataType& data) override; diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterNodeCtrlStandalone.cpp b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterNodeCtrlStandalone.cpp index af052db02929..3abd035d7397 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterNodeCtrlStandalone.cpp +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterNodeCtrlStandalone.cpp @@ -45,6 +45,11 @@ void FDisplayClusterNodeCtrlStandalone::GetDeltaTime(float& deltaTime) // Nothing special to do here in standalone mode } +void FDisplayClusterNodeCtrlStandalone::GetTimecode(FTimecode& timecode, FFrameRate& frameRate) +{ + // Nothing special to do here in standalone mode +} + void FDisplayClusterNodeCtrlStandalone::GetSyncData(FDisplayClusterMessage::DataType& data) { // Nothing special to do here in standalone mode diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterNodeCtrlStandalone.h b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterNodeCtrlStandalone.h index 89193b71898b..23109889852a 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterNodeCtrlStandalone.h +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/Controller/DisplayClusterNodeCtrlStandalone.h @@ -36,6 +36,7 @@ public: virtual void WaitForFrameEnd() override; virtual void WaitForTickEnd() override; virtual void GetDeltaTime(float& deltaTime) override; + virtual void GetTimecode(FTimecode& timecode, FFrameRate& frameRate) override; virtual void GetSyncData(FDisplayClusterMessage::DataType& data) override; virtual void GetInputData(FDisplayClusterMessage::DataType& data) override; diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/DisplayClusterClusterManager.h b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/DisplayClusterClusterManager.h index 8b3d8f704d45..3391fffed5b7 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/DisplayClusterClusterManager.h +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/DisplayClusterClusterManager.h @@ -4,6 +4,7 @@ #include "IPDisplayClusterClusterManager.h" #include "Network/DisplayClusterMessage.h" +#include "Misc/App.h" class ADisplayClusterGameMode; class ADisplayClusterSettings; @@ -58,6 +59,12 @@ public: virtual void SetDeltaTime(float deltaTime) override { DeltaTime = deltaTime; } + virtual void GetTimecode(FTimecode& timecode, FFrameRate& frameRate) const override + { timecode = FApp::GetTimecode(); frameRate = FApp::GetTimecodeFrameRate(); } + + virtual void SetTimecode(const FTimecode& timecode, const FFrameRate& frameRate) override + { FApp::SetTimecodeAndFrameRate(timecode, frameRate); } + virtual void RegisterSyncObject(IDisplayClusterClusterSyncObject* pSyncObj) override; virtual void UnregisterSyncObject(IDisplayClusterClusterSyncObject* pSyncObj) override; diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/IPDisplayClusterClusterManager.h b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/IPDisplayClusterClusterManager.h index 2aa75440ee4b..fd6af70eb791 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/IPDisplayClusterClusterManager.h +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Cluster/IPDisplayClusterClusterManager.h @@ -2,6 +2,8 @@ #pragma once +#include "Misc/Timecode.h" +#include "Misc/FrameRate.h" #include "Cluster/IDisplayClusterClusterManager.h" #include "IPDisplayClusterManager.h" @@ -25,6 +27,9 @@ struct IPDisplayClusterClusterManager : virtual float GetDeltaTime() const = 0; virtual void SetDeltaTime(float deltaTime) = 0; + + virtual void GetTimecode(FTimecode& timecode, FFrameRate& frameRate) const = 0; + virtual void SetTimecode(const FTimecode& timecode, const FFrameRate& frameRate) = 0; virtual void RegisterSyncObject (IDisplayClusterClusterSyncObject* pSyncObj) = 0; virtual void UnregisterSyncObject(IDisplayClusterClusterSyncObject* pSyncObj) = 0; diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Game/Classes/Basics/DisplayClusterGameEngine.cpp b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Game/Classes/Basics/DisplayClusterGameEngine.cpp index ebed5d8c6322..f1a7c2bbf994 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Game/Classes/Basics/DisplayClusterGameEngine.cpp +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Game/Classes/Basics/DisplayClusterGameEngine.cpp @@ -168,6 +168,8 @@ void UDisplayClusterGameEngine::Tick(float DeltaSeconds, bool bIdleMode) if (OperationMode == EDisplayClusterOperationMode::Cluster || OperationMode == EDisplayClusterOperationMode::Standalone) { + FTimecode Timecode; + FFrameRate FrameRate; // Update input device state (master only) InputMgr->Update(); @@ -185,10 +187,13 @@ void UDisplayClusterGameEngine::Tick(float DeltaSeconds, bool bIdleMode) // Get DisplayCluster time delta NodeController->GetDeltaTime(DeltaSeconds); + NodeController->GetTimecode(Timecode, FrameRate); UE_LOG(LogDisplayClusterEngine, Verbose, TEXT("DisplayCluster delta time (seconds): %f"), DeltaSeconds); + UE_LOG(LogDisplayClusterEngine, Verbose, TEXT("DisplayCluster Timecode: %s | %s"), *Timecode.ToString(), *FrameRate.ToPrettyText().ToString()); // Update delta time in the application FApp::SetDeltaTime(DeltaSeconds); + FApp::SetTimecodeAndFrameRate(Timecode, FrameRate); // Update input state in the cluster ClusterMgr->SyncInput(); @@ -205,7 +210,11 @@ void UDisplayClusterGameEngine::Tick(float DeltaSeconds, bool bIdleMode) { const float lag = CfgDebug.LagMaxTime; UE_LOG(LogDisplayClusterEngine, Log, TEXT("Simulating lag: %f seconds"), lag); +#if 1 + FPlatformProcess::Sleep(FMath::RandRange(0.f, lag)); +#else FPlatformProcess::Sleep(lag); +#endif } #if 0 diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Misc/DisplayClusterTypesConverter.h b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Misc/DisplayClusterTypesConverter.h index 62b08c8b103a..33c3bc1fc5f6 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Misc/DisplayClusterTypesConverter.h +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Misc/DisplayClusterTypesConverter.h @@ -3,6 +3,8 @@ #pragma once #include "CoreMinimal.h" +#include "Misc/Timecode.h" +#include "Misc/FrameRate.h" #include "DisplayClusterOperationMode.h" #include "DisplayClusterStrings.h" @@ -28,6 +30,10 @@ public: template <> static FString ToString<> (const FVector2D& from) { return from.ToString(); } template <> static FString ToString<> (const FRotator& from) { return from.ToString(); } + // We can't just use FTimecode ToString as that loses information. + template <> static FString ToString<> (const FTimecode& from) { return FString::Printf(TEXT("%d;%d;%d;%d;%d"), from.bDropFrameFormat ? 1 : 0, from.Hours, from.Minutes, from.Seconds, from.Frames); } + template <> static FString ToString<> (const FFrameRate& from) { return FString::Printf(TEXT("%d;%d"), from.Numerator, from.Denominator); } + template <> static FString ToString<> (const EDisplayClusterOperationMode& from) { switch (from) @@ -59,4 +65,41 @@ public: template <> static FVector FromString<> (const FString& from) { FVector vec; vec.InitFromString(from); return vec; } template <> static FVector2D FromString<> (const FString& from) { FVector2D vec; vec.InitFromString(from); return vec; } template <> static FRotator FromString<> (const FString& from) { FRotator rot; rot.InitFromString(from); return rot; } + template <> static FTimecode FromString<> (const FString& from) + { + FTimecode timecode; + + TArray parts; + parts.Reserve(5); + const int32 found = from.ParseIntoArray(parts, TEXT(";")); + + // We are expecting 5 "parts" - DropFrame, Hours, Minutes, Seconds, Frames. + if (found == 5) + { + timecode.bDropFrameFormat = FromString(parts[0]); + timecode.Hours = FromString(parts[1]); + timecode.Minutes = FromString(parts[2]); + timecode.Seconds = FromString(parts[3]); + timecode.Frames = FromString(parts[4]); + } + + return timecode; + } + template <> static FFrameRate FromString<> (const FString& from) + { + FFrameRate frameRate; + + TArray parts; + parts.Reserve(2); + const int32 found = from.ParseIntoArray(parts, TEXT(";")); + + // We are expecting 2 "parts" - Numerator, Denominator. + if (found == 2) + { + frameRate.Numerator = FromString(parts[0]); + frameRate.Denominator = FromString(parts[1]); + } + + return frameRate; + } }; diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Protocol/IPDisplayClusterClusterSyncProtocol.h b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Protocol/IPDisplayClusterClusterSyncProtocol.h index 02cbbe91e1f7..64aa7283d8f8 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Protocol/IPDisplayClusterClusterSyncProtocol.h +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Protocol/IPDisplayClusterClusterSyncProtocol.h @@ -2,6 +2,8 @@ #pragma once +#include "Misc/FrameRate.h" +#include "Misc/Timecode.h" #include "Network/DisplayClusterMessage.h" @@ -26,6 +28,9 @@ public: // Provides with time delta for current frame virtual void GetDeltaTime(float& deltaTime) = 0; + // Get the Timecode value for the current frame. + virtual void GetTimecode(FTimecode& timecode, FFrameRate& frameRate) = 0; + // Sync objects virtual void GetSyncData(FDisplayClusterMessage::DataType& data) = 0; diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncClient.cpp b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncClient.cpp index 2e6ef3ef3984..d9f70ac1e0c3 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncClient.cpp +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncClient.cpp @@ -70,7 +70,28 @@ void FDisplayClusterClusterSyncClient::GetDeltaTime(float& deltaTime) // Extract sync data from response message if (response->GetArg(FDisplayClusterClusterSyncMsg::GetDeltaTime::argDeltaTime, deltaTime) == false) { - UE_LOG(LogDisplayClusterNetworkMsg, Error, TEXT("Coulnd't extract an argument: %s"), FDisplayClusterClusterSyncMsg::GetDeltaTime::argDeltaTime); + UE_LOG(LogDisplayClusterNetworkMsg, Error, TEXT("Couldn't extract an argument: %s"), FDisplayClusterClusterSyncMsg::GetDeltaTime::argDeltaTime); + } +} + +void FDisplayClusterClusterSyncClient::GetTimecode(FTimecode& timecode, FFrameRate& frameRate) +{ + static const TSharedPtr request(new FDisplayClusterMessage(FDisplayClusterClusterSyncMsg::GetTimecode::name, FDisplayClusterClusterSyncMsg::TypeRequest, FDisplayClusterClusterSyncMsg::ProtocolName)); + TSharedPtr response = SendRecvMsg(request); + + if (!response.IsValid()) + { + return; + } + + // Extract sync data from response message + if (response->GetArg(FDisplayClusterClusterSyncMsg::GetTimecode::argTimecode, timecode) == false) + { + UE_LOG(LogDisplayClusterNetworkMsg, Error, TEXT("Couldn't extract an argument: %s"), FDisplayClusterClusterSyncMsg::GetTimecode::argTimecode); + } + if (response->GetArg(FDisplayClusterClusterSyncMsg::GetTimecode::argFrameRate, frameRate) == false) + { + UE_LOG(LogDisplayClusterNetworkMsg, Error, TEXT("Couldn't extract an argument: %s"), FDisplayClusterClusterSyncMsg::GetTimecode::argTimecode); } } diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncClient.h b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncClient.h index 87b837ca590c..d1ad8d47d365 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncClient.h +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncClient.h @@ -27,6 +27,7 @@ public: virtual void WaitForFrameEnd() override; virtual void WaitForTickEnd() override; virtual void GetDeltaTime(float& deltaTime) override; + virtual void GetTimecode(FTimecode& timecode, FFrameRate& frameRate) override; virtual void GetSyncData(FDisplayClusterMessage::DataType& data) override; virtual void GetInputData(FDisplayClusterMessage::DataType& data) override; }; diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncMsg.h b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncMsg.h index 470913f07c5e..ee9f6063bf39 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncMsg.h +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncMsg.h @@ -40,6 +40,13 @@ namespace FDisplayClusterClusterSyncMsg constexpr static auto argDeltaTime = "DeltaTime"; }; + namespace GetTimecode + { + constexpr static auto name = "GetTimecode"; + constexpr static auto argTimecode = "Timecode"; + constexpr static auto argFrameRate = "FrameRate"; + } + namespace GetSyncData { constexpr static auto name = "GetSyncData"; diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncService.cpp b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncService.cpp index 69a4d6b7778b..c8109972e36d 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncService.cpp +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncService.cpp @@ -117,6 +117,15 @@ FDisplayClusterMessage::Ptr FDisplayClusterClusterSyncService::ProcessMessage(FD response->SetArg(FDisplayClusterClusterSyncMsg::GetDeltaTime::argDeltaTime, deltaTime); return response; } + else if (msgName == FDisplayClusterClusterSyncMsg::GetTimecode::name) + { + FTimecode timecode; + FFrameRate frameRate; + GetTimecode(timecode, frameRate); + response->SetArg(FDisplayClusterClusterSyncMsg::GetTimecode::argTimecode, timecode); + response->SetArg(FDisplayClusterClusterSyncMsg::GetTimecode::argFrameRate, frameRate); + return response; + } else if (msgName == FDisplayClusterClusterSyncMsg::GetSyncData::name) { FDisplayClusterMessage::DataType data; @@ -180,6 +189,11 @@ void FDisplayClusterClusterSyncService::GetDeltaTime(float& deltaTime) deltaTime = GDisplayCluster->GetPrivateClusterMgr()->GetDeltaTime(); } +void FDisplayClusterClusterSyncService::GetTimecode(FTimecode& timecode, FFrameRate& frameRate) +{ + GDisplayCluster->GetPrivateClusterMgr()->GetTimecode(timecode, frameRate); +} + void FDisplayClusterClusterSyncService::GetSyncData(FDisplayClusterMessage::DataType& data) { GDisplayCluster->GetPrivateClusterMgr()->ExportSyncData(data); diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncService.h b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncService.h index cd1190a4e9a7..901ce43fadf4 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncService.h +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Network/Service/ClusterSync/DisplayClusterClusterSyncService.h @@ -42,6 +42,7 @@ private: virtual void WaitForFrameEnd() override; virtual void WaitForTickEnd() override; virtual void GetDeltaTime(float& deltaTime) override; + virtual void GetTimecode(FTimecode& timecode, FFrameRate& frameRate) override; virtual void GetSyncData(FDisplayClusterMessage::DataType& data) override; virtual void GetInputData(FDisplayClusterMessage::DataType& data) override; diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/Devices/DisplayClusterNativePresentHandler.cpp b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/Devices/DisplayClusterNativePresentHandler.cpp new file mode 100644 index 000000000000..d5152cd44cee --- /dev/null +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/Devices/DisplayClusterNativePresentHandler.cpp @@ -0,0 +1,21 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "DisplayClusterNativePresentHandler.h" + + +FDisplayClusterNativePresentHandler::FDisplayClusterNativePresentHandler() +{ +} + +FDisplayClusterNativePresentHandler::~FDisplayClusterNativePresentHandler() +{ +} + + +bool FDisplayClusterNativePresentHandler::Present(int32& InOutSyncInterval) +{ + exec_BarrierWait(); + InOutSyncInterval = 1; + + return true; +} diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/Devices/DisplayClusterNativePresentHandler.h b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/Devices/DisplayClusterNativePresentHandler.h new file mode 100644 index 000000000000..27fe184935bd --- /dev/null +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/Devices/DisplayClusterNativePresentHandler.h @@ -0,0 +1,22 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Render/Devices/DisplayClusterDeviceBase.h" + + +/** + * Present stub to allow to sycnhronize a cluster with native rendering pipeline (no nDisplay stereo devices used) + */ +class FDisplayClusterNativePresentHandler : public FDisplayClusterDeviceBase +{ +public: + FDisplayClusterNativePresentHandler(); + virtual ~FDisplayClusterNativePresentHandler(); + +protected: + ////////////////////////////////////////////////////////////////////////////////////////////// + // FRHICustomPresent + ////////////////////////////////////////////////////////////////////////////////////////////// + virtual bool Present(int32& InOutSyncInterval) override; +}; diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/DisplayClusterRenderManager.cpp b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/DisplayClusterRenderManager.cpp index 9274b336fc29..41e0fbfcae06 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/DisplayClusterRenderManager.cpp +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/DisplayClusterRenderManager.cpp @@ -3,11 +3,13 @@ #include "Render/DisplayClusterRenderManager.h" #include "Config/IPDisplayClusterConfigManager.h" +#include "Engine/GameViewportClient.h" #include "Engine/GameEngine.h" #include "Misc/DisplayClusterLog.h" #include "DisplayClusterStrings.h" #include "DisplayClusterOperationMode.h" +#include "Render/Devices/DisplayClusterNativePresentHandler.h" #include "Render/Devices/Debug/DisplayClusterDeviceDebug.h" #include "Render/Devices/Monoscopic/DisplayClusterDeviceMonoscopicOpenGL.h" #include "Render/Devices/Monoscopic/DisplayClusterDeviceMonoscopicD3D11.h" @@ -18,6 +20,8 @@ #include "Render/Devices/SideBySide/DisplayClusterDeviceSideBySide.h" #include "Render/Devices/TopBottom/DisplayClusterDeviceTopBottom.h" +#include "UnrealClient.h" + FDisplayClusterRenderManager::FDisplayClusterRenderManager() { @@ -153,7 +157,7 @@ FDisplayClusterDeviceBase* FDisplayClusterRenderManager::CreateStereoDevice() } } // Monoscopic - else //if (FParse::Param(FCommandLine::Get(), DisplayClusterConstants::args::dev::Mono)) + else if (FParse::Param(FCommandLine::Get(), DisplayClusterStrings::args::dev::Mono)) { if (RHIName.Compare(DisplayClusterStrings::rhi::OpenGL, ESearchCase::IgnoreCase) == 0) { @@ -171,6 +175,11 @@ FDisplayClusterDeviceBase* FDisplayClusterRenderManager::CreateStereoDevice() pDevice = new FDisplayClusterDeviceMonoscopicD3D12; } } + // Leave native render but inject custom present for cluster synchronization + else + { + UGameViewportClient::OnViewportCreated().AddRaw(this, &FDisplayClusterRenderManager::OnViewportCreatedHandler); + } if (pDevice == nullptr) { @@ -195,6 +204,29 @@ FDisplayClusterDeviceBase* FDisplayClusterRenderManager::CreateStereoDevice() return pDevice; } +void FDisplayClusterRenderManager::OnViewportCreatedHandler() +{ + if (GEngine && GEngine->GameViewport) + { + if (!GEngine->GameViewport->Viewport->GetViewportRHI().IsValid()) + { + GEngine->GameViewport->OnBeginDraw().AddRaw(this, &FDisplayClusterRenderManager::OnBeginDrawHandler); + } + } +} + +void FDisplayClusterRenderManager::OnBeginDrawHandler() +{ + //@todo: this is fast solution for prototype. We shouldn't use raw handlers to be able to unsubscribe from the event. + static bool initialized = false; + if (!initialized && GEngine->GameViewport->Viewport->GetViewportRHI().IsValid()) + { + NativePresentHandler = new FDisplayClusterNativePresentHandler; + GEngine->GameViewport->Viewport->GetViewportRHI().GetReference()->SetCustomPresent(NativePresentHandler); + initialized = true; + } +} + void FDisplayClusterRenderManager::PreTick(float DeltaSeconds) { DISPLAY_CLUSTER_FUNC_TRACE(LogDisplayClusterRender); diff --git a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/DisplayClusterRenderManager.h b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/DisplayClusterRenderManager.h index 3c28722c6b35..4fdbfb1a88e1 100644 --- a/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/DisplayClusterRenderManager.h +++ b/Engine/Plugins/Runtime/nDisplay/Source/DisplayCluster/Private/Render/DisplayClusterRenderManager.h @@ -6,7 +6,7 @@ #include "IPDisplayClusterRenderManager.h" class FDisplayClusterDeviceBase; - +class FDisplayClusterNativePresentHandler; /** * Render manager. Responsible for anything related to a visual part. @@ -42,6 +42,8 @@ public: private: FDisplayClusterDeviceBase* CreateStereoDevice(); void ResizeWindow(int32 WinX, int32 WinY, int32 ResX, int32 ResY); + void OnViewportCreatedHandler(); + void OnBeginDrawHandler(); private: EDisplayClusterOperationMode CurrentOperationMode; @@ -50,6 +52,7 @@ private: // Interface pointer to eliminate type casting IDisplayClusterStereoDevice* Device = nullptr; + FDisplayClusterNativePresentHandler* NativePresentHandler; bool bWindowAdjusted = false; }; diff --git a/Engine/Programs/UnrealCollaborationServer/Config/DefaultEngine.ini b/Engine/Programs/UnrealCollaborationServer/Config/DefaultEngine.ini new file mode 100644 index 000000000000..94b6c23135aa --- /dev/null +++ b/Engine/Programs/UnrealCollaborationServer/Config/DefaultEngine.ini @@ -0,0 +1,6 @@ +[URL] +GameNameShort=UCS + +[Plugins] ++ProgramEnabledPlugins="UdpMessaging" ++ProgramEnabledPlugins="ConcertSyncServer" diff --git a/Engine/Source/Developer/IOS/IOSPlatformEditor/Private/IOSTargetSettingsCustomization.cpp b/Engine/Source/Developer/IOS/IOSPlatformEditor/Private/IOSTargetSettingsCustomization.cpp index 8465b7d87e2f..c2820f9f2c17 100644 --- a/Engine/Source/Developer/IOS/IOSPlatformEditor/Private/IOSTargetSettingsCustomization.cpp +++ b/Engine/Source/Developer/IOS/IOSPlatformEditor/Private/IOSTargetSettingsCustomization.cpp @@ -442,7 +442,7 @@ void FIOSTargetSettingsCustomization::BuildPListSection(IDetailLayoutBuilder& De ( SNew(SHeaderRow) + SHeaderRow::Column("Selected") - .DefaultLabel(LOCTEXT("ProvisionListSelectColumnHeader", "")) + .DefaultLabel(FText::GetEmpty()) .FixedWidth(30.0f) + SHeaderRow::Column("Name") .DefaultLabel(LOCTEXT("ProvisionListNameColumnHeader", "Provision")) @@ -603,7 +603,7 @@ void FIOSTargetSettingsCustomization::BuildPListSection(IDetailLayoutBuilder& De ( SNew(SHeaderRow) + SHeaderRow::Column("Selected") - .DefaultLabel(LOCTEXT("CertificateListSelectColumnHeader", "")) + .DefaultLabel(FText::GetEmpty()) .FixedWidth(30.0f) + SHeaderRow::Column("Name") .DefaultLabel(LOCTEXT("CertificateListNameColumnHeader", "Certificate")) diff --git a/Engine/Source/Developer/LogVisualizer/Private/SVisualLoggerFilters.cpp b/Engine/Source/Developer/LogVisualizer/Private/SVisualLoggerFilters.cpp index a80eb5602502..cce61e665b45 100644 --- a/Engine/Source/Developer/LogVisualizer/Private/SVisualLoggerFilters.cpp +++ b/Engine/Source/Developer/LogVisualizer/Private/SVisualLoggerFilters.cpp @@ -150,7 +150,7 @@ TSharedRef SVisualLoggerFilters::MakeGraphsFilterMenu() .HintText(LOCTEXT("GraphsFilterSearchHint", "Quick find")) .OnTextChanged(this, &SVisualLoggerFilters::OnSearchChanged); - MenuBuilder.AddWidget(FiltersSearchBox, LOCTEXT("FiltersSearchMenuWidget", "")); + MenuBuilder.AddWidget(FiltersSearchBox, FText::GetEmpty()); if (CachedDatasPerGraph.Num() > 0) { diff --git a/Engine/Source/Developer/MeshDescriptionOperations/Private/MeshDescriptionOperations.cpp b/Engine/Source/Developer/MeshDescriptionOperations/Private/MeshDescriptionOperations.cpp index d98fef3b6926..3d241870429f 100644 --- a/Engine/Source/Developer/MeshDescriptionOperations/Private/MeshDescriptionOperations.cpp +++ b/Engine/Source/Developer/MeshDescriptionOperations/Private/MeshDescriptionOperations.cpp @@ -9,6 +9,7 @@ #include "OverlappingCorners.h" #include "RenderUtils.h" #include "mikktspace.h" +#include "UVMapSettings.h" DEFINE_LOG_CATEGORY(LogMeshDescriptionOperations); @@ -1250,4 +1251,255 @@ bool FMeshDescriptionOperations::RemoveUVChannel(FMeshDescription& MeshDescripti return true; } +void FMeshDescriptionOperations::GeneratePlanarUV(const FMeshDescription& MeshDescription, const FUVMapSettings& Settings, TArray& OutTexCoords) +{ + FVector U = FVector::ForwardVector; + FVector V = FVector::RightVector; + + switch (Settings.Axis) + { + case 0: + // Project along X-axis (left view), UV along Z Y axes + U = FVector::UpVector; + V = FVector::RightVector; + break; + case 1: + // Project along Y-axis (front view), UV along X -Z axes + U = FVector::ForwardVector; + V = -FVector::UpVector; + break; + case 2: + // Project along Z-axis (top view), UV along X Y axes + U = FVector::ForwardVector; + V = FVector::RightVector; + break; + } + + TMeshAttributesConstRef VertexPositions = MeshDescription.VertexAttributes().GetAttributesRef(MeshAttribute::Vertex::Position); + + OutTexCoords.AddZeroed(MeshDescription.VertexInstances().Num()); + + FVector Size = Settings.Size * Settings.Scale; + FVector Offset = Settings.Position - Size / 2.f; + FQuat Rotation(Settings.RotationAxis, FMath::DegreesToRadians(Settings.RotationAngle)); + + int32 TextureCoordIndex = 0; + for (const FVertexInstanceID& VertexInstanceID : MeshDescription.VertexInstances().GetElementIDs()) + { + const FVertexID VertexID = MeshDescription.GetVertexInstanceVertex(VertexInstanceID); + FVector Vertex = VertexPositions[VertexID]; + + // Apply the gizmo transforms + Vertex = Rotation.RotateVector(Vertex); + Vertex -= Offset; + Vertex /= Size; + + float UCoord = FVector::DotProduct(Vertex, U) * Settings.UVTile.X; + float VCoord = FVector::DotProduct(Vertex, V) * Settings.UVTile.Y; + OutTexCoords[TextureCoordIndex++] = FVector2D(UCoord, VCoord); + } +} + +void FMeshDescriptionOperations::GenerateCylindricalUV(FMeshDescription& MeshDescription, const FUVMapSettings& Settings, TArray& OutTexCoords) +{ + FVector Size = Settings.Size * Settings.Scale; + FVector Offset = Settings.Position; + + FVector U; + FVector V; + + switch (Settings.Axis) + { + case 0: + // Cylinder along Y-axis, counterclockwise from -Z axis as seen from back view + V = FVector::RightVector; + Offset.Y -= Size.Y / 2.f; + break; + case 1: + // Cylinder along X-axis, counterclockwise from -Y axis as seen from left view + V = FVector::ForwardVector; + Offset.X -= Size.X / 2.f; + break; + case 2: + // Cylinder along Z-axis, counterclockwise from -Y axis as seen from top view + V = -FVector::UpVector; + Offset.Z -= Size.Z / 2.f; + break; + } + + TMeshAttributesConstRef VertexPositions = MeshDescription.VertexAttributes().GetAttributesRef(MeshAttribute::Vertex::Position); + + OutTexCoords.AddZeroed(MeshDescription.VertexInstances().Num()); + + const float AngleOffset = PI; // offset to get the same result as in 3dsmax + int32 TextureCoordIndex = 0; + FQuat Rotation(Settings.RotationAxis, FMath::DegreesToRadians(Settings.RotationAngle)); + + for (const FVertexInstanceID& VertexInstanceID : MeshDescription.VertexInstances().GetElementIDs()) + { + const FVertexID VertexID = MeshDescription.GetVertexInstanceVertex(VertexInstanceID); + FVector Vertex = VertexPositions[VertexID]; + + // Apply the gizmo transforms + Vertex = Rotation.RotateVector(Vertex); + Vertex -= Offset; + Vertex /= Size; + + float Angle = 0.f; + switch (Settings.Axis) + { + case 0: + Angle = FMath::Atan2(Vertex.X, Vertex.Z); + break; + case 1: + Angle = FMath::Atan2(Vertex.Z, Vertex.Y); + break; + case 2: + Angle = FMath::Atan2(Vertex.X, Vertex.Y); + break; + } + + Angle += AngleOffset; + Angle *= Settings.UVTile.X; + + float UCoord = Angle / (2 * PI); + float VCoord = FVector::DotProduct(Vertex, V) * Settings.UVTile.Y; + + OutTexCoords[TextureCoordIndex++] = FVector2D(UCoord, VCoord); + } + + // Fix the UV coordinates for triangles at the seam where the angle wraps around + for (const FPolygonID& PolygonID : MeshDescription.Polygons().GetElementIDs()) + { + const TArray& VertexInstances = MeshDescription.GetPolygonPerimeterVertexInstances(PolygonID); + int32 NumInstances = VertexInstances.Num(); + if (NumInstances >= 2) + { + for (int32 StartIndex = 1; StartIndex < NumInstances; ++StartIndex) + { + int32 EndIndex = StartIndex + 1; + if (EndIndex >= NumInstances) + { + EndIndex = EndIndex % NumInstances; + } + + const FVector2D& StartUV = OutTexCoords[VertexInstances[StartIndex].GetValue()]; + FVector2D& EndUV = OutTexCoords[VertexInstances[EndIndex].GetValue()]; + + // TODO: Improve fix for UVTile other than 1 + float Threshold = 0.5f / Settings.UVTile.X; + if (FMath::Abs(EndUV.X - StartUV.X) > Threshold) + { + // Fix the U coordinate to get the texture go counterclockwise + if (EndUV.X > Threshold) + { + EndUV.X -= 1.f; + } + else + { + EndUV.X += 1.f; + } + } + } + } + } +} + +void FMeshDescriptionOperations::GenerateBoxUV(const FMeshDescription& MeshDescription, const FUVMapSettings& Settings, TArray& OutTexCoords) +{ + FVector Size = Settings.Size * Settings.Scale; + FVector HalfSize = Size / 2.0f; + FVector Offset = Settings.Position - HalfSize; + + FVector HintU; + FVector HintV; + + switch (Settings.Axis) + { + case 0: + HintU = FVector::UpVector; + HintV = FVector::RightVector; + break; + case 1: + HintU = FVector::ForwardVector; + HintV = -FVector::UpVector; + break; + case 2: + HintU = FVector::ForwardVector; + HintV = FVector::RightVector; + break; + } + + TMeshAttributesConstRef VertexPositions = MeshDescription.VertexAttributes().GetAttributesRef(MeshAttribute::Vertex::Position); + + OutTexCoords.AddZeroed(MeshDescription.VertexInstances().Num()); + + TArray BoxPlanes; + const FVector& Center = Settings.Position; + + BoxPlanes.Add(FPlane(Center + FVector(0, 0, HalfSize.Z), FVector::UpVector)); // Top plane + BoxPlanes.Add(FPlane(Center - FVector(0, 0, HalfSize.Z), -FVector::UpVector)); // Bottom plane + BoxPlanes.Add(FPlane(Center + FVector(HalfSize.X, 0, 0), FVector::ForwardVector)); // Right plane + BoxPlanes.Add(FPlane(Center - FVector(HalfSize.X, 0, 0), -FVector::ForwardVector)); // Left plane + BoxPlanes.Add(FPlane(Center + FVector(0, HalfSize.Y, 0), FVector::RightVector)); // Front plane + BoxPlanes.Add(FPlane(Center - FVector(0, HalfSize.Y, 0), -FVector::RightVector)); // Back plane + + FQuat Rotation(Settings.RotationAxis, FMath::DegreesToRadians(Settings.RotationAngle)); + + // For each polygon, find the box plane that best matches the polygon normal + for (const FPolygonID& PolygonID : MeshDescription.Polygons().GetElementIDs()) + { + const TArray& VertexInstances = MeshDescription.GetPolygonPerimeterVertexInstances(PolygonID); + check(VertexInstances.Num() == 3); + + FVector Vertex0 = VertexPositions[MeshDescription.GetVertexInstanceVertex(VertexInstances[0])]; + FVector Vertex1 = VertexPositions[MeshDescription.GetVertexInstanceVertex(VertexInstances[1])]; + FVector Vertex2 = VertexPositions[MeshDescription.GetVertexInstanceVertex(VertexInstances[2])]; + + FPlane PolygonPlane(Vertex0, Vertex2, Vertex1); + + // Find the box plane that is most aligned with the polygon plane + // TODO: Also take the distance between the planes into consideration + float MaxProj = 0.f; + int32 BestPlaneIndex = 0; + for (int32 Index = 0; Index < BoxPlanes.Num(); ++Index) + { + float Proj = FVector::DotProduct(BoxPlanes[Index], PolygonPlane); + if (Proj > MaxProj) + { + MaxProj = Proj; + BestPlaneIndex = Index; + } + } + + const FPlane& BestPlane = BoxPlanes[BestPlaneIndex]; + + FVector U = HintU; + FVector V = BestPlane ^ HintU; + + if (V.IsZero()) + { + // Plane normal and U were aligned, so try with V instead + U = HintV; + V = BestPlane ^ HintV; + } + + for (const FVertexInstanceID& VertexInstanceID : VertexInstances) + { + const FVertexID VertexID = MeshDescription.GetVertexInstanceVertex(VertexInstanceID); + FVector Vertex = VertexPositions[VertexID]; + + // Apply the gizmo transforms + Vertex = Rotation.RotateVector(Vertex); + Vertex -= Offset; + Vertex /= Size; + + float UCoord = FVector::DotProduct(Vertex, U) * Settings.UVTile.X; + float VCoord = FVector::DotProduct(Vertex, V) * Settings.UVTile.Y; + + OutTexCoords[VertexInstanceID.GetValue()] = FVector2D(UCoord, VCoord); + } + } +} + #undef LOCTEXT_NAMESPACE diff --git a/Engine/Source/Developer/MeshDescriptionOperations/Public/MeshDescriptionOperations.h b/Engine/Source/Developer/MeshDescriptionOperations/Public/MeshDescriptionOperations.h index d9147817829f..90dd1c4b9e14 100644 --- a/Engine/Source/Developer/MeshDescriptionOperations/Public/MeshDescriptionOperations.h +++ b/Engine/Source/Developer/MeshDescriptionOperations/Public/MeshDescriptionOperations.h @@ -6,6 +6,7 @@ struct FMeshDescription; struct FRawMesh; +struct FUVMapSettings; struct FOverlappingCorners; enum class ELightmapUVVersion : int32; @@ -66,6 +67,14 @@ public: /** Remove the UV channel at the given index from the MeshDescription. */ static bool RemoveUVChannel(FMeshDescription& MeshDescription, int32 UVChannelIndex); + /** Generate planar UV mapping for the MeshDescription */ + static void GeneratePlanarUV(const FMeshDescription& MeshDescription, const FUVMapSettings& Settings, TArray& OutTexCoords); + + /** Generate cylindrical UV mapping for the MeshDescription */ + static void GenerateCylindricalUV(FMeshDescription& MeshDescription, const FUVMapSettings& Settings, TArray& OutTexCoords); + + /** Generate box UV mapping for the MeshDescription */ + static void GenerateBoxUV(const FMeshDescription& MeshDescription, const FUVMapSettings& Settings, TArray& OutTexCoords); static void ConvertHardEdgesToSmoothGroup(const FMeshDescription& SourceMeshDescription, FRawMesh& DestinationRawMesh); diff --git a/Engine/Source/Developer/MeshDescriptionOperations/Public/UVMapSettings.h b/Engine/Source/Developer/MeshDescriptionOperations/Public/UVMapSettings.h new file mode 100644 index 000000000000..23b282652e41 --- /dev/null +++ b/Engine/Source/Developer/MeshDescriptionOperations/Public/UVMapSettings.h @@ -0,0 +1,54 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "UObject/ObjectMacros.h" + +#include "UVMapSettings.generated.h" + +USTRUCT(BlueprintType) +struct MESHDESCRIPTIONOPERATIONS_API FUVMapSettings +{ + GENERATED_BODY() + + /** Length, width, height of the UV mapping gizmo */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = UVMapSettings) + FVector Size; + + /** Tiling of the UV mapping */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = UVMapSettings) + FVector2D UVTile; + + /** Axis of the UV mapping gizmo that is aligned with the local up */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = UVMapSettings) + int Axis; + + /** Position of the UV mapping gizmo */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = GizmoTransform) + FVector Position; + + /** Rotation axis of the UV mapping gizmo */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = GizmoTransform) + FVector RotationAxis; + + /** Rotation angle (in degrees) of the UV mapping gizmo */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = GizmoTransform) + float RotationAngle; + + /** Scale of the UV mapping gizmo */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = GizmoTransform) + FVector Scale; + + /** Default settings */ + FUVMapSettings() + : Size(1.0f) + , UVTile(1.0f, 1.0f) + , Axis(0) + , Position(0.0f) + , RotationAxis(FVector::UpVector) + , RotationAngle(0.f) + , Scale(1.0f) + { + } +}; \ No newline at end of file diff --git a/Engine/Source/Developer/MeshMergeUtilities/Private/MeshMergeUtilities.cpp b/Engine/Source/Developer/MeshMergeUtilities/Private/MeshMergeUtilities.cpp index 28cb396c6e73..66a712d4951b 100644 --- a/Engine/Source/Developer/MeshMergeUtilities/Private/MeshMergeUtilities.cpp +++ b/Engine/Source/Developer/MeshMergeUtilities/Private/MeshMergeUtilities.cpp @@ -1059,23 +1059,6 @@ void FMeshMergeUtilities::CreateProxyMesh(const TArray& I void FMeshMergeUtilities::CreateProxyMesh(const TArray& InActors, const struct FMeshProxySettings& InMeshProxySettings, UMaterialInterface* InBaseMaterial, UPackage* InOuter, const FString& InProxyBasePackageName, const FGuid InGuid, const FCreateProxyDelegate& InProxyCreatedDelegate, const bool bAllowAsync /*= false*/, const float ScreenSize /*= 1.0f*/) const { - // The MeshReductionInterface manages the choice mesh reduction plugins, Unreal native vs third party (e.g. Simplygon) - - IMeshReductionModule& ReductionModule = FModuleManager::Get().LoadModuleChecked("MeshReductionInterface"); - // Error/warning checking for input - if (ReductionModule.GetMeshMergingInterface() == nullptr) - { - UE_LOG(LogMeshMerging, Log, TEXT("No automatic mesh merging module available")); - return; - } - - // Check that the delegate has a func-ptr bound to it - if (!InProxyCreatedDelegate.IsBound()) - { - UE_LOG(LogMeshMerging, Log, TEXT("Invalid (unbound) delegate for returning generated proxy mesh")); - return; - } - // No actors given as input if (InActors.Num() == 0) { @@ -1083,57 +1066,24 @@ void FMeshMergeUtilities::CreateProxyMesh(const TArray& InActors, const return; } - // Base asset name for a new assets - // In case outer is null ProxyBasePackageName has to be long package name - if (InOuter == nullptr && FPackageName::IsShortPackageName(InProxyBasePackageName)) - { - UE_LOG(LogMeshMerging, Warning, TEXT("Invalid long package name: '%s'."), *InProxyBasePackageName); - return; - } - - FScopedSlowTask SlowTask(100.f, (LOCTEXT("CreateProxyMesh_CreateMesh", "Creating Mesh Proxy"))); - SlowTask.MakeDialog(); - - // Retrieve static mesh components valid for merging from the given set of actors + // Collect components to merge TArray ComponentsToMerge; + for (AActor* Actor : InActors) { - // Collect components to merge - for (AActor* Actor : InActors) - { - TInlineComponentArray Components; - Actor->GetComponents(Components); - - // Remove anything non-regular or non-spline static mesh components - Components.RemoveAll([](UStaticMeshComponent* Val) - { - if (Val->GetClass() != UStaticMeshComponent::StaticClass() && Val->GetClass() != UInstancedStaticMeshComponent::StaticClass() && !Val->IsA(USplineMeshComponent::StaticClass())) - { - return true; - } - - if (Val->GetStaticMesh() == nullptr) - { - return true; - } - - return false; - }); - - ComponentsToMerge.Append(Components); - } + TInlineComponentArray Components; + Actor->GetComponents(Components); + ComponentsToMerge.Append(Components); } - CreateProxyMesh(ComponentsToMerge, InMeshProxySettings, InBaseMaterial, InOuter, InProxyBasePackageName, InGuid, InProxyCreatedDelegate, bAllowAsync, ScreenSize); - } void FMeshMergeUtilities::CreateProxyMesh(const TArray& InComponentsToMerge, const struct FMeshProxySettings& InMeshProxySettings, UMaterialInterface* InBaseMaterial, UPackage* InOuter, const FString& InProxyBasePackageName, const FGuid InGuid, const FCreateProxyDelegate& InProxyCreatedDelegate, const bool bAllowAsync, const float ScreenSize) const { // The MeshReductionInterface manages the choice mesh reduction plugins, Unreal native vs third party (e.g. Simplygon) - IMeshReductionModule& ReductionModule = FModuleManager::Get().LoadModuleChecked("MeshReductionInterface"); + // Error/warning checking for input if (ReductionModule.GetMeshMergingInterface() == nullptr) { @@ -1149,6 +1099,8 @@ void FMeshMergeUtilities::CreateProxyMesh(const TArray& I } TArray ComponentsToMerge = InComponentsToMerge; + + // Remove anything non-regular or non-spline static mesh components ComponentsToMerge.RemoveAll([](UStaticMeshComponent* Val) { if (Val->GetClass() != UStaticMeshComponent::StaticClass() && Val->GetClass() != UInstancedStaticMeshComponent::StaticClass() && !Val->IsA(USplineMeshComponent::StaticClass())) @@ -1182,16 +1134,7 @@ void FMeshMergeUtilities::CreateProxyMesh(const TArray& I FScopedSlowTask SlowTask(100.f, (LOCTEXT("CreateProxyMesh_CreateMesh", "Creating Mesh Proxy"))); SlowTask.MakeDialog(); - - // Check if there are actually any static mesh components to merge - if (ComponentsToMerge.Num() == 0) - { - UE_LOG(LogMeshMerging, Log, TEXT("No valid static mesh components found in given set of Actors")); - return; - } - TArray SourceMeshes; - //TArray UniqueSections; TMap> GlobalMaterialMap; static const int32 ProxyMeshTargetLODLevel = 0; @@ -1201,7 +1144,8 @@ void FMeshMergeUtilities::CreateProxyMesh(const TArray& I EstimatedBounds = EstimatedBounds + StaticMeshComponent->Bounds; } - static const float FOVRad = 90.0f * (float)PI / 360.0f; + static const float FOVRad = FMath::DegreesToRadians(45.0f); + static const FMatrix ProjectionMatrix = FPerspectiveMatrix(FOVRad, 1920, 1080, 0.01f); FHierarchicalLODUtilitiesModule& HLODModule = FModuleManager::LoadModuleChecked("HierarchicalLODUtilities"); IHierarchicalLODUtilities* Utilities = HLODModule.GetUtilities(); diff --git a/Engine/Source/Developer/MeshReductionInterface/Public/IMeshReductionInterfaces.h b/Engine/Source/Developer/MeshReductionInterface/Public/IMeshReductionInterfaces.h index 091284d0be77..6fae7f92522e 100644 --- a/Engine/Source/Developer/MeshReductionInterface/Public/IMeshReductionInterfaces.h +++ b/Engine/Source/Developer/MeshReductionInterface/Public/IMeshReductionInterfaces.h @@ -76,7 +76,7 @@ DECLARE_DELEGATE_TwoParams(FCreateProxyDelegate, const FGuid, TArray&) /** Data used for passing back the data resulting from a completed mesh merging operation*/ struct FMergeCompleteData { - /** Outer object for the to store/save UObjects */ + /** Outer object to store/save UObjects */ class UPackage* InOuter; /** Base package name for the proxy mesh UObjects */ FString ProxyBasePackageName; diff --git a/Engine/Source/Editor/AnimationEditor/Private/AnimationEditor.cpp b/Engine/Source/Editor/AnimationEditor/Private/AnimationEditor.cpp index b5a732eb1bc2..603f69f1fe44 100644 --- a/Engine/Source/Editor/AnimationEditor/Private/AnimationEditor.cpp +++ b/Engine/Source/Editor/AnimationEditor/Private/AnimationEditor.cpp @@ -847,7 +847,7 @@ void FAnimationEditor::FillInsertPoseMenu(FMenuBuilder& MenuBuilder) const MenuBuilder.AddWidget( ContentBrowserModule.Get().CreateAssetPicker(AssetPickerConfig), - LOCTEXT("Select_Label", "") + FText::GetEmpty() ); } @@ -919,7 +919,7 @@ void FAnimationEditor::FillCopyToSoundWaveMenu(FMenuBuilder& MenuBuilder) const [ ContentBrowserModule.Get().CreateAssetPicker(AssetPickerConfig) ], - LOCTEXT("Select_Label", "") + FText::GetEmpty() ); } diff --git a/Engine/Source/Editor/ContentBrowser/Private/CollectionContextMenu.cpp b/Engine/Source/Editor/ContentBrowser/Private/CollectionContextMenu.cpp index d580031e3176..c5942c0bc342 100644 --- a/Engine/Source/Editor/ContentBrowser/Private/CollectionContextMenu.cpp +++ b/Engine/Source/Editor/ContentBrowser/Private/CollectionContextMenu.cpp @@ -384,7 +384,7 @@ void FCollectionContextMenu::MakeSetColorSubMenu(FMenuBuilder& MenuBuilder) .Size( FVector2D(77,16) ) ] ], - LOCTEXT("CustomColor", ""), + FText::GetEmpty(), /*bNoIndent=*/true ); } diff --git a/Engine/Source/Editor/ContentBrowser/Private/PathContextMenu.cpp b/Engine/Source/Editor/ContentBrowser/Private/PathContextMenu.cpp index c12625072df0..eca0710f9694 100644 --- a/Engine/Source/Editor/ContentBrowser/Private/PathContextMenu.cpp +++ b/Engine/Source/Editor/ContentBrowser/Private/PathContextMenu.cpp @@ -439,7 +439,7 @@ void FPathContextMenu::MakeSetColorSubMenu(FMenuBuilder& MenuBuilder) .Size( FVector2D(77,16) ) ] ], - LOCTEXT("CustomColor", ""), + FText::GetEmpty(), /*bNoIndent=*/true ); } diff --git a/Engine/Source/Editor/DetailCustomizations/Private/CollisionProfileDetails.cpp b/Engine/Source/Editor/DetailCustomizations/Private/CollisionProfileDetails.cpp index 25c0add74059..4fda003474d7 100644 --- a/Engine/Source/Editor/DetailCustomizations/Private/CollisionProfileDetails.cpp +++ b/Engine/Source/Editor/DetailCustomizations/Private/CollisionProfileDetails.cpp @@ -1607,7 +1607,7 @@ void FCollisionProfileDetails::CustomizeDetails( IDetailLayoutBuilder& DetailBui .AutoWidth() [ SNew(STextBlock) - .Text(LOCTEXT("ProfileListHeader_Category", "")) + .Text(FText::GetEmpty()) .Font(IDetailLayoutBuilder::GetDetailFont()) ] ] diff --git a/Engine/Source/Editor/DetailCustomizations/Private/ComponentTransformDetails.cpp b/Engine/Source/Editor/DetailCustomizations/Private/ComponentTransformDetails.cpp index 57736308b0e2..db7734a32090 100644 --- a/Engine/Source/Editor/DetailCustomizations/Private/ComponentTransformDetails.cpp +++ b/Engine/Source/Editor/DetailCustomizations/Private/ComponentTransformDetails.cpp @@ -1223,16 +1223,27 @@ void FComponentTransformDetails::OnSetTransform(ETransformField::Type TransformF // This can invalidate OldSceneComponent // We don't call PostEditChange for non commit changes because most classes implement the version that doesn't check the interaction type OldSceneComponent->PostEditChangeChainProperty(PropertyChangedChainEvent); + } + else + { + SnapshotTransactionBuffer(OldSceneComponent); + } - SceneComponent = FindObject(EditedActor, *SceneComponentPath); + SceneComponent = FindObject(EditedActor, *SceneComponentPath); - if (EditedActor && EditedActor->GetRootComponent() == SceneComponent) + if (EditedActor && EditedActor->GetRootComponent() == SceneComponent) + { + if (bCommitted) { EditedActor->PostEditChangeChainProperty(PropertyChangedChainEvent); SceneComponent = FindObject(EditedActor, *SceneComponentPath); } + else + { + SnapshotTransactionBuffer(EditedActor); + } } - + if (!Object->IsTemplate()) { if (TransformField == ETransformField::Rotation || TransformField == ETransformField::Location) diff --git a/Engine/Source/Editor/DetailCustomizations/Private/TextureLODSettingsDetails.cpp b/Engine/Source/Editor/DetailCustomizations/Private/TextureLODSettingsDetails.cpp index d9f444fc39c0..c132076f3587 100644 --- a/Engine/Source/Editor/DetailCustomizations/Private/TextureLODSettingsDetails.cpp +++ b/Engine/Source/Editor/DetailCustomizations/Private/TextureLODSettingsDetails.cpp @@ -329,7 +329,7 @@ void FTextureLODGroupLayout::OnMinMagFilterChanged(TSharedPtr NewSelectio FText FTextureLODGroupLayout::GetMinMagFilterComboBoxToolTip() const { - return LOCTEXT("MinMagFilterComboToolTip", ""); + return FText::GetEmpty(); } FText FTextureLODGroupLayout::GetMinMagFilterComboBoxContent() const @@ -355,7 +355,7 @@ void FTextureLODGroupLayout::OnMipFilterChanged(TSharedPtr NewSelection, FText FTextureLODGroupLayout::GetMipFilterComboBoxToolTip() const { - return LOCTEXT("MipFilterComboToolTip", ""); + return FText::GetEmpty(); } FText FTextureLODGroupLayout::GetMipFilterComboBoxContent() const @@ -384,7 +384,7 @@ void FTextureLODGroupLayout::OnMipGenSettingsChanged(TSharedPtr FSlateEditorStyle::StyleInstance = NULL; TWeakObjectPtr< UEditorStyleSettings > FSlateEditorStyle::Settings = NULL; +bool IncludeEditorSpecificStyles() +{ +#if IS_PROGRAM + return true; +#else + return GIsEditor; +#endif +} /* FSlateEditorStyle interface *****************************************************************************/ @@ -160,15 +168,6 @@ void FSlateEditorStyle::FStyle::SyncSettings() void FSlateEditorStyle::FStyle::Initialize() { - //@Todo slate: splitting game and style atlases is a better solution to avoiding editor textures impacting game atlas pages. Tho this would still be a loading win. - // We do WITH_EDITOR and well as !GIsEditor because in UFE !GIsEditor is true, however we need the styles. -#if WITH_EDITOR - if (!GIsEditor) - { - return; - } -#endif - SyncSettings(); SetContentRoot( FPaths::EngineContentDir() / TEXT("Editor/Slate") ); @@ -177,10 +176,17 @@ void FSlateEditorStyle::FStyle::Initialize() SetupGeneralStyles(); SetupGeneralIcons(); SetupWindowStyles(); + SetupPropertyEditorStyles(); + + // Avoid polluting the game texture atlas with non-core editor style items when not the editor (or a standalone application, like UFE) + if (!IncludeEditorSpecificStyles()) + { + return; + } + SetupProjectBadgeStyle(); SetupDockingStyles(); SetupTutorialStyles(); - SetupPropertyEditorStyles(); SetupProfilerStyle(); SetupGraphEditorStyles(); SetupLevelEditorStyle(); @@ -1116,6 +1122,7 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() #if WITH_EDITOR || IS_PROGRAM // Animation tools + if (IncludeEditorSpecificStyles()) { Set( "AnimEditor.RefreshButton", new IMAGE_BRUSH( "Old/AnimEditor/RefreshButton", Icon16x16 ) ); Set( "AnimEditor.VisibleEye", new IMAGE_BRUSH( "Old/AnimEditor/RefreshButton", Icon16x16 ) ); @@ -1317,7 +1324,6 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() } // Package Dialog - { Set( "PackageDialog.ListHeader", new BOX_BRUSH( "Old/SavePackages/ListHeader", 4.0f/32.0f ) ); Set( "SavePackages.SCC_DlgCheckedOutOther", new IMAGE_BRUSH( "Old/SavePackages/SCC_DlgCheckedOutOther", FVector2D( 18, 16 ) ) ); @@ -1348,7 +1354,6 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() } // Layer Browser - { Set( "LayerBrowser.LayerContentsQuickbarBackground", new BOX_BRUSH( "Common/DarkGroupBorder", 4.f/16.f ) ); Set( "LayerBrowser.ExploreLayerContents", new IMAGE_BRUSH( "Icons/ExploreLayerContents", Icon16x16 ) ); @@ -1423,7 +1428,6 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() Set( "WorldBrowser.LabelFontBold", DEFAULT_FONT( "Bold", 10 ) ); } - // Scene Outliner { Set( "SceneOutliner.FilterSearch", new IMAGE_BRUSH( "Old/FilterSearch", Icon16x16 ) ); @@ -1504,7 +1508,7 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() #if WITH_EDITOR || IS_PROGRAM // Breadcrumb Trail - { + { Set( "BreadcrumbTrail.Delimiter", new IMAGE_BRUSH( "Common/Delimiter", Icon16x16 ) ); Set( "BreadcrumbButton", FButtonStyle() @@ -1560,7 +1564,7 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() } // Open any asset dialog - { + { Set( "SystemWideCommands.SummonOpenAssetDialog", new IMAGE_BRUSH( "Icons/icon_asset_open_16px", Icon16x16 ) ); Set( "GlobalAssetPicker.Background", new BOX_BRUSH( "Old/Menu_Background", FMargin(8.0f/64.0f) ) ); @@ -1572,7 +1576,7 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() .SetShadowOffset( FVector2D( 1,1 ) ) .SetShadowColorAndOpacity( FLinearColor::Black ) ); - } + } // Main frame @@ -1656,6 +1660,7 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() #if WITH_EDITOR || IS_PROGRAM // About screen + if (IncludeEditorSpecificStyles()) { Set( "AboutScreen.Background", new IMAGE_BRUSH( "About/Background", FVector2D(600,332), FLinearColor::White, ESlateBrushTileType::Both) ); Set( "AboutScreen.Facebook", new IMAGE_BRUSH( "About/FacebookIcon", FVector2D(35,35) ) ); @@ -1669,6 +1674,7 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() #if WITH_EDITOR // Credits screen + if (IncludeEditorSpecificStyles()) { Set("Credits.Button", FButtonStyle(NoBorder) .SetNormal(FSlateNoResource()) @@ -1765,6 +1771,7 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() } // Sequencer + if (IncludeEditorSpecificStyles()) { Set( "Sequencer.IconKeyAuto", new IMAGE_BRUSH( "Sequencer/IconKeyAuto", Icon12x12 ) ); Set( "Sequencer.IconKeyBreak", new IMAGE_BRUSH( "Sequencer/IconKeyBreak", Icon12x12 ) ); @@ -2027,6 +2034,7 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() // Sequence recorder standalone UI + if (IncludeEditorSpecificStyles()) { Set( "SequenceRecorder.TabIcon", new IMAGE_BRUSH( "SequenceRecorder/icon_tab_SequenceRecorder_16x", Icon16x16 ) ); Set( "SequenceRecorder.Common.RecordAll.Small", new IMAGE_BRUSH( "SequenceRecorder/icon_RecordAll_40x", Icon20x20 ) ); @@ -2046,6 +2054,7 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() } // Foliage Edit Mode + if (IncludeEditorSpecificStyles()) { FLinearColor DimBackground = FLinearColor(FColor(64, 64, 64)); FLinearColor DimBackgroundHover = FLinearColor(FColor(50, 50, 50)); @@ -2152,6 +2161,7 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() } // GameProjectDialog + if (IncludeEditorSpecificStyles()) { Set( "GameProjectDialog.NewProjectTitle", FTextBlockStyle(NormalText) .SetFont( DEFAULT_FONT( "BoldCondensed", 28 ) ) @@ -2244,6 +2254,7 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() } // NewClassDialog + if (IncludeEditorSpecificStyles()) { Set( "NewClassDialog.PageTitle", FTextBlockStyle(NormalText) .SetFont( DEFAULT_FONT( "BoldCondensed", 28 ) ) @@ -2285,8 +2296,6 @@ void FSlateEditorStyle::FStyle::SetupGeneralStyles() ); } - - // Package Migration { Set( "PackageMigration.DialogTitle", FTextBlockStyle( NormalText ) diff --git a/Engine/Source/Editor/FoliageEdit/Private/SFoliageEdit.cpp b/Engine/Source/Editor/FoliageEdit/Private/SFoliageEdit.cpp index e8bd96294110..0c0207be37c9 100644 --- a/Engine/Source/Editor/FoliageEdit/Private/SFoliageEdit.cpp +++ b/Engine/Source/Editor/FoliageEdit/Private/SFoliageEdit.cpp @@ -44,7 +44,7 @@ void SFoliageEdit::Construct(const FArguments& InArgs) FSlateFontInfo StandardFont = FEditorStyle::GetFontStyle(TEXT("PropertyWindow.NormalFont")); - const FText BlankText = LOCTEXT("Blank", ""); + const FText BlankText = FText::GetEmpty(); ChildSlot [ diff --git a/Engine/Source/Editor/FoliageEdit/Private/SFoliagePalette.cpp b/Engine/Source/Editor/FoliageEdit/Private/SFoliagePalette.cpp index 14e79030cd89..e423f53275a5 100644 --- a/Engine/Source/Editor/FoliageEdit/Private/SFoliagePalette.cpp +++ b/Engine/Source/Editor/FoliageEdit/Private/SFoliagePalette.cpp @@ -195,7 +195,7 @@ void SFoliagePalette::Construct(const FArguments& InArgs) FOnGetDetailCustomizationInstance::CreateStatic(&FFoliageTypePaintingCustomization::MakeInstance, FoliageEditMode) ); - const FText BlankText = LOCTEXT("Blank", ""); + const FText BlankText = FText::GetEmpty(); ChildSlot [ @@ -570,7 +570,7 @@ void SFoliagePalette::AddFoliageType(const FAssetData& AssetData) TSharedRef SFoliagePalette::CreatePaletteViews() { - const FText BlankText = LOCTEXT("Blank", ""); + const FText BlankText = FText::GetEmpty(); // Tile View Widget SAssignNew(TileViewWidget, SFoliageTypeTileView) diff --git a/Engine/Source/Editor/GraphEditor/Private/KismetNodes/SGraphNodeK2CreateDelegate.cpp b/Engine/Source/Editor/GraphEditor/Private/KismetNodes/SGraphNodeK2CreateDelegate.cpp index 9dbd85f72060..6ca0c1ed99f8 100644 --- a/Engine/Source/Editor/GraphEditor/Private/KismetNodes/SGraphNodeK2CreateDelegate.cpp +++ b/Engine/Source/Editor/GraphEditor/Private/KismetNodes/SGraphNodeK2CreateDelegate.cpp @@ -125,7 +125,7 @@ FText SGraphNodeK2CreateDelegate::GetCurrentFunctionDescription() const if (!FunctionSignature || !ScopeClass) { - return NSLOCTEXT("GraphNodeK2Create", "NoneLabel", ""); + return FText::GetEmpty(); } if (const auto Func = FindField(ScopeClass, Node->GetFunctionName())) diff --git a/Engine/Source/Editor/Kismet/Private/CallStackViewer.cpp b/Engine/Source/Editor/Kismet/Private/CallStackViewer.cpp index 1f071fe27cbc..9c2b34338528 100644 --- a/Engine/Source/Editor/Kismet/Private/CallStackViewer.cpp +++ b/Engine/Source/Editor/Kismet/Private/CallStackViewer.cpp @@ -375,7 +375,7 @@ void SCallStackViewer::Construct(const FArguments& InArgs, TArray FLandscapeEditorDetailCustomization_NewLandscape::MakeInstance() { @@ -541,9 +540,10 @@ TSharedRef FLandscapeEditorDetailCustomization_NewLandscape::GetSection { FMenuBuilder MenuBuilder(true, nullptr); - for (int32 i = 0; i < ARRAY_COUNT(SectionSizes); i++) + for (int32 i = 0; i < ARRAY_COUNT(FNewLandscapeUtils::SectionSizes); i++) { - MenuBuilder.AddMenuEntry(FText::Format(LOCTEXT("NxNQuads", "{0}\u00D7{0} Quads"), FText::AsNumber(SectionSizes[i])), FText::GetEmpty(), FSlateIcon(), FExecuteAction::CreateStatic(&OnChangeSectionSize, PropertyHandle, SectionSizes[i])); + MenuBuilder.AddMenuEntry(FText::Format(LOCTEXT("NxNQuads", "{0}\u00D7{0} Quads"), FText::AsNumber(FNewLandscapeUtils::SectionSizes[i])), FText::GetEmpty(), + FSlateIcon(), FExecuteAction::CreateStatic(&OnChangeSectionSize, PropertyHandle, FNewLandscapeUtils::SectionSizes[i])); } return MenuBuilder.MakeWidget(); @@ -572,12 +572,13 @@ TSharedRef FLandscapeEditorDetailCustomization_NewLandscape::GetSection { FMenuBuilder MenuBuilder(true, nullptr); - for (int32 i = 0; i < ARRAY_COUNT(NumSections); i++) + for (int32 i = 0; i < ARRAY_COUNT(FNewLandscapeUtils::NumSections); i++) { FFormatNamedArguments Args; - Args.Add(TEXT("Width"), NumSections[i]); - Args.Add(TEXT("Height"), NumSections[i]); - MenuBuilder.AddMenuEntry(FText::Format(NumSections[i] == 1 ? LOCTEXT("1x1Section", "{Width}\u00D7{Height} Section") : LOCTEXT("NxNSections", "{Width}\u00D7{Height} Sections"), Args), FText::GetEmpty(), FSlateIcon(), FExecuteAction::CreateStatic(&OnChangeSectionsPerComponent, PropertyHandle, NumSections[i])); + Args.Add(TEXT("Width"), FNewLandscapeUtils::NumSections[i]); + Args.Add(TEXT("Height"), FNewLandscapeUtils::NumSections[i]); + MenuBuilder.AddMenuEntry(FText::Format(FNewLandscapeUtils::NumSections[i] == 1 ? LOCTEXT("1x1Section", "{Width}\u00D7{Height} Section") : LOCTEXT("NxNSections", "{Width}\u00D7{Height} Sections"), Args), + FText::GetEmpty(), FSlateIcon(), FExecuteAction::CreateStatic(&OnChangeSectionsPerComponent, PropertyHandle, FNewLandscapeUtils::NumSections[i])); } return MenuBuilder.MakeWidget(); @@ -783,132 +784,32 @@ FReply FLandscapeEditorDetailCustomization_NewLandscape::OnCreateButtonClicked() LandscapeEdMode->GetWorld() != nullptr && LandscapeEdMode->GetWorld()->GetCurrentLevel()->bIsVisible) { - // Initialize heightmap data - TArray Data; const int32 ComponentCountX = LandscapeEdMode->UISettings->NewLandscape_ComponentCount.X; const int32 ComponentCountY = LandscapeEdMode->UISettings->NewLandscape_ComponentCount.Y; const int32 QuadsPerComponent = LandscapeEdMode->UISettings->NewLandscape_SectionsPerComponent * LandscapeEdMode->UISettings->NewLandscape_QuadsPerSection; const int32 SizeX = ComponentCountX * QuadsPerComponent + 1; const int32 SizeY = ComponentCountY * QuadsPerComponent + 1; - Data.AddUninitialized(SizeX * SizeY); - uint16* WordData = Data.GetData(); - // Initialize blank heightmap data - for (int32 i = 0; i < SizeX * SizeY; i++) + TOptional< TArray< FLandscapeImportLayerInfo > > ImportLayers = FNewLandscapeUtils::CreateImportLayersInfo( LandscapeEdMode->UISettings, LandscapeEdMode->NewLandscapePreviewMode ); + + if ( !ImportLayers ) { - WordData[i] = 32768; + return FReply::Handled(); } - TArray ImportLayers; - - if (LandscapeEdMode->NewLandscapePreviewMode == ENewLandscapePreviewMode::NewLandscape) - { - const auto& ImportLandscapeLayersList = LandscapeEdMode->UISettings->ImportLandscape_Layers; - ImportLayers.Reserve(ImportLandscapeLayersList.Num()); - - // Fill in LayerInfos array and allocate data - for (const FLandscapeImportLayer& UIImportLayer : ImportLandscapeLayersList) - { - FLandscapeImportLayerInfo ImportLayer = FLandscapeImportLayerInfo(UIImportLayer.LayerName); - ImportLayer.LayerInfo = UIImportLayer.LayerInfo; - ImportLayer.SourceFilePath = ""; - ImportLayer.LayerData = TArray(); - ImportLayers.Add(MoveTemp(ImportLayer)); - } - - // Fill the first weight-blended layer to 100% - if (FLandscapeImportLayerInfo* FirstBlendedLayer = ImportLayers.FindByPredicate([](const FLandscapeImportLayerInfo& ImportLayer) { return ImportLayer.LayerInfo && !ImportLayer.LayerInfo->bNoWeightBlend; })) - { - FirstBlendedLayer->LayerData.AddUninitialized(SizeX * SizeY); - - uint8* ByteData = FirstBlendedLayer->LayerData.GetData(); - for (int32 i = 0; i < SizeX * SizeY; i++) - { - ByteData[i] = 255; - } - } - } - else if (LandscapeEdMode->NewLandscapePreviewMode == ENewLandscapePreviewMode::ImportLandscape) - { - const uint32 ImportSizeX = LandscapeEdMode->UISettings->ImportLandscape_Width; - const uint32 ImportSizeY = LandscapeEdMode->UISettings->ImportLandscape_Height; - - if (LandscapeEdMode->UISettings->ImportLandscape_HeightmapImportResult == ELandscapeImportResult::Error) - { - // Cancel import - return FReply::Handled(); - } - - TArray& ImportLandscapeLayersList = LandscapeEdMode->UISettings->ImportLandscape_Layers; - ImportLayers.Reserve(ImportLandscapeLayersList.Num()); - - // Fill in LayerInfos array and allocate data - for (FLandscapeImportLayer& UIImportLayer : ImportLandscapeLayersList) - { - ImportLayers.Add((const FLandscapeImportLayer&)UIImportLayer); //slicing is fine here - FLandscapeImportLayerInfo& ImportLayer = ImportLayers.Last(); - - if (ImportLayer.LayerInfo != nullptr && ImportLayer.SourceFilePath != "") - { - ILandscapeEditorModule& LandscapeEditorModule = FModuleManager::GetModuleChecked("LandscapeEditor"); - const ILandscapeWeightmapFileFormat* WeightmapFormat = LandscapeEditorModule.GetWeightmapFormatByExtension(*FPaths::GetExtension(ImportLayer.SourceFilePath, true)); - - if (WeightmapFormat) - { - FLandscapeWeightmapImportData WeightmapImportData = WeightmapFormat->Import(*ImportLayer.SourceFilePath, ImportLayer.LayerName, {ImportSizeX, ImportSizeY}); - UIImportLayer.ImportResult = WeightmapImportData.ResultCode; - UIImportLayer.ErrorMessage = WeightmapImportData.ErrorMessage; - ImportLayer.LayerData = MoveTemp(WeightmapImportData.Data); - } - else - { - UIImportLayer.ImportResult = ELandscapeImportResult::Error; - UIImportLayer.ErrorMessage = LOCTEXT("Import_UnknownFileType", "File type not recognised"); - } - - if (UIImportLayer.ImportResult == ELandscapeImportResult::Error) - { - ImportLayer.LayerData.Empty(); - FMessageDialog::Open(EAppMsgType::Ok, UIImportLayer.ErrorMessage); - - // Cancel import - return FReply::Handled(); - } - } - } - - const TArray& ImportData = LandscapeEdMode->UISettings->GetImportLandscapeData(); - if (ImportData.Num() != 0) - { - const int32 OffsetX = (int32)(SizeX - ImportSizeX) / 2; - const int32 OffsetY = (int32)(SizeY - ImportSizeY) / 2; - - // Heightmap - Data = LandscapeEditorUtils::ExpandData(ImportData, - 0, 0, ImportSizeX - 1, ImportSizeY - 1, - -OffsetX, -OffsetY, SizeX - OffsetX - 1, SizeY - OffsetY - 1); - - // Layers - for (int32 LayerIdx = 0; LayerIdx < ImportLayers.Num(); LayerIdx++) - { - TArray& ImportLayerData = ImportLayers[LayerIdx].LayerData; - if (ImportLayerData.Num()) - { - ImportLayerData = LandscapeEditorUtils::ExpandData(ImportLayerData, - 0, 0, ImportSizeX - 1, ImportSizeY - 1, - -OffsetX, -OffsetY, SizeX - OffsetX - 1, SizeY - OffsetY - 1); - } - } - } - } + TArray Data = FNewLandscapeUtils::ComputeHeightData( LandscapeEdMode->UISettings, ImportLayers.GetValue(), LandscapeEdMode->NewLandscapePreviewMode ); FScopedTransaction Transaction(LOCTEXT("Undo", "Creating New Landscape")); - const FVector Offset = FTransform(LandscapeEdMode->UISettings->NewLandscape_Rotation, FVector::ZeroVector, LandscapeEdMode->UISettings->NewLandscape_Scale).TransformVector(FVector(-ComponentCountX * QuadsPerComponent / 2, -ComponentCountY * QuadsPerComponent / 2, 0)); + const FVector Offset = FTransform(LandscapeEdMode->UISettings->NewLandscape_Rotation, FVector::ZeroVector, + LandscapeEdMode->UISettings->NewLandscape_Scale).TransformVector(FVector(-ComponentCountX * QuadsPerComponent / 2, -ComponentCountY * QuadsPerComponent / 2, 0)); + ALandscape* Landscape = LandscapeEdMode->GetWorld()->SpawnActor(LandscapeEdMode->UISettings->NewLandscape_Location + Offset, LandscapeEdMode->UISettings->NewLandscape_Rotation); Landscape->LandscapeMaterial = LandscapeEdMode->UISettings->NewLandscape_Material.Get(); Landscape->SetActorRelativeScale3D(LandscapeEdMode->UISettings->NewLandscape_Scale); - Landscape->Import(FGuid::NewGuid(), 0, 0, SizeX-1, SizeY-1, LandscapeEdMode->UISettings->NewLandscape_SectionsPerComponent, LandscapeEdMode->UISettings->NewLandscape_QuadsPerSection, Data.GetData(), nullptr, ImportLayers, LandscapeEdMode->UISettings->ImportLandscape_AlphamapType); + + Landscape->Import(FGuid::NewGuid(), 0, 0, SizeX-1, SizeY-1, LandscapeEdMode->UISettings->NewLandscape_SectionsPerComponent, LandscapeEdMode->UISettings->NewLandscape_QuadsPerSection, Data.GetData(), + nullptr, ImportLayers.GetValue(), LandscapeEdMode->UISettings->ImportLandscape_AlphamapType); // automatically calculate a lighting LOD that won't crash lightmass (hopefully) // < 2048x2048 -> LOD0 @@ -1080,45 +981,7 @@ void FLandscapeEditorDetailCustomization_NewLandscape::OnImportHeightmapFilename FEdModeLandscape* LandscapeEdMode = GetEditorMode(); if (LandscapeEdMode != nullptr) { - ImportResolutions.Reset(1); - LandscapeEdMode->UISettings->ImportLandscape_Width = 0; - LandscapeEdMode->UISettings->ImportLandscape_Height = 0; - LandscapeEdMode->UISettings->ClearImportLandscapeData(); - LandscapeEdMode->UISettings->ImportLandscape_HeightmapImportResult = ELandscapeImportResult::Success; - LandscapeEdMode->UISettings->ImportLandscape_HeightmapErrorMessage = FText(); - - if (!LandscapeEdMode->UISettings->ImportLandscape_HeightmapFilename.IsEmpty()) - { - ILandscapeEditorModule& LandscapeEditorModule = FModuleManager::GetModuleChecked("LandscapeEditor"); - const ILandscapeHeightmapFileFormat* HeightmapFormat = LandscapeEditorModule.GetHeightmapFormatByExtension(*FPaths::GetExtension(LandscapeEdMode->UISettings->ImportLandscape_HeightmapFilename, true)); - - if (HeightmapFormat) - { - FLandscapeHeightmapInfo HeightmapImportInfo = HeightmapFormat->Validate(*LandscapeEdMode->UISettings->ImportLandscape_HeightmapFilename); - LandscapeEdMode->UISettings->ImportLandscape_HeightmapImportResult = HeightmapImportInfo.ResultCode; - LandscapeEdMode->UISettings->ImportLandscape_HeightmapErrorMessage = HeightmapImportInfo.ErrorMessage; - ImportResolutions = MoveTemp(HeightmapImportInfo.PossibleResolutions); - if (HeightmapImportInfo.DataScale.IsSet()) - { - LandscapeEdMode->UISettings->NewLandscape_Scale = HeightmapImportInfo.DataScale.GetValue(); - LandscapeEdMode->UISettings->NewLandscape_Scale.Z *= LANDSCAPE_INV_ZSCALE; - } - } - else - { - LandscapeEdMode->UISettings->ImportLandscape_HeightmapImportResult = ELandscapeImportResult::Error; - LandscapeEdMode->UISettings->ImportLandscape_HeightmapErrorMessage = LOCTEXT("Import_UnknownFileType", "File type not recognised"); - } - } - - if (ImportResolutions.Num() > 0) - { - int32 i = ImportResolutions.Num() / 2; - LandscapeEdMode->UISettings->ImportLandscape_Width = ImportResolutions[i].Width; - LandscapeEdMode->UISettings->ImportLandscape_Height = ImportResolutions[i].Height; - LandscapeEdMode->UISettings->ImportLandscapeData(); - ChooseBestComponentSizeForImport(LandscapeEdMode); - } + FNewLandscapeUtils::ImportLandscapeData(LandscapeEdMode->UISettings, ImportResolutions); } } @@ -1207,83 +1070,7 @@ FText FLandscapeEditorDetailCustomization_NewLandscape::GetImportLandscapeResolu void FLandscapeEditorDetailCustomization_NewLandscape::ChooseBestComponentSizeForImport(FEdModeLandscape* LandscapeEdMode) { - int32 Width = LandscapeEdMode->UISettings->ImportLandscape_Width; - int32 Height = LandscapeEdMode->UISettings->ImportLandscape_Height; - - bool bFoundMatch = false; - if (Width > 0 && Height > 0) - { - // Try to find a section size and number of sections that exactly matches the dimensions of the heightfield - for (int32 SectionSizesIdx = ARRAY_COUNT(SectionSizes) - 1; SectionSizesIdx >= 0; SectionSizesIdx--) - { - for (int32 NumSectionsIdx = ARRAY_COUNT(NumSections) - 1; NumSectionsIdx >= 0; NumSectionsIdx--) - { - int32 ss = SectionSizes[SectionSizesIdx]; - int32 ns = NumSections[NumSectionsIdx]; - - if (((Width - 1) % (ss * ns)) == 0 && ((Width - 1) / (ss * ns)) <= 32 && - ((Height - 1) % (ss * ns)) == 0 && ((Height - 1) / (ss * ns)) <= 32) - { - bFoundMatch = true; - LandscapeEdMode->UISettings->NewLandscape_QuadsPerSection = ss; - LandscapeEdMode->UISettings->NewLandscape_SectionsPerComponent = ns; - LandscapeEdMode->UISettings->NewLandscape_ComponentCount.X = (Width - 1) / (ss * ns); - LandscapeEdMode->UISettings->NewLandscape_ComponentCount.Y = (Height - 1) / (ss * ns); - LandscapeEdMode->UISettings->NewLandscape_ClampSize(); - break; - } - } - if (bFoundMatch) - { - break; - } - } - - if (!bFoundMatch) - { - // if there was no exact match, try increasing the section size until we encompass the whole heightmap - const int32 CurrentSectionSize = LandscapeEdMode->UISettings->NewLandscape_QuadsPerSection; - const int32 CurrentNumSections = LandscapeEdMode->UISettings->NewLandscape_SectionsPerComponent; - for (int32 SectionSizesIdx = 0; SectionSizesIdx < ARRAY_COUNT(SectionSizes); SectionSizesIdx++) - { - if (SectionSizes[SectionSizesIdx] < CurrentSectionSize) - { - continue; - } - - const int32 ComponentsX = FMath::DivideAndRoundUp((Width - 1), SectionSizes[SectionSizesIdx] * CurrentNumSections); - const int32 ComponentsY = FMath::DivideAndRoundUp((Height - 1), SectionSizes[SectionSizesIdx] * CurrentNumSections); - if (ComponentsX <= 32 && ComponentsY <= 32) - { - bFoundMatch = true; - LandscapeEdMode->UISettings->NewLandscape_QuadsPerSection = SectionSizes[SectionSizesIdx]; - //LandscapeEdMode->UISettings->NewLandscape_SectionsPerComponent = ; - LandscapeEdMode->UISettings->NewLandscape_ComponentCount.X = ComponentsX; - LandscapeEdMode->UISettings->NewLandscape_ComponentCount.Y = ComponentsY; - LandscapeEdMode->UISettings->NewLandscape_ClampSize(); - break; - } - } - } - - if (!bFoundMatch) - { - // if the heightmap is very large, fall back to using the largest values we support - const int32 MaxSectionSize = SectionSizes[ARRAY_COUNT(SectionSizes) - 1]; - const int32 MaxNumSubSections = NumSections[ARRAY_COUNT(NumSections) - 1]; - const int32 ComponentsX = FMath::DivideAndRoundUp((Width - 1), MaxSectionSize * MaxNumSubSections); - const int32 ComponentsY = FMath::DivideAndRoundUp((Height - 1), MaxSectionSize * MaxNumSubSections); - - bFoundMatch = true; - LandscapeEdMode->UISettings->NewLandscape_QuadsPerSection = MaxSectionSize; - LandscapeEdMode->UISettings->NewLandscape_SectionsPerComponent = MaxNumSubSections; - LandscapeEdMode->UISettings->NewLandscape_ComponentCount.X = ComponentsX; - LandscapeEdMode->UISettings->NewLandscape_ComponentCount.Y = ComponentsY; - LandscapeEdMode->UISettings->NewLandscape_ClampSize(); - } - - check(bFoundMatch); - } + FNewLandscapeUtils::ChooseBestComponentSizeForImport(LandscapeEdMode->UISettings); } EVisibility FLandscapeEditorDetailCustomization_NewLandscape::GetMaterialTipVisibility() const diff --git a/Engine/Source/Editor/LandscapeEditor/Private/LandscapeEditorDetailCustomization_NewLandscape.h b/Engine/Source/Editor/LandscapeEditor/Private/LandscapeEditorDetailCustomization_NewLandscape.h index 48ab38b579fb..565ce15ed6e0 100644 --- a/Engine/Source/Editor/LandscapeEditor/Private/LandscapeEditorDetailCustomization_NewLandscape.h +++ b/Engine/Source/Editor/LandscapeEditor/Private/LandscapeEditorDetailCustomization_NewLandscape.h @@ -81,9 +81,6 @@ public: EVisibility GetMaterialTipVisibility() const; protected: - static const int32 SectionSizes[]; - static const int32 NumSections[]; - TArray ImportResolutions; }; diff --git a/Engine/Source/Editor/LandscapeEditor/Private/NewLandscapeUtils.cpp b/Engine/Source/Editor/LandscapeEditor/Private/NewLandscapeUtils.cpp new file mode 100644 index 000000000000..dfd9f04fad3b --- /dev/null +++ b/Engine/Source/Editor/LandscapeEditor/Private/NewLandscapeUtils.cpp @@ -0,0 +1,293 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "NewLandscapeUtils.h" + +#include "Landscape.h" +#include "LandscapeDataAccess.h" +#include "LandscapeEditorObject.h" +#include "LandscapeEditorModule.h" +#include "LandscapeEditorUtils.h" +#include "LandscapeEdMode.h" + +#include "Misc/MessageDialog.h" +#include "Modules/ModuleManager.h" +#include "ScopedTransaction.h" +#include "Templates/UnrealTemplate.h" + +#define LOCTEXT_NAMESPACE "LandscapeEditor.NewLandscape" + +const int32 FNewLandscapeUtils::SectionSizes[6] = { 7, 15, 31, 63, 127, 255 }; +const int32 FNewLandscapeUtils::NumSections[2] = { 1, 2 }; + +void FNewLandscapeUtils::ChooseBestComponentSizeForImport(ULandscapeEditorObject* UISettings) +{ + int32 Width = UISettings->ImportLandscape_Width; + int32 Height = UISettings->ImportLandscape_Height; + + bool bFoundMatch = false; + if(Width > 0 && Height > 0) + { + // Try to find a section size and number of sections that exactly matches the dimensions of the heightfield + for(int32 SectionSizesIdx = ARRAY_COUNT(SectionSizes) - 1; SectionSizesIdx >= 0; SectionSizesIdx--) + { + for(int32 NumSectionsIdx = ARRAY_COUNT(NumSections) - 1; NumSectionsIdx >= 0; NumSectionsIdx--) + { + int32 ss = SectionSizes[SectionSizesIdx]; + int32 ns = NumSections[NumSectionsIdx]; + + if(((Width - 1) % (ss * ns)) == 0 && ((Width - 1) / (ss * ns)) <= 32 && + ((Height - 1) % (ss * ns)) == 0 && ((Height - 1) / (ss * ns)) <= 32) + { + bFoundMatch = true; + UISettings->NewLandscape_QuadsPerSection = ss; + UISettings->NewLandscape_SectionsPerComponent = ns; + UISettings->NewLandscape_ComponentCount.X = (Width - 1) / (ss * ns); + UISettings->NewLandscape_ComponentCount.Y = (Height - 1) / (ss * ns); + UISettings->NewLandscape_ClampSize(); + break; + } + } + if(bFoundMatch) + { + break; + } + } + + if(!bFoundMatch) + { + // if there was no exact match, try increasing the section size until we encompass the whole heightmap + const int32 CurrentSectionSize = UISettings->NewLandscape_QuadsPerSection; + const int32 CurrentNumSections = UISettings->NewLandscape_SectionsPerComponent; + for(int32 SectionSizesIdx = 0; SectionSizesIdx < ARRAY_COUNT(SectionSizes); SectionSizesIdx++) + { + if(SectionSizes[SectionSizesIdx] < CurrentSectionSize) + { + continue; + } + + const int32 ComponentsX = FMath::DivideAndRoundUp((Width - 1), SectionSizes[SectionSizesIdx] * CurrentNumSections); + const int32 ComponentsY = FMath::DivideAndRoundUp((Height - 1), SectionSizes[SectionSizesIdx] * CurrentNumSections); + if(ComponentsX <= 32 && ComponentsY <= 32) + { + bFoundMatch = true; + UISettings->NewLandscape_QuadsPerSection = SectionSizes[SectionSizesIdx]; + //UISettings->NewLandscape_SectionsPerComponent = ; + UISettings->NewLandscape_ComponentCount.X = ComponentsX; + UISettings->NewLandscape_ComponentCount.Y = ComponentsY; + UISettings->NewLandscape_ClampSize(); + break; + } + } + } + + if(!bFoundMatch) + { + // if the heightmap is very large, fall back to using the largest values we support + const int32 MaxSectionSize = SectionSizes[ARRAY_COUNT(SectionSizes) - 1]; + const int32 MaxNumSubSections = NumSections[ARRAY_COUNT(NumSections) - 1]; + const int32 ComponentsX = FMath::DivideAndRoundUp((Width - 1), MaxSectionSize * MaxNumSubSections); + const int32 ComponentsY = FMath::DivideAndRoundUp((Height - 1), MaxSectionSize * MaxNumSubSections); + + bFoundMatch = true; + UISettings->NewLandscape_QuadsPerSection = MaxSectionSize; + UISettings->NewLandscape_SectionsPerComponent = MaxNumSubSections; + UISettings->NewLandscape_ComponentCount.X = ComponentsX; + UISettings->NewLandscape_ComponentCount.Y = ComponentsY; + UISettings->NewLandscape_ClampSize(); + } + + check(bFoundMatch); + } +} + +void FNewLandscapeUtils::ImportLandscapeData( ULandscapeEditorObject* UISettings, TArray< FLandscapeFileResolution >& ImportResolutions ) +{ + if ( !UISettings ) + { + return; + } + + ImportResolutions.Reset(1); + UISettings->ImportLandscape_Width = 0; + UISettings->ImportLandscape_Height = 0; + UISettings->ClearImportLandscapeData(); + UISettings->ImportLandscape_HeightmapImportResult = ELandscapeImportResult::Success; + UISettings->ImportLandscape_HeightmapErrorMessage = FText(); + + if(!UISettings->ImportLandscape_HeightmapFilename.IsEmpty()) + { + ILandscapeEditorModule& LandscapeEditorModule = FModuleManager::GetModuleChecked("LandscapeEditor"); + const ILandscapeHeightmapFileFormat* HeightmapFormat = LandscapeEditorModule.GetHeightmapFormatByExtension(*FPaths::GetExtension(UISettings->ImportLandscape_HeightmapFilename, true)); + + if(HeightmapFormat) + { + FLandscapeHeightmapInfo HeightmapImportInfo = HeightmapFormat->Validate(*UISettings->ImportLandscape_HeightmapFilename); + UISettings->ImportLandscape_HeightmapImportResult = HeightmapImportInfo.ResultCode; + UISettings->ImportLandscape_HeightmapErrorMessage = HeightmapImportInfo.ErrorMessage; + ImportResolutions = MoveTemp(HeightmapImportInfo.PossibleResolutions); + if(HeightmapImportInfo.DataScale.IsSet()) + { + UISettings->NewLandscape_Scale = HeightmapImportInfo.DataScale.GetValue(); + UISettings->NewLandscape_Scale.Z *= LANDSCAPE_INV_ZSCALE; + } + } + else + { + UISettings->ImportLandscape_HeightmapImportResult = ELandscapeImportResult::Error; + UISettings->ImportLandscape_HeightmapErrorMessage = LOCTEXT("Import_UnknownFileType", "File type not recognised"); + } + } + + if(ImportResolutions.Num() > 0) + { + int32 i = ImportResolutions.Num() / 2; + UISettings->ImportLandscape_Width = ImportResolutions[i].Width; + UISettings->ImportLandscape_Height = ImportResolutions[i].Height; + UISettings->ImportLandscapeData(); + ChooseBestComponentSizeForImport(UISettings); + } +} + +TOptional< TArray< FLandscapeImportLayerInfo > > FNewLandscapeUtils::CreateImportLayersInfo( ULandscapeEditorObject* UISettings, int32 NewLandscapePreviewMode ) +{ + const int32 ComponentCountX = UISettings->NewLandscape_ComponentCount.X; + const int32 ComponentCountY = UISettings->NewLandscape_ComponentCount.Y; + const int32 QuadsPerComponent = UISettings->NewLandscape_SectionsPerComponent * UISettings->NewLandscape_QuadsPerSection; + const int32 SizeX = ComponentCountX * QuadsPerComponent + 1; + const int32 SizeY = ComponentCountY * QuadsPerComponent + 1; + + TArray ImportLayers; + + if(NewLandscapePreviewMode == ENewLandscapePreviewMode::NewLandscape) + { + const auto& ImportLandscapeLayersList = UISettings->ImportLandscape_Layers; + ImportLayers.Reserve(ImportLandscapeLayersList.Num()); + + // Fill in LayerInfos array and allocate data + for(const FLandscapeImportLayer& UIImportLayer : ImportLandscapeLayersList) + { + FLandscapeImportLayerInfo ImportLayer = FLandscapeImportLayerInfo(UIImportLayer.LayerName); + ImportLayer.LayerInfo = UIImportLayer.LayerInfo; + ImportLayer.SourceFilePath = ""; + ImportLayer.LayerData = TArray(); + ImportLayers.Add(MoveTemp(ImportLayer)); + } + + // Fill the first weight-blended layer to 100% + if(FLandscapeImportLayerInfo* FirstBlendedLayer = ImportLayers.FindByPredicate([](const FLandscapeImportLayerInfo& ImportLayer) { return ImportLayer.LayerInfo && !ImportLayer.LayerInfo->bNoWeightBlend; })) + { + FirstBlendedLayer->LayerData.AddUninitialized(SizeX * SizeY); + + uint8* ByteData = FirstBlendedLayer->LayerData.GetData(); + for(int32 i = 0; i < SizeX * SizeY; i++) + { + ByteData[i] = 255; + } + } + } + else if(NewLandscapePreviewMode == ENewLandscapePreviewMode::ImportLandscape) + { + const uint32 ImportSizeX = UISettings->ImportLandscape_Width; + const uint32 ImportSizeY = UISettings->ImportLandscape_Height; + + if(UISettings->ImportLandscape_HeightmapImportResult == ELandscapeImportResult::Error) + { + // Cancel import + return TOptional< TArray< FLandscapeImportLayerInfo > >(); + } + + TArray& ImportLandscapeLayersList = UISettings->ImportLandscape_Layers; + ImportLayers.Reserve(ImportLandscapeLayersList.Num()); + + // Fill in LayerInfos array and allocate data + for(FLandscapeImportLayer& UIImportLayer : ImportLandscapeLayersList) + { + ImportLayers.Add((const FLandscapeImportLayer&)UIImportLayer); //slicing is fine here + FLandscapeImportLayerInfo& ImportLayer = ImportLayers.Last(); + + if(ImportLayer.LayerInfo != nullptr && ImportLayer.SourceFilePath != "") + { + ILandscapeEditorModule& LandscapeEditorModule = FModuleManager::GetModuleChecked("LandscapeEditor"); + const ILandscapeWeightmapFileFormat* WeightmapFormat = LandscapeEditorModule.GetWeightmapFormatByExtension(*FPaths::GetExtension(ImportLayer.SourceFilePath, true)); + + if(WeightmapFormat) + { + FLandscapeWeightmapImportData WeightmapImportData = WeightmapFormat->Import(*ImportLayer.SourceFilePath, ImportLayer.LayerName, { ImportSizeX, ImportSizeY }); + UIImportLayer.ImportResult = WeightmapImportData.ResultCode; + UIImportLayer.ErrorMessage = WeightmapImportData.ErrorMessage; + ImportLayer.LayerData = MoveTemp(WeightmapImportData.Data); + } + else + { + UIImportLayer.ImportResult = ELandscapeImportResult::Error; + UIImportLayer.ErrorMessage = LOCTEXT("Import_UnknownFileType", "File type not recognised"); + } + + if(UIImportLayer.ImportResult == ELandscapeImportResult::Error) + { + ImportLayer.LayerData.Empty(); + FMessageDialog::Open(EAppMsgType::Ok, UIImportLayer.ErrorMessage); + + // Cancel import + return TOptional< TArray< FLandscapeImportLayerInfo > >(); + } + } + } + } + + return ImportLayers; +} + +TArray< uint16 > FNewLandscapeUtils::ComputeHeightData( ULandscapeEditorObject* UISettings, TArray< FLandscapeImportLayerInfo >& ImportLayers, int32 NewLandscapePreviewMode ) +{ + const int32 ComponentCountX = UISettings->NewLandscape_ComponentCount.X; + const int32 ComponentCountY = UISettings->NewLandscape_ComponentCount.Y; + const int32 QuadsPerComponent = UISettings->NewLandscape_SectionsPerComponent * UISettings->NewLandscape_QuadsPerSection; + const int32 SizeX = ComponentCountX * QuadsPerComponent + 1; + const int32 SizeY = ComponentCountY * QuadsPerComponent + 1; + + const uint32 ImportSizeX = UISettings->ImportLandscape_Width; + const uint32 ImportSizeY = UISettings->ImportLandscape_Height; + + // Initialize heightmap data + TArray Data; + Data.AddUninitialized(SizeX * SizeY); + uint16* WordData = Data.GetData(); + + // Initialize blank heightmap data + for(int32 i = 0; i < SizeX * SizeY; i++) + { + WordData[i] = 32768; + } + + if(NewLandscapePreviewMode == ENewLandscapePreviewMode::ImportLandscape) + { + const TArray& ImportData = UISettings->GetImportLandscapeData(); + if(ImportData.Num() != 0) + { + const int32 OffsetX = (int32)(SizeX - ImportSizeX) / 2; + const int32 OffsetY = (int32)(SizeY - ImportSizeY) / 2; + + // Heightmap + Data = LandscapeEditorUtils::ExpandData(ImportData, + 0, 0, ImportSizeX - 1, ImportSizeY - 1, + -OffsetX, -OffsetY, SizeX - OffsetX - 1, SizeY - OffsetY - 1); + + // Layers + for(int32 LayerIdx = 0; LayerIdx < ImportLayers.Num(); LayerIdx++) + { + TArray& ImportLayerData = ImportLayers[LayerIdx].LayerData; + if(ImportLayerData.Num()) + { + ImportLayerData = LandscapeEditorUtils::ExpandData(ImportLayerData, + 0, 0, ImportSizeX - 1, ImportSizeY - 1, + -OffsetX, -OffsetY, SizeX - OffsetX - 1, SizeY - OffsetY - 1); + } + } + } + } + + return Data; +} + +#undef LOCTEXT_NAMESPACE diff --git a/Engine/Source/Editor/LandscapeEditor/Public/LandscapeEditorObject.h b/Engine/Source/Editor/LandscapeEditor/Public/LandscapeEditorObject.h index 0fbd1eea4650..b859e96572f2 100644 --- a/Engine/Source/Editor/LandscapeEditor/Public/LandscapeEditorObject.h +++ b/Engine/Source/Editor/LandscapeEditor/Public/LandscapeEditorObject.h @@ -232,7 +232,7 @@ struct FLandscapePatternBrushWorldSpaceSettings FLandscapePatternBrushWorldSpaceSettings() = default; }; -UCLASS() +UCLASS(MinimalAPI) class ULandscapeEditorObject : public UObject { GENERATED_UCLASS_BODY() diff --git a/Engine/Source/Editor/LandscapeEditor/Public/NewLandscapeUtils.h b/Engine/Source/Editor/LandscapeEditor/Public/NewLandscapeUtils.h new file mode 100644 index 000000000000..b140bd4b029f --- /dev/null +++ b/Engine/Source/Editor/LandscapeEditor/Public/NewLandscapeUtils.h @@ -0,0 +1,22 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" + +class ALandscape; +struct FLandscapeFileResolution; +struct FLandscapeImportLayerInfo; +class ULandscapeEditorObject; + +class LANDSCAPEEDITOR_API FNewLandscapeUtils +{ +public: + static void ChooseBestComponentSizeForImport( ULandscapeEditorObject* UISettings ); + static void ImportLandscapeData( ULandscapeEditorObject* UISettings, TArray< FLandscapeFileResolution >& ImportResolutions ); + static TOptional< TArray< FLandscapeImportLayerInfo > > CreateImportLayersInfo( ULandscapeEditorObject* UISettings, int32 NewLandscapePreviewMode ); + static TArray< uint16 > ComputeHeightData( ULandscapeEditorObject* UISettings, TArray< FLandscapeImportLayerInfo >& ImportLayers, int32 NewLandscapePreviewMode ); + + static const int32 SectionSizes[6]; + static const int32 NumSections[2]; +}; diff --git a/Engine/Source/Editor/Layers/Private/ActorLayerViewModel.cpp b/Engine/Source/Editor/Layers/Private/ActorLayerViewModel.cpp index 3eb23ef24b86..9e226184d4d4 100644 --- a/Engine/Source/Editor/Layers/Private/ActorLayerViewModel.cpp +++ b/Engine/Source/Editor/Layers/Private/ActorLayerViewModel.cpp @@ -53,7 +53,7 @@ FText FActorLayerViewModel::GetName() const { if( !Layer.IsValid() ) { - return LOCTEXT("Invalid layer Name", ""); + return FText::GetEmpty(); } return FText::FromName(Layer->LayerName); diff --git a/Engine/Source/Editor/Matinee/Private/Matinee.cpp b/Engine/Source/Editor/Matinee/Private/Matinee.cpp index cf0ea7b52aa9..997d4d00b3bf 100644 --- a/Engine/Source/Editor/Matinee/Private/Matinee.cpp +++ b/Engine/Source/Editor/Matinee/Private/Matinee.cpp @@ -1513,7 +1513,7 @@ void FMatinee::StopPlaying() UpdateFixedTimeStepPlayback(); } -void FMatinee::OnPostUndoRedo(FUndoSessionContext SessionContext, bool Succeeded) +void FMatinee::OnPostUndoRedo(const FTransactionContext& TransactionContext, bool Succeeded) { InvalidateTrackWindowViewports(); } diff --git a/Engine/Source/Editor/Matinee/Private/Matinee.h b/Engine/Source/Editor/Matinee/Private/Matinee.h index 80ac1b60a813..c571521c9fde 100644 --- a/Engine/Source/Editor/Matinee/Private/Matinee.h +++ b/Engine/Source/Editor/Matinee/Private/Matinee.h @@ -123,7 +123,7 @@ public: void StopPlaying() override; /** Handle undo redo events */ - void OnPostUndoRedo(FUndoSessionContext SessionContext, bool Succeeded); + void OnPostUndoRedo(const FTransactionContext& TransactionContext, bool Succeeded); // Menu handlers void OnMenuAddKey(); diff --git a/Engine/Source/Editor/MergeActors/Private/MeshProxyTool/MeshProxyTool.cpp b/Engine/Source/Editor/MergeActors/Private/MeshProxyTool/MeshProxyTool.cpp index 31bdb5036a55..3faf455072cd 100644 --- a/Engine/Source/Editor/MergeActors/Private/MeshProxyTool/MeshProxyTool.cpp +++ b/Engine/Source/Editor/MergeActors/Private/MeshProxyTool/MeshProxyTool.cpp @@ -162,7 +162,7 @@ bool FMeshProxyTool::CanMerge() const FText FThirdPartyMeshProxyTool::GetTooltipText() const { - return LOCTEXT("ThirdPartyMeshProxyToolTooltip", "Harvest geometry selected meshes and merge and and simplify them as a single mesh."); + return LOCTEXT("ThirdPartyMeshProxyToolTooltip", "Harvest geometry from selected meshes, merge and simplify them as a single mesh."); } diff --git a/Engine/Source/Editor/MergeActors/Private/MeshProxyTool/SMeshProxyDialog.cpp b/Engine/Source/Editor/MergeActors/Private/MeshProxyTool/SMeshProxyDialog.cpp index 9931ba68a37e..91d98fb19612 100644 --- a/Engine/Source/Editor/MergeActors/Private/MeshProxyTool/SMeshProxyDialog.cpp +++ b/Engine/Source/Editor/MergeActors/Private/MeshProxyTool/SMeshProxyDialog.cpp @@ -49,47 +49,47 @@ void SMeshProxyDialog::Construct(const FArguments& InArgs, FMeshProxyTool* InTo // Create widget layout this->ChildSlot + [ + SNew(SVerticalBox) + + SVerticalBox::Slot() + .AutoHeight() + .Padding(0, 10, 0, 0) [ - SNew(SVerticalBox) - + SVerticalBox::Slot() - .AutoHeight() - .Padding(0, 10, 0, 0) + SNew(SBorder) + .BorderImage(FEditorStyle::GetBrush("ToolPanel.GroupBorder")) [ - SNew(SBorder) - .BorderImage(FEditorStyle::GetBrush("ToolPanel.GroupBorder")) + SNew(SVerticalBox) + // Static mesh component selection + + SVerticalBox::Slot() + .AutoHeight() + .Padding(FEditorStyle::GetMargin("StandardDialog.ContentPadding")) [ - SNew(SVerticalBox) - // Static mesh component selection - + SVerticalBox::Slot() - .AutoHeight() - .Padding(FEditorStyle::GetMargin("StandardDialog.ContentPadding")) + SNew(SHorizontalBox) + + SHorizontalBox::Slot() + .AutoWidth() + .VAlign(VAlign_Center) [ - SNew(SHorizontalBox) - + SHorizontalBox::Slot() - .AutoWidth() - .VAlign(VAlign_Center) - [ - SNew(STextBlock) - .Text(LOCTEXT("MergeStaticMeshComponentsLabel", "Mesh Components to be incorporated in the merge:")) - ] - ] - + SVerticalBox::Slot() - .AutoHeight() - .Padding(FEditorStyle::GetMargin("StandardDialog.ContentPadding")) - [ - SAssignNew(ComponentSelectionControl.ComponentsListView, SListView>) - .ListItemsSource(&ComponentSelectionControl.SelectedComponents) - .OnGenerateRow(this, &SMeshProxyDialog::MakeComponentListItemWidget) - .ToolTipText(LOCTEXT("SelectedComponentsListBoxToolTip", "The selected mesh components will be incorporated into the merged mesh")) + SNew(STextBlock) + .Text(LOCTEXT("CreateProxyMeshComponentsLabel", "Mesh components used to compute the proxy mesh:")) ] ] + + SVerticalBox::Slot() + .AutoHeight() + .Padding(FEditorStyle::GetMargin("StandardDialog.ContentPadding")) + [ + SAssignNew(ComponentSelectionControl.ComponentsListView, SListView>) + .ListItemsSource(&ComponentSelectionControl.SelectedComponents) + .OnGenerateRow(this, &SMeshProxyDialog::MakeComponentListItemWidget) + .ToolTipText(LOCTEXT("CreateProxyMeshSelectedComponentsListBoxToolTip", "The selected mesh components will be used to compute the proxy mesh")) + ] ] + ] - + SVerticalBox::Slot() - .Padding(0, 10, 0, 0) - [ - SNew(SBorder) - .BorderImage(FEditorStyle::GetBrush("ToolPanel.GroupBorder")) + + SVerticalBox::Slot() + .Padding(0, 10, 0, 0) + [ + SNew(SBorder) + .BorderImage(FEditorStyle::GetBrush("ToolPanel.GroupBorder")) [ SNew(SVerticalBox) // Static mesh component selection @@ -106,8 +106,6 @@ void SMeshProxyDialog::Construct(const FArguments& InArgs, FMeshProxyTool* InTo ] ] - - + SVerticalBox::Slot() .AutoHeight() .Padding(10) @@ -120,8 +118,8 @@ void SMeshProxyDialog::Construct(const FArguments& InArgs, FMeshProxyTool* InTo SNew(STextBlock) .Text(LOCTEXT("DeleteUndo", "Insufficient mesh components found for merging.")) ] - ] - ]; + ] + ]; // Selection change diff --git a/Engine/Source/Editor/Persona/Private/SSlotNameReferenceWindow.cpp b/Engine/Source/Editor/Persona/Private/SSlotNameReferenceWindow.cpp index fe3db7fe2467..e2560effb629 100644 --- a/Engine/Source/Editor/Persona/Private/SSlotNameReferenceWindow.cpp +++ b/Engine/Source/Editor/Persona/Private/SSlotNameReferenceWindow.cpp @@ -316,7 +316,7 @@ TSharedRef SSlotNameReferenceWindow::GetContent() + SHeaderRow::Column("MontageName") .DefaultLabel(FText(LOCTEXT("MontageColumnDefaultLabel", "Montage Name"))) + SHeaderRow::Column("Asset") - .DefaultLabel(FText(LOCTEXT("MontageAssetColumnDefaultLabel", ""))) + .DefaultLabel(FText::GetEmpty()) ) ] ] @@ -352,7 +352,7 @@ TSharedRef SSlotNameReferenceWindow::GetContent() + SHeaderRow::Column("NodeName") .DefaultLabel(FText(LOCTEXT("NodeColumnDefaultLabel", "Node Name"))) + SHeaderRow::Column("Asset") - .DefaultLabel(FText(LOCTEXT("BPAssetColumnDefaultLabel", ""))) + .DefaultLabel(FText::GetEmpty()) ) ] ] diff --git a/Engine/Source/Editor/PropertyEditor/Private/PropertyEditorHelpers.cpp b/Engine/Source/Editor/PropertyEditor/Private/PropertyEditorHelpers.cpp index 87ee24224fe5..6d850ac336e0 100644 --- a/Engine/Source/Editor/PropertyEditor/Private/PropertyEditorHelpers.cpp +++ b/Engine/Source/Editor/PropertyEditor/Private/PropertyEditorHelpers.cpp @@ -358,6 +358,8 @@ bool SEditConditionWidget::HasEditCondition() const void SEditConditionWidget::OnEditConditionCheckChanged( ECheckBoxState CheckState ) { + FScopedTransaction EditConditionChangedTransaction(FText::Format(LOCTEXT("UpdatedEditConditionFmt", "{0} Edit Condition Changed"), PropertyEditor->GetDisplayName())); + if( PropertyEditor.IsValid() && PropertyEditor->HasEditCondition() && PropertyEditor->SupportsEditConditionToggle() ) { PropertyEditor->SetEditConditionState( CheckState == ECheckBoxState::Checked ); diff --git a/Engine/Source/Editor/SequenceRecorder/Private/SequenceRecorder.cpp b/Engine/Source/Editor/SequenceRecorder/Private/SequenceRecorder.cpp index 89f85fa8b348..14fc6455996b 100644 --- a/Engine/Source/Editor/SequenceRecorder/Private/SequenceRecorder.cpp +++ b/Engine/Source/Editor/SequenceRecorder/Private/SequenceRecorder.cpp @@ -42,6 +42,7 @@ #include "ScopedTransaction.h" #include "Features/IModularFeatures.h" #include "ILiveLinkClient.h" +#include "ScopedTransaction.h" #define LOCTEXT_NAMESPACE "SequenceRecorder" @@ -926,6 +927,8 @@ bool FSequenceRecorder::StopRecording(bool bAllowLooping) return false; } + FScopedTransaction ScopeTransaction(LOCTEXT("ProcessedRecording", "Processed Recording")); + // 1 step for the audio processing static const uint8 NumAdditionalSteps = 1; diff --git a/Engine/Source/Editor/Sequencer/Private/SSequencer.cpp b/Engine/Source/Editor/Sequencer/Private/SSequencer.cpp index c7610b330cb2..812e2a552ad1 100644 --- a/Engine/Source/Editor/Sequencer/Private/SSequencer.cpp +++ b/Engine/Source/Editor/Sequencer/Private/SSequencer.cpp @@ -1762,7 +1762,8 @@ FReply SSequencer::OnDrop( const FGeometry& MyGeometry, const FDragDropEvent& Dr FReply SSequencer::OnKeyDown( const FGeometry& MyGeometry, const FKeyEvent& InKeyEvent ) { // A toolkit tab is active, so direct all command processing to it - if( SequencerPtr.Pin()->GetCommandBindings()->ProcessCommandBindings( InKeyEvent ) ) + TSharedPtr SequencerPin = SequencerPtr.Pin(); + if ( SequencerPin.IsValid() && SequencerPin->GetCommandBindings()->ProcessCommandBindings( InKeyEvent ) ) { return FReply::Handled(); } diff --git a/Engine/Source/Editor/Sequencer/Private/Sequencer.cpp b/Engine/Source/Editor/Sequencer/Private/Sequencer.cpp index aa188c53917a..e2864565c794 100644 --- a/Engine/Source/Editor/Sequencer/Private/Sequencer.cpp +++ b/Engine/Source/Editor/Sequencer/Private/Sequencer.cpp @@ -482,6 +482,8 @@ void FSequencer::Close() TrackEditors.Empty(); GUnrealEd->UpdatePivotLocationForSelection(); + + OnCloseEventDelegate.Broadcast(AsShared()); } @@ -749,8 +751,13 @@ void FSequencer::SuppressAutoEvaluation(UMovieSceneSequence* Sequence, const FGu FGuid FSequencer::CreateBinding(UObject& InObject, const FString& InName) { + const FScopedTransaction Transaction(LOCTEXT("CreateBinding", "Create New Binding")); + UMovieSceneSequence* OwnerSequence = GetFocusedMovieSceneSequence(); UMovieScene* OwnerMovieScene = OwnerSequence->GetMovieScene(); + + OwnerSequence->Modify(); + OwnerMovieScene->Modify(); const FGuid PossessableGuid = OwnerMovieScene->AddPossessable(InName, InObject.GetClass()); @@ -4244,6 +4251,19 @@ bool FSequencer::OnRequestNodeDeleted( TSharedRef N return bAnythingRemoved; } +bool FSequencer::MatchesContext(const FTransactionContext& InContext, const TArray>& TransactionObjects) const +{ + // Check if we care about the undo/redo + for (const TPair& TransactionObjectPair : TransactionObjects) + { + if (TransactionObjectPair.Value.HasPendingKillChange() || TransactionObjectPair.Key->GetClass()->IsChildOf(UMovieSceneSignedObject::StaticClass())) + { + return true; + } + } + return false; +} + void FSequencer::PostUndo(bool bSuccess) { NotifyMovieSceneDataChanged( EMovieSceneDataChangeType::Unknown ); @@ -5870,8 +5890,19 @@ void FSequencer::PasteCopiedTracks() TArray BindingsPasted; for (UMovieSceneCopyableBinding* CopyableBinding : ImportedBindings) - { - + { + // Clear transient flags on the imported tracks + for (UMovieSceneTrack* CopiedTrack : CopyableBinding->Tracks) + { + CopiedTrack->ClearFlags(RF_Transient); + TArray Subobjects; + GetObjectsWithOuter(CopiedTrack, Subobjects); + for (UObject* Subobject : Subobjects) + { + Subobject->ClearFlags(RF_Transient); + } + } + if (CopyableBinding->Possessable.GetGuid().IsValid()) { FGuid NewGuid = FGuid::NewGuid(); diff --git a/Engine/Source/Editor/Sequencer/Private/Sequencer.h b/Engine/Source/Editor/Sequencer/Private/Sequencer.h index 1b2150babb0a..06d92dab15cd 100644 --- a/Engine/Source/Editor/Sequencer/Private/Sequencer.h +++ b/Engine/Source/Editor/Sequencer/Private/Sequencer.h @@ -619,6 +619,7 @@ public: //~ ISequencer Interface virtual void Close() override; + virtual FOnCloseEvent& OnCloseEvent() override { return OnCloseEventDelegate; } virtual TSharedRef GetSequencerWidget() const override; virtual FMovieSceneSequenceIDRef GetRootTemplateID() const override { return ActiveTemplateIDs[0]; } virtual FMovieSceneSequenceIDRef GetFocusedTemplateID() const override { return ActiveTemplateIDs.Top(); } @@ -885,6 +886,7 @@ protected: void BindCommands(); //~ Begin FEditorUndoClient Interface + virtual bool MatchesContext(const FTransactionContext& InContext, const TArray>& TransactionObjects) const override; virtual void PostUndo(bool bSuccess) override; virtual void PostRedo(bool bSuccess) override { PostUndo(bSuccess); } // End of FEditorUndoClient @@ -1081,6 +1083,9 @@ private: /** Represents the tree of nodes to display in the animation outliner. */ TSharedRef NodeTree; + /** A delegate which is called when the sequencer closes. */ + FOnCloseEvent OnCloseEventDelegate; + /** A delegate which is called any time the global time changes. */ FOnGlobalTimeChanged OnGlobalTimeChangedDelegate; diff --git a/Engine/Source/Editor/Sequencer/Public/ISequencer.h b/Engine/Source/Editor/Sequencer/Public/ISequencer.h index 0eb1b34ffd28..5a1dd7702cce 100644 --- a/Engine/Source/Editor/Sequencer/Public/ISequencer.h +++ b/Engine/Source/Editor/Sequencer/Public/ISequencer.h @@ -163,6 +163,8 @@ public: DECLARE_MULTICAST_DELEGATE_OneParam(FOnSelectionChangedSections, TArray /*Sections*/); + DECLARE_MULTICAST_DELEGATE_OneParam(FOnCloseEvent, TSharedRef); + DECLARE_MULTICAST_DELEGATE_TwoParams(FOnActorAddedToSequencer, AActor*, const FGuid); public: @@ -170,6 +172,9 @@ public: /** Close the sequencer. */ virtual void Close() = 0; + /** @return a multicast delegate which is executed when sequencer closes. */ + virtual FOnCloseEvent& OnCloseEvent() = 0; + /** @return Widget used to display the sequencer */ virtual TSharedRef GetSequencerWidget() const = 0; diff --git a/Engine/Source/Editor/StaticMeshEditor/Private/StaticMeshEditorViewportClient.cpp b/Engine/Source/Editor/StaticMeshEditor/Private/StaticMeshEditorViewportClient.cpp index 1fdd973d6ca8..921cf2da5dc2 100644 --- a/Engine/Source/Editor/StaticMeshEditor/Private/StaticMeshEditorViewportClient.cpp +++ b/Engine/Source/Editor/StaticMeshEditor/Private/StaticMeshEditorViewportClient.cpp @@ -846,6 +846,8 @@ void FStaticMeshEditorViewportClient::DrawCanvas( FViewport& InViewport, FSceneV const int32 YPos = 160; DrawUVsForMesh(Viewport, &Canvas, YPos); } + + FEditorViewportClient::DrawCanvas(InViewport, View, Canvas); } void FStaticMeshEditorViewportClient::DrawUVsForMesh(FViewport* InViewport, FCanvas* InCanvas, int32 InTextYPos ) diff --git a/Engine/Source/Editor/TimeManagementEditor/Private/STimecodeProvider.cpp b/Engine/Source/Editor/TimeManagementEditor/Private/STimecodeProvider.cpp index 471af78dce88..187bc6b19ec8 100644 --- a/Engine/Source/Editor/TimeManagementEditor/Private/STimecodeProvider.cpp +++ b/Engine/Source/Editor/TimeManagementEditor/Private/STimecodeProvider.cpp @@ -41,8 +41,7 @@ void STimecodeProvider::Construct(const FArguments& InArgs) TSharedRef FrameRateWidget = InArgs._DisplayFrameRate ? SNew(STextBlock) .Text(MakeAttributeLambda([this] { - UTimecodeProvider* TimecodeProviderPtr = GetTimecodeProvider(); - if (TimecodeProviderPtr) + if (const UTimecodeProvider* TimecodeProviderPtr = GetTimecodeProvider()) { return TimecodeProviderPtr->GetFrameRate().ToPrettyText(); } @@ -73,8 +72,7 @@ void STimecodeProvider::Construct(const FArguments& InArgs) SNew(STextBlock) .Text(MakeAttributeLambda([this] { - UTimecodeProvider* TimecodeProviderPtr = GetTimecodeProvider(); - if (TimecodeProviderPtr) + if (const UTimecodeProvider* TimecodeProviderPtr = GetTimecodeProvider()) { return FText::FromName(TimecodeProviderPtr->GetFName()); } @@ -98,8 +96,7 @@ void STimecodeProvider::Construct(const FArguments& InArgs) SNew(STimecode) .Timecode(MakeAttributeLambda([this] { - UTimecodeProvider* OverrideTimecodeProviderPtr = OverrideTimecodeProvider.Get().Get(); - if (OverrideTimecodeProviderPtr) + if (const UTimecodeProvider* OverrideTimecodeProviderPtr = OverrideTimecodeProvider.Get().Get()) { return OverrideTimecodeProviderPtr->GetTimecode(); } @@ -113,9 +110,9 @@ void STimecodeProvider::Construct(const FArguments& InArgs) ]; } -UTimecodeProvider* STimecodeProvider::GetTimecodeProvider() const +const UTimecodeProvider* STimecodeProvider::GetTimecodeProvider() const { - UTimecodeProvider* TimecodeProviderPtr = OverrideTimecodeProvider.Get().Get(); + const UTimecodeProvider* TimecodeProviderPtr = OverrideTimecodeProvider.Get().Get(); if (!TimecodeProviderPtr) { TimecodeProviderPtr = GEngine->GetTimecodeProvider(); @@ -127,8 +124,7 @@ UTimecodeProvider* STimecodeProvider::GetTimecodeProvider() const FSlateColor STimecodeProvider::HandleIconColorAndOpacity() const { FSlateColor Result = FSlateColor::UseForeground(); - UTimecodeProvider* TimecodeProviderPtr = GetTimecodeProvider(); - if (TimecodeProviderPtr) + if (const UTimecodeProvider* TimecodeProviderPtr = GetTimecodeProvider()) { ETimecodeProviderSynchronizationState State = TimecodeProviderPtr->GetSynchronizationState(); switch (State) @@ -152,8 +148,7 @@ const FSlateBrush* STimecodeProvider::HandleIconImage() const { const FSlateBrush* Result = nullptr; - UTimecodeProvider* TimecodeProviderPtr = GetTimecodeProvider(); - if (TimecodeProviderPtr) + if (const UTimecodeProvider* TimecodeProviderPtr = GetTimecodeProvider()) { ETimecodeProviderSynchronizationState State = TimecodeProviderPtr->GetSynchronizationState(); switch (State) @@ -183,8 +178,7 @@ EVisibility STimecodeProvider::HandleImageVisibility() const EVisibility STimecodeProvider::HandleThrobberVisibility() const { - UTimecodeProvider* TimecodeProviderPtr = GetTimecodeProvider(); - if (TimecodeProviderPtr) + if (const UTimecodeProvider* TimecodeProviderPtr = GetTimecodeProvider()) { ETimecodeProviderSynchronizationState State = TimecodeProviderPtr->GetSynchronizationState(); if (State == ETimecodeProviderSynchronizationState::Synchronizing) diff --git a/Engine/Source/Editor/TimeManagementEditor/Private/STimecodeProviderTab.cpp b/Engine/Source/Editor/TimeManagementEditor/Private/STimecodeProviderTab.cpp index 973e1b022f09..3d448662683d 100644 --- a/Engine/Source/Editor/TimeManagementEditor/Private/STimecodeProviderTab.cpp +++ b/Engine/Source/Editor/TimeManagementEditor/Private/STimecodeProviderTab.cpp @@ -30,17 +30,6 @@ namespace TimecodeProviderTab SNew(STimecodeProviderTab) ]; } - - FReply ApplyTimecodeProvider(UTimecodeProvider* InTimecodeProvider) - { - if (GEngine) - { - GEngine->SetTimecodeProvider(nullptr); - GEngine->SetTimecodeProvider(InTimecodeProvider); - } - - return FReply::Handled(); - } } void STimecodeProviderTab::RegisterNomadTabSpawner() @@ -66,7 +55,6 @@ void STimecodeProviderTab::Construct(const FArguments& InArgs) .ContentPadding(0) .ButtonStyle(&FCoreStyle::Get(), "ToolBar.Button") .ForegroundColor(FCoreStyle::Get().GetSlateColor("DefaultForeground")) - //.Visibility_Lambda([]() { return (GEngine && GEngine->GetTimecodeProvider() != nullptr) ? EVisibility::Visible : EVisibility::Hidden; } ) .ButtonContent() [ SNullWidget::NullWidget @@ -83,11 +71,11 @@ void STimecodeProviderTab::Construct(const FArguments& InArgs) .IsEnabled(FSlateApplication::Get().GetNormalExecutionAttribute()) [ SNew(SBorder) - //.Padding(ToolBarPadding) .BorderImage(FEditorStyle::GetBrush("ToolPanel.GroupBorder")) [ SNew(SVerticalBox) + SVerticalBox::Slot() + .AutoHeight() [ SNew(SHorizontalBox) + SHorizontalBox::Slot() @@ -122,7 +110,7 @@ TSharedRef STimecodeProviderTab::OnGetMenuContent() LOCTEXT("ReapplyMenuLabel", "Reinitialize"), LOCTEXT("ReapplyMenuToolTip", "Reinitialize the current Timecode Provider."), FSlateIcon(), - FUIAction(FExecuteAction::CreateLambda([](){ TimecodeProviderTab::ApplyTimecodeProvider(GEngine->GetTimecodeProvider()); })) + FUIAction(FExecuteAction::CreateUObject(GEngine, &UEngine::ReinitializeTimecodeProvider)) ); MenuBuilder.EndSection(); diff --git a/Engine/Source/Editor/TimeManagementEditor/Private/TimeManagementEditorModule.cpp b/Engine/Source/Editor/TimeManagementEditor/Private/TimeManagementEditorModule.cpp index a8d43b2860bd..fced6ebc4cf8 100644 --- a/Engine/Source/Editor/TimeManagementEditor/Private/TimeManagementEditorModule.cpp +++ b/Engine/Source/Editor/TimeManagementEditor/Private/TimeManagementEditorModule.cpp @@ -7,9 +7,9 @@ #include "STimecodeProviderTab.h" /** - * Timecode Provider Editor module + * Time Management Editor module */ -class FTimecodeProviderEditorModule : public IModuleInterface +class FTimeManagementEditorModule : public IModuleInterface { public: //~ IModuleInterface interface @@ -24,4 +24,4 @@ public: } }; -IMPLEMENT_MODULE(FTimecodeProviderEditorModule, TimecodeProviderEditorModule); \ No newline at end of file +IMPLEMENT_MODULE(FTimeManagementEditorModule, TimeManagementEditorModule); \ No newline at end of file diff --git a/Engine/Source/Editor/TimeManagementEditor/Public/STimecodeProvider.h b/Engine/Source/Editor/TimeManagementEditor/Public/STimecodeProvider.h index 70dd507e10b1..0c81d8eff867 100644 --- a/Engine/Source/Editor/TimeManagementEditor/Public/STimecodeProvider.h +++ b/Engine/Source/Editor/TimeManagementEditor/Public/STimecodeProvider.h @@ -69,7 +69,8 @@ public: void Construct(const FArguments& InArgs); private: - UTimecodeProvider* GetTimecodeProvider() const; + + const UTimecodeProvider* GetTimecodeProvider() const; FSlateColor HandleIconColorAndOpacity() const; const FSlateBrush* HandleIconImage() const; EVisibility HandleImageVisibility() const; diff --git a/Engine/Source/Editor/UnrealEd/Classes/Editor/EditorEngine.h b/Engine/Source/Editor/UnrealEd/Classes/Editor/EditorEngine.h index 6552e1c0be64..77db4644d41a 100644 --- a/Engine/Source/Editor/UnrealEd/Classes/Editor/EditorEngine.h +++ b/Engine/Source/Editor/UnrealEd/Classes/Editor/EditorEngine.h @@ -528,6 +528,10 @@ public: UPROPERTY() uint32 bSquelchTransactionNotification:1; + /** True if we should force a selection change notification during an undo/redo */ + UPROPERTY() + uint32 bNotifyUndoRedoSelectionChange:1; + /** The PlayerStart class used when spawning the player at the current camera location. */ UPROPERTY() TSubclassOf PlayFromHerePlayerStartClass; @@ -967,11 +971,11 @@ public: void ExecFile( UWorld* InWorld, const TCHAR* InFilename, FOutputDevice& Ar ); //~ Begin Transaction Interfaces. - int32 BeginTransaction(const TCHAR* SessionContext, const FText& Description, UObject* PrimaryObject); + virtual int32 BeginTransaction(const TCHAR* TransactionContext, const FText& Description, UObject* PrimaryObject) override; int32 BeginTransaction(const FText& Description); - int32 EndTransaction(); + virtual int32 EndTransaction() override; + virtual void CancelTransaction(int32 Index) override; void ResetTransaction(const FText& Reason); - void CancelTransaction(int32 Index); bool UndoTransaction(bool bCanRedo = true); bool RedoTransaction(); bool IsTransactionActive() const; @@ -1138,7 +1142,7 @@ public: */ virtual void TakeHighResScreenShots(){} - virtual void NoteSelectionChange() {} + virtual void NoteSelectionChange(bool bNotify = true) {} /** * Adds an actor to the world at the specified location. @@ -2717,11 +2721,8 @@ private: ULevel* CreateTransLevelMoveBuffer( UWorld* InWorld ); - /** Broadcasts that an undo has just occurred. */ - void BroadcastPostUndo(const FString& UndoContext, UObject* PrimaryObject, bool bUndoSuccess); - - /** Broadcasts that an redo has just occurred. */ - void BroadcastPostRedo(const FString& RedoContext, UObject* PrimaryObject, bool bRedoSuccess); + /** Broadcasts that an undo or redo has just occurred. */ + void BroadcastPostUndoRedo(const FTransactionContext& UndoContext, bool bWasUndo); /** Helper function to show undo/redo notifications */ void ShowUndoRedoNotification(const FText& NotificationText, bool bSuccess); @@ -2754,16 +2755,41 @@ private: void HandleSettingChanged( FName Name ); /** Callback for handling undo and redo transactions before they happen. */ - void HandleTransactorBeforeRedoUndo( FUndoSessionContext SessionContext ); + void HandleTransactorBeforeRedoUndo(const FTransactionContext& TransactionContext); + + /** Common code for finished undo and redo transactions. */ + void HandleTransactorRedoUndo(const FTransactionContext& TransactionContext, bool Succeeded, bool WasUndo); /** Callback for finished redo transactions. */ - void HandleTransactorRedo( FUndoSessionContext SessionContext, bool Succeeded ); + void HandleTransactorRedo(const FTransactionContext& TransactionContext, bool Succeeded); /** Callback for finished undo transactions. */ - void HandleTransactorUndo( FUndoSessionContext SessionContext, bool Succeeded ); + void HandleTransactorUndo(const FTransactionContext& TransactionContext, bool Succeeded); + +public: + /** Callback for object changes during undo/redo. */ + void HandleObjectTransacted(UObject* InObject, const class FTransactionObjectEvent& InTransactionObjectEvent); private: + /** Internal struct to hold undo/redo transaction object context */ + struct FTransactionDeltaContext + { + FGuid OuterOperationId; + int32 OperationDepth; + TArray> TransactionObjects; + + void Reset() + { + OuterOperationId.Invalidate(); + TransactionObjects.Empty(); + OperationDepth = 0; + } + + FTransactionDeltaContext() = default; + }; + FTransactionDeltaContext CurrentUndoRedoContext; + /** Delegate broadcast just before a blueprint is compiled */ FBlueprintPreCompileEvent BlueprintPreCompileEvent; diff --git a/Engine/Source/Editor/UnrealEd/Classes/Editor/TransBuffer.h b/Engine/Source/Editor/UnrealEd/Classes/Editor/TransBuffer.h index c062522c8194..fcb140679350 100644 --- a/Engine/Source/Editor/UnrealEd/Classes/Editor/TransBuffer.h +++ b/Engine/Source/Editor/UnrealEd/Classes/Editor/TransBuffer.h @@ -128,7 +128,8 @@ protected: UndoBuffer.Emplace(MakeShareable(new TTransaction(SessionContext, Description, 1))); GUndo = &UndoBuffer.Last().Get(); - UndoBufferChangedDelegate.Broadcast(); + GUndo->BeginOperation(); + TransactionStateChangedDelegate.Broadcast(GUndo->GetContext(), ETransactionStateEventType::TransactionStarted); } const int32 PriorRecordsCount = (Result > 0 ? ActiveRecordCounts[Result - 1] : 0); ActiveRecordCounts.Add(UndoBuffer.Last()->GetRecordCount() - PriorRecordsCount); @@ -149,10 +150,10 @@ public: virtual bool CanRedo( FText* Text=NULL ) override; virtual int32 GetQueueLength( ) const override { return UndoBuffer.Num(); } virtual const FTransaction* GetTransaction( int32 QueueIndex ) const override; - virtual FUndoSessionContext GetUndoContext( bool bCheckWhetherUndoPossible = true ) override; + virtual FTransactionContext GetUndoContext( bool bCheckWhetherUndoPossible = true ) override; virtual SIZE_T GetUndoSize() const override; virtual int32 GetUndoCount( ) const override { return UndoCount; } - virtual FUndoSessionContext GetRedoContext() override; + virtual FTransactionContext GetRedoContext() override; virtual void SetUndoBarrier() override; virtual void RemoveUndoBarrier() override; virtual void ClearUndoBarriers() override; @@ -164,7 +165,7 @@ public: virtual void SetPrimaryUndoObject( UObject* Object ) override; virtual bool IsObjectInTransationBuffer( const UObject* Object ) const override; virtual bool IsObjectTransacting(const UObject* Object) const override; - virtual bool ContainsPieObject() const override; + virtual bool ContainsPieObjects() const override; virtual bool IsActive() override { return ActiveCount > 0; @@ -174,6 +175,17 @@ public: public: + /** + * Gets an event delegate that is executed when a transaction state changes. + * + * @return The event delegate. + */ + DECLARE_EVENT_TwoParams(UTransBuffer, FOnTransactorTransactionStateChanged, const FTransactionContext& /*TransactionContext*/, ETransactionStateEventType /*TransactionState*/) + FOnTransactorTransactionStateChanged& OnTransactionStateChanged( ) + { + return TransactionStateChangedDelegate; + } + /** * Gets an event delegate that is executed when a redo operation is being attempted. * @@ -181,7 +193,7 @@ public: * * @see OnUndo */ - DECLARE_EVENT_OneParam(UTransBuffer, FOnTransactorBeforeRedoUndo, FUndoSessionContext /*RedoContext*/) + DECLARE_EVENT_OneParam(UTransBuffer, FOnTransactorBeforeRedoUndo, const FTransactionContext& /*TransactionContext*/) FOnTransactorBeforeRedoUndo& OnBeforeRedoUndo( ) { return BeforeRedoUndoDelegate; @@ -194,7 +206,7 @@ public: * * @see OnUndo */ - DECLARE_EVENT_TwoParams(UTransBuffer, FOnTransactorRedo, FUndoSessionContext /*RedoContext*/, bool /*Succeeded*/) + DECLARE_EVENT_TwoParams(UTransBuffer, FOnTransactorRedo, const FTransactionContext& /*TransactionContext*/, bool /*Succeeded*/) FOnTransactorRedo& OnRedo( ) { return RedoDelegate; @@ -207,7 +219,7 @@ public: * * @see OnRedo */ - DECLARE_EVENT_TwoParams(UTransBuffer, FOnTransactorUndo, FUndoSessionContext /*RedoContext*/, bool /*Succeeded*/) + DECLARE_EVENT_TwoParams(UTransBuffer, FOnTransactorUndo, const FTransactionContext& /*TransactionContext*/, bool /*Succeeded*/) FOnTransactorUndo& OnUndo( ) { return UndoDelegate; @@ -233,6 +245,9 @@ private: private: + // Holds an event delegate that is executed when a transaction state changes. + FOnTransactorTransactionStateChanged TransactionStateChangedDelegate; + // Holds an event delegate that is executed before a redo or undo operation is attempted. FOnTransactorBeforeRedoUndo BeforeRedoUndoDelegate; diff --git a/Engine/Source/Editor/UnrealEd/Classes/Editor/Transactor.h b/Engine/Source/Editor/UnrealEd/Classes/Editor/Transactor.h index 415a2e7aeeeb..79986b9bf9fb 100644 --- a/Engine/Source/Editor/UnrealEd/Classes/Editor/Transactor.h +++ b/Engine/Source/Editor/UnrealEd/Classes/Editor/Transactor.h @@ -15,30 +15,6 @@ #include "Transactor.generated.h" -/*----------------------------------------------------------------------------- - FUndoSessionContext ------------------------------------------------------------------------------*/ - -/** - * Convenience struct for passing around undo/redo context - */ -struct FUndoSessionContext -{ - FUndoSessionContext() - : Title(), Context(TEXT("")), PrimaryObject(nullptr) - {} - FUndoSessionContext (const TCHAR* InContext, const FText& InSessionTitle, UObject* InPrimaryObject) - : Title(InSessionTitle), Context(InContext), PrimaryObject(InPrimaryObject) - {} - - /** Descriptive title of the undo/redo session */ - FText Title; - /** The context that generated the undo/redo session */ - FString Context; - /** The primary UObject for the context (if any). */ - UObject* PrimaryObject; -}; - /*----------------------------------------------------------------------------- FTransaction. -----------------------------------------------------------------------------*/ @@ -128,24 +104,7 @@ protected: { const int32 NumProperties = InPropertyChain.GetNumProperties(); check(NumProperties > 0); - - if (NumProperties == 1) - { - return InPropertyChain.GetPropertyFromRoot(0)->GetFName(); - } - else - { - FString FullPropertyName; - for (int32 PropertyIndex = 0; PropertyIndex < NumProperties; ++PropertyIndex) - { - if (PropertyIndex > 0) - { - FullPropertyName += TEXT('.'); - } - FullPropertyName += InPropertyChain.GetPropertyFromRoot(PropertyIndex)->GetName(); - } - return *FullPropertyName; - } + return InPropertyChain.GetPropertyFromRoot(0)->GetFName(); } void AppendSerializedData(const int32 InOffset, const int32 InSize) @@ -181,7 +140,7 @@ protected: ObjectPathName = *InObject->GetPathName(); ObjectOuterPathName = InObject->GetOuter() ? FName(*InObject->GetOuter()->GetPathName()) : FName(); bIsPendingKill = InObject->IsPendingKill(); - ObjectAnnotation = InObject->GetTransactionAnnotation(); + ObjectAnnotation = InObject->FindOrCreateTransactionAnnotation(); } void Reset() @@ -195,6 +154,7 @@ protected: ReferencedNames.Reset(); SerializedProperties.Reset(); SerializedObjectIndices.Reset(); + SerializedNameIndices.Reset(); ObjectAnnotation.Reset(); } @@ -209,6 +169,7 @@ protected: Exchange(ReferencedNames, Other.ReferencedNames); Exchange(SerializedProperties, Other.SerializedProperties); Exchange(SerializedObjectIndices, Other.SerializedObjectIndices); + Exchange(SerializedNameIndices, Other.SerializedNameIndices); Exchange(ObjectAnnotation, Other.ObjectAnnotation); } @@ -230,6 +191,8 @@ protected: TMap SerializedProperties; /** Information about the object pointer offsets that were serialized within this object (this maps the property name (or None if there was no property) to the ReferencedObjects indices of the property) */ TMultiMap SerializedObjectIndices; + /** Information about the name offsets that were serialized within this object (this maps the property name to the ReferencedNames index of the property) */ + TMultiMap SerializedNameIndices; /** Annotation data for the object stored externally */ TSharedPtr ObjectAnnotation; }; @@ -290,7 +253,7 @@ protected: void Restore( FTransaction* Owner ); void Save( FTransaction* Owner ); void Load( FTransaction* Owner ); - void Finalize( FTransaction* Owner ); + void Finalize( FTransaction* Owner, TSharedPtr& OutFinalizedObjectAnnotation ); void Snapshot( FTransaction* Owner ); static void Diff( FTransaction* Owner, const FSerializedObject& OldSerializedObect, const FSerializedObject& NewSerializedObject, FTransactionObjectDeltaChange& OutDeltaChange ); @@ -461,8 +424,16 @@ protected: FArchive& operator<<( class FName& N ) override { int32 NameIndex = SerializedObject.ReferencedNames.AddUnique(N); + + // Track this name index in the serialized data + { + const FArchiveSerializedPropertyChain* PropertyChain = GetSerializedPropertyChain(); + const FName SerializedTaggedPropertyKey = CachedSerializedTaggedPropertyKey.SyncCache(PropertyChain); + SerializedObject.SerializedNameIndices.Add(SerializedTaggedPropertyKey, NameIndex); + } + return (FArchive&)*this << NameIndex; - } + } FArchive& operator<<( class UObject*& Res ) override { int32 ObjectIndex = INDEX_NONE; @@ -497,6 +468,16 @@ protected: /** List of object records in this transaction */ TArray Records; + /** Unique identifier for this transaction, used to track it during its lifetime */ + FGuid Id; + + /** + * Unique identifier for the active operation on this transaction (if any). + * This is set by a call to BeginOperation and cleared by a call to EndOperation. + * BeginOperation should be called when a transaction or undo/redo starts, and EndOperation should be called when a transaction is finalized or canceled or undo/redo ends. + */ + FGuid OperationId; + /** Description of the transaction. Can be used by UI */ FText Title; @@ -537,7 +518,8 @@ protected: public: // Constructor. FTransaction( const TCHAR* InContext=nullptr, const FText& InTitle=FText(), bool bInFlip=false ) - : Title( InTitle ) + : Id( FGuid::NewGuid() ) + , Title( InTitle ) , Context( InContext ) , PrimaryObject(nullptr) , bFlip(bInFlip) @@ -562,6 +544,12 @@ public: virtual void SetPrimaryObject(UObject* InObject) override; virtual void SnapshotObject( UObject* InObject ) override; + /** BeginOperation should be called when a transaction or undo/redo starts */ + virtual void BeginOperation() override; + + /** EndOperation should be called when a transaction is finalized or canceled or undo/redo ends */ + virtual void EndOperation() override; + /** * Enacts the transaction. */ @@ -572,6 +560,14 @@ public: */ virtual void Finalize() override; + /** + * Gets the full context for the transaction. + */ + virtual FTransactionContext GetContext() const override + { + return FTransactionContext(Id, OperationId, Title, *Context, PrimaryObject); + } + /** Returns a unique string to serve as a type ID for the FTranscationBase-derived type. */ virtual const TCHAR* GetTransactionType() const { @@ -581,22 +577,28 @@ public: // FTransaction interface. SIZE_T DataSize() const; + /** Returns the unique identifier for this transaction, used to track it during its lifetime */ + FGuid GetId() const + { + return Id; + } + + /** Returns the unique identifier for the active operation on this transaction (if any) */ + FGuid GetOperationId() const + { + return OperationId; + } + /** Returns the descriptive text for the transaction */ FText GetTitle() const { return Title; } - /** Gets the full context for the transaction */ - FUndoSessionContext GetContext() const - { - return FUndoSessionContext(*Context, Title, PrimaryObject); - } - /** Serializes a reference to a transaction in a given archive. */ friend FArchive& operator<<( FArchive& Ar, FTransaction& T ) { - return Ar << T.Records << T.Title << T.ObjectMap << T.Context << T.PrimaryObject; + return Ar << T.Records << T.Id << T.Title << T.ObjectMap << T.Context << T.PrimaryObject; } /** Serializes a reference to a transaction in a given archive. */ @@ -619,7 +621,7 @@ public: const UObject* GetPrimaryObject() const { return PrimaryObject; } /** @return True if this record contains a reference to a pie object */ - bool ContainsPieObject() const; + virtual bool ContainsPieObjects() const override; /** Checks if a specific object is in the transaction currently underway */ bool IsObjectTransacting(const UObject* Object) const; @@ -715,7 +717,7 @@ class UNREALED_API UTransactor : public UObject * * @return A read-only pointer to the transaction, or NULL if it does not exist. */ - virtual const FTransaction* GetTransaction( int32 QueueIndex ) const PURE_VIRTUAL(UTransactor::GetQueueEntry,return nullptr;); + virtual const FTransaction* GetTransaction( int32 QueueIndex ) const PURE_VIRTUAL(UTransactor::GetTransaction,return nullptr;); /** * Returns the description of the undo action that will be performed next. @@ -725,7 +727,7 @@ class UNREALED_API UTransactor : public UObject * * @return text describing the next undo transaction */ - virtual FUndoSessionContext GetUndoContext ( bool bCheckWhetherUndoPossible = true ) PURE_VIRTUAL(UTransactor::GetUndoDesc,return FUndoSessionContext();); + virtual FTransactionContext GetUndoContext( bool bCheckWhetherUndoPossible = true ) PURE_VIRTUAL(UTransactor::GetUndoContext,return FTransactionContext();); /** * Determines the amount of data currently stored by the transaction buffer. @@ -747,7 +749,7 @@ class UNREALED_API UTransactor : public UObject * * @return text describing the next redo transaction */ - virtual FUndoSessionContext GetRedoContext () PURE_VIRTUAL(UTransactor::GetRedoDesc,return FUndoSessionContext();); + virtual FTransactionContext GetRedoContext() PURE_VIRTUAL(UTransactor::GetRedoContext,return FTransactionContext();); /** * Sets an undo barrier at the current point in the transaction buffer. @@ -812,5 +814,5 @@ class UNREALED_API UTransactor : public UObject virtual bool IsObjectTransacting(const UObject* Object) const PURE_VIRTUAL(UTransactor::IsObjectTransacting, return false;); /** @return True if this record contains a reference to a pie object */ - virtual bool ContainsPieObject() const { return false; } + virtual bool ContainsPieObjects() const { return false; } }; diff --git a/Engine/Source/Editor/UnrealEd/Classes/Editor/UnrealEdEngine.h b/Engine/Source/Editor/UnrealEd/Classes/Editor/UnrealEdEngine.h index 47506f6e3b1a..77e4e111f606 100644 --- a/Engine/Source/Editor/UnrealEd/Classes/Editor/UnrealEdEngine.h +++ b/Engine/Source/Editor/UnrealEd/Classes/Editor/UnrealEdEngine.h @@ -212,7 +212,7 @@ public: virtual void SelectBSPSurf(UModel* InModel, int32 iSurf, bool bSelected, bool bNoteSelectionChange) override; virtual void SelectNone(bool bNoteSelectionChange, bool bDeselectBSPSurfs, bool WarnAboutManyActors=true) override; virtual void DeselectAllSurfaces() override; - virtual void NoteSelectionChange() override; + virtual void NoteSelectionChange(bool bNotify = true) override; virtual void NoteActorMovement() override; virtual void FinishAllSnaps() override; virtual void Cleanse( bool ClearSelection, bool Redraw, const FText& Reason ) override; diff --git a/Engine/Source/Editor/UnrealEd/Classes/UserDefinedStructure/UserDefinedStructEditorData.h b/Engine/Source/Editor/UnrealEd/Classes/UserDefinedStructure/UserDefinedStructEditorData.h index 249f029a9bf0..7065428240ba 100644 --- a/Engine/Source/Editor/UnrealEd/Classes/UserDefinedStructure/UserDefinedStructEditorData.h +++ b/Engine/Source/Editor/UnrealEd/Classes/UserDefinedStructure/UserDefinedStructEditorData.h @@ -126,7 +126,7 @@ public: public: // UObject interface. - virtual TSharedPtr GetTransactionAnnotation() const override; + virtual TSharedPtr FactoryTransactionAnnotation(const ETransactionAnnotationCreationMode InCreationMode) const override; virtual void PostEditUndo() override; virtual void PostEditUndo(TSharedPtr TransactionAnnotation) override; virtual void PostLoadSubobjects(struct FObjectInstancingGraph* OuterInstanceGraph) override; diff --git a/Engine/Source/Editor/UnrealEd/Private/EditorEngine.cpp b/Engine/Source/Editor/UnrealEd/Private/EditorEngine.cpp index 8cb106cf9b9d..e5cdfabe3af6 100644 --- a/Engine/Source/Editor/UnrealEd/Private/EditorEngine.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/EditorEngine.cpp @@ -342,6 +342,8 @@ UEditorEngine::UEditorEngine(const FObjectInitializer& ObjectInitializer) NumOnlinePIEInstances = 0; DefaultWorldFeatureLevel = GMaxRHIFeatureLevel; + bNotifyUndoRedoSelectionChange = true; + EditorWorldExtensionsManager = nullptr; ActorGroupingUtilsClassName = UActorGroupingUtils::StaticClass(); @@ -2750,7 +2752,10 @@ void UEditorEngine::ApplyDeltaToActor(AActor* InActor, // Update the actor before leaving. InActor->MarkPackageDirty(); - InActor->InvalidateLightingCacheDetailed(bTranslationOnly); + if (!GIsDemoMode) + { + InActor->InvalidateLightingCacheDetailed(bTranslationOnly); + } InActor->PostEditMove( false ); } diff --git a/Engine/Source/Editor/UnrealEd/Private/EditorSelectUtils.cpp b/Engine/Source/Editor/UnrealEd/Private/EditorSelectUtils.cpp index c7f2ed228b02..3efeb247f8da 100644 --- a/Engine/Source/Editor/UnrealEd/Private/EditorSelectUtils.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/EditorSelectUtils.cpp @@ -383,7 +383,7 @@ void UUnrealEdEngine::UpdatePivotLocationForSelection( bool bOnChange ) -void UUnrealEdEngine::NoteSelectionChange() +void UUnrealEdEngine::NoteSelectionChange(bool bNotify) { // The selection changed, so make sure the pivot (widget) is located in the right place UpdatePivotLocationForSelection( true ); @@ -399,8 +399,11 @@ void UUnrealEdEngine::NoteSelectionChange() } const bool bComponentSelectionChanged = GetSelectedComponentCount() > 0; - USelection* Selection = bComponentSelectionChanged ? GetSelectedComponents() : GetSelectedActors(); - USelection::SelectionChangedEvent.Broadcast(Selection); + if (bNotify) + { + USelection* Selection = bComponentSelectionChanged ? GetSelectedComponents() : GetSelectedActors(); + USelection::SelectionChangedEvent.Broadcast(Selection); + } if (!bComponentSelectionChanged) { diff --git a/Engine/Source/Editor/UnrealEd/Private/EditorServer.cpp b/Engine/Source/Editor/UnrealEd/Private/EditorServer.cpp index 8bbd365b4513..d3f4bbcf4659 100644 --- a/Engine/Source/Editor/UnrealEd/Private/EditorServer.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/EditorServer.cpp @@ -1088,7 +1088,7 @@ int32 UEditorEngine::BeginTransaction(const TCHAR* TransactionContext, const FTe { int32 Index = INDEX_NONE; - if (Trans && !bIsSimulatingInEditor) + if (Trans) { // generate transaction context Index = Trans->Begin(TransactionContext, Description); @@ -1105,7 +1105,8 @@ int32 UEditorEngine::BeginTransaction(const FText& Description) int32 UEditorEngine::EndTransaction() { int32 Index = INDEX_NONE; - if (Trans && !bIsSimulatingInEditor) + + if (Trans) { Index = Trans->End(); } @@ -1113,14 +1114,6 @@ int32 UEditorEngine::EndTransaction() return Index; } -void UEditorEngine::ResetTransaction(const FText& Reason) -{ - if (Trans) - { - Trans->Reset( Reason ); - } -} - void UEditorEngine::CancelTransaction(int32 Index) { if (Trans) @@ -1129,6 +1122,14 @@ void UEditorEngine::CancelTransaction(int32 Index) } } +void UEditorEngine::ResetTransaction(const FText& Reason) +{ + if (Trans) + { + Trans->Reset(Reason); + } +} + void UEditorEngine::ShowUndoRedoNotification(const FText& NotificationText, bool bSuccess) { // Add a new notification item only if the previous one has expired or is otherwise done fading out (CS_None). This way multiple undo/redo notifications do not pollute the notification window. @@ -1152,7 +1153,7 @@ void UEditorEngine::ShowUndoRedoNotification(const FText& NotificationText, bool } } -void UEditorEngine::HandleTransactorBeforeRedoUndo( FUndoSessionContext SessionContext ) +void UEditorEngine::HandleTransactorBeforeRedoUndo(const FTransactionContext& TransactionContext) { //Get the list of all selected actors before the undo/redo is performed OldSelectedActors.Empty(); @@ -1169,34 +1170,60 @@ void UEditorEngine::HandleTransactorBeforeRedoUndo( FUndoSessionContext SessionC auto Component = CastChecked(*It); OldSelectedComponents.Add(Component); } -} -void UEditorEngine::HandleTransactorRedo( FUndoSessionContext SessionContext, bool Succeeded ) -{ - NoteSelectionChange(); - PostUndo(Succeeded); - - BroadcastPostRedo(SessionContext.Context, SessionContext.PrimaryObject, Succeeded); - InvalidateAllViewportsAndHitProxies(); - if (!bSquelchTransactionNotification) + // Before an undo, store the current operation and hook on object transaction, if we do not have an outer operation already + if (CurrentUndoRedoContext.OperationDepth++ == 0) { - ShowUndoRedoNotification(FText::Format(NSLOCTEXT("UnrealEd", "RedoMessageFormat", "Redo: {0}"), SessionContext.Title), Succeeded); + check(!CurrentUndoRedoContext.OuterOperationId.IsValid()); + CurrentUndoRedoContext.OuterOperationId = TransactionContext.OperationId; + FCoreUObjectDelegates::OnObjectTransacted.AddUObject(this, &UEditorEngine::HandleObjectTransacted); } } -void UEditorEngine::HandleTransactorUndo( FUndoSessionContext SessionContext, bool Succeeded ) +void UEditorEngine::HandleTransactorRedoUndo(const FTransactionContext& TransactionContext, bool Succeeded, bool WasUndo) { - NoteSelectionChange(); + NoteSelectionChange(bNotifyUndoRedoSelectionChange); PostUndo(Succeeded); - BroadcastPostUndo(SessionContext.Context, SessionContext.PrimaryObject, Succeeded); - InvalidateAllViewportsAndHitProxies(); + // Broadcast only if you have an actual transaction context + if (Succeeded) + { + check(CurrentUndoRedoContext.OuterOperationId.IsValid() && CurrentUndoRedoContext.OperationDepth > 0); + BroadcastPostUndoRedo(TransactionContext, WasUndo); + + if (--CurrentUndoRedoContext.OperationDepth == 0) + { + // Undo/Redo is done clear out operation + check(CurrentUndoRedoContext.OuterOperationId == TransactionContext.OperationId); + CurrentUndoRedoContext.Reset(); + FCoreUObjectDelegates::OnObjectTransacted.RemoveAll(this); + } + } + if (!bSquelchTransactionNotification) { - ShowUndoRedoNotification(FText::Format(NSLOCTEXT("UnrealEd", "UndoMessageFormat", "Undo: {0}"), SessionContext.Title), Succeeded); + const FText UndoRedoMessage = WasUndo ? NSLOCTEXT("UnrealEd", "UndoMessageFormat", "Undo: {0}") : NSLOCTEXT("UnrealEd", "RedoMessageFormat", "Redo: {0}"); + ShowUndoRedoNotification(FText::Format(UndoRedoMessage, TransactionContext.Title), Succeeded); } } +void UEditorEngine::HandleTransactorRedo(const FTransactionContext& TransactionContext, bool Succeeded) +{ + HandleTransactorRedoUndo(TransactionContext, Succeeded, /*WasUndo*/false); +} + +void UEditorEngine::HandleTransactorUndo(const FTransactionContext& TransactionContext, bool Succeeded) +{ + HandleTransactorRedoUndo(TransactionContext, Succeeded, /*WasUndo*/true); +} + +void UEditorEngine::HandleObjectTransacted(UObject* InObject, const FTransactionObjectEvent& InTransactionObjectEvent) +{ + check(CurrentUndoRedoContext.OuterOperationId.IsValid() && CurrentUndoRedoContext.OperationDepth > 0); + check(InTransactionObjectEvent.GetEventType() == ETransactionObjectEventType::UndoRedo); + CurrentUndoRedoContext.TransactionObjects.Add(TPair{ InObject, InTransactionObjectEvent }); +} + bool UEditorEngine::AreEditorAnalyticsEnabled() const { return GetDefault()->bSendUsageData; @@ -1235,7 +1262,7 @@ UTransactor* UEditorEngine::CreateTrans() return TransBuffer; } -void UEditorEngine::PostUndo(bool bSuccess) +void UEditorEngine::PostUndo(bool) { // Cache any Actor that needs to be re-instanced because it still points to a REINST_ class TMap< UClass*, UClass* > OldToNewClassMapToReinstance; @@ -5077,7 +5104,7 @@ bool UEditorEngine::Exec_Transaction(const TCHAR* Str, FOutputDevice& Ar) return true; } -void UEditorEngine::BroadcastPostUndo(const FString& Context, UObject* PrimaryObject, bool bUndoSuccess ) +void UEditorEngine::BroadcastPostUndoRedo(const FTransactionContext& UndoContext, bool bWasUndo) { // This sanitization code can be removed once blueprint ::Conform(ImplementedEvents/ImplementedInterfaces) // functions have been fixed. For the time being it improves editor stability, though: @@ -5086,25 +5113,16 @@ void UEditorEngine::BroadcastPostUndo(const FString& Context, UObject* PrimaryOb for (auto UndoIt = UndoClients.CreateIterator(); UndoIt; ++UndoIt) { FEditorUndoClient* Client = *UndoIt; - if (Client && Client->MatchesContext(Context, PrimaryObject)) + if (Client && Client->MatchesContext(UndoContext, CurrentUndoRedoContext.TransactionObjects)) { - Client->PostUndo( bUndoSuccess ); - } - } -} - -void UEditorEngine::BroadcastPostRedo(const FString& Context, UObject* PrimaryObject, bool bRedoSuccess ) -{ - // This sanitization code can be removed once blueprint ::Conform(ImplementedEvents/ImplementedInterfaces) - // functions have been fixed. For the time being it improves editor stability, though: - UEdGraphPin::SanitizePinsPostUndoRedo(); - - for (auto UndoIt = UndoClients.CreateIterator(); UndoIt; ++UndoIt) - { - FEditorUndoClient* Client = *UndoIt; - if (Client && Client->MatchesContext(Context, PrimaryObject)) - { - Client->PostRedo( bRedoSuccess ); + if (bWasUndo) + { + Client->PostUndo( true ); + } + else + { + Client->PostRedo( true ); + } } } diff --git a/Engine/Source/Editor/UnrealEd/Private/EditorTransaction.cpp b/Engine/Source/Editor/UnrealEd/Private/EditorTransaction.cpp index 7be1da8cdd97..2466c08d6f7d 100644 --- a/Engine/Source/Editor/UnrealEd/Private/EditorTransaction.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/EditorTransaction.cpp @@ -252,8 +252,10 @@ void FTransaction::FObjectRecord::Load(FTransaction* Owner) } } -void FTransaction::FObjectRecord::Finalize( FTransaction* Owner ) +void FTransaction::FObjectRecord::Finalize( FTransaction* Owner, TSharedPtr& OutFinalizedObjectAnnotation ) { + OutFinalizedObjectAnnotation.Reset(); + if (Array) { // Can only diff objects @@ -271,6 +273,7 @@ void FTransaction::FObjectRecord::Finalize( FTransaction* Owner ) FSerializedObject CurrentSerializedObject; { CurrentSerializedObject.SetObject(CurrentObject); + OutFinalizedObjectAnnotation = CurrentSerializedObject.ObjectAnnotation; FWriter Writer(CurrentSerializedObject, bWantsBinarySerialization); SerializeObject(Writer); } @@ -326,10 +329,12 @@ void FTransaction::FObjectRecord::Snapshot( FTransaction* Owner ) bSnapshot = true; SerializedObjectSnapshot.Swap(CurrentSerializedObject); + TSharedPtr ChangedObjectTransactionAnnotation = SerializedObjectSnapshot.ObjectAnnotation; + // Notify any listeners of this change - if (SnapshotDeltaChange.HasChanged()) + if (SnapshotDeltaChange.HasChanged() || ChangedObjectTransactionAnnotation.IsValid()) { - CurrentObject->PostTransacted(FTransactionObjectEvent(ETransactionObjectEventType::Snapshot, SnapshotDeltaChange, InitialSerializedObject.ObjectAnnotation, InitialSerializedObject.ObjectName, InitialSerializedObject.ObjectPathName, InitialSerializedObject.ObjectOuterPathName)); + CurrentObject->PostTransacted(FTransactionObjectEvent(Owner->GetId(), Owner->GetOperationId(), ETransactionObjectEventType::Snapshot, SnapshotDeltaChange, ChangedObjectTransactionAnnotation, InitialSerializedObject.ObjectName, InitialSerializedObject.ObjectPathName, InitialSerializedObject.ObjectOuterPathName)); } } } @@ -338,10 +343,10 @@ void FTransaction::FObjectRecord::Diff( FTransaction* Owner, const FSerializedOb { auto AreObjectPointersIdentical = [&OldSerializedObject, &NewSerializedObject](const FName InPropertyName) { - TArray OldSerializedObjectIndices; + TArray> OldSerializedObjectIndices; OldSerializedObject.SerializedObjectIndices.MultiFind(InPropertyName, OldSerializedObjectIndices, true); - TArray NewSerializedObjectIndices; + TArray> NewSerializedObjectIndices; NewSerializedObject.SerializedObjectIndices.MultiFind(InPropertyName, NewSerializedObjectIndices, true); bool bAreObjectPointersIdentical = OldSerializedObjectIndices.Num() == NewSerializedObjectIndices.Num(); @@ -357,24 +362,42 @@ void FTransaction::FObjectRecord::Diff( FTransaction* Owner, const FSerializedOb return bAreObjectPointersIdentical; }; + auto AreNamesIdentical = [&OldSerializedObject, &NewSerializedObject](const FName InPropertyName) + { + TArray> OldSerializedNameIndices; + OldSerializedObject.SerializedNameIndices.MultiFind(InPropertyName, OldSerializedNameIndices, true); + + TArray> NewSerializedNameIndices; + NewSerializedObject.SerializedNameIndices.MultiFind(InPropertyName, NewSerializedNameIndices, true); + + bool bAreNamesIdentical = OldSerializedNameIndices.Num() == NewSerializedNameIndices.Num(); + if (bAreNamesIdentical) + { + for (int32 ObjIndex = 0; ObjIndex < OldSerializedNameIndices.Num() && bAreNamesIdentical; ++ObjIndex) + { + const FName& OldName = OldSerializedObject.ReferencedNames.IsValidIndex(OldSerializedNameIndices[ObjIndex]) ? OldSerializedObject.ReferencedNames[OldSerializedNameIndices[ObjIndex]] : FName(); + const FName& NewName = NewSerializedObject.ReferencedNames.IsValidIndex(NewSerializedNameIndices[ObjIndex]) ? NewSerializedObject.ReferencedNames[NewSerializedNameIndices[ObjIndex]] : FName(); + bAreNamesIdentical = OldName == NewName; + } + } + return bAreNamesIdentical; + }; + OutDeltaChange.bHasNameChange |= OldSerializedObject.ObjectName != NewSerializedObject.ObjectName; OutDeltaChange.bHasOuterChange |= OldSerializedObject.ObjectOuterPathName != NewSerializedObject.ObjectOuterPathName; OutDeltaChange.bHasPendingKillChange |= OldSerializedObject.bIsPendingKill != NewSerializedObject.bIsPendingKill; - // If the two have a different number of properties or object references then something structural was changed so we skip the property diff - const bool bHasStructuralChange = OldSerializedObject.SerializedProperties.Num() != NewSerializedObject.SerializedProperties.Num() || OldSerializedObject.SerializedObjectIndices.Num() != NewSerializedObject.SerializedObjectIndices.Num(); - if (bHasStructuralChange) - { - OutDeltaChange.bHasNonPropertyChanges = true; - return; - } - if (!AreObjectPointersIdentical(NAME_None)) { OutDeltaChange.bHasNonPropertyChanges = true; } - if (OldSerializedObject.SerializedProperties.Num() > 0) + if (!AreNamesIdentical(NAME_None)) + { + OutDeltaChange.bHasNonPropertyChanges = true; + } + + if (OldSerializedObject.SerializedProperties.Num() > 0 || NewSerializedObject.SerializedProperties.Num() > 0) { int32 StartOfOldPropertyBlock = INT_MAX; int32 StartOfNewPropertyBlock = INT_MAX; @@ -386,8 +409,8 @@ void FTransaction::FObjectRecord::Diff( FTransaction* Owner, const FSerializedOb const FSerializedProperty* OldSerializedProperty = OldSerializedObject.SerializedProperties.Find(NewNamePropertyPair.Key); if (!OldSerializedProperty) { - // Missing property, assume something structural changed - OutDeltaChange.bHasNonPropertyChanges = true; + // Missing property, assume that the property changed + OutDeltaChange.ChangedProperties.AddUnique(NewNamePropertyPair.Key); continue; } @@ -407,6 +430,10 @@ void FTransaction::FObjectRecord::Diff( FTransaction* Owner, const FSerializedOb { bIsPropertyIdentical = AreObjectPointersIdentical(NewNamePropertyPair.Key); } + if (bIsPropertyIdentical) + { + bIsPropertyIdentical = AreNamesIdentical(NewNamePropertyPair.Key); + } if (!bIsPropertyIdentical) { @@ -414,6 +441,17 @@ void FTransaction::FObjectRecord::Diff( FTransaction* Owner, const FSerializedOb } } + for (const TPair& OldNamePropertyPair : OldSerializedObject.SerializedProperties) + { + const FSerializedProperty* NewSerializedProperty = NewSerializedObject.SerializedProperties.Find(OldNamePropertyPair.Key); + if (!NewSerializedProperty) + { + // Missing property, assume that the property changed + OutDeltaChange.ChangedProperties.AddUnique(OldNamePropertyPair.Key); + continue; + } + } + // Compare the data before the property block to see if something else in the object has changed if (!OutDeltaChange.bHasNonPropertyChanges) { @@ -471,7 +509,7 @@ int32 FTransaction::GetRecordCount() const return Records.Num(); } -bool FTransaction::ContainsPieObject() const +bool FTransaction::ContainsPieObjects() const { for( const FObjectRecord& Record : Records ) { @@ -718,9 +756,18 @@ void FTransaction::SnapshotObject( UObject* InObject ) } } -/** - * Enacts the transaction. - */ +void FTransaction::BeginOperation() +{ + check(!OperationId.IsValid()); + OperationId = FGuid::NewGuid(); +} + +void FTransaction::EndOperation() +{ + check(OperationId.IsValid()); + OperationId.Invalidate(); +} + void FTransaction::Apply() { checkSlow(Inc==1||Inc==-1); @@ -739,7 +786,8 @@ void FTransaction::Apply() // In this case we still need to generate a diff for the transaction so that we notify correctly if (!Record.bFinalized) { - Record.Finalize(this); + TSharedPtr FinalizedObjectAnnotation; + Record.Finalize(this, FinalizedObjectAnnotation); } UObject* Object = Record.Object.Get(); @@ -774,11 +822,20 @@ void FTransaction::Apply() } } - // An Actor's components must always get its PostEditUndo before the owning Actor so do a quick sort + // An Actor's components must always get its PostEditUndo before the owning Actor + // so do a quick sort on Outer depth, component will deeper than their owner ChangedObjects.KeySort([](UObject& A, UObject& B) { - UActorComponent* BAsComponent = Cast(&B); - return (BAsComponent ? (BAsComponent->GetOwner() != &A) : true); + auto GetObjectDepth = [](UObject* InObj) + { + int32 Depth = 0; + for (UObject* Outer = InObj; Outer; Outer = Outer->GetOuter()) + { + ++Depth; + } + return Depth; + }; + return GetObjectDepth(&A) > GetObjectDepth(&B); }); TArray LevelsToCommitModelSurface; @@ -810,10 +867,10 @@ void FTransaction::Apply() const FObjectRecord& ChangedObjectRecord = Records[ChangedObjectIt.Value.RecordIndex]; const FTransactionObjectDeltaChange& DeltaChange = ChangedObjectRecord.DeltaChange; - if (DeltaChange.HasChanged()) + if (DeltaChange.HasChanged() || ChangedObjectTransactionAnnotation.IsValid()) { const FObjectRecord::FSerializedObject& InitialSerializedObject = ChangedObjectRecord.SerializedObject; - ChangedObject->PostTransacted(FTransactionObjectEvent(ETransactionObjectEventType::UndoRedo, DeltaChange, ChangedObjectTransactionAnnotation, InitialSerializedObject.ObjectName, InitialSerializedObject.ObjectPathName, InitialSerializedObject.ObjectOuterPathName)); + ChangedObject->PostTransacted(FTransactionObjectEvent(Id, OperationId, ETransactionObjectEventType::UndoRedo, DeltaChange, ChangedObjectTransactionAnnotation, InitialSerializedObject.ObjectName, InitialSerializedObject.ObjectPathName, InitialSerializedObject.ObjectOuterPathName)); } } @@ -841,37 +898,49 @@ void FTransaction::Finalize() { for (int32 i = 0; i < Records.Num(); ++i) { + TSharedPtr FinalizedObjectAnnotation; + FObjectRecord& ObjectRecord = Records[i]; - ObjectRecord.Finalize(this); + ObjectRecord.Finalize(this, FinalizedObjectAnnotation); UObject* Object = ObjectRecord.Object.Get(); if (Object) { if (!ChangedObjects.Contains(Object)) { - ChangedObjects.Add(Object, FChangedObjectValue(i, ObjectRecord.SerializedObject.ObjectAnnotation)); + ChangedObjects.Add(Object, FChangedObjectValue(i, FinalizedObjectAnnotation)); } } } - // An Actor's components must always be notified before the owning Actor so do a quick sort + // An Actor's components must always be notified before the owning Actor + // so do a quick sort on Outer depth, component will deeper than their owner ChangedObjects.KeySort([](UObject& A, UObject& B) { - UActorComponent* BAsComponent = Cast(&B); - return (BAsComponent ? (BAsComponent->GetOwner() != &A) : true); + auto GetObjectDepth = [](UObject* InObj) + { + int32 Depth = 0; + for (UObject* Outer = InObj; Outer; Outer = Outer->GetOuter()) + { + ++Depth; + } + return Depth; + }; + return GetObjectDepth(&A) > GetObjectDepth(&B); }); for (auto ChangedObjectIt : ChangedObjects) { + TSharedPtr ChangedObjectTransactionAnnotation = ChangedObjectIt.Value.Annotation; + const FObjectRecord& ChangedObjectRecord = Records[ChangedObjectIt.Value.RecordIndex]; const FTransactionObjectDeltaChange& DeltaChange = ChangedObjectRecord.DeltaChange; - if (DeltaChange.HasChanged()) + if (DeltaChange.HasChanged() || ChangedObjectTransactionAnnotation.IsValid()) { UObject* ChangedObject = ChangedObjectIt.Key; - TSharedPtr ChangedObjectTransactionAnnotation = ChangedObjectIt.Value.Annotation; const FObjectRecord::FSerializedObject& InitialSerializedObject = ChangedObjectRecord.SerializedObject; - ChangedObject->PostTransacted(FTransactionObjectEvent(ETransactionObjectEventType::Finalized, DeltaChange, ChangedObjectTransactionAnnotation, InitialSerializedObject.ObjectName, InitialSerializedObject.ObjectPathName, InitialSerializedObject.ObjectOuterPathName)); + ChangedObject->PostTransacted(FTransactionObjectEvent(Id, OperationId, ETransactionObjectEventType::Finalized, DeltaChange, ChangedObjectTransactionAnnotation, InitialSerializedObject.ObjectName, InitialSerializedObject.ObjectPathName, InitialSerializedObject.ObjectOuterPathName)); } } @@ -1006,6 +1075,16 @@ int32 UTransBuffer::End() if (GUndo) { GUndo->Finalize(); + TransactionStateChangedDelegate.Broadcast(GUndo->GetContext(), ETransactionStateEventType::TransactionFinalized); + GUndo->EndOperation(); + + // PIE objects now generate transactions. + // Once the transaction is finalized however, they aren't kept in the undo buffer. + if (GUndo->ContainsPieObjects()) + { + check(UndoCount == 0); + UndoBuffer.Pop(false); + } } GUndo = nullptr; PreviousUndoCount = INDEX_NONE; @@ -1063,6 +1142,12 @@ void UTransBuffer::Cancel( int32 StartIndex /*=0*/ ) { if ( StartIndex == 0 ) { + if (GUndo) + { + TransactionStateChangedDelegate.Broadcast(GUndo->GetContext(), ETransactionStateEventType::TransactionCanceled); + GUndo->EndOperation(); + } + // clear the global pointer to the soon-to-be-deleted transaction GUndo = nullptr; @@ -1177,9 +1262,9 @@ const FTransaction* UTransBuffer::GetTransaction( int32 QueueIndex ) const } -FUndoSessionContext UTransBuffer::GetUndoContext( bool bCheckWhetherUndoPossible ) +FTransactionContext UTransBuffer::GetUndoContext( bool bCheckWhetherUndoPossible ) { - FUndoSessionContext Context; + FTransactionContext Context; FText Title; if( bCheckWhetherUndoPossible && !CanUndo( &Title ) ) { @@ -1192,9 +1277,9 @@ FUndoSessionContext UTransBuffer::GetUndoContext( bool bCheckWhetherUndoPossible } -FUndoSessionContext UTransBuffer::GetRedoContext() +FTransactionContext UTransBuffer::GetRedoContext() { - FUndoSessionContext Context; + FTransactionContext Context; FText Title; if( !CanRedo( &Title ) ) { @@ -1234,7 +1319,7 @@ bool UTransBuffer::Undo(bool bCanRedo) if (!CanUndo()) { - UndoDelegate.Broadcast(FUndoSessionContext(), false); + UndoDelegate.Broadcast(FTransactionContext(), false); return false; } @@ -1245,18 +1330,23 @@ bool UTransBuffer::Undo(bool bCanRedo) FTransaction& Transaction = UndoBuffer[ UndoBuffer.Num() - ++UndoCount ].Get(); UE_LOG(LogEditorTransaction, Log, TEXT("Undo %s"), *Transaction.GetTitle().ToString() ); CurrentTransaction = &Transaction; + CurrentTransaction->BeginOperation(); - BeforeRedoUndoDelegate.Broadcast(Transaction.GetContext()); + const FTransactionContext TransactionContext = CurrentTransaction->GetContext(); + TransactionStateChangedDelegate.Broadcast(TransactionContext, ETransactionStateEventType::UndoRedoStarted); + BeforeRedoUndoDelegate.Broadcast(TransactionContext); Transaction.Apply(); - UndoDelegate.Broadcast(Transaction.GetContext(), true); + UndoDelegate.Broadcast(TransactionContext, true); + TransactionStateChangedDelegate.Broadcast(TransactionContext, ETransactionStateEventType::UndoRedoFinalized); + + CurrentTransaction->EndOperation(); + CurrentTransaction = nullptr; if (!bCanRedo) { UndoBuffer.RemoveAt(UndoBuffer.Num() - UndoCount, UndoCount); UndoCount = 0; } - - CurrentTransaction = nullptr; } GIsTransacting = false; @@ -1271,7 +1361,7 @@ bool UTransBuffer::Redo() if (!CanRedo()) { - RedoDelegate.Broadcast(FUndoSessionContext(), false); + RedoDelegate.Broadcast(FTransactionContext(), false); return false; } @@ -1282,11 +1372,16 @@ bool UTransBuffer::Redo() FTransaction& Transaction = UndoBuffer[ UndoBuffer.Num() - UndoCount-- ].Get(); UE_LOG(LogEditorTransaction, Log, TEXT("Redo %s"), *Transaction.GetTitle().ToString() ); CurrentTransaction = &Transaction; + CurrentTransaction->BeginOperation(); - BeforeRedoUndoDelegate.Broadcast(Transaction.GetContext()); + const FTransactionContext TransactionContext = CurrentTransaction->GetContext(); + TransactionStateChangedDelegate.Broadcast(TransactionContext, ETransactionStateEventType::UndoRedoStarted); + BeforeRedoUndoDelegate.Broadcast(TransactionContext); Transaction.Apply(); - RedoDelegate.Broadcast(Transaction.GetContext(), true); + RedoDelegate.Broadcast(TransactionContext, true); + TransactionStateChangedDelegate.Broadcast(TransactionContext, ETransactionStateEventType::UndoRedoFinalized); + CurrentTransaction->EndOperation(); CurrentTransaction = nullptr; } GIsTransacting = false; @@ -1373,11 +1468,11 @@ bool UTransBuffer::IsObjectTransacting(const UObject* Object) const return false; } -bool UTransBuffer::ContainsPieObject() const +bool UTransBuffer::ContainsPieObjects() const { for( const TSharedRef& Transaction : UndoBuffer ) { - if( Transaction->ContainsPieObject() ) + if( Transaction->ContainsPieObjects() ) { return true; } diff --git a/Engine/Source/Editor/UnrealEd/Private/EditorViewportClient.cpp b/Engine/Source/Editor/UnrealEd/Private/EditorViewportClient.cpp index b64bfaa32850..753f68776b4c 100644 --- a/Engine/Source/Editor/UnrealEd/Private/EditorViewportClient.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/EditorViewportClient.cpp @@ -5101,6 +5101,26 @@ void FEditorViewportClient::MouseLeave(FViewport* InViewport) PixelInspectorRealtimeManagement(this, false); } +FViewportCursorLocation FEditorViewportClient::GetCursorWorldLocationFromMousePos() +{ + // Create the scene view context + FSceneViewFamilyContext ViewFamily(FSceneViewFamily::ConstructionValues( + Viewport, + GetScene(), + EngineShowFlags) + .SetRealtimeUpdate(IsRealtime())); + + // Calculate the scene view + FSceneView* View = CalcSceneView(&ViewFamily); + + // Construct an FViewportCursorLocation which calculates world space postion from the scene view and mouse pos. + return FViewportCursorLocation(View, + this, + Viewport->GetMouseX(), + Viewport->GetMouseY() + ); +} + void FEditorViewportClient::CapturedMouseMove( FViewport* InViewport, int32 InMouseX, int32 InMouseY ) { UpdateRequiredCursorVisibility(); diff --git a/Engine/Source/Editor/UnrealEd/Private/EditorWorldExtension.cpp b/Engine/Source/Editor/UnrealEd/Private/EditorWorldExtension.cpp index cbf259335b66..4f1f13239db8 100644 --- a/Engine/Source/Editor/UnrealEd/Private/EditorWorldExtension.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/EditorWorldExtension.cpp @@ -21,6 +21,10 @@ UEditorWorldExtension::UEditorWorldExtension() : UEditorWorldExtension::~UEditorWorldExtension() { + if (OwningExtensionsCollection) + { + OwningExtensionsCollection->RemoveExtension(this); + } OwningExtensionsCollection = nullptr; } @@ -39,10 +43,26 @@ UWorld* UEditorWorldExtension::GetWorld() const return OwningExtensionsCollection->GetWorld(); } -AActor* UEditorWorldExtension::SpawnTransientSceneActor(TSubclassOf ActorClass, const FString& ActorName, const bool bWithSceneComponent /*= false*/, const EObjectFlags InObjectFlags /*= EObjectFlags::RF_DuplicateTransient*/) +UWorld* UEditorWorldExtension::GetLastEditorWorld() const +{ + return OwningExtensionsCollection->GetLastEditorWorld(); +} + +AActor* UEditorWorldExtension::SpawnTransientSceneActor(TSubclassOf ActorClass, const FString& ActorName, const bool bWithSceneComponent /*= false*/, const EObjectFlags InObjectFlags /*= EObjectFlags::RF_DuplicateTransient*/, const bool bValidForPIE /* = false */) { UWorld* World = GetWorld(); check(World != nullptr); + + // if currently in PIE, non-PIE actors should be spawned in LastEditorWorld if it exists. + if (!bValidForPIE && !GEditor->bIsSimulatingInEditor && GEditor->PlayWorld != nullptr && GEditor->PlayWorld == World) + { + UWorld* LastEditorWorld = GetLastEditorWorld(); + if (LastEditorWorld != nullptr) + { + World = LastEditorWorld; + } + } + const bool bWasWorldPackageDirty = World->GetOutermost()->IsDirty(); FActorSpawnParameters ActorSpawnParameters; @@ -54,8 +74,12 @@ AActor* UEditorWorldExtension::SpawnTransientSceneActor(TSubclassOf Acto AActor* NewActor = World->SpawnActor< AActor >(ActorClass, ActorSpawnParameters); NewActor->SetActorLabel(ActorName); + FEditorWorldExtensionActorData ActorData; + ActorData.Actor = NewActor; + ActorData.bValidForPIE = bValidForPIE; + // Keep track of this actor so that we can migrate it between worlds if needed - ExtensionActors.Add( NewActor ); + ExtensionActors.Add( ActorData ); if (bWithSceneComponent) { @@ -79,13 +103,20 @@ void UEditorWorldExtension::DestroyTransientActor(AActor* Actor) { if (Actor != nullptr) { - ExtensionActors.RemoveSingleSwap(Actor); + for (int32 ActorIndex = 0; ActorIndex < ExtensionActors.Num(); ++ActorIndex) + { + FEditorWorldExtensionActorData ActorData = ExtensionActors[ActorIndex]; + if (ActorData.Actor == Actor) + { + ExtensionActors.RemoveAtSwap(ActorIndex--); + break; + } + } } - UWorld* World = GetWorld(); - check(World != nullptr); if (Actor != nullptr) { + UWorld* World = Actor->GetWorld(); const bool bWasWorldPackageDirty = World->GetOutermost()->IsDirty(); const bool bNetForce = false; @@ -148,14 +179,21 @@ bool UEditorWorldExtension::ExecCommand(const FString& InCommand) return bResult; } -void UEditorWorldExtension::TransitionWorld(UWorld* NewWorld) +void UEditorWorldExtension::TransitionWorld(UWorld* NewWorld, EEditorWorldExtensionTransitionState TransitionState) { + check(NewWorld != nullptr); + for (int32 ActorIndex = 0; ActorIndex < ExtensionActors.Num(); ++ActorIndex) { - AActor* Actor = ExtensionActors[ ActorIndex ]; - if( Actor != nullptr ) - { - ReparentActor( Actor, NewWorld ); + FEditorWorldExtensionActorData ActorData = ExtensionActors[ ActorIndex ]; + if( ActorData.Actor != nullptr) + { + if (TransitionState == EEditorWorldExtensionTransitionState::TransitionAll || + (TransitionState == EEditorWorldExtensionTransitionState::TransitionPIEOnly && ActorData.bValidForPIE) || + (TransitionState == EEditorWorldExtensionTransitionState::TransitionNonPIEOnly && !ActorData.bValidForPIE)) + { + ReparentActor(ActorData.Actor, NewWorld); + } } else { @@ -217,7 +255,7 @@ void UEditorWorldExtension::InitInternal(UEditorWorldExtensionCollection* InOwni UEditorWorldExtensionCollection::UEditorWorldExtensionCollection() : Super(), Currentworld(nullptr), - EditorWorldOnSimulate(nullptr) + LastEditorWorld(nullptr) { if( !IsTemplate() ) { @@ -235,9 +273,13 @@ UEditorWorldExtensionCollection::~UEditorWorldExtensionCollection() FEditorDelegates::EndPIE.RemoveAll( this ); FEditorDelegates::OnSwitchBeginPIEAndSIE.RemoveAll( this ); + for (const FEditorExtensionTuple& Extension : EditorExtensions) + { + Extension.Get<0>()->OwningExtensionsCollection = nullptr; + } EditorExtensions.Empty(); Currentworld.Reset(); - EditorWorldOnSimulate.Reset(); + LastEditorWorld.Reset(); } UWorld* UEditorWorldExtensionCollection::GetWorld() const @@ -245,6 +287,11 @@ UWorld* UEditorWorldExtensionCollection::GetWorld() const return Currentworld.IsValid() ? Currentworld.Get() : nullptr; } +UWorld* UEditorWorldExtensionCollection::GetLastEditorWorld() const +{ + return LastEditorWorld.IsValid() ? LastEditorWorld.Get() : nullptr; +} + UEditorWorldExtension* UEditorWorldExtensionCollection::AddExtension(TSubclassOf EditorExtensionClass) { UEditorWorldExtension* Extension = nullptr; @@ -301,6 +348,7 @@ void UEditorWorldExtensionCollection::RemoveExtension( UEditorWorldExtension* Ed ); if( ensure( ExistingExtensionIndex != INDEX_NONE ) ) { + check(EditorExtension->OwningExtensionsCollection == this); FEditorExtensionTuple& EditorExtensionTuple = EditorExtensions[ ExistingExtensionIndex ]; int32& RefCount = EditorExtensionTuple.Get<1>(); --RefCount; @@ -309,6 +357,7 @@ void UEditorWorldExtensionCollection::RemoveExtension( UEditorWorldExtension* Ed { EditorExtensions.RemoveAt( ExistingExtensionIndex ); EditorExtension->Shutdown(); + EditorExtension->OwningExtensionsCollection = nullptr; } } } @@ -372,8 +421,9 @@ void UEditorWorldExtensionCollection::ShowAllActors(const bool bShow) for (FEditorExtensionTuple& EditorExtensionTuple : EditorExtensions) { UEditorWorldExtension* EditorExtension = EditorExtensionTuple.Get<0>(); - for (AActor* Actor : EditorExtension->ExtensionActors) + for (FEditorWorldExtensionActorData ActorData : EditorExtension->ExtensionActors) { + AActor* Actor = ActorData.Actor; if (Actor != nullptr) { TInlineComponentArray ComponentArray; @@ -391,48 +441,71 @@ void UEditorWorldExtensionCollection::ShowAllActors(const bool bShow) void UEditorWorldExtensionCollection::PostPIEStarted( bool bIsSimulatingInEditor ) { - if( bIsSimulatingInEditor && GEditor->EditorWorld != nullptr && Currentworld.IsValid() && GEditor->EditorWorld == Currentworld.Get() ) + if( GEditor->EditorWorld != nullptr && Currentworld.IsValid() && GEditor->EditorWorld == Currentworld.Get() && GEditor->PlayWorld != nullptr ) { - SetWorld( GEditor->GetPIEWorldContext()->World() ); - EditorWorldOnSimulate = GEditor->GetEditorWorldContext().World(); - - for (FEditorExtensionTuple& EditorExtensionTuple : EditorExtensions) + if (bIsSimulatingInEditor) { - UEditorWorldExtension* EditorExtension = EditorExtensionTuple.Get<0>(); - EditorExtension->EnteredSimulateInEditor(); + // Editor to SIE + // Transition all actors to the play world. + SetWorld(GEditor->PlayWorld, EEditorWorldExtensionTransitionState::TransitionAll); + + LastEditorWorld = GEditor->GetEditorWorldContext().World(); + + for (FEditorExtensionTuple& EditorExtensionTuple : EditorExtensions) + { + UEditorWorldExtension* EditorExtension = EditorExtensionTuple.Get<0>(); + EditorExtension->EnteredSimulateInEditor(); + } + } + else + { + // Editor to PIE + // Transition PIE-valid actors to the play world. + SetWorld(GEditor->PlayWorld, EEditorWorldExtensionTransitionState::TransitionPIEOnly); + + LastEditorWorld = GEditor->GetEditorWorldContext().World(); } } } void UEditorWorldExtensionCollection::OnPreEndPIE(bool bWasSimulatingInEditor) { - if (!bWasSimulatingInEditor && EditorWorldOnSimulate.IsValid() && EditorWorldOnSimulate.Get() == GEditor->EditorWorld) + if (!bWasSimulatingInEditor && LastEditorWorld.IsValid() && LastEditorWorld.Get() == GEditor->EditorWorld) { if (!GIsRequestingExit) { + // PIE to Editor // Revert back to the editor world before closing the play world, otherwise actors and objects will be destroyed. - SetWorld(EditorWorldOnSimulate.Get()); - EditorWorldOnSimulate.Reset(); + // Transition PIE-valid extension actors back to the editor world. + SetWorld(GEditor->EditorWorld, EEditorWorldExtensionTransitionState::TransitionPIEOnly); + + LastEditorWorld.Reset(); } } } void UEditorWorldExtensionCollection::OnEndPIE( bool bWasSimulatingInEditor ) { - if( bWasSimulatingInEditor && EditorWorldOnSimulate.IsValid() && EditorWorldOnSimulate.Get() == GEditor->EditorWorld ) + if( bWasSimulatingInEditor && LastEditorWorld.IsValid() && LastEditorWorld.Get() == GEditor->EditorWorld ) { if( !GIsRequestingExit ) { UWorld* SimulateWorld = Currentworld.Get(); + // SIE to Editor // Revert back to the editor world before closing the play world, otherwise actors and objects will be destroyed. - SetWorld( EditorWorldOnSimulate.Get() ); - EditorWorldOnSimulate.Reset(); + // Transition all extension actors back to the editor world. + SetWorld(GEditor->EditorWorld, EEditorWorldExtensionTransitionState::TransitionAll); - for( FEditorExtensionTuple& EditorExtensionTuple : EditorExtensions ) + LastEditorWorld.Reset(); + + if (SimulateWorld != nullptr) { - UEditorWorldExtension* EditorExtension = EditorExtensionTuple.Get<0>(); - EditorExtension->LeftSimulateInEditor(SimulateWorld); + for (FEditorExtensionTuple& EditorExtensionTuple : EditorExtensions) + { + UEditorWorldExtension* EditorExtension = EditorExtensionTuple.Get<0>(); + EditorExtension->LeftSimulateInEditor(SimulateWorld); + } } } } @@ -440,36 +513,43 @@ void UEditorWorldExtensionCollection::OnEndPIE( bool bWasSimulatingInEditor ) void UEditorWorldExtensionCollection::SwitchPIEAndSIE(bool bIsSimulatingInEditor) { - if (GEditor->EditorWorld != nullptr && EditorWorldOnSimulate.IsValid() && EditorWorldOnSimulate.Get() == GEditor->EditorWorld && + if (GEditor->EditorWorld != nullptr && LastEditorWorld.IsValid() && LastEditorWorld.Get() == GEditor->EditorWorld && GEditor->PlayWorld != nullptr && Currentworld.IsValid() && Currentworld.Get() == GEditor->PlayWorld) { if (!bIsSimulatingInEditor) { // Post SIE to PIE. - // Transition the extensions to the editor world, so everything is stored while being in PIE. - SetWorld(EditorWorldOnSimulate.Get()); + // Transition non-PIE extension actors to the editor world while in PIE. + TransitionWorld(GEditor->EditorWorld, EEditorWorldExtensionTransitionState::TransitionNonPIEOnly); } else { // Post PIE to SIE - // All the extensions were transitioned to the editor world before entering PIE from SIE. Now we have to transition the extensions back to simulate world. - SetWorld(Currentworld.Get()); + // Transition non-PIE extension actors back to simulate world from editor world where they were temporarily moved while in PIE. + TransitionWorld(GEditor->PlayWorld, EEditorWorldExtensionTransitionState::TransitionNonPIEOnly); } } } -void UEditorWorldExtensionCollection::SetWorld(UWorld* World) +void UEditorWorldExtensionCollection::TransitionWorld(UWorld* World, EEditorWorldExtensionTransitionState TransitionState) { - check( World != nullptr ); + check(World != nullptr); + + for (FEditorExtensionTuple& EditorExtensionTuple : EditorExtensions) + { + UEditorWorldExtension* EditorExtension = EditorExtensionTuple.Get<0>(); + EditorExtension->TransitionWorld(World, TransitionState); + } +} + +void UEditorWorldExtensionCollection::SetWorld(UWorld* World, EEditorWorldExtensionTransitionState TransitionState /* = EEditorWorldExtensionTransitionState::TransitionAll */) +{ + check(World != nullptr); // First time setting the world on collection we don't want to transition because there is nothing yet to transition from. - if( Currentworld.IsValid() ) + if (Currentworld.IsValid() && TransitionState != EEditorWorldExtensionTransitionState::TransitionNone) { - for (FEditorExtensionTuple& EditorExtensionTuple : EditorExtensions) - { - UEditorWorldExtension* EditorExtension = EditorExtensionTuple.Get<0>(); - EditorExtension->TransitionWorld(World); - } + TransitionWorld(World, TransitionState); } Currentworld = World; @@ -522,7 +602,7 @@ UEditorWorldExtensionCollection* UEditorWorldExtensionManager::OnWorldAdd(UWorld if (World != nullptr) { UEditorWorldExtensionCollection* ExtensionCollection = NewObject(); - ExtensionCollection->SetWorld(World); + ExtensionCollection->SetWorld(World, EEditorWorldExtensionTransitionState::TransitionAll); Result = ExtensionCollection; EditorWorldExtensionCollection.Add(Result); } diff --git a/Engine/Source/Editor/UnrealEd/Private/Factories/EditorFactories.cpp b/Engine/Source/Editor/UnrealEd/Private/Factories/EditorFactories.cpp index 3320b8d6a69d..e783b3029c5c 100644 --- a/Engine/Source/Editor/UnrealEd/Private/Factories/EditorFactories.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/Factories/EditorFactories.cpp @@ -4803,7 +4803,7 @@ EReimportResult::Type UFontFileImportFactory::Reimport(UObject* InObject) return EReimportResult::Succeeded; } - return EReimportResult::Failed; + return OutCanceled ? EReimportResult::Cancelled : EReimportResult::Failed; } int32 UFontFileImportFactory::GetPriority() const @@ -5194,10 +5194,12 @@ EReimportResult::Type UReimportTextureFactory::Reimport( UObject* Obj ) else if (OutCanceled) { UE_LOG(LogEditorFactories, Warning, TEXT("-- import canceled")); + return EReimportResult::Cancelled; } else { UE_LOG(LogEditorFactories, Warning, TEXT("-- import failed")); + return EReimportResult::Failed; } return EReimportResult::Succeeded; @@ -5879,6 +5881,8 @@ EReimportResult::Type UReimportFbxAnimSequenceFactory::Reimport( UObject* Obj ) { UE_LOG(LogEditorFactories, Warning, TEXT("-- import failed") ); Importer->AddTokenizedErrorMessage(FTokenizedMessage::Create(EMessageSeverity::Error, LOCTEXT("Error_CouldNotReimportAnimation", "Cannot re-import animation.")), FFbxErrors::Generic_ReimportingObjectFailed); + Importer->ReleaseScene(); + return EReimportResult::Failed; } Importer->ReleaseScene(); diff --git a/Engine/Source/Editor/UnrealEd/Private/Fbx/FbxCompareWindow.cpp b/Engine/Source/Editor/UnrealEd/Private/Fbx/FbxCompareWindow.cpp index 4c3d667c757e..317090bbf0ab 100644 --- a/Engine/Source/Editor/UnrealEd/Private/Fbx/FbxCompareWindow.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/Fbx/FbxCompareWindow.cpp @@ -261,7 +261,7 @@ TSharedRef FMaterialCompareData::ConstructCell(FCompMesh *MeshData, int .Padding(FMargin(5.0f, 0.0f, 0.0f, 0.0f)) [ SNew(STextBlock) - .Text(LOCTEXT("FMaterialCompareData_EmptyCell", "")) + .Text(FText::GetEmpty()) ]; } diff --git a/Engine/Source/Editor/UnrealEd/Private/Fbx/FbxMaterialConflictWindow.cpp b/Engine/Source/Editor/UnrealEd/Private/Fbx/FbxMaterialConflictWindow.cpp index f66cac03bcff..87c68885e2ba 100644 --- a/Engine/Source/Editor/UnrealEd/Private/Fbx/FbxMaterialConflictWindow.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/Fbx/FbxMaterialConflictWindow.cpp @@ -183,7 +183,7 @@ TSharedPtr SFbxMaterialConflictWindow::ConstructMaterialComparison() ( SNew(SHeaderRow) + SHeaderRow::Column("RowIndex") - .DefaultLabel(LOCTEXT("SFbxMaterialConflictWindow_RowIndex_ColumnHeader", "")) + .DefaultLabel(FText::GetEmpty()) .FixedWidth(25) + SHeaderRow::Column("Current") .DefaultLabel(LOCTEXT("SFbxMaterialConflictWindow_Current_ColumnHeader", "Current Asset Materials")) @@ -361,7 +361,7 @@ TSharedRef FMaterialConflictData::ConstructCellCurrent() .Padding(FMargin(5.0f, 2.0f, 0.0f, 2.0f)) [ SNew(STextBlock) - .Text(LOCTEXT("FMaterialConflictData_EmptyCell", "")) + .Text(FText::GetEmpty()) ]; } @@ -384,7 +384,7 @@ TSharedRef FMaterialConflictData::ConstructCellFbx() .Padding(FMargin(5.0f, 2.0f, 0.0f, 2.0f)) [ SNew(STextBlock) - .Text(LOCTEXT("FMaterialConflictData_EmptyCell", "")) + .Text(FText::GetEmpty()) ]; } diff --git a/Engine/Source/Editor/UnrealEd/Private/Fbx/FbxStaticMeshImport.cpp b/Engine/Source/Editor/UnrealEd/Private/Fbx/FbxStaticMeshImport.cpp index 6b09d4734637..42ce99490aa9 100644 --- a/Engine/Source/Editor/UnrealEd/Private/Fbx/FbxStaticMeshImport.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/Fbx/FbxStaticMeshImport.cpp @@ -799,7 +799,7 @@ bool UnFbx::FFbxImporter::BuildStaticMeshFromGeometry(FbxNode* Node, UStaticMesh if (!PolygonGroupMapping.Contains(RealMaterialIndex)) { UMaterialInterface* Material = MeshMaterials.IsValidIndex(RealMaterialIndex) ? MeshMaterials[RealMaterialIndex].Material : UMaterial::GetDefaultMaterial(MD_Surface); - FName ImportedMaterialSlotName = MeshMaterials.IsValidIndex(RealMaterialIndex) ? FName(*MeshMaterials[RealMaterialIndex].GetName()) : NAME_None; + FName ImportedMaterialSlotName = MeshMaterials.IsValidIndex(RealMaterialIndex) ? FName(*MeshMaterials[RealMaterialIndex].GetName()) : (Material != nullptr ? FName(*Material->GetName()) : NAME_None); FPolygonGroupID ExistingPolygonGroup = FPolygonGroupID::Invalid; for (const FPolygonGroupID PolygonGroupID : MeshDescription->PolygonGroups().GetElementIDs()) { diff --git a/Engine/Source/Editor/UnrealEd/Private/GroupActor.cpp b/Engine/Source/Editor/UnrealEd/Private/GroupActor.cpp index 11ff0d9d1fa6..d8b4b7024df0 100644 --- a/Engine/Source/Editor/UnrealEd/Private/GroupActor.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/GroupActor.cpp @@ -77,6 +77,20 @@ void AGroupActor::PostEditUndo() { GetWorld()->ActiveGroupActors.RemoveSwap(this); } + else + { + // Cache group on de-serialization + GetWorld()->ActiveGroupActors.AddUnique(this); + + // Fix up references for GetParentForActor() + for (int32 i = 0; i < GroupActors.Num(); ++i) + { + if (GroupActors[i] != NULL) + { + GroupActors[i]->GroupActor = this; + } + } + } } bool AGroupActor::IsSelected() const diff --git a/Engine/Source/Editor/UnrealEd/Private/Kismet2/KismetReinstanceUtilities.cpp b/Engine/Source/Editor/UnrealEd/Private/Kismet2/KismetReinstanceUtilities.cpp index cc3352db7468..699784753794 100644 --- a/Engine/Source/Editor/UnrealEd/Private/Kismet2/KismetReinstanceUtilities.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/Kismet2/KismetReinstanceUtilities.cpp @@ -1105,7 +1105,7 @@ struct FActorReplacementHelper , AttachmentData( MoveTemp(InAttachmentData) ) , bSelectNewActor(OldActor->IsSelected()) { - CachedActorData = StaticCastSharedPtr(OldActor->GetTransactionAnnotation()); + CachedActorData = StaticCastSharedPtr(OldActor->FindOrCreateTransactionAnnotation()); TArray AttachedActors; OldActor->GetAttachedActors(AttachedActors); diff --git a/Engine/Source/Editor/UnrealEd/Private/LevelEditorViewport.cpp b/Engine/Source/Editor/UnrealEd/Private/LevelEditorViewport.cpp index 8b4b8c1fdc27..1071bad83a07 100644 --- a/Engine/Source/Editor/UnrealEd/Private/LevelEditorViewport.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/LevelEditorViewport.cpp @@ -3849,28 +3849,6 @@ EMouseCursor::Type FLevelEditorViewportClient::GetCursor(FViewport* InViewport,i } -FViewportCursorLocation FLevelEditorViewportClient::GetCursorWorldLocationFromMousePos() -{ - // Create the scene view context - FSceneViewFamilyContext ViewFamily( FSceneViewFamily::ConstructionValues( - Viewport, - GetScene(), - EngineShowFlags ) - .SetRealtimeUpdate( IsRealtime() )); - - // Calculate the scene view - FSceneView* View = CalcSceneView( &ViewFamily ); - - // Construct an FViewportCursorLocation which calculates world space postion from the scene view and mouse pos. - return FViewportCursorLocation( View, - this, - Viewport->GetMouseX(), - Viewport->GetMouseY() - ); -} - - - /** * Called when the mouse is moved while a window input capture is in effect * diff --git a/Engine/Source/Editor/UnrealEd/Private/PackageTools.cpp b/Engine/Source/Editor/UnrealEd/Private/PackageTools.cpp index 08da63f8be13..da3ab9c60736 100644 --- a/Engine/Source/Editor/UnrealEd/Private/PackageTools.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/PackageTools.cpp @@ -14,6 +14,7 @@ #include "UObject/Package.h" #include "UObject/MetaData.h" #include "UObject/UObjectHash.h" +#include "UObject/GCObjectScopeGuard.h" #include "Serialization/ArchiveFindCulprit.h" #include "Misc/PackageName.h" #include "Editor/EditorPerProjectUserSettings.h" @@ -37,6 +38,7 @@ #include "UObject/UObjectIterator.h" #include "ComponentReregisterContext.h" #include "Engine/Selection.h" +#include "Engine/GameEngine.h" #include "Engine/LevelStreaming.h" #include "Engine/MapBuildDataRegistry.h" @@ -461,7 +463,7 @@ UPackageTools::UPackageTools(const FObjectInitializer& ObjectInitializer) bool UPackageTools::ReloadPackages( const TArray& TopLevelPackages ) { FText ErrorMessage; - const bool bResult = ReloadPackages(TopLevelPackages, ErrorMessage, /*bInteractive*/true); + const bool bResult = ReloadPackages(TopLevelPackages, ErrorMessage, EReloadPackagesInteractionMode::Interactive); if (!ErrorMessage.IsEmpty()) { @@ -473,6 +475,12 @@ UPackageTools::UPackageTools(const FObjectInitializer& ObjectInitializer) bool UPackageTools::ReloadPackages( const TArray& TopLevelPackages, FText& OutErrorMessage, const bool bInteractive ) + { + return ReloadPackages(TopLevelPackages, OutErrorMessage, bInteractive ? EReloadPackagesInteractionMode::Interactive : EReloadPackagesInteractionMode::AssumeNegative); + } + + + bool UPackageTools::ReloadPackages( const TArray& TopLevelPackages, FText& OutErrorMessage, const EReloadPackagesInteractionMode InteractionMode ) { bool bResult = false; @@ -504,22 +512,34 @@ UPackageTools::UPackageTools(const FObjectInitializer& ObjectInitializer) } } - // Ask the user whether dirty packages should be reloaded. - if (bInteractive && DirtyPackages.Num() > 0) + // How should we handle locally dirty packages? + if (DirtyPackages.Num() > 0) { - FTextBuilder ReloadDirtyPackagesMsgBuilder; - ReloadDirtyPackagesMsgBuilder.AppendLine(NSLOCTEXT("UnrealEd", "ShouldReloadDirtyPackagesHeader", "The following packages have been modified:")); - { - ReloadDirtyPackagesMsgBuilder.Indent(); - for (UPackage* DirtyPackage : DirtyPackages) - { - ReloadDirtyPackagesMsgBuilder.AppendLine(DirtyPackage->GetFName()); - } - ReloadDirtyPackagesMsgBuilder.Unindent(); - } - ReloadDirtyPackagesMsgBuilder.AppendLine(NSLOCTEXT("UnrealEd", "ShouldReloadDirtyPackagesFooter", "Would you like to reload these packages? This will revert any changes you have made.")); + EAppReturnType::Type ReloadDirtyPackagesResult = EAppReturnType::No; - if (FMessageDialog::Open(EAppMsgType::YesNo, ReloadDirtyPackagesMsgBuilder.ToText()) == EAppReturnType::Yes) + // Ask the user whether dirty packages should be reloaded. + if (InteractionMode == EReloadPackagesInteractionMode::Interactive) + { + FTextBuilder ReloadDirtyPackagesMsgBuilder; + ReloadDirtyPackagesMsgBuilder.AppendLine(NSLOCTEXT("UnrealEd", "ShouldReloadDirtyPackagesHeader", "The following packages have been modified:")); + { + ReloadDirtyPackagesMsgBuilder.Indent(); + for (UPackage* DirtyPackage : DirtyPackages) + { + ReloadDirtyPackagesMsgBuilder.AppendLine(DirtyPackage->GetFName()); + } + ReloadDirtyPackagesMsgBuilder.Unindent(); + } + ReloadDirtyPackagesMsgBuilder.AppendLine(NSLOCTEXT("UnrealEd", "ShouldReloadDirtyPackagesFooter", "Would you like to reload these packages? This will revert any changes you have made.")); + + ReloadDirtyPackagesResult = FMessageDialog::Open(EAppMsgType::YesNo, ReloadDirtyPackagesMsgBuilder.ToText()); + } + else if (InteractionMode == EReloadPackagesInteractionMode::AssumePositive) + { + ReloadDirtyPackagesResult = EAppReturnType::Yes; + } + + if (ReloadDirtyPackagesResult == EAppReturnType::Yes) { for (UPackage* DirtyPackage : DirtyPackages) { @@ -571,71 +591,95 @@ UPackageTools::UPackageTools(const FObjectInitializer& ObjectInitializer) } } + // Get the current world. + TWeakObjectPtr CurrentWorld; + if (GIsEditor) + { + if (UWorld* EditorWorld = GEditor->GetEditorWorldContext().World()) + { + CurrentWorld = EditorWorld; + } + } + else if (UGameEngine* GameEngine = Cast(GEngine)) + { + if (UWorld* GameWorld = GameEngine->GetGameWorld()) + { + CurrentWorld = GameWorld; + } + } + // Check to see if we need to reload the current world. FName WorldNameToReload; TMap LevelsToMapBuildData; TArray RemovedStreamingLevels; + if (UWorld* CurrentWorldPtr = CurrentWorld.Get()) { - if (UWorld* EditorWorld = GEditor->GetEditorWorldContext().World()) + // Is the current world being reloaded? If so, we just reset the current world and load it again at the end rather than let it go through ReloadPackage + // (which doesn't work for the current world due to some assumptions about worlds, and their lifetimes). + // We also need to skip the build data package as that will also be destroyed by the transition. + if (PackagesToReload.Contains(CurrentWorldPtr->GetOutermost())) { - // Is the currently loaded world being reloaded? If so, we just reset the current world and load it again at the end rather than let it go - // through ReloadPackage (which doesn't work for the editor due to some assumptions it makes about worlds, and their lifetimes). - // We also need to skip the build data package as that will also be destroyed by the call to CreateNewMapForEditing. - if (PackagesToReload.Contains(EditorWorld->GetOutermost())) + // Cache this so we can reload the world later + WorldNameToReload = *CurrentWorldPtr->GetPathName(); + + // Remove the world package from the reload list + PackagesToReload.Remove(CurrentWorldPtr->GetOutermost()); + + // Remove the level build data package from the reload list as creating a new map will unload build data for the current world + for (int32 LevelIndex = 0; LevelIndex < CurrentWorldPtr->GetNumLevels(); ++LevelIndex) { - // Cache this so we can reload the world later - WorldNameToReload = *EditorWorld->GetPathName(); - - // Remove the world package from the reload list - PackagesToReload.Remove(EditorWorld->GetOutermost()); - - // Remove the level build data package from the reload list as creating a new map will unload build data for the current world - for (int32 LevelIndex = 0; LevelIndex < EditorWorld->GetNumLevels(); ++LevelIndex) + ULevel* Level = CurrentWorldPtr->GetLevel(LevelIndex); + if (Level->MapBuildData) { - ULevel* Level = EditorWorld->GetLevel(LevelIndex); - if (Level->MapBuildData) - { - PackagesToReload.Remove(Level->MapBuildData->GetOutermost()); - } + PackagesToReload.Remove(Level->MapBuildData->GetOutermost()); } + } - // Remove any streaming levels from the reload list as creating a new map will unload streaming levels for the current world - for (ULevelStreaming* EditorStreamingLevel : EditorWorld->GetStreamingLevels()) + // Remove any streaming levels from the reload list as creating a new map will unload streaming levels for the current world + for (ULevelStreaming* StreamingLevel : CurrentWorldPtr->GetStreamingLevels()) + { + if (StreamingLevel->IsLevelLoaded()) { - if (EditorStreamingLevel->IsLevelLoaded()) - { - UPackage* EditorStreamingLevelPackage = EditorStreamingLevel->GetLoadedLevel()->GetOutermost(); - PackagesToReload.Remove(EditorStreamingLevelPackage); - } + UPackage* StreamingLevelPackage = StreamingLevel->GetLoadedLevel()->GetOutermost(); + PackagesToReload.Remove(StreamingLevelPackage); } + } - // Unload the current world + // Unload the current world + if (GIsEditor) + { GEditor->CreateNewMapForEditing(); } - // Cache the current map build data for the levels of the current world so we can see if they change due to a reload (we can skip this if reloading the current world). - else + else if (UGameEngine* GameEngine = Cast(GEngine)) { - TArray EditorLevels = EditorWorld->GetLevels(); + // Outside of the editor we need to keep the packages alive to stop the world transition from GC'ing them + TGCObjectsScopeGuard KeepPackagesAlive(PackagesToReload); - for (ULevel* Level : EditorLevels) + FString LoadMapError; + GameEngine->LoadMap(GameEngine->GetWorldContextFromWorldChecked(CurrentWorldPtr), FURL(TEXT("/Engine/Maps/Templates/Template_Default")), nullptr, LoadMapError); + } + } + // Cache the current map build data for the levels of the current world so we can see if they change due to a reload (we can skip this if reloading the current world). + else + { + for (ULevel* Level : CurrentWorldPtr->GetLevels()) + { + if (PackagesToReload.Contains(Level->GetOutermost())) { - if (PackagesToReload.Contains(Level->GetOutermost())) + for (ULevelStreaming* StreamingLevel : CurrentWorldPtr->GetStreamingLevels()) { - for (ULevelStreaming* StreamingLevel : EditorWorld->GetStreamingLevels()) + if (StreamingLevel->GetLoadedLevel() == Level) { - if (StreamingLevel->GetLoadedLevel() == Level) - { - EditorWorld->RemoveFromWorld(Level); - StreamingLevel->RemoveLevelFromCollectionForReload(); - RemovedStreamingLevels.Add(StreamingLevel); - break; - } + CurrentWorldPtr->RemoveFromWorld(Level); + StreamingLevel->RemoveLevelFromCollectionForReload(); + RemovedStreamingLevels.Add(StreamingLevel); + break; } } - else - { - LevelsToMapBuildData.Add(Level->GetFName(), Level->MapBuildData); - } + } + else + { + LevelsToMapBuildData.Add(Level->GetFName(), Level->MapBuildData); } } } @@ -649,8 +693,10 @@ UPackageTools::UPackageTools(const FObjectInitializer& ObjectInitializer) ::SortPackagesForReload(PackagesToReload); // Remove potential references to to-be deleted objects from the global selection set. - GEditor->GetSelectedObjects()->DeselectAll(); - + if (GIsEditor) + { + GEditor->GetSelectedObjects()->DeselectAll(); + } // Detach all components while loading a package. // This is necessary for the cases where the load replaces existing objects which may be referenced by the attached components. FGlobalComponentReregisterContext ReregisterContext; @@ -704,7 +750,7 @@ UPackageTools::UPackageTools(const FObjectInitializer& ObjectInitializer) } // Update the actor browser if a script package was reloaded. - if (bScriptPackageWasReloaded) + if (GIsEditor && bScriptPackageWasReloaded) { GEditor->BroadcastClassPackageLoadedOrUnloaded(); } @@ -713,21 +759,29 @@ UPackageTools::UPackageTools(const FObjectInitializer& ObjectInitializer) // Load the previous world (if needed). if (!WorldNameToReload.IsNone()) { - TArray WorldNamesToReload; - WorldNamesToReload.Add(WorldNameToReload); - FAssetEditorManager::Get().OpenEditorsForAssets(WorldNamesToReload); + if (GIsEditor) + { + TArray WorldNamesToReload; + WorldNamesToReload.Add(WorldNameToReload); + FAssetEditorManager::Get().OpenEditorsForAssets(WorldNamesToReload); + } + else if (UGameEngine* GameEngine = Cast(GEngine)) + { + FString LoadMapError; + GameEngine->LoadMap(GameEngine->GetWorldContextFromWorldChecked(GameEngine->GetGameWorld()), FURL(*WorldNameToReload.ToString()), nullptr, LoadMapError); + } } // Update the rendering resources for the levels of the current world if their map build data has changed (we skip this if reloading the current world). else { if (LevelsToMapBuildData.Num() > 0) { - UWorld* EditorWorld = GEditor->GetEditorWorldContext().World(); - check(EditorWorld); + UWorld* CurrentWorldPtr = CurrentWorld.Get(); + check(CurrentWorldPtr); - for (int32 LevelIndex = 0; LevelIndex < EditorWorld->GetNumLevels(); ++LevelIndex) + for (int32 LevelIndex = 0; LevelIndex < CurrentWorldPtr->GetNumLevels(); ++LevelIndex) { - ULevel* Level = EditorWorld->GetLevel(LevelIndex); + ULevel* Level = CurrentWorldPtr->GetLevel(LevelIndex); const UMapBuildDataRegistry* OldMapBuildData = LevelsToMapBuildData.FindRef(Level->GetFName()); if (OldMapBuildData && OldMapBuildData != Level->MapBuildData) @@ -737,15 +791,16 @@ UPackageTools::UPackageTools(const FObjectInitializer& ObjectInitializer) } } } + if (RemovedStreamingLevels.Num() > 0) { - UWorld* EditorWorld = GEditor->GetEditorWorldContext().World(); - check(EditorWorld); + UWorld* CurrentWorldPtr = CurrentWorld.Get(); + check(CurrentWorldPtr); for (ULevelStreaming* StreamingLevel : RemovedStreamingLevels) { ULevel* NewLevel = StreamingLevel->GetLoadedLevel(); - EditorWorld->AddToWorld(NewLevel, StreamingLevel->LevelTransform, false); + CurrentWorldPtr->AddToWorld(NewLevel, StreamingLevel->LevelTransform, false); StreamingLevel->AddLevelToCollectionAfterReload(); } } diff --git a/Engine/Source/Editor/UnrealEd/Private/PlayLevel.cpp b/Engine/Source/Editor/UnrealEd/Private/PlayLevel.cpp index 9ace75f2bc14..021d9478bf98 100644 --- a/Engine/Source/Editor/UnrealEd/Private/PlayLevel.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/PlayLevel.cpp @@ -454,8 +454,8 @@ void UEditorEngine::EndPlayMap() // Clean up any PIE world objects { - // The trans buffer should never have a PIE object in it. If it does though, as a s - if( GEditor->Trans->ContainsPieObject() ) + // The trans buffer should never have a PIE object in it. If it does though, reset it, which may happen sometimes with selection objects + if( GEditor->Trans->ContainsPieObjects() ) { GEditor->ResetTransaction( NSLOCTEXT("UnrealEd", "TransactionContainedPIEObject", "A PIE object was in the transaction buffer and had to be destroyed") ); } diff --git a/Engine/Source/Editor/UnrealEd/Private/ScopedTransaction.cpp b/Engine/Source/Editor/UnrealEd/Private/ScopedTransaction.cpp index 7d87a08f30e4..d578074f550b 100644 --- a/Engine/Source/Editor/UnrealEd/Private/ScopedTransaction.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/ScopedTransaction.cpp @@ -17,7 +17,7 @@ FScopedTransaction::FScopedTransaction(const TCHAR* TransactionContext, const FT void FScopedTransaction::Construct (const TCHAR* TransactionContext, const FText& SessionName, UObject* PrimaryObject, const bool bShouldActuallyTransact) { - if( bShouldActuallyTransact && GEditor && GEditor->Trans && !GEditor->bIsSimulatingInEditor && ensure(!GIsTransacting)) + if( bShouldActuallyTransact && GEditor && GEditor->Trans && ensure(!GIsTransacting)) { FSlateApplication::Get().OnLogSlateEvent(EEventLog::BeginTransaction, SessionName ); Index = GEditor->BeginTransaction( TransactionContext, SessionName, PrimaryObject ); diff --git a/Engine/Source/Editor/UnrealEd/Private/Toolkits/SGlobalTabSwitchingDialog.cpp b/Engine/Source/Editor/UnrealEd/Private/Toolkits/SGlobalTabSwitchingDialog.cpp index 602dca45a5ae..a46caebc11a0 100644 --- a/Engine/Source/Editor/UnrealEd/Private/Toolkits/SGlobalTabSwitchingDialog.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/Toolkits/SGlobalTabSwitchingDialog.cpp @@ -90,7 +90,7 @@ public: const bool bDirtyState = MyAsset->GetOutermost()->IsDirty(); FFormatNamedArguments Args; Args.Add(TEXT("AssetName"), FText::AsCultureInvariant(MyAsset->GetName())); - Args.Add(TEXT("DirtyState"), bDirtyState ? LOCTEXT("AssetModified", " [Modified]") : LOCTEXT("AssetNotModified", "")); + Args.Add(TEXT("DirtyState"), bDirtyState ? LOCTEXT("AssetModified", " [Modified]") : FText::GetEmpty()); FText AssetText = FText::Format(LOCTEXT("AssetEntryLabel", "{AssetName}{DirtyState}"), Args); // Create a thumbnail to represent the asset type diff --git a/Engine/Source/Editor/UnrealEd/Private/UnrealEdSrv.cpp b/Engine/Source/Editor/UnrealEd/Private/UnrealEdSrv.cpp index 939dfcd41c6a..95c2314af404 100644 --- a/Engine/Source/Editor/UnrealEd/Private/UnrealEdSrv.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/UnrealEdSrv.cpp @@ -1355,30 +1355,16 @@ bool UUnrealEdEngine::IsUserInteracting() { // Check to see if the user is in the middle of a drag operation. bool bUserIsInteracting = false; - for( int32 ClientIndex = 0 ; ClientIndex < AllViewportClients.Num() ; ++ClientIndex ) + for (const FEditorViewportClient* VC : AllViewportClients) { // Check for tracking and capture. If a viewport has mouse capture, it could be locking the mouse to the viewport, which means if we prompt with a dialog // while the mouse is locked to a viewport, we wont be able to interact with the dialog. - if ( AllViewportClients[ClientIndex]->IsTracking() || AllViewportClients[ClientIndex]->Viewport->HasMouseCapture() ) + if (VC->IsTracking() || (VC->Viewport && VC->Viewport->HasMouseCapture())) { bUserIsInteracting = true; break; } } - - if( !bUserIsInteracting ) - { - // When a property window is open and the user is dragging to modify a property with a spinbox control, - // the viewport clients will have bIsTracking to false. - // We check for the state of the right and left mouse buttons and assume the user is interacting with something if a mouse button is pressed down - -#if PLATFORM_WINDOWS - bool bLeftDown = !!(GetAsyncKeyState(VK_LBUTTON) & 0x8000); - bool bRightDown = !!(GetAsyncKeyState(VK_RBUTTON) & 0x8000); - bUserIsInteracting = bLeftDown || bRightDown; -#endif - } - return bUserIsInteracting; } diff --git a/Engine/Source/Editor/UnrealEd/Private/UserDefinedStructEditorData.cpp b/Engine/Source/Editor/UnrealEd/Private/UserDefinedStructEditorData.cpp index 7ea11246d089..9c5537fe9337 100644 --- a/Engine/Source/Editor/UnrealEd/Private/UserDefinedStructEditorData.cpp +++ b/Engine/Source/Editor/UnrealEd/Private/UserDefinedStructEditorData.cpp @@ -89,12 +89,41 @@ void UUserDefinedStructEditorData::PostEditUndo() class FStructureTransactionAnnotation : public ITransactionObjectAnnotation { public: - FStructureTransactionAnnotation(FStructureEditorUtils::EStructureEditorChangeInfo ChangeInfo) + FStructureTransactionAnnotation() + : ActiveChange(FStructureEditorUtils::Unknown) + { + } + + explicit FStructureTransactionAnnotation(FStructureEditorUtils::EStructureEditorChangeInfo ChangeInfo) : ActiveChange(ChangeInfo) { } - virtual void AddReferencedObjects(FReferenceCollector& Collector) override { /** Don't need this functionality for now */ } + //~ ITransactionObjectAnnotation interface + virtual void AddReferencedObjects(FReferenceCollector& Collector) override {} + virtual void Serialize(FArchive& Ar) override + { + enum class EVersion : uint8 + { + InitialVersion = 0, + // ------------------------------------------------------ + VersionPlusOne, + LatestVersion = VersionPlusOne - 1 + }; + + EVersion Version = EVersion::LatestVersion; + Ar << Version; + + if (Version > EVersion::LatestVersion) + { + Ar.SetError(); + return; + } + + int32 ActiveChangeInt = (int32)ActiveChange; + Ar << ActiveChangeInt; + ActiveChange = (FStructureEditorUtils::EStructureEditorChangeInfo)ActiveChangeInt; + } FStructureEditorUtils::EStructureEditorChangeInfo GetActiveChange() { @@ -105,9 +134,14 @@ protected: FStructureEditorUtils::EStructureEditorChangeInfo ActiveChange; }; -TSharedPtr UUserDefinedStructEditorData::GetTransactionAnnotation() const +TSharedPtr UUserDefinedStructEditorData::FactoryTransactionAnnotation(const ETransactionAnnotationCreationMode InCreationMode) const { - return MakeShareable(new FStructureTransactionAnnotation(FStructureEditorUtils::FStructEditorManager::ActiveChange)); + if (InCreationMode == UObject::ETransactionAnnotationCreationMode::DefaultInstance) + { + return MakeShared(); + } + + return MakeShared(FStructureEditorUtils::FStructEditorManager::ActiveChange); } void UUserDefinedStructEditorData::PostEditUndo(TSharedPtr TransactionAnnotation) diff --git a/Engine/Source/Editor/UnrealEd/Public/EditorUndoClient.h b/Engine/Source/Editor/UnrealEd/Public/EditorUndoClient.h index 180407034c1d..4eff231a9fca 100644 --- a/Engine/Source/Editor/UnrealEd/Public/EditorUndoClient.h +++ b/Engine/Source/Editor/UnrealEd/Public/EditorUndoClient.h @@ -7,6 +7,7 @@ #pragma once #include "CoreMinimal.h" +#include "Misc/ITransaction.h" /** * Interface for tools wanting to handle undo/redo operations @@ -21,12 +22,12 @@ public: * Called to see if the context of the current undo/redo operation is a match for the client * Default state matching old context-less undo is Context="" and PrimaryObject=NULL * - * @param InContext A text string providing context for the undo operation; can be the empty string - * @param PrimaryObject The object marked as the primary object for the undo operation; can be NULL + * @param InContext The transaction context + * @param TransactionObjectContexts The transaction context of each object involved in this transaction * * @return True if client wishes to handle the undo/redo operation for this context. False otherwise */ - virtual bool MatchesContext( const FString& InContext, UObject* PrimaryObject ) const { return true; } + virtual bool MatchesContext( const FTransactionContext& InContext, const TArray>& TransactionObjectContexts ) const { return true; } /** * Signal that client should run any PostUndo code diff --git a/Engine/Source/Editor/UnrealEd/Public/EditorViewportClient.h b/Engine/Source/Editor/UnrealEd/Public/EditorViewportClient.h index 86950dc2dfd0..f9dc60293dfd 100644 --- a/Engine/Source/Editor/UnrealEd/Public/EditorViewportClient.h +++ b/Engine/Source/Editor/UnrealEd/Public/EditorViewportClient.h @@ -273,6 +273,10 @@ public: virtual ~FEditorViewportClient(); + /** Non-copyable */ + FEditorViewportClient(const FEditorViewportClient&) = delete; + FEditorViewportClient& operator=(const FEditorViewportClient&) = delete; + /** * Toggles whether or not the viewport updates in realtime and returns the updated state. * @@ -450,6 +454,13 @@ public: virtual void Draw(const FSceneView* View,FPrimitiveDrawInterface* PDI) override; virtual void Draw(FViewport* Viewport,FCanvas* Canvas) override; + /** + * Gets the world space cursor info from the current mouse position + * + * @return An FViewportCursorLocation containing information about the mouse position in world space. + */ + FViewportCursorLocation GetCursorWorldLocationFromMousePos(); + /** FViewportClient interface */ virtual void ProcessScreenShots(FViewport* Viewport) override; virtual void RedrawRequested(FViewport* Viewport) override; diff --git a/Engine/Source/Editor/UnrealEd/Public/EditorWorldExtension.h b/Engine/Source/Editor/UnrealEd/Public/EditorWorldExtension.h index 69f59cbf3a7e..2cdf8ea93277 100644 --- a/Engine/Source/Editor/UnrealEd/Public/EditorWorldExtension.h +++ b/Engine/Source/Editor/UnrealEd/Public/EditorWorldExtension.h @@ -18,6 +18,26 @@ class FViewport; class AActor; class FEditorViewportClient; +enum class EEditorWorldExtensionTransitionState : uint8 +{ + TransitionNone, + TransitionAll, + TransitionPIEOnly, + TransitionNonPIEOnly +}; + +USTRUCT() +struct FEditorWorldExtensionActorData +{ + GENERATED_BODY() + + UPROPERTY() + AActor* Actor; + + UPROPERTY() + bool bValidForPIE; +}; + UCLASS() class UNREALED_API UEditorWorldExtension : public UObject { @@ -47,15 +67,18 @@ public: /** Gets the world owning this extension */ virtual UWorld* GetWorld() const override; + /** Gets the world owning this extension's non-PIE valid actors when current world is a play world */ + virtual UWorld* GetLastEditorWorld() const; + /** Spawns a transient actor that we can use in the current world of this extension (templated for convenience) */ template - inline T* SpawnTransientSceneActor(const FString& ActorName, const bool bWithSceneComponent = false, const EObjectFlags InObjectFlags = EObjectFlags::RF_Transient | EObjectFlags::RF_DuplicateTransient ) + inline T* SpawnTransientSceneActor(const FString& ActorName, const bool bWithSceneComponent = false, const EObjectFlags InObjectFlags = EObjectFlags::RF_Transient | EObjectFlags::RF_DuplicateTransient, const bool bValidForPIE = false ) { - return CastChecked(SpawnTransientSceneActor(T::StaticClass(), ActorName, bWithSceneComponent, InObjectFlags)); + return CastChecked(SpawnTransientSceneActor(T::StaticClass(), ActorName, bWithSceneComponent, InObjectFlags, bValidForPIE)); } /** Spawns a transient actor that we can use in the current world of this extension */ - AActor* SpawnTransientSceneActor(TSubclassOf ActorClass, const FString& ActorName, const bool bWithSceneComponent = false, const EObjectFlags InObjectFlags = EObjectFlags::RF_Transient | EObjectFlags::RF_DuplicateTransient ); + AActor* SpawnTransientSceneActor(TSubclassOf ActorClass, const FString& ActorName, const bool bWithSceneComponent = false, const EObjectFlags InObjectFlags = EObjectFlags::RF_Transient | EObjectFlags::RF_DuplicateTransient, const bool bValidForPIE = false); /** Destroys a transient actor we created earlier */ void DestroyTransientActor(AActor* Actor); @@ -75,7 +98,7 @@ public: protected: /** Reparent actors to a new world */ - virtual void TransitionWorld(UWorld* NewWorld); + virtual void TransitionWorld(UWorld* NewWorld, EEditorWorldExtensionTransitionState TransitionState); /** Give child class a chance to act on entering simulate mode */ virtual void EnteredSimulateInEditor() {}; @@ -95,7 +118,7 @@ private: void InitInternal(UEditorWorldExtensionCollection* InOwningExtensionsCollection); UPROPERTY() - TArray ExtensionActors; + TArray ExtensionActors; /** If this extension is currently being ticked */ bool bActive; @@ -121,6 +144,9 @@ public: /** Gets the world from the world context */ virtual UWorld* GetWorld() const override; + /** Gets the last editor world, will only be non-null when current world is a play world. */ + UWorld* GetLastEditorWorld() const; + /** * Checks if the passed extension already exists and creates one if it doesn't. * @param EditorExtensionClass the subclass of an extension to create if necessary and add. @@ -161,7 +187,10 @@ public: private: /** Sets the world for this collection and gives every extension an opportunity to transition */ - void SetWorld(UWorld* World); + void SetWorld(UWorld* NewWorld, EEditorWorldExtensionTransitionState TransitionState /* = EEditorWorldExtensionTransitionState::TransitionAll */); + + /** Transitions actors in every extension to the specified world */ + void TransitionWorld(UWorld* NewWorld, EEditorWorldExtensionTransitionState TransitionState); /** Called by the editor after PIE or Simulate is started */ void PostPIEStarted( bool bIsSimulatingInEditor ); @@ -178,9 +207,9 @@ private: /** World context */ TWeakObjectPtr Currentworld; - /** After entering Simulate, this stores the counterpart editor world to the Simulate world, so that we - know this collection needs to transition back to editor world after Simulate finishes */ - TWeakObjectPtr EditorWorldOnSimulate; + /** After entering Simulate or PIE, this stores the counterpart editor world to the play world, so that we + know this collection needs to transition back to editor world after Simulate or PIE finishes. */ + TWeakObjectPtr LastEditorWorld; /** List of extensions along with their reference count. Extensions will only be truly removed and Shutdown() after their reference count drops to zero. */ diff --git a/Engine/Source/Editor/UnrealEd/Public/FbxImporter.h b/Engine/Source/Editor/UnrealEd/Public/FbxImporter.h index 85513edc41a1..5633a5b4457c 100644 --- a/Engine/Source/Editor/UnrealEd/Public/FbxImporter.h +++ b/Engine/Source/Editor/UnrealEd/Public/FbxImporter.h @@ -11,6 +11,7 @@ #include "Factories/FbxStaticMeshImportData.h" #include "Factories/FbxTextureImportData.h" #include "Factories/FbxSceneImportFactory.h" +#include "Materials/MaterialInterface.h" #include "MeshBuild.h" #include "Algo/LevenshteinDistance.h" @@ -29,7 +30,6 @@ class UInterpTrackMoveAxis; class ULightComponent; class UMaterial; class UMaterialInstanceConstant; -class UMaterialInterface; class UPhysicsAsset; class USkeletalMesh; class USkeleton; @@ -1183,7 +1183,12 @@ public: FbxSurfaceMaterial* FbxMaterial; UMaterialInterface* Material; - FString GetName() const { return FbxMaterial ? ANSI_TO_TCHAR(FbxMaterial->GetName()) : TEXT("None"); } + FFbxMaterial() + : FbxMaterial(nullptr) + , Material(nullptr) + {} + + FString GetName() const { return FbxMaterial ? ANSI_TO_TCHAR(FbxMaterial->GetName()) : (Material != nullptr ? Material->GetName() : TEXT("None")); } }; /** diff --git a/Engine/Source/Editor/UnrealEd/Public/LevelEditorViewport.h b/Engine/Source/Editor/UnrealEd/Public/LevelEditorViewport.h index 2cdcd8b64862..c7a20bd4b51a 100644 --- a/Engine/Source/Editor/UnrealEd/Public/LevelEditorViewport.h +++ b/Engine/Source/Editor/UnrealEd/Public/LevelEditorViewport.h @@ -424,14 +424,6 @@ public: /** Set the global ptr to the last viewport to receive a key press */ void SetLastKeyViewport(); - /** - * Gets the world space cursor info from the current mouse position - * - * @param InViewportClient The viewport client to check for mouse position and to set up the scene view. - * @return An FViewportCursorLocation containing information about the mouse position in world space. - */ - FViewportCursorLocation GetCursorWorldLocationFromMousePos(); - /** * Access the 'active' actor lock. This is the actor locked to the viewport via the viewport menus. * It is forced to be inactive if Matinee is controlling locking. diff --git a/Engine/Source/Editor/UnrealEd/Public/PackageTools.h b/Engine/Source/Editor/UnrealEd/Public/PackageTools.h index 988d97f912ee..b6a14a986ca6 100644 --- a/Engine/Source/Editor/UnrealEd/Public/PackageTools.h +++ b/Engine/Source/Editor/UnrealEd/Public/PackageTools.h @@ -75,6 +75,18 @@ public: */ static bool UnloadPackages( const TArray& PackagesToUnload, FText& OutErrorMessage ); + enum class EReloadPackagesInteractionMode : uint8 + { + /** Interactive, ask the user what to do */ + Interactive, + + /** Non-interactive, assume a positive response */ + AssumePositive, + + /** Non-interactive, assume a negative response */ + AssumeNegative, + }; + /** * Helper function that attempts to reload the specified top-level packages. * @@ -93,8 +105,20 @@ public: * * @return true if the set of loaded packages was changed */ + DEPRECATED(4.21, "ReloadPackages taking bInteractive is deprecated. Use the version taking EReloadPackagesInteractionMode instead.") static bool ReloadPackages( const TArray& PackagesToReload, FText& OutErrorMessage, const bool bInteractive = true ); + /** + * Helper function that attempts to reload the specified top-level packages. + * + * @param PackagesToReload The list of packages that should be reloaded + * @param OutErrorMessage An error message specifying any problems with reloading packages + * @param InteractionMode Whether the function is allowed to ask the user questions (such as whether to reload dirty packages) + * + * @return true if the set of loaded packages was changed + */ + static bool ReloadPackages( const TArray& PackagesToReload, FText& OutErrorMessage, const EReloadPackagesInteractionMode InteractionMode = EReloadPackagesInteractionMode::Interactive ); + /** * Exports the given packages to files. * diff --git a/Engine/Source/Editor/VREditor/Public/VREditorMode.h b/Engine/Source/Editor/VREditor/Public/VREditorMode.h index d43bd8f647ee..940b5f294c23 100644 --- a/Engine/Source/Editor/VREditor/Public/VREditorMode.h +++ b/Engine/Source/Editor/VREditor/Public/VREditorMode.h @@ -318,7 +318,7 @@ public: protected: - virtual void TransitionWorld(UWorld* NewWorld) override; + virtual void TransitionWorld(UWorld* NewWorld, EEditorWorldExtensionTransitionState TransitionState) override; private: diff --git a/Engine/Source/Editor/VREditor/UI/VREditorUISystem.cpp b/Engine/Source/Editor/VREditor/UI/VREditorUISystem.cpp index 2686c0180634..70c58a91a1b8 100644 --- a/Engine/Source/Editor/VREditor/UI/VREditorUISystem.cpp +++ b/Engine/Source/Editor/VREditor/UI/VREditorUISystem.cpp @@ -2593,32 +2593,38 @@ void UVREditorUISystem::UpdateExternalSlateUI(TSharedRef InWidget, FNam } } -void UVREditorUISystem::TransitionWorld(UWorld* NewWorld) +void UVREditorUISystem::TransitionWorld(UWorld* NewWorld, EEditorWorldExtensionTransitionState TransitionState) { - for (auto& CurrentUI : FloatingUIs) + check(NewWorld != nullptr); + + if (TransitionState == EEditorWorldExtensionTransitionState::TransitionAll || + TransitionState == EEditorWorldExtensionTransitionState::TransitionNonPIEOnly) { - AVREditorFloatingUI* FloatingUI = CurrentUI.Value; - if (FloatingUI != nullptr) + for (auto& CurrentUI : FloatingUIs) { - UUserWidget* UserWidget = FloatingUI->GetUserWidget(); - if (UserWidget != nullptr) + AVREditorFloatingUI* FloatingUI = CurrentUI.Value; + if (FloatingUI != nullptr) { - // Only reparent the UserWidget if it was parented to a level to begin with. It may have been parented to an actor or - // some other object that doesn't require us to rename anything - ULevel* ExistingWidgetOuterLevel = Cast(UserWidget->GetOuter()); - if (ExistingWidgetOuterLevel != nullptr && ExistingWidgetOuterLevel != NewWorld->PersistentLevel) + UUserWidget* UserWidget = FloatingUI->GetUserWidget(); + if (UserWidget != nullptr) { - UserWidget->Rename(nullptr, NewWorld->PersistentLevel); + // Only reparent the UserWidget if it was parented to a level to begin with. It may have been parented to an actor or + // some other object that doesn't require us to rename anything + ULevel* ExistingWidgetOuterLevel = Cast(UserWidget->GetOuter()); + if (ExistingWidgetOuterLevel != nullptr && ExistingWidgetOuterLevel != NewWorld->PersistentLevel) + { + UserWidget->Rename(nullptr, NewWorld->PersistentLevel); + } } } } - } - - AVREditorFloatingUI* TabManagerUI = GetPanel(TabManagerPanelID); - if (TabManagerUI != nullptr) - { - TabManagerUI->GetWidgetComponent()->UpdateWidget(); - ProxyTabManager->SetParentWindow(TabManagerUI->GetWidgetComponent()->GetSlateWindow().ToSharedRef()); + + AVREditorFloatingUI* TabManagerUI = GetPanel(TabManagerPanelID); + if (TabManagerUI != nullptr) + { + TabManagerUI->GetWidgetComponent()->UpdateWidget(); + ProxyTabManager->SetParentWindow(TabManagerUI->GetWidgetComponent()->GetSlateWindow().ToSharedRef()); + } } } diff --git a/Engine/Source/Editor/VREditor/UI/VREditorUISystem.h b/Engine/Source/Editor/VREditor/UI/VREditorUISystem.h index e202c11f748a..5443d155a4f2 100644 --- a/Engine/Source/Editor/VREditor/UI/VREditorUISystem.h +++ b/Engine/Source/Editor/VREditor/UI/VREditorUISystem.h @@ -1,4 +1,4 @@ -// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. #pragma once @@ -8,6 +8,7 @@ #include "UObject/Object.h" #include "Widgets/SOverlay.h" #include "VRRadialMenuHandler.h" +#include "EditorWorldExtension.h" #include "VREditorUISystem.generated.h" class AVREditorDockableWindow; @@ -206,7 +207,7 @@ public: void UpdateExternalSlateUI(TSharedRef InWidget, FName Name); /** Transition the user widgets to a new world */ - void TransitionWorld(UWorld* NewWorld); + void TransitionWorld(UWorld* NewWorld, EEditorWorldExtensionTransitionState TransitionState); UVRRadialMenuHandler* GetRadialMenuHandler() { diff --git a/Engine/Source/Editor/VREditor/VREditorActions.cpp b/Engine/Source/Editor/VREditor/VREditorActions.cpp index c7e8a946c930..dd548807a3f0 100644 --- a/Engine/Source/Editor/VREditor/VREditorActions.cpp +++ b/Engine/Source/Editor/VREditor/VREditorActions.cpp @@ -443,6 +443,7 @@ void FVREditorActionCallbacks::PauseSequencePlayback(UVREditorMode* InVRMode) ISequencer* CurrentSequencer = InVRMode->GetCurrentSequencer(); if (CurrentSequencer != nullptr) { + CurrentSequencer->SetPlaybackSpeed(1.0f); CurrentSequencer->Pause(); } } diff --git a/Engine/Source/Editor/VREditor/VREditorMode.cpp b/Engine/Source/Editor/VREditor/VREditorMode.cpp index 0ca523ae7d19..2af8f3d4f8bc 100644 --- a/Engine/Source/Editor/VREditor/VREditorMode.cpp +++ b/Engine/Source/Editor/VREditor/VREditorMode.cpp @@ -808,7 +808,6 @@ bool UVREditorMode::IsHandAimingTowardsCapsule(UViewportInteractor* Interactor, UVREditorInteractor* UVREditorMode::GetHandInteractor( const EControllerHand ControllerHand ) const { UVREditorInteractor* ResultInteractor = ControllerHand == EControllerHand::Left ? LeftHandInteractor : RightHandInteractor; - check( ResultInteractor != nullptr ); return ResultInteractor; } @@ -904,11 +903,11 @@ void UVREditorMode::TogglePIEAndVREditor() } } -void UVREditorMode::TransitionWorld(UWorld* NewWorld) +void UVREditorMode::TransitionWorld(UWorld* NewWorld, EEditorWorldExtensionTransitionState TransitionState) { - Super::TransitionWorld(NewWorld); + Super::TransitionWorld(NewWorld, TransitionState); - UISystem->TransitionWorld(NewWorld); + UISystem->TransitionWorld(NewWorld, TransitionState); } void UVREditorMode::StartViewport(TSharedPtr Viewport) diff --git a/Engine/Source/Editor/VREditor/VREditorModeManager.cpp b/Engine/Source/Editor/VREditor/VREditorModeManager.cpp index 60007ad23769..667e989927fa 100644 --- a/Engine/Source/Editor/VREditor/VREditorModeManager.cpp +++ b/Engine/Source/Editor/VREditor/VREditorModeManager.cpp @@ -27,7 +27,8 @@ FVREditorModeManager::FVREditorModeManager() : CurrentVREditorMode( nullptr ), bEnableVRRequest( false ), - HMDWornState( EHMDWornState::Unknown ) + HMDWornState( EHMDWornState::Unknown ), + bAddedViewportWorldInteractionExtension( false ) { } @@ -179,7 +180,20 @@ void FVREditorModeManager::StartVREditorMode( const bool bForceWithoutHMD ) UEditorWorldExtensionCollection* ExtensionCollection = GEditor->GetEditorWorldExtensionsManager()->GetEditorWorldExtensions(World); check(ExtensionCollection != nullptr); - UViewportWorldInteraction* ViewportWorldInteraction = Cast(ExtensionCollection->AddExtension(UViewportWorldInteraction::StaticClass())); + // Add viewport world interaction to the collection if not already there + UViewportWorldInteraction* ViewportWorldInteraction = Cast(ExtensionCollection->FindExtension(UViewportWorldInteraction::StaticClass())); + if (ViewportWorldInteraction == nullptr) + { + ViewportWorldInteraction = NewObject(ExtensionCollection); + check(ViewportWorldInteraction != nullptr); + + ExtensionCollection->AddExtension(ViewportWorldInteraction); + bAddedViewportWorldInteractionExtension = true; + } + else + { + ViewportWorldInteraction->UseVWInteractions(); + } // Create vr editor mode. VRMode = NewObject(); @@ -216,7 +230,16 @@ void FVREditorModeManager::CloseVREditor( const bool bShouldDisableStereo ) UEditorWorldExtensionCollection* Collection = CurrentVREditorMode->GetOwningCollection(); check(Collection != nullptr); Collection->RemoveExtension(CurrentVREditorMode); - Collection->RemoveExtension(WorldInteraction); + + if (bAddedViewportWorldInteractionExtension) + { + Collection->RemoveExtension(WorldInteraction); + bAddedViewportWorldInteractionExtension = false; + } + else + { + WorldInteraction->UseLegacyInteractions(); + } CurrentVREditorMode = nullptr; } diff --git a/Engine/Source/Editor/VREditor/VREditorModeManager.h b/Engine/Source/Editor/VREditor/VREditorModeManager.h index 2be5df3dfd8d..3841c548c883 100644 --- a/Engine/Source/Editor/VREditor/VREditorModeManager.h +++ b/Engine/Source/Editor/VREditor/VREditorModeManager.h @@ -75,4 +75,7 @@ private: /** True when we detect that the user is wearing the HMD */ EHMDWornState::Type HMDWornState; + + /** True if the ViewportWorldInteraction extension was not pre-existing. */ + bool bAddedViewportWorldInteractionExtension; }; diff --git a/Engine/Source/Editor/VREditor/VREditorMotionControllerInteractor.cpp b/Engine/Source/Editor/VREditor/VREditorMotionControllerInteractor.cpp index e13cbd460492..8cb4b64cfb38 100644 --- a/Engine/Source/Editor/VREditor/VREditorMotionControllerInteractor.cpp +++ b/Engine/Source/Editor/VREditor/VREditorMotionControllerInteractor.cpp @@ -1310,6 +1310,10 @@ void UVREditorMotionControllerInteractor::UpdateSplineLaser(const FVector& InSta { if (LaserSplineComponent) { + + LaserStart = InStartLocation; + LaserEnd = InEndLocation; + // Clear the segments before updating it LaserSplineComponent->ClearSplinePoints(true); @@ -1437,8 +1441,7 @@ void UVREditorMotionControllerInteractor::UpdateRadialMenuInput( const float Del { if (bIsScrubbingSequence) { - const float NewPlayRate = 0.0f; - FVREditorActionCallbacks::PlaySequenceAtRate(VRMode, NewPlayRate); + FVREditorActionCallbacks::PauseSequencePlayback(VRMode); } if (UISystem.IsShowingRadialMenu(this)) diff --git a/Engine/Source/Editor/VREditor/VREditorMotionControllerInteractor.h b/Engine/Source/Editor/VREditor/VREditorMotionControllerInteractor.h index 881227601698..4200841a99d9 100644 --- a/Engine/Source/Editor/VREditor/VREditorMotionControllerInteractor.h +++ b/Engine/Source/Editor/VREditor/VREditorMotionControllerInteractor.h @@ -17,7 +17,7 @@ class UStaticMeshSocket; * Represents the interactor in the world */ UCLASS() -class UVREditorMotionControllerInteractor : public UVREditorInteractor +class VREDITOR_API UVREditorMotionControllerInteractor : public UVREditorInteractor { GENERATED_BODY() @@ -106,6 +106,10 @@ public: return bIsScrubbingSequence; } + FVector LaserStart; + + FVector LaserEnd; + protected: // ViewportInteractor diff --git a/Engine/Source/Editor/ViewportInteraction/ActorViewportTransformable.cpp b/Engine/Source/Editor/ViewportInteraction/ActorViewportTransformable.cpp index 1e4cafac1e32..d20d15181278 100644 --- a/Engine/Source/Editor/ViewportInteraction/ActorViewportTransformable.cpp +++ b/Engine/Source/Editor/ViewportInteraction/ActorViewportTransformable.cpp @@ -35,7 +35,7 @@ void FActorViewportTransformable::ApplyTransform( const FTransform& NewTransform // @todo vreditor: InvalidateLightingCacheDetailed() causes static mesh components to re-create their physics state, // cancelling all velocity on the rigid body. So we currently avoid calling it for simulated actors. - if( !IsPhysicallySimulated() ) + if( !IsPhysicallySimulated() && !GIsDemoMode) { Actor->InvalidateLightingCacheDetailed( bOnlyTranslationChanged ); } diff --git a/Engine/Source/Editor/ViewportInteraction/Public/ViewportWorldInteraction.h b/Engine/Source/Editor/ViewportInteraction/Public/ViewportWorldInteraction.h index 08515f395484..887d7177b52e 100644 --- a/Engine/Source/Editor/ViewportInteraction/Public/ViewportWorldInteraction.h +++ b/Engine/Source/Editor/ViewportInteraction/Public/ViewportWorldInteraction.h @@ -36,6 +36,13 @@ class IViewportInteractableInterface; class UViewportInteractionAssetContainer; class UViewportInteractor; +UENUM() +enum class EViewportWorldInteractionType : uint8 +{ + VR = 0, + Legacy = 1 +}; + UCLASS() class VIEWPORTINTERACTION_API UViewportWorldInteraction : public UEditorWorldExtension { @@ -48,6 +55,9 @@ public: virtual void Init() override; virtual void Shutdown() override; virtual void Tick( float DeltaSeconds ) override; + + /** Initialize colors */ + void InitColors(); /** Adds interactor to the worldinteraction */ void AddInteractor( UViewportInteractor* Interactor ); @@ -343,9 +353,12 @@ public: /** Forces the VWI to fall back to standard desktop interactions */ void UseLegacyInteractions(); + /** Sets the VWI to use its own interactions */ + void UseVWInteractions(); + protected: - virtual void TransitionWorld(UWorld* NewWorld) override; + virtual void TransitionWorld(UWorld* NewWorld, EEditorWorldExtensionTransitionState TransitionState) override; virtual void EnteredSimulateInEditor() override; virtual void LeftSimulateInEditor(UWorld* SimulateWorld) override; diff --git a/Engine/Source/Editor/ViewportInteraction/ViewportWorldInteraction.cpp b/Engine/Source/Editor/ViewportInteraction/ViewportWorldInteraction.cpp index f2c2cfca924e..7819778ec61f 100644 --- a/Engine/Source/Editor/ViewportInteraction/ViewportWorldInteraction.cpp +++ b/Engine/Source/Editor/ViewportInteraction/ViewportWorldInteraction.cpp @@ -300,15 +300,7 @@ UViewportWorldInteraction::UViewportWorldInteraction(): void UViewportWorldInteraction::Init() { - Colors.SetNumZeroed( (int32)EColors::TotalCount ); - { - Colors[(int32)EColors::DefaultColor] = FLinearColor(0.7f, 0.7f, 0.7f, 1.0f); - Colors[(int32)EColors::Forward] = FLinearColor(0.594f, 0.0197f, 0.0f, 1.0f); - Colors[(int32)EColors::Right] = FLinearColor(0.1349f, 0.3959f, 0.0f, 1.0f); - Colors[(int32)EColors::Up] = FLinearColor(0.0251f, 0.207f, 0.85f, 1.0f); - Colors[(int32)EColors::GizmoHover] = FLinearColor::Yellow; - Colors[(int32)EColors::GizmoDragging] = FLinearColor::Yellow; - } + InitColors(); AppTimeEntered = FTimespan::FromSeconds( FApp::GetCurrentTime() ); @@ -346,6 +338,19 @@ void UViewportWorldInteraction::Init() CurrentTickNumber = 0; } +void UViewportWorldInteraction::InitColors() +{ + Colors.SetNumZeroed((int32)EColors::TotalCount); + { + Colors[(int32)EColors::DefaultColor] = FLinearColor(0.7f, 0.7f, 0.7f, 1.0f); + Colors[(int32)EColors::Forward] = FLinearColor(0.594f, 0.0197f, 0.0f, 1.0f); + Colors[(int32)EColors::Right] = FLinearColor(0.1349f, 0.3959f, 0.0f, 1.0f); + Colors[(int32)EColors::Up] = FLinearColor(0.0251f, 0.207f, 0.85f, 1.0f); + Colors[(int32)EColors::GizmoHover] = FLinearColor::Yellow; + Colors[(int32)EColors::GizmoDragging] = FLinearColor::Yellow; + } +} + void UViewportWorldInteraction::Shutdown() { SetActive(false); @@ -397,7 +402,7 @@ void UViewportWorldInteraction::Shutdown() GizmoType.Reset(); // Remove the input pre-processor - if (InputProcessor.IsValid()) + if (InputProcessor.IsValid() && FSlateApplication::IsInitialized()) { FSlateApplication::Get().UnregisterInputPreProcessor(InputProcessor); InputProcessor.Reset(); @@ -406,13 +411,19 @@ void UViewportWorldInteraction::Shutdown() GEditor->OnEditorClose().RemoveAll( this ); } -void UViewportWorldInteraction::TransitionWorld(UWorld* NewWorld) +void UViewportWorldInteraction::TransitionWorld(UWorld* NewWorld, EEditorWorldExtensionTransitionState TransitionState) { - Super::TransitionWorld(NewWorld); + check(NewWorld != nullptr); - for (UViewportInteractor* Interactor : Interactors) + Super::TransitionWorld(NewWorld, TransitionState); + + if (TransitionState == EEditorWorldExtensionTransitionState::TransitionAll || + TransitionState == EEditorWorldExtensionTransitionState::TransitionNonPIEOnly) { - Interactor->Rename(nullptr, NewWorld->PersistentLevel); + for (UViewportInteractor* Interactor : Interactors) + { + Interactor->Rename(nullptr, NewWorld->PersistentLevel); + } } } @@ -3657,5 +3668,46 @@ void UViewportWorldInteraction::UseLegacyInteractions() USelection::SelectionChangedEvent.RemoveAll(this); } +void UViewportWorldInteraction::UseVWInteractions() +{ + // Add colors + InitColors(); + + // Setup the asset container. + AssetContainer = &LoadAssetContainer(); + + if (DefaultMouseCursorInteractorRefCount == 0) + { + this->AddMouseCursorInteractor(); + } + if (DefaultOptionalViewportClient != nullptr) + { + DefaultOptionalViewportClient->ShowWidget(false); + } + + // Start with the default transformer + SetTransformer(nullptr); + + // Spawn the transform gizmo + SpawnTransformGizmoIfNeeded(); + + const bool bShouldBeVisible = false; + const bool bPropagateToChildren = true; + TransformGizmoActor->GetRootComponent()->SetVisibility(bShouldBeVisible, bPropagateToChildren); + + // Create and add the input pre-processor to the slate application. + if (!InputProcessor.IsValid()) + { + InputProcessor = MakeShareable(new FViewportInteractionInputProcessor(this)); + FSlateApplication::Get().RegisterInputPreProcessor(InputProcessor); + } + + // Pretend that actor selection changed, so that our gizmo refreshes right away based on which objects are selected + GEditor->NoteSelectionChange(); + GEditor->SelectNone(true, true, false); + + CurrentTickNumber = 0; +} + #undef LOCTEXT_NAMESPACE diff --git a/Engine/Source/Programs/MemoryProfiler2/App.config b/Engine/Source/Programs/MemoryProfiler2/App.config index 9c3a5dc093ff..8d4f14eb4557 100644 --- a/Engine/Source/Programs/MemoryProfiler2/App.config +++ b/Engine/Source/Programs/MemoryProfiler2/App.config @@ -5,5 +5,6 @@ + diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/README.md b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/README.md new file mode 100644 index 000000000000..ec7afaf657da --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/README.md @@ -0,0 +1,7 @@ + +CODING STANDARDS +================ + +- As close to UE4 standards as t practically possible (https://docs.unrealengine.com/en-us/Programming/Development/CodingStandard) +- Strings are UTF-8 + diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/WebRTCProxy.sln b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/WebRTCProxy.sln new file mode 100644 index 000000000000..d4c5d2e77357 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/WebRTCProxy.sln @@ -0,0 +1,34 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.27130.2027 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "WebRTCProxy", "WebRTCProxy.vcxproj", "{777948DE-A5DB-4767-A80F-7B16F1FACF17}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{CB07EBB8-7D9D-49C2-BA7C-916378CDB266}" + ProjectSection(SolutionItems) = preProject + _clang-format = _clang-format + README.md = README.md + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|x64 = Debug|x64 + Development|x64 = Development|x64 + Shipping|x64 = Shipping|x64 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {777948DE-A5DB-4767-A80F-7B16F1FACF17}.Debug|x64.ActiveCfg = Debug|x64 + {777948DE-A5DB-4767-A80F-7B16F1FACF17}.Debug|x64.Build.0 = Debug|x64 + {777948DE-A5DB-4767-A80F-7B16F1FACF17}.Development|x64.ActiveCfg = Development|x64 + {777948DE-A5DB-4767-A80F-7B16F1FACF17}.Development|x64.Build.0 = Development|x64 + {777948DE-A5DB-4767-A80F-7B16F1FACF17}.Shipping|x64.ActiveCfg = Shipping|x64 + {777948DE-A5DB-4767-A80F-7B16F1FACF17}.Shipping|x64.Build.0 = Shipping|x64 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {99EC75A0-50A8-43F6-8DA0-0C41032FF439} + EndGlobalSection +EndGlobal diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/WebRTCProxy.vcxproj b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/WebRTCProxy.vcxproj new file mode 100644 index 000000000000..8c38ba08be9c --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/WebRTCProxy.vcxproj @@ -0,0 +1,189 @@ + + + + + Debug + x64 + + + Development + x64 + + + Shipping + x64 + + + + + + + + + + + + + + + + + + + + + Create + WebRTCProxyPCH.h + Create + Create + WebRTCProxyPCH.h + WebRTCProxyPCH.h + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 15.0 + {777948DE-A5DB-4767-A80F-7B16F1FACF17} + Win32Proj + WebRTCProxy + 10.0.15063.0 + WebRTCProxy + + + + Application + true + v141 + Unicode + + + Application + false + v141 + true + Unicode + + + Application + false + v141 + true + Unicode + + + + + + + + + + + + + + + + + + + + + true + + + false + + + false + + + + Use + Level3 + Disabled + true + _DEBUG;_CONSOLE;EG_BUILD_DEBUG=1;%(PreprocessorDefinitions) + true + WebRTCProxyPCH.h + %(AdditionalIncludeDirectories) + stdcpp17 + + + Console + true + + + + + Use + Level3 + MaxSpeed + true + true + true + NDEBUG;_CONSOLE;EG_BUILD_SHIPPING=1;%(PreprocessorDefinitions) + true + WebRTCProxyPCH.h + %(AdditionalIncludeDirectories) + stdcpp17 + + + Console + true + true + true + + + + + Use + Level3 + MaxSpeed + true + true + true + NDEBUG;_CONSOLE;EG_BUILD_DEVELOPMENT=1;%(PreprocessorDefinitions) + true + WebRTCProxyPCH.h + %(AdditionalIncludeDirectories) + stdcpp17 + + + Console + true + true + true + + + + + + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/WebRTCProxy.vcxproj.filters b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/WebRTCProxy.vcxproj.filters new file mode 100644 index 000000000000..f1872fbe10f8 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/WebRTCProxy.vcxproj.filters @@ -0,0 +1,110 @@ + + + + + {b50b70f7-e6ef-4f6b-80ac-2d47296bab76} + + + {a9ea3ac3-3edf-401f-9136-63912ba63ef5} + + + + + Logging + + + Logging + + + Logging + + + Logging + + + stuff + + + stuff + + + stuff + + + stuff + + + stuff + + + stuff + + + stuff + + + + + + + + + + + + + Logging + + + Logging + + + Logging + + + Logging + + + stuff + + + stuff + + + stuff + + + stuff + + + stuff + + + stuff + + + stuff + + + stuff + + + stuff + + + stuff + + + stuff + + + + + + + + + + + + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/bin/Start_AWS_WebRTCProxy.bat b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/bin/Start_AWS_WebRTCProxy.bat new file mode 100644 index 000000000000..764d42b94700 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/bin/Start_AWS_WebRTCProxy.bat @@ -0,0 +1,6 @@ +@echo off +pushd %~dp0 + +Powershell.exe -executionpolicy unrestricted -File Start_AWS_WebRTCProxy.ps1 + +popd \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/bin/Start_WebRTCProxy.bat b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/bin/Start_WebRTCProxy.bat new file mode 100644 index 000000000000..f821fa3be77d --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/bin/Start_WebRTCProxy.bat @@ -0,0 +1,5 @@ +pushd %~dp0 + +WebRTCProxy_x64_Development.exe + +popd \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/AsyncConnection.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/AsyncConnection.cpp new file mode 100644 index 000000000000..a3d16b106032 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/AsyncConnection.cpp @@ -0,0 +1,86 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "AsyncConnection.h" +#include "Logging.h" + +FAsyncConnection::FAsyncConnection(const std::string& ConnectionName, IAsyncConnectionObserver& Observer) : + Name(ConnectionName), + Observer(Observer) +{} + +void FAsyncConnection::Connect(const std::string& IP, uint16_t Port) +{ + SocketAddress.SetIP(IP); + SocketAddress.SetPort(Port); + + verify(!Socket || Socket->GetState() == rtc::AsyncSocket::CS_CLOSED); + Socket.reset(rtc::ThreadManager::Instance()->CurrentThread()->socketserver()->CreateAsyncSocket(SocketAddress.family(), SOCK_STREAM)); + + Socket->SignalConnectEvent.connect(this, &FAsyncConnection::OnConnect); + Socket->SignalReadEvent.connect(this, &FAsyncConnection::OnRead); + Socket->SignalCloseEvent.connect(this, &FAsyncConnection::OnClose); + + bReconnect = true; + + EG_LOG(LogDefault, Log, "Connecting to %s %s:%d", Name.c_str(), IP.c_str(), Port); + if (Socket->Connect(SocketAddress) == SOCKET_ERROR) + { + OnClose(Socket.get(), SOCKET_ERROR); + } +} + +void FAsyncConnection::Disconnect() +{ + bReconnect = false; + Socket->Close(); +} + +void FAsyncConnection::OnConnect(rtc::AsyncSocket*) +{ + EG_LOG(LogDefault, Log, "Connected to %s", Name.c_str()); + bReportDisconnection = true; + Observer.OnConnect(); +} + +void FAsyncConnection::OnClose(rtc::AsyncSocket*, int Err) +{ + if (bReportDisconnection) + { + EG_LOG(LogDefault, Warning, "Disconnected from %s, error %d. Reconnecting...", Name.c_str(), Err); + bReportDisconnection = false; + Observer.OnDisconnect(Err); + } + + if (!bReconnect) + return; + + while (Socket->Connect(SocketAddress) == SOCKET_ERROR) + { + } +} + +void FAsyncConnection::OnRead(rtc::AsyncSocket*) +{ + do + { + int ReceivedBytes = Socket->Recv(TmpReadBuffer, sizeof(TmpReadBuffer), nullptr); + if (ReceivedBytes <= 0) + { + break; + } + ReadBuffer.insert(ReadBuffer.end(), TmpReadBuffer, TmpReadBuffer + ReceivedBytes); + } while (true); + + uint32_t Consumed = 0; + while (!ReadBuffer.empty() && + (Consumed = Observer.OnRead(&ReadBuffer.front(), static_cast(ReadBuffer.size())))) + { + ReadBuffer.erase(ReadBuffer.begin(), ReadBuffer.begin() + Consumed); + } +} + +void FAsyncConnection::Send(const void* Data, uint32_t Size) +{ + Socket->Send(Data, Size); +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/AsyncConnection.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/AsyncConnection.h new file mode 100644 index 000000000000..688b3fabb8b5 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/AsyncConnection.h @@ -0,0 +1,52 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" + +// callback interface for `FAsyncConnection` +struct IAsyncConnectionObserver +{ + virtual ~IAsyncConnectionObserver() {} + + // reports succeeded connection + virtual void OnConnect() = 0; + // reports incoming data + virtual uint32_t OnRead(const uint8_t* Data, uint32_t Size) = 0; + // reports disconnection + virtual void OnDisconnect(int Err) = 0; +}; + +// async TCP client connection +// automatically reconnects on disconnection except disconnection was explicit by the caller +class FAsyncConnection : public sigslot::has_slots<> +{ +public: + // `ConnectionName` is used for logging + FAsyncConnection(const std::string& ConnectionName, IAsyncConnectionObserver& Observer); + + // keeps connecting until succeeded, success is reported by `IAsyncConnectionObserver::OnConnect()` + void Connect(const std::string& IP, uint16_t Port); + // disconnects and calls `IAsyncConnectionObserver::OnDisconnect()` + void Disconnect(); + + // sends data asynchronously (?) but doesn't report when done + void Send(const void* Data, uint32_t Size); + +private: + void OnConnect(rtc::AsyncSocket*); + void OnRead(rtc::AsyncSocket*); + void OnClose(rtc::AsyncSocket*, int Err); + +private: + std::string Name; + IAsyncConnectionObserver& Observer; + rtc::SocketAddress SocketAddress; + std::unique_ptr Socket; + + std::atomic bReconnect = false; // automatically try to reconnect on disconnection + std::atomic bReportDisconnection = false; // to avoid reporting disconnection on repeated connection attempts + + uint8_t TmpReadBuffer[0xFFFF]; + std::vector ReadBuffer; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CirrusConnection.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CirrusConnection.cpp new file mode 100644 index 000000000000..a1680b080f51 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CirrusConnection.cpp @@ -0,0 +1,148 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "CirrusConnection.h" +#include "Logging.h" + +using PixelStreamingProtocol::ECirrusToProxyMsg; +using PixelStreamingProtocol::EProxyToCirrusMsg; +using FMsgSize = uint32_t; + +FCirrusConnection::FCirrusConnection(ICirrusConnectionObserver& Observer) + : Connection("Cirrus", *this) + , Observer(Observer) +{} + +void FCirrusConnection::Connect(const std::string& IP, uint16_t Port) +{ + Connection.Connect(IP, Port); +} + +void FCirrusConnection::Disconnect() +{ + Connection.Disconnect(); +} + +void FCirrusConnection::OnDisconnect(int Err) +{ + Observer.OnCirrusDisconnected(); +} + +uint32_t FCirrusConnection::OnRead(const uint8_t* Data, uint32_t Size) +{ + if (Size < (sizeof(ECirrusToProxyMsg) + sizeof(FMsgSize))) + { + return 0; + } + + const uint8_t* Ptr = Data; // pointer to current read pos in the buffer + + ECirrusToProxyMsg MsgId = *reinterpret_cast(Ptr); + Ptr += sizeof(ECirrusToProxyMsg); + + checkf(MsgId < ECirrusToProxyMsg::count, + "Invalid message ID received from Cirrus: %u", static_cast(MsgId)); + + auto GetString = [&Ptr, Data, Size](std::string& res) -> bool + { + if (Ptr + sizeof(FMsgSize) > Data + Size) + return false; + + FMsgSize MsgSize = *reinterpret_cast(Ptr); + Ptr += sizeof(FMsgSize); + if ((Ptr + MsgSize) > (Data + Size)) + return false; + + res.assign(Ptr, Ptr + MsgSize); + Ptr += MsgSize; + return true; + }; + + auto GetClientId = [&Ptr, Data, Size](FClientId& res) -> bool + { + if (Ptr + sizeof(FClientId) > Data + Size) + return false; + + res = *reinterpret_cast(Ptr); + Ptr += sizeof(FClientId); + return true; + }; + + switch (MsgId) + { + case ECirrusToProxyMsg::config: + { + std::string Config; + if (!GetString(Config)) + return 0; + + Observer.OnCirrusConfig(Config); + break; + } + case ECirrusToProxyMsg::offer: + { + FClientId ClientId; + if (!GetClientId(ClientId)) + return 0; + + std::string Msg; + if (!GetString(Msg)) + return 0; + + Observer.OnOffer(ClientId, Msg); + break; + } + case ECirrusToProxyMsg::iceCandidate: + { + FClientId ClientId; + if (!GetClientId(ClientId)) + return 0; + + std::string Msg; + if (!GetString(Msg)) + return 0; + + Observer.OnIceCandidate(ClientId, Msg); + break; + } + case ECirrusToProxyMsg::clientDisconnected: + { + FClientId ClientId; + if (!GetClientId(ClientId)) + return 0; + + Observer.OnClientDisconnected(ClientId); + break; + } + default: + check(false); + } + + return static_cast(Ptr - Data); +} + +void FCirrusConnection::SendAnswer(FClientId Client, const std::string& Answer) +{ + SendStringMsg(EProxyToCirrusMsg::answer, Client, Answer); +} + +void FCirrusConnection::SendIceCandidate(FClientId Client, const std::string& IceCandidate) +{ + SendStringMsg(EProxyToCirrusMsg::iceCandidate, Client, IceCandidate); +} + +void FCirrusConnection::SendDisconnectClient(FClientId Client) +{ + auto MsgId = EProxyToCirrusMsg::disconnectClient; + Connection.Send(&MsgId, sizeof(MsgId)); + Connection.Send(&Client, sizeof(Client)); +} + +void FCirrusConnection::SendStringMsg(EProxyToCirrusMsg MsgId, FClientId Client, const std::string& Msg) +{ + Connection.Send(&MsgId, sizeof(MsgId)); + Connection.Send(&Client, sizeof(Client)); + FMsgSize MsgSize = static_cast(Msg.size()); + Connection.Send(&MsgSize, sizeof(FMsgSize)); + Connection.Send(Msg.data(), MsgSize); +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CirrusConnection.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CirrusConnection.h new file mode 100644 index 000000000000..f22450b939ee --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CirrusConnection.h @@ -0,0 +1,52 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" +#include "AsyncConnection.h" + +// callback interface for `FCirrusConnection` +class ICirrusConnectionObserver +{ +public: + ~ICirrusConnectionObserver() + {} + + virtual void OnCirrusConfig(const std::string& Config) = 0; + virtual void OnOffer(FClientId Client, const std::string& Offer) = 0; + virtual void OnIceCandidate(FClientId Client, const std::string& IceCandidate) = 0; + virtual void OnQualityOwnership(FClientId Client) = 0; + virtual void OnClientDisconnected(FClientId Client) = 0; + virtual void OnCirrusDisconnected() = 0; +}; + +/** + * Communication with Cirrus. + * Sends messages to Cirrus and calls `ICirrusConnectionObserver` on incoming messages. + * Reconnects after losing connection + */ +class FCirrusConnection : public IAsyncConnectionObserver +{ +public: + explicit FCirrusConnection(ICirrusConnectionObserver& Observer); + + void Connect(const std::string& IP, uint16_t Port); + void Disconnect(); + + // Messages to Cirrus + void SendAnswer(FClientId Client, const std::string& Answer); + void SendIceCandidate(FClientId Client, const std::string& IceCandidate); + void SendDisconnectClient(FClientId Client); + +private: + // IAsyncConnectionObserver impl + void OnConnect() override {} + uint32_t OnRead(const uint8_t* Data, uint32_t Size) override; + void OnDisconnect(int Err) override; + + void SendStringMsg(PixelStreamingProtocol::EProxyToCirrusMsg MsgId, FClientId Client, const std::string& Msg); + +private: + ICirrusConnectionObserver& Observer; + FAsyncConnection Connection; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/ClientSession.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/ClientSession.cpp new file mode 100644 index 000000000000..8e6508dcfeb3 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/ClientSession.cpp @@ -0,0 +1,258 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "ClientSession.h" +#include "Logging.h" +#include "Conductor.h" +#include "SetSessionDescriptionObserver.h" + +namespace detail +{ + const char* ToString(webrtc::PeerConnectionInterface::SignalingState Val) + { + if (Val == webrtc::PeerConnectionInterface::kStable) + return "kStable"; + else if (Val == webrtc::PeerConnectionInterface::kHaveLocalOffer) + return "kHaveLocalOffer"; + else if (Val == webrtc::PeerConnectionInterface::kHaveLocalPrAnswer) + return "kHaveLocalPrAnswer"; + else if (Val == webrtc::PeerConnectionInterface::kHaveRemoteOffer) + return "kHaveRemoteOffer"; + else if (Val == webrtc::PeerConnectionInterface::kHaveRemotePrAnswer) + return "kHaveRemotePrAnswer"; + else if (Val == webrtc::PeerConnectionInterface::kClosed) + return "kClosed"; + else + { + checkfSlow(false, "Unknown enum value (%u). Revise code.", (uint32_t)Val); + return "Unknown"; + } + }; + + const char* ToString(webrtc::PeerConnectionInterface::IceConnectionState Val) + { + if (Val == webrtc::PeerConnectionInterface::kIceConnectionNew) + return "kIceConnectionNew"; + else if (Val == webrtc::PeerConnectionInterface::kIceConnectionChecking) + return "kIceConnectionChecking"; + else if (Val == webrtc::PeerConnectionInterface::kIceConnectionConnected) + return "kIceConnectionConnected"; + else if (Val == webrtc::PeerConnectionInterface::kIceConnectionCompleted) + return "kIceConnectionCompleted"; + else if (Val == webrtc::PeerConnectionInterface::kIceConnectionFailed) + return "kIceConnectionFailed"; + else if (Val == webrtc::PeerConnectionInterface::kIceConnectionDisconnected) + return "kIceConnectionDisconnected"; + else if (Val == webrtc::PeerConnectionInterface::kIceConnectionClosed) + return "kIceConnectionClosed"; + else + { + checkfSlow(false, "Unknown enum value (%u). Revise code.", (uint32_t)Val); + return "Unknown"; + } + }; + + const char* ToString(webrtc::PeerConnectionInterface::IceGatheringState Val) + { + if (Val == webrtc::PeerConnectionInterface::kIceGatheringNew) + return "kIceGatheringNew"; + else if (Val == webrtc::PeerConnectionInterface::kIceGatheringGathering) + return "kIceGatheringGathering"; + else if (Val == webrtc::PeerConnectionInterface::kIceGatheringComplete) + return "kIceGatheringComplete"; + else + { + checkfSlow(false, "Unknown enum value (%u). Revise code.", (uint32_t)Val); + return "Unknown"; + } + }; + + const char* ToString(bool Val) + { + return Val ? "True" : "False"; + } +} + +FClientSession::FClientSession(FConductor& Outer, FClientId ClientId, bool bOriginalQualityController) + : Outer(Outer) + , ClientId(ClientId) + , bOriginalQualityController(bOriginalQualityController) +{ + EG_LOG(LogDefault, Log, "%s: ClientId=%u", __FUNCTION__, ClientId); +} + +FClientSession::~FClientSession() +{ + EG_LOG(LogDefault, Log, "%s: ClientId=%u", __FUNCTION__, ClientId); + if (DataChannel) + DataChannel->UnregisterObserver(); +} + +void FClientSession::DisconnectClient() +{ + if (bDisconnecting) + return; // already notified Cirrus to disconnect this client + + bDisconnecting = true; + Outer.CirrusConnection.SendDisconnectClient(ClientId); +} + +// +// webrtc::PeerConnectionObserver implementation. +// + +void FClientSession::OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState NewState) +{ + EG_LOG(LogDefault, Log, "%s : ClientId=%u, NewState=%s", __FUNCTION__, ClientId, detail::ToString(NewState)); +} + +// Called when a remote stream is added +void FClientSession::OnAddStream(rtc::scoped_refptr Stream) +{ + EG_LOG(LogDefault, Log, "%s : ClientId=%u, Stream=%s", __FUNCTION__, ClientId, Stream->id().c_str()); +} + +void FClientSession::OnRemoveStream(rtc::scoped_refptr Stream) +{ + EG_LOG(LogDefault, Log, "%s : ClientId=%u, Stream=%s", __FUNCTION__, ClientId, Stream->id().c_str()); +} + +void FClientSession::OnDataChannel(rtc::scoped_refptr DataChannel) +{ + EG_LOG(LogDefault, Log, "%s : ClientId=%u", __FUNCTION__, ClientId); + this->DataChannel = DataChannel; + this->DataChannel->RegisterObserver(this); +} + +void FClientSession::OnRenegotiationNeeded() +{ + EG_LOG(LogDefault, Log, "%s : ClientId=%u", __FUNCTION__, ClientId); +} + +void FClientSession::OnIceConnectionChange(webrtc::PeerConnectionInterface::IceConnectionState NewState) +{ + EG_LOG(LogDefault, Log, "%s : ClientId=%u, NewState=%s", __FUNCTION__, ClientId, detail::ToString(NewState)); +} + +void FClientSession::OnIceGatheringChange(webrtc::PeerConnectionInterface::IceGatheringState NewState) +{ + EG_LOG(LogDefault, Log, "%s : ClientId=%u, NewState=%s", __FUNCTION__, ClientId, detail::ToString(NewState)); +} + +void FClientSession::OnIceCandidate(const webrtc::IceCandidateInterface* Candidate) +{ + EG_LOG(LogDefault, Log, "%s : ClientId=%u", __FUNCTION__, ClientId); + + Json::StyledWriter Writer; + Json::Value Jmessage; + + Jmessage[kCandidateSdpMidName] = Candidate->sdp_mid(); + Jmessage[kCandidateSdpMlineIndexName] = Candidate->sdp_mline_index(); + std::string Sdp; + if (!Candidate->ToString(&Sdp)) + { + EG_LOG(LogDefault, Error, "Failed to serialize candidate for client %u", ClientId); + return; + } + + EG_LOG( + LogDefault, + Log, + "Sending ICE candidate to Client %u (sdp_mline_index=%d) : %s", + ClientId, + Candidate->sdp_mline_index(), + Sdp.c_str()); + + Jmessage[kCandidateSdpName] = Sdp; + std::string Msg = Writer.write(Jmessage); + Outer.CirrusConnection.SendIceCandidate(ClientId, Msg); +} + +void FClientSession::OnIceCandidatesRemoved(const std::vector& candidates) +{ + EG_LOG(LogDefault, Log, "%s : ClientId=%u", __FUNCTION__, ClientId); +} + +void FClientSession::OnIceConnectionReceivingChange(bool Receiving) +{ + EG_LOG(LogDefault, Log, "%s : ClientId=%u, Receiving=%s", __FUNCTION__, ClientId, detail::ToString(Receiving)); +} + +void FClientSession::OnTrack(rtc::scoped_refptr transceiver) +{ + EG_LOG(LogDefault, Log, "%s : ClientId=%u", __FUNCTION__, ClientId); +} + +void FClientSession::OnRemoveTrack(rtc::scoped_refptr receiver) +{ + EG_LOG(LogDefault, Log, "%s : ClientId=%u", __FUNCTION__, ClientId); +} + +// +// webrtc::DataChannelObserver implementation. +// + +void FClientSession::OnMessage(const webrtc::DataBuffer& Buffer) +{ + auto MsgType = static_cast(Buffer.data.data()[0]); + if (MsgType == PixelStreamingProtocol::EToUE4Msg::RequestQualityControl) + { + check(Buffer.data.size() == 1); + Outer.OnQualityOwnership(ClientId); + } + else + { + Outer.UE4Connection.Send(Buffer.data.data(), static_cast(Buffer.data.size())); + } +} + +// +// webrtc::CreateSessionDescriptionObserver implementation. +// +void FClientSession::OnSuccess(webrtc::SessionDescriptionInterface* Desc) +{ + std::string Sdp; + Desc->ToString(&Sdp); + EG_LOG(LogDefault, Log, "Answer for client %u : %s", ClientId, Sdp.c_str()); + + // #REFACTOR : With WebRTC branch-heads/66, the sink of video capturer will be added as a direct result + // of `PeerConnection->SetLocalDescription()` call but video encoder will be created later on + // the first frame pushed into the pipeline (by capturer). + // We need to associate this `FClientSession` instance with the right instance of `FVideoEncoder` for quality + // control, the problem is that `FVideoEncoder` is created asynchronously on demand and there's no + // clean way to give it the right instance of `FClientSession`. + // The plan is to assume that encoder instances are created in the same order as we call + // `PeerConnection->SetLocalDescription()`, as these calls are done from the same thread and internally + // WebRTC uses `std::vector` for capturer's sinks and then iterates over it to create encoder instances, + // and there's no obvious reason why it can be replaced by an unordered container in the future. + // So before adding a new sink to the capturer (`PeerConnection->SetLocalDescription()`) we push + // this `FClientSession` into encoder factory queue and pop it out of the queue when encoder instance + // is created. Unfortunately I (Andriy) don't see a way to put `check`s to verify it works correctly. + Outer.VideoEncoderFactory->AddSession(*this); + // we assume just created local session description shouldn't cause any issue and so proceed immediately + // not waiting for confirmation, otherwise we hard fail + PeerConnection->SetLocalDescription( + FSetSessionDescriptionObserver::Create( + []() {}, + [](const std::string& error) { checkf("Setting local description failed: %s", error.c_str()); } + ), + Desc + ); + + Outer.UE4Connection.StartStreaming(); + + Json::StyledWriter Writer; + Json::Value Jmessage; + Jmessage[kSessionDescriptionTypeName] = webrtc::SdpTypeToString(Desc->GetType()); + Jmessage[kSessionDescriptionSdpName] = Sdp; + std::string msg = Writer.write(Jmessage); + Outer.CirrusConnection.SendAnswer(ClientId, msg); +} + +void FClientSession::OnFailure(const std::string& Error) +{ + EG_LOG(LogDefault, Error, "Failed to create answer for client %u : %s", ClientId, Error.c_str()); + + // This must be the last line because it will destroy this instance + Outer.DeleteClient(ClientId); +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/ClientSession.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/ClientSession.h new file mode 100644 index 000000000000..d973e31c0e8b --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/ClientSession.h @@ -0,0 +1,57 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" + +class FConductor; + +struct FClientSession + : public webrtc::CreateSessionDescriptionObserver + , public webrtc::PeerConnectionObserver + , public webrtc::DataChannelObserver +{ + FClientSession(FConductor& Outer, FClientId ClientId, bool bOriginalQualityController); + ~FClientSession() override; + + void DisconnectClient(); + + FConductor& Outer; + FClientId ClientId; + bool bOriginalQualityController; + std::atomic VideoEncoder = nullptr; + rtc::scoped_refptr PeerConnection; + rtc::scoped_refptr DataChannel; + std::atomic bDisconnecting = false; + + // + // webrtc::PeerConnectionObserver implementation. + // + void OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState NewState) override; + void OnAddStream(rtc::scoped_refptr Stream) override; + void OnRemoveStream(rtc::scoped_refptr Stream) override; + void OnDataChannel(rtc::scoped_refptr Channel) override; + void OnRenegotiationNeeded() override; + void OnIceConnectionChange(webrtc::PeerConnectionInterface::IceConnectionState NewState) override; + void OnIceGatheringChange(webrtc::PeerConnectionInterface::IceGatheringState NewState) override; + void OnIceCandidate(const webrtc::IceCandidateInterface* Candidate) override; + void OnIceCandidatesRemoved(const std::vector& candidates) override; + void OnIceConnectionReceivingChange(bool Receiving) override; + void OnTrack(rtc::scoped_refptr transceiver) override; + void OnRemoveTrack(rtc::scoped_refptr receiver) override; + + // + // werbrtc::DataChannelObserver implementation. + // + void OnStateChange() override + {} + void OnBufferedAmountChange(uint64_t PreviousAmount) override + {} + void OnMessage(const webrtc::DataBuffer& Buffer) override; + + // + // webrtc::CreateSessionDescriptionObserver implementation. + // + void OnSuccess(webrtc::SessionDescriptionInterface* Desc) override; + void OnFailure(const std::string& Error) override; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CmdLine.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CmdLine.cpp new file mode 100644 index 000000000000..b2cd6a1e0f0d --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CmdLine.cpp @@ -0,0 +1,100 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "CmdLine.h" +#include "StringUtils.h" +#include "Logging.h" + +std::string FCmdLine::Empty; + +bool FCmdLine::Parse(int Argc, char* Argv[], bool CaseSensitive) +{ + this->CaseSensitive = CaseSensitive; + + if (Argc <= 1) + { + return true; + } + + for (int I = 1; I < Argc; I++) + { + const char* Arg = Argv[I]; + if (*Arg == '-') + { + Arg++; + } + else + { + EG_LOG(LogDefault, Error, "Invalid parameter ('%s'). Parameters need to be prefixed with '-'.", Arg); + // We need parameters to have the '-' prefix + return false; + } + + const char* Separator = std::find(Arg, Arg + strlen(Arg), '='); + if (Separator == Arg + strlen(Arg)) + { + Params.emplace_back(Arg, ""); + } + else + { + std::string Name(Arg, Separator); + std::string Value(++Separator); + Params.emplace_back(std::move(Name), std::move(Value)); + } + } + + return true; +} + +bool FCmdLine::Has(const char* Name) const +{ + for (auto& P : Params) + { + if (Equals(P.Name, Name)) + { + return true; + } + } + return false; +} + +const std::string& FCmdLine::Get(const char* Name) const +{ + for (auto& P : Params) + { + if (Equals(P.Name, Name)) + { + return P.Value; + } + } + return Empty; +} + +std::pair FCmdLine::GetAsInt(const char* Name, int DefaultValue) const +{ + std::pair Res{false, DefaultValue}; + Res.first = Has(Name); + if (Res.first) + { + Res.second = std::atoi(Get(Name).c_str()); + } + + return Res; +} + +int FCmdLine::GetCount() const +{ + return static_cast(Params.size()); +} + +bool FCmdLine::Equals(const std::string& A, const char* B) const +{ + if (CaseSensitive) + { + return A == B ? true : false; + } + else + { + return CiEquals(A, std::string(B)); + } +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CmdLine.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CmdLine.h new file mode 100644 index 000000000000..2bad22306724 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CmdLine.h @@ -0,0 +1,74 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" + + +/** + * Utility class to parse command line parameters. + * + * Arguments need to be prefixed with '-' and can have the following formats: + * -SomeArg + * -SomeOtherArg=Value + */ +class FCmdLine +{ +public: + struct FParam + { + template + FParam(T1&& Name, T2&& Value) + : Name(std::forward(Name)) + , Value(std::forward(Value)) + { + } + std::string Name; + std::string Value; + }; + + FCmdLine() + { + } + + /** + * Parse all the supplied parameters, as received in "main" + */ + bool Parse(int Argc, char* Argv[], bool CaseSensitive=false); + + /** + * Checks if the specified parameter is present, in any acceptable form, such + * ash "-arg" or "-arg=value" + */ + bool Has(const char* Name) const; + + /** + * Gets the value of the specified parameter. + * @return Value of the parameter or an empty string if the parameter doesn't + * exist or doesn't is not in the "-arg=value" form + * Use "Has" method first, to check if a parameter exists. + */ + const std::string& Get(const char* Name) const; + + /** + * Gets the value of the specified parameter, as an integer + * + * @param Name Parameter name + * @param DefaultValue If the parameter doesn't exist or is not in the "-arg=value" form, it will default to this + * @return + * Pair where "first" is true if the parameter exists, false if not. "second" is the parameter's value or DefaultValue + */ + std::pair GetAsInt(const char* Name, int DefaultValue = 0) const; + + /** + * @return The number of parameters + */ + int GetCount() const; + +private: + bool Equals(const std::string& A, const char* B) const; + + std::vector Params; + static std::string Empty; + bool CaseSensitive = true; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Conductor.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Conductor.cpp new file mode 100644 index 000000000000..78cdc5e49873 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Conductor.cpp @@ -0,0 +1,453 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "Conductor.h" +#include "Logging.h" +#include "UE4Connection.h" +#include "NetworkAudioCapturer.h" +#include "NetworkVideoCapturer.h" +#include "ClientSession.h" +#include "SetSessionDescriptionObserver.h" + +const char StreamId[] = "stream_id"; +const char AudioLabel[] = "audio_label"; +const char VideoLabel[] = "video_label"; + +using webrtc::SdpType; + +////////////////////////////////////////////////////////////////////////// +// FConductor +////////////////////////////////////////////////////////////////////////// + +FConductor::FConductor() + : UE4Connection(*this) + , CirrusConnection(*this) + , AudioCapturer(new rtc::RefCountedObject()) +{ + auto VideoEncoderFactoryStrong = std::make_unique(*this); + // #HACK: Keep a pointer to the Video encoder factory, so we can use it to figure out the + // FClientSession <-> FakeVideoEncoder relationship later on + check(!VideoEncoderFactory); + VideoEncoderFactory = VideoEncoderFactoryStrong.get(); + + PeerConnectionFactory = webrtc::CreatePeerConnectionFactory( + nullptr, + nullptr, + nullptr, + AudioCapturer, + webrtc::CreateAudioEncoderFactory(), + webrtc::CreateAudioDecoderFactory(), + std::move(VideoEncoderFactoryStrong), + std::make_unique(), + nullptr, + nullptr); + check(PeerConnectionFactory); + + ResetPeerConnectionConfig(); + + UE4Connection.Connect("127.0.0.1", PARAM_UE4Port); +} + +FConductor::~FConductor() +{ + // #REFACTOR: To destroy NetworkVideoCapturer first, TODO (andriy): reconsider/simplify dependencies + DeleteAllClients(); +} + +FClientSession* FConductor::GetClientSession(FClientId ClientId) +{ + auto It = Clients.find(ClientId); + if (It == Clients.end()) + { + return nullptr; + } + else + { + return It->second.get(); + } +} + +void FConductor::DeleteAllClients() +{ + while (Clients.size()) + { + DeleteClient(Clients.begin()->first); + } +} + +void FConductor::CreateClient(FClientId ClientId) +{ + check(PeerConnectionFactory.get() != NULL); + + if (PARAM_PlanB) + { + verifyf(Clients.find(ClientId) == Clients.end(), "Client %u already exists", ClientId); + } + else + { + // With unified plan, we get several calls to OnOffer, which in turn calls + // this several times. + // Therefore, we only try to create the client if not created already + if (Clients.find(ClientId) != Clients.end()) + { + return; + } + } + + webrtc::FakeConstraints Constraints; + Constraints.AddOptional(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp, "true"); + + rtc::scoped_refptr Session = new rtc::RefCountedObject(*this, ClientId, Clients.empty()); + Session->PeerConnection = PeerConnectionFactory->CreatePeerConnection(PeerConnectionConfig, &Constraints, NULL, NULL, Session.get()); + check(Session->PeerConnection); + Clients[ClientId] = std::move(Session); +} + +void FConductor::DeleteClient(FClientId ClientId) +{ + Clients.erase(ClientId); + if (Clients.size() == 0) + { + UE4Connection.StopStreaming(); + + if (!PARAM_PlanB) + { + AudioTrack = nullptr; + VideoTrack = nullptr; + } + Streams.clear(); + } +} + +void FConductor::AddStreams(FClientId ClientId) +{ + FClientSession* Session = GetClientSession(ClientId); + check(Session); + + if (PARAM_PlanB) + { + rtc::scoped_refptr Stream; + + if (Streams.find(StreamId) != Streams.end()) + { + Stream = Streams[StreamId]; + } + else + { + Stream = PeerConnectionFactory->CreateLocalMediaStream(StreamId); + + rtc::scoped_refptr AudioTrack( + PeerConnectionFactory->CreateAudioTrack(AudioLabel, PeerConnectionFactory->CreateAudioSource(NULL))); + + Stream->AddTrack(AudioTrack); + + std::unique_ptr VideoCapturerStrong = std::make_unique(); + VideoCapturer = VideoCapturerStrong.get(); + rtc::scoped_refptr VideoTrack(PeerConnectionFactory->CreateVideoTrack( + VideoLabel, PeerConnectionFactory->CreateVideoSource(std::move(VideoCapturerStrong)))); + + Stream->AddTrack(VideoTrack); + + typedef std::pair > MediaStreamPair; + Streams.insert(MediaStreamPair(Stream->id(), Stream)); + } + + verifyf(Session->PeerConnection->AddStream(Stream), "Failed to add stream for client %u", ClientId); + } + else + { + if (!Session->PeerConnection->GetSenders().empty()) + { + return; // Already added tracks + } + + if (!AudioTrack) + { + AudioTrack = + PeerConnectionFactory->CreateAudioTrack(AudioLabel, PeerConnectionFactory->CreateAudioSource(NULL)); + } + + if (!VideoTrack) + { + std::unique_ptr VideoCapturerStrong = std::make_unique(); + VideoCapturer = VideoCapturerStrong.get(); + VideoTrack = PeerConnectionFactory->CreateVideoTrack( + VideoLabel, PeerConnectionFactory->CreateVideoSource(std::move(VideoCapturerStrong))); + } + + auto ResultOrError = Session->PeerConnection->AddTrack(AudioTrack, {StreamId}); + if (!ResultOrError.ok()) + { + EG_LOG( + LogDefault, + Error, + "Failed to add AudioTrack to PeerConnection of client %u. Msg=%s", + Session->ClientId, + ResultOrError.error().message()); + } + + ResultOrError = Session->PeerConnection->AddTrack(VideoTrack, {StreamId}); + if (!ResultOrError.ok()) + { + EG_LOG( + LogDefault, + Error, + "Failed to add VideoTrack to PeerConnection of client %u. Msg=%s", + Session->ClientId, + ResultOrError.error().message()); + } + } +} + +void FConductor::OnQualityOwnership(FClientId ClientId) +{ + // First disable ownership for all + for (auto&& Client : Clients) + { + if (Client.second->VideoEncoder && Client.second->DataChannel) + { + Client.second->VideoEncoder.load()->SetQualityControlOwnership(false); + rtc::CopyOnWriteBuffer Buf(2); + Buf[0] = static_cast(PixelStreamingProtocol::EToClientMsg::QualityControlOwnership); + Buf[1] = 0; // false + Client.second->DataChannel->Send(webrtc::DataBuffer(Buf, true)); + } + } + + FClientSession* Session = GetClientSession(ClientId); + if (Session->VideoEncoder && Session->DataChannel) + { + // Then enable this instance. This avoids any potential competition + Session->VideoEncoder.load()->SetQualityControlOwnership(true); + rtc::CopyOnWriteBuffer Buf(2); + Buf[0] = static_cast(PixelStreamingProtocol::EToClientMsg::QualityControlOwnership); + Buf[1] = 1; // true + Session->DataChannel->Send(webrtc::DataBuffer(Buf, true)); + } +} + +// IUE4ConnectionObserver implementation + +void FConductor::OnUE4Connected() +{ + CirrusConnection.Connect(PARAM_Cirrus.first, PARAM_Cirrus.second); +} + +void FConductor::OnUE4Disconnected() +{ + DeleteAllClients(); + CirrusConnection.Disconnect(); +} + +void FConductor::OnUE4Packet(PixelStreamingProtocol::EToProxyMsg PktType, const void* Pkt, uint32_t Size) +{ + // Forward to the audio component if it's audio + if (PktType == PixelStreamingProtocol::EToProxyMsg::AudioPCM) + { + check(AudioCapturer); + AudioCapturer->ProcessPacket(PktType, Pkt, Size); + } + else if (PktType == PixelStreamingProtocol::EToProxyMsg::Response) + { + // Currently broadcast the response to all clients. + for (auto&& Client : Clients) + { + if (Client.second->DataChannel) + { + rtc::CopyOnWriteBuffer Buffer(Size + 1); + Buffer[0] = static_cast(PixelStreamingProtocol::EToClientMsg::Response); + std::memcpy(&Buffer[1], reinterpret_cast(Pkt), Size); + Client.second->DataChannel->Send(webrtc::DataBuffer(Buffer, true)); + } + } + } + else + { + check(VideoCapturer); + VideoCapturer->ProcessPacket(PktType, Pkt, Size); + } +} + +// +// ICirrusConnectionObserver implementation. +// + +void FConductor::OnCirrusConfig(const std::string& Config) +// gets configuration from Cirrus so we have a single point to provide configuration shared by Proxy and clients +// parses from JSON and stores in `webrtc::RTCConfiguration` that will be used for all clients peer connections +{ + Json::Reader Reader; + Json::Value ConfigJson; + bool res = Reader.parse(Config, ConfigJson); + checkf(res, "Received invalid JSON config from Cirrus: %s", Config.c_str()); + + EG_LOG(LogDefault, Log, "Cirrus config : %s", ConfigJson.toStyledString().c_str()); + + checkf(!ConfigJson[kPeerConnectionConfigName].isNull(), "No \"%s\" key in Cirrus config: %s", kPeerConnectionConfigName, ConfigJson.toStyledString().c_str()); + + Json::Value PcCfgJson = ConfigJson[kPeerConnectionConfigName]; + Json::Value IceServersListJson = PcCfgJson[kIceServersName]; + if (!IceServersListJson) + return; + + for (auto IceServerJson : IceServersListJson) + { + PeerConnectionConfig.servers.emplace_back(); + auto& IceServer = PeerConnectionConfig.servers.back(); + + for (auto Url : IceServerJson[kUrlsName]) + { + IceServer.urls.push_back(Url.asString()); + } + + Json::Value UsernameJson = IceServerJson[kUsernameName]; + if (!UsernameJson.isNull()) + { + IceServer.username = UsernameJson.asString(); + } + + Json::Value CredentialJson = IceServerJson[kCredentialName]; + if (!CredentialJson.isNull()) + { + IceServer.password = CredentialJson.asString(); + } + } +} + +void FConductor::ResetPeerConnectionConfig() +{ + PeerConnectionConfig = webrtc::PeerConnectionInterface::RTCConfiguration{}; + PeerConnectionConfig.sdp_semantics = + PARAM_PlanB ? webrtc::SdpSemantics::kPlanB : webrtc::SdpSemantics::kUnifiedPlan; +} + +void FConductor::OnOffer(FClientId ClientId, const std::string& Offer) +{ + CreateClient(ClientId); + AddStreams(ClientId); + + FClientSession* Session = GetClientSession(ClientId); + checkf(Session, "Client %u not found", ClientId); + + Json::Reader Reader; + Json::Value Jmessage; + std::string Sdp; + if (!Reader.parse(Offer, Jmessage) || Jmessage.get(kSessionDescriptionTypeName, "") != "offer" || + (Sdp = Jmessage.get(kSessionDescriptionSdpName, "").asString()) == "") + { + EG_LOG(LogDefault, Warning, "Received invalid JSON for Offer from Client %u : %s", ClientId, Offer.c_str()); + Session->DisconnectClient(); + return; + } + + EG_LOG(LogDefault, Log, "Received offer from client %u : %s", ClientId, Sdp.c_str()); + + webrtc::SdpParseError Error; + std::unique_ptr SessionDesc = + webrtc::CreateSessionDescription(webrtc::SdpType::kOffer, Sdp, &Error); + if (!SessionDesc) + { + // offer comes from the client and can be malformed/unsupported + // don't crash here but tell Cirrus to disconnect the client + EG_LOG( + LogDefault, + Warning, + "Can't parse offer from client %u. SdpParseError was '%s'. Disconnecting client.", + ClientId, + Error.description.c_str()); + Session->DisconnectClient(); + return; + } + + // this can fail if client is incompatible, so proceed only on success + Session->PeerConnection->SetRemoteDescription( + FSetSessionDescriptionObserver::Create( + [Session]() { Session->PeerConnection->CreateAnswer(Session, nullptr); }, + [Session](const std::string& error) { + EG_LOG(LogDefault, Error, error.c_str()); + Session->DisconnectClient(); + }), + SessionDesc.release()); +} + +void FConductor::OnIceCandidate(FClientId ClientId, const std::string& IceCandidate) +{ + EG_LOG(LogDefault, Log, "Received ICE candidate from Client %u : %s", ClientId, IceCandidate.c_str()); + + FClientSession* Session = GetClientSession(ClientId); + checkf(Session, "Client %u not found", ClientId); + + Json::Reader Reader; + Json::Value Jmessage; + if (!Reader.parse(IceCandidate, Jmessage)) + { + EG_LOG( + LogDefault, + Warning, + "Received invalid JSON for ICE Candidate from Client %u : %s", + ClientId, + IceCandidate.c_str()); + Session->DisconnectClient(); + return; + } + + std::string Sdp_mid; + int Sdp_mlineindex = 0; + std::string Sdp; + if (!rtc::GetStringFromJsonObject(Jmessage, kCandidateSdpMidName, &Sdp_mid) || + !rtc::GetIntFromJsonObject(Jmessage, kCandidateSdpMlineIndexName, &Sdp_mlineindex) || + !rtc::GetStringFromJsonObject(Jmessage, kCandidateSdpName, &Sdp)) + { + EG_LOG( + LogDefault, + Warning, + "Cannot parse ICE Candidate fields from Client %u : %s", + ClientId, + IceCandidate.c_str()); + Session->DisconnectClient(); + return; + } + + webrtc::SdpParseError Error; + std::unique_ptr Candidate( + webrtc::CreateIceCandidate(Sdp_mid, Sdp_mlineindex, Sdp, &Error)); + if (!Candidate.get()) + { + EG_LOG(LogDefault, Warning, "Cannot parse ICE Candidate from Client %u : %s ", ClientId, IceCandidate.c_str()); + Session->DisconnectClient(); + return; + } + + if (!Session->PeerConnection->AddIceCandidate(Candidate.get())) + { + EG_LOG( + LogDefault, Warning, "Failed to apply ICE Candidate from Client %u : %s ", ClientId, IceCandidate.c_str()); + Session->DisconnectClient(); + return; + } +} + +void FConductor::OnClientDisconnected(FClientId ClientId) +{ + EG_LOG(LogDefault, Log, "Client %u disconnected", ClientId); + DeleteClient(ClientId); +} + +void FConductor::OnCirrusDisconnected() +{ + EG_LOG(LogDefault, Log, "Cirrus disconnected. Removing all clients"); + DeleteAllClients(); + ResetPeerConnectionConfig(); +} + +// IVideoEncoderObserver impl + +void FConductor::ForceKeyFrame() +{ + UE4Connection.ForceKeyFrame(); +} + +void FConductor::SetRate(uint32_t BitrateKbps, uint32_t Framerate) +{ + UE4Connection.SetRate(BitrateKbps, Framerate); +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Conductor.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Conductor.h new file mode 100644 index 000000000000..4b0944bb42ad --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Conductor.h @@ -0,0 +1,75 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" +#include "UE4Connection.h" +#include "CirrusConnection.h" +#include "VideoEncoder.h" + +// Forward declarations +struct FClientSession; +class FNetworkAudioCapturer; +class FNetworkVideoCapturer; + +class FConductor : + public IUE4ConnectionObserver, + public ICirrusConnectionObserver, + public IVideoEncoderObserver +{ +public: + FConductor(); + ~FConductor(); + +private: + // IUE4ConnectionObserver implementation + void OnUE4Connected() override; + void OnUE4Disconnected() override; + void OnUE4Packet(PixelStreamingProtocol::EToProxyMsg PktType, const void* Data, uint32_t Size) override; + + // ICirrusConnectionObserver implementation + void OnCirrusConfig(const std::string& Config) override; + void OnOffer(FClientId ClientId, const std::string& Offer) override; + void OnIceCandidate(FClientId ClientId, const std::string& IceCandidate) override; + void OnClientDisconnected(FClientId ClientId) override; + void OnCirrusDisconnected() override; + + // IVideoEncoderObserver + void ForceKeyFrame() override; + void SetRate(uint32_t BitrateKbps, uint32_t Framerate) override; + + // own methods + void CreateClient(FClientId ClientId); + void DeleteClient(FClientId ClientId); + void DeleteAllClients(); + FClientSession* GetClientSession(FClientId ClientId); + + void AddStreams(FClientId ClientId); + + void OnQualityOwnership(FClientId ClientId); + + void ResetPeerConnectionConfig(); + + friend FClientSession; + +private: + FUE4Connection UE4Connection; + FCirrusConnection CirrusConnection; + + rtc::scoped_refptr AudioCapturer; + // #MULTICAST : Refactor this. We are keeping the raw pointer internally, + // since the outside code requires the ownership (std::unique_ptr). Dangerous cos it allows + // usage after destruction + FNetworkVideoCapturer* VideoCapturer = nullptr; + FVideoEncoderFactory* VideoEncoderFactory = nullptr; + + std::unordered_map> Clients; + rtc::scoped_refptr PeerConnectionFactory; + webrtc::PeerConnectionInterface::RTCConfiguration PeerConnectionConfig; + + // This is only used if using PlanB semantics + std::unordered_map> Streams; + // These are used only if using UnifiedPlan semantics + rtc::scoped_refptr AudioTrack; + rtc::scoped_refptr VideoTrack; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Console.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Console.cpp new file mode 100644 index 000000000000..43def9a09f53 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Console.cpp @@ -0,0 +1,122 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "Console.h" +#include "StringUtils.h" + +FConsole::FConsole() +{ +} + +FConsole::~FConsole() +{ + if (bOwnsConsole && hConsoleHandle != INVALID_HANDLE_VALUE) + { + FreeConsole(); + } +} + +void FConsole::Init(short Width, short Height, short BufferWidth, short BufferHeight) +{ + check(hConsoleHandle == INVALID_HANDLE_VALUE); + if (hConsoleHandle != INVALID_HANDLE_VALUE) + { + return; + } + + CONSOLE_SCREEN_BUFFER_INFO ConInfo; + + // allocate a console for this app. + // NOTE: It fails if there is a console already + bOwnsConsole = AllocConsole() == TRUE ? true : false; + + hConsoleHandle = GetStdHandle(STD_OUTPUT_HANDLE); + // set the screen buffer to be big enough to let us scroll text + GetConsoleScreenBufferInfo(hConsoleHandle, &ConInfo); + // Set the screen buffer size + ConInfo.dwSize.Y = BufferHeight; + ConInfo.dwSize.X = BufferWidth; + SetConsoleScreenBufferSize(hConsoleHandle, ConInfo.dwSize); + // Set the real window size (need to be smaller than the buffer + ConInfo.srWindow.Bottom = Height - 1; + ConInfo.srWindow.Right = Width - 1; + SetConsoleWindowInfo(hConsoleHandle, TRUE, &ConInfo.srWindow); + Center(); + EnableUTF8Support(); + SetTextColour(EColour::White); +} + +void FConsole::Center() +{ + // Reposition windows + RECT ScreenRect; + GetWindowRect(GetDesktopWindow(), &ScreenRect); + int ScreenWidth = ScreenRect.right - ScreenRect.left + 1; + int ScreenHeight = ScreenRect.bottom - ScreenRect.top + 1; + + HWND hConsoleWnd = GetConsoleWindow(); + + RECT ConsoleRect; + GetWindowRect(hConsoleWnd, &ConsoleRect); + int ConsoleWidth = ConsoleRect.right - ConsoleRect.left + 1; + int ConsoleHeight = ConsoleRect.bottom - ConsoleRect.top + 1; + + SetWindowPos( + hConsoleWnd, + 0, + (ScreenWidth - ConsoleWidth) / 2, + (ScreenHeight - ConsoleHeight) / 2, + 0, + 0, + SWP_NOSIZE | SWP_NOZORDER); + + SwitchToThisWindow(hConsoleWnd, TRUE); +} + +void FConsole::EnableUTF8Support() +{ + BOOL ret = SetConsoleOutputCP( + 65001); // utf codepage, as in http://msdn.microsoft.com/en-us/library/dd317756(v=vs.85).aspx + ret = SetConsoleCP(65001); +} + +void FConsole::Print(const char* Str) +{ + DWORD Written; + // WriteConsoleA, to force using Ansi/UTF8 + WriteConsoleA(hConsoleHandle, Str, static_cast(strlen(Str)), &Written, NULL); +} + +void FConsole::Printf(_Printf_format_string_ const char* Fmt, ...) +{ + va_list Args; + va_start(Args, Fmt); + Print(FormatStringVA(Fmt, Args)); + va_end(Args); +} + +void FConsole::Log( + const char* File, int Line, const FLogCategoryBase* Category, ELogVerbosity Verbosity, const char* Msg) +{ + EColour Colour = CurrColour; + if (Verbosity == ELogVerbosity::Log) + { + SetTextColour(EColour::White); + } + else if (Verbosity == ELogVerbosity::Warning) + { + SetTextColour(EColour::Yellow); + } + else + { + SetTextColour(EColour::Red); + } + Printf(Msg); + SetTextColour(Colour); +} + +void FConsole::SetTextColour(EColour Colour) +{ + SetConsoleTextAttribute(hConsoleHandle, (WORD)Colour); + CurrColour = Colour; +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Console.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Console.h new file mode 100644 index 000000000000..fe740cf0b597 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Console.h @@ -0,0 +1,92 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" +#include "Logging.h" + +/** + * Creates or manages the existing Windows console + * If the application already has a console, it will use it, if not, it will create + * one + */ +class FConsole : public ILogOutput +{ +public: + + /** + * Colours allowed + */ + enum class EColour + { + Black = 0x0, + Blue = 0x01, + Green = 0x02, + Red = 0x04, + Cyan = Blue + Green, + Pink = Blue + Red, + Yellow = Green + Red, + White = Blue + Green + Red, + BrightBlue = Blue + 0x08, + BrightGreen = Green + 0x08, + BrightRed = Red + 0x08, + BrightCyan = Cyan + 0x08, + BrightPink = Pink + 0x08, + BrightYellow = Yellow + 0x08, + BrightWhite = White + 0x08 + }; + + FConsole(); + ~FConsole(); + + /** + * Initialize the console + * @param Width width of the console window + * @param Height height of the console window + * @param BufferWidth + * width of the buffer itself (it can be larger than the window to allow scrolling + * horizontally + * @param BufferHeight + * Number of lines of the console buffer. Allows keeping some history, so you + * can scroll vertically and take a look at older entries + */ + void Init(short Width, short Height, short BufferWidth, short BufferHeight); + + /** + * Prints a string using the current foreground/background colours + */ + void Print(const char* Str); + /** + * Prints a string using the current foreground/background colours + */ + void Printf(_Printf_format_string_ const char* Fmt, ...); + + // + // ILogOutput interface + // + void Log(const char* File, int Line, const FLogCategoryBase* Category, ELogVerbosity Verbosity, const char* Msg) override; + + /** + * Allows the console to print UTF8 content + * \note This only works if the console font is anything other than "Raster Font" + */ + void EnableUTF8Support(); + + /** + * Centers the console window on the screen + */ + void Center(); + +private: + void SetTextColour(EColour Colour); + HANDLE hConsoleHandle = INVALID_HANDLE_VALUE; + + // Each process can have only 1 console. + // This tells if the console was created by this class, and if so, it will + // be deleted when the object is destroyed + bool bOwnsConsole = false; + + EColour CurrColour = EColour::White; +}; + + diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CrashDetection.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CrashDetection.cpp new file mode 100644 index 000000000000..c5f5671dc801 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CrashDetection.cpp @@ -0,0 +1,105 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "CrashDetection.h" +#include "Logging.h" + +#if EG_PLATFORM == EG_PLATFORM_WINDOWS + +namespace detail +{ + /** + * Detects a crash, logs the reason, and waits a bit, so the logs have time + * to flush + */ + LONG WINAPI WindowsExceptionHandler(EXCEPTION_POINTERS* ExceptionInfo) + { + switch (ExceptionInfo->ExceptionRecord->ExceptionCode) + { + case EXCEPTION_ACCESS_VIOLATION: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_ACCESS_VIOLATION"); + break; + case EXCEPTION_ARRAY_BOUNDS_EXCEEDED: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_ARRAY_BOUNDS_EXCEEDED"); + break; + case EXCEPTION_BREAKPOINT: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_BREAKPOINT"); + break; + case EXCEPTION_DATATYPE_MISALIGNMENT: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_DATATYPE_MISALIGNMENT"); + break; + case EXCEPTION_FLT_DENORMAL_OPERAND: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_FLT_DENORMAL_OPERAND"); + break; + case EXCEPTION_FLT_DIVIDE_BY_ZERO: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_FLT_DIVIDE_BY_ZERO"); + break; + case EXCEPTION_FLT_INEXACT_RESULT: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_FLT_INEXACT_RESULT"); + break; + case EXCEPTION_FLT_INVALID_OPERATION: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_FLT_INVALID_OPERATION"); + break; + case EXCEPTION_FLT_OVERFLOW: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_FLT_OVERFLOW"); + break; + case EXCEPTION_FLT_STACK_CHECK: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_FLT_STACK_CHECK"); + break; + case EXCEPTION_FLT_UNDERFLOW: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_FLT_UNDERFLOW"); + break; + case EXCEPTION_ILLEGAL_INSTRUCTION: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_ILLEGAL_INSTRUCTION"); + break; + case EXCEPTION_IN_PAGE_ERROR: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_IN_PAGE_ERROR"); + break; + case EXCEPTION_INT_DIVIDE_BY_ZERO: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_INT_DIVIDE_BY_ZERO"); + break; + case EXCEPTION_INT_OVERFLOW: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_INT_OVERFLOW"); + break; + case EXCEPTION_INVALID_DISPOSITION: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_INVALID_DISPOSITION"); + break; + case EXCEPTION_NONCONTINUABLE_EXCEPTION: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_NONCONTINUABLE_EXCEPTION"); + break; + case EXCEPTION_PRIV_INSTRUCTION: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_PRIV_INSTRUCTION"); + break; + case EXCEPTION_SINGLE_STEP: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_SINGLE_STEP"); + break; + case EXCEPTION_STACK_OVERFLOW: + EG_LOG(LogDefault, Fatal, "Crash: EXCEPTION_STACK_OVERFLOW"); + break; + default: + EG_LOG(LogDefault, Fatal, "Crash: Unrecognized Exception"); + break; + } + + // Give some time for logs to flush + std::this_thread::sleep_for(std::chrono::seconds(1)); + + exit(EXIT_FAILURE); + return EXCEPTION_EXECUTE_HANDLER; + } + +} // namespace detail + +void SetupCrashDetection() +{ + ::SetUnhandledExceptionFilter(detail::WindowsExceptionHandler); +} + +#elif EG_PLATFORM == EG_PLATFORM_LINUX + // #LINUX: See https://gist.github.com/jvranish/4441299 for some tips how to implement this in Linux + #error "Not implemented yet"" +#else + #error "Unknown Platform" +#endif + + diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CrashDetection.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CrashDetection.h new file mode 100644 index 000000000000..4b96a85b806b --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/CrashDetection.h @@ -0,0 +1,7 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" + +void SetupCrashDetection(); diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/FileLogOutput.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/FileLogOutput.cpp new file mode 100644 index 000000000000..2fd4ab4fd0d2 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/FileLogOutput.cpp @@ -0,0 +1,93 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" + +#include "FileLogOutput.h" +#include "StringUtils.h" +#include "TimeUtils.h" + +////////////////////////////////////////////////////////////////////////// +// +// FThreadedFileLogging +// +////////////////////////////////////////////////////////////////////////// + + FThreadedFileLogging::FThreadedFileLogging(const char* Filename, const char* PostFix) +{ + if (!Filename) + { + FDateTime DateTime = PARAM_LocalTime ? Now() : UtcNow(); + + std::string ExeName; + std::string Basename; + std::string ExePath = GetProcessPath(&ExeName); + + GetExtension(ExeName, &Basename); + Filename = FormatString( + "%s\\%s-%s", + ExePath.c_str(), + Basename.c_str(), + DateTime.ToString(false)); + } + + Filename = FormatString("%s%s", Filename, PostFix ? PostFix : ""); + +#if EG_PLATFORM == EG_PLATFORM_WINDOWS + // Visual Studio has an "ofstream::open" overload that takes a wchar_t string" + Out.open(Widen(Filename), std::ios::out | std::ios::trunc); +#else + Out.open(Filename, std::ios::out | std::ios::trunc); +#endif + + if (!Out.is_open()) + { + EG_LOG(LogDefault, Error, "Failed to open log file '%s'", Filename); + } + + WorkThread = std::thread([this]() + { + while (!bFinish) + { + std::function Work; + WorkQueue.Pop(Work); + Work(); + } + }); +} + +FThreadedFileLogging::~FThreadedFileLogging() +{ + WorkQueue.Push([this]() { bFinish = true; }); + WorkThread.join(); +} + +void FThreadedFileLogging::Write( + const char* Msg) +{ + WorkQueue.Push([ this, Msg = std::string(Msg) ]() + { + Out << Msg; + if (bAutoNewLine) + { + Out << std::endl; + } + Out.flush(); + }); +} + +////////////////////////////////////////////////////////////////////////// +// +// FFileLogOutput +// +////////////////////////////////////////////////////////////////////////// + +FFileLogOutput::FFileLogOutput(const char* Filename, const char* PostFix) + : FThreadedFileLogging(Filename, PostFix) +{ +} + +void FFileLogOutput::Log( + const char* File, int Line, const FLogCategoryBase* Category, ELogVerbosity Verbosity, const char* Msg) +{ + Write(Msg); +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/FileLogOutput.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/FileLogOutput.h new file mode 100644 index 000000000000..a4218767d8e8 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/FileLogOutput.h @@ -0,0 +1,67 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" +#include "Logging.h" +#include "SharedQueue.h" + +/** + * Reusable file logging. + * This is split from the EG_LOG macro file logging, so it can be reused without + * accepting the EG_LOG macros. + */ +class FThreadedFileLogging +{ +public: + /** + * @param Filename + * Full path to the log file name. If not specified, it will use + * "\_YYYY-MM-DD_HH-MM-SS + * @param PostFix Post-fix to the filename. + * This can be useful to for example split logs into categories. Eg: + * .log and _WebRTC.log + */ + FThreadedFileLogging(const char* Filename, const char* PostFix=".log"); + ~FThreadedFileLogging(); + + /** + * @param bAutoNewLine if true, ever Log call will automatically append a '\n' to the message + */ + void SetAutoNewLine(bool bAutoNewLine) + { + this->bAutoNewLine = bAutoNewLine; + } + + void Write(const char* Msg); + +private: + + std::ofstream Out; + FWorkQueue WorkQueue; + std::thread WorkThread; + bool bFinish = false; + + // If true, it will automatically append a '\n' to the logged messages + bool bAutoNewLine = false; +}; + +/** + * Logs EG_LOG macros calls to a file + */ +class FFileLogOutput + : public FThreadedFileLogging + , public ILogOutput +{ +public: + + /** + * See the FThreadedFileLogging constructor for what the parameters do + */ + FFileLogOutput(const char* Filename, const char* PostFix=".log"); + + // + // ILogOutput interface + // + void Log(const char* File, int Line, const FLogCategoryBase* Category, ELogVerbosity Verbosity, const char* Msg) override; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/H264FrameBuffer.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/H264FrameBuffer.h new file mode 100644 index 000000000000..be11622d8abd --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/H264FrameBuffer.h @@ -0,0 +1,52 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" + +class FH264FrameBuffer : public webrtc::VideoFrameBuffer +{ +public: + FH264FrameBuffer(int Width, int Height) + : Width(Width) + , Height(Height) + { + } + + // + // webrtc::VideoFrameBuffer interface + // + Type type() const override + { + return Type::kNative; + } + + virtual int width() const override + { + return Width; + } + + virtual int height() const override + { + return Height; + } + + rtc::scoped_refptr ToI420() override + { + check(false); + return nullptr; + } + + // + // Own methods + // + std::vector& GetBuffer() + { + return Buffer; + } + +private: + int Width; + int Height; + std::vector Buffer; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Logging.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Logging.cpp new file mode 100644 index 000000000000..3467145ff91e --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Logging.cpp @@ -0,0 +1,106 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "Logging.h" +#include "StringUtils.h" +#include "TimeUtils.h" + +EG_DEFINE_LOG_CATEGORY(LogDefault) + +extern bool PARAM_DbgWindow_Proxy; + +const char* LogVerbosityToString(ELogVerbosity v) +{ + switch (v) + { + case ELogVerbosity::None: + return "NNN"; + case ELogVerbosity::Fatal: + return "FTL"; + case ELogVerbosity::Error: + return "ERR"; + case ELogVerbosity::Warning: + return "WRN"; + case ELogVerbosity::Log: + return "LOG"; + }; + return "Unknown"; +} + +FLogCategoryBase::FLogCategoryBase(const char* Name, ELogVerbosity Verbosity, ELogVerbosity CompileTimeVerbosity) + : Name(Name) + , Verbosity(Verbosity) + , CompileTimeVerbosity(CompileTimeVerbosity) +{ +} + +bool FLogCategoryBase::IsSuppressed(ELogVerbosity V) const +{ + return V > this->Verbosity; +} + +void FLogCategoryBase::SetVerbosity(ELogVerbosity V) +{ + Verbosity = ELogVerbosity(std::min((int)CompileTimeVerbosity, (int)V)); +} + +////////////////////////////////////////////////////////////////////////// +// ILogOutput +////////////////////////////////////////////////////////////////////////// + +ILogOutput::ILogOutput() +{ + auto Data = GetSharedData(); + auto Lk = std::unique_lock(Data->Mtx); + Data->Outputs.push_back(this); +} + +ILogOutput::~ILogOutput() +{ + auto Data = GetSharedData(); + auto Lk = std::unique_lock(Data->Mtx); + Data->Outputs.erase(std::find(Data->Outputs.begin(), Data->Outputs.end(), this)); +} + +ILogOutput::FSharedData* ILogOutput::GetSharedData() +{ + // This is thread safe (aka: Magic statics in C++11) + static FSharedData Data; + return &Data; +} + +void ILogOutput::LogToAll( + const char* File, int Line, const FLogCategoryBase* Category, ELogVerbosity Verbosity, + _Printf_format_string_ const char* Fmt, ...) +{ + va_list Args; + va_start(Args, Fmt); + + const char* Prefix = ""; + + { + FDateTime DateTime = PARAM_LocalTime ? Now() : UtcNow(); + Prefix = FormatString( + "[%s]: %s: %-15s: ", + DateTime.ToString(), + LogVerbosityToString(Verbosity), + Category->Name.c_str()); + } + + char* Msg = FormatStringVA(Fmt, Args); + constexpr int BufSize = 1024*10; + char Buf[BufSize]; + SNPrintf(Buf, BufSize, "%s%s\n", Prefix, Msg); + + if (PARAM_DbgWindow_Proxy) + { + OutputDebugStringA(Buf); + } + + auto Data = GetSharedData(); + auto Lk = std::unique_lock(Data->Mtx); + for (ILogOutput* Out : Data->Outputs) + { + Out->Log(File, Line, Category, Verbosity, Buf); + } +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Logging.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Logging.h new file mode 100644 index 000000000000..cb26df150e29 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Logging.h @@ -0,0 +1,115 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +/** + * + * Logging framework very similar to what UE4's own logging framework + * + */ +#pragma once + +#include "WebRTCProxyCommon.h" + +enum class ELogVerbosity : uint8_t +{ + None, + Fatal, + Error, + Warning, + Log +}; + +const char* LogVerbosityToString(ELogVerbosity v); + +class FLogCategoryBase +{ +public: + FLogCategoryBase(const char* Name, ELogVerbosity Verbosity, ELogVerbosity CompileTimeVerbosity); + + //! Tells if a log message of the specified verbosity should be suppressed or logged + bool IsSuppressed(ELogVerbosity V) const; + + //! Set runtime verbosity + void SetVerbosity(ELogVerbosity V); + + ELogVerbosity Verbosity; + ELogVerbosity CompileTimeVerbosity; + std::string Name; +}; + +template +class FLogCategory : public FLogCategoryBase +{ +public: + FLogCategory(const char* Name) + : FLogCategoryBase(Name, DEFAULT_VERBOSITY, COMPILETIME_VERBOSITY) + { + } + + enum + { + CompileTimeVerbosity = (int)COMPILETIME_VERBOSITY + }; +}; + +/** + * Interface for log outputs. + * NOTE: Classes that implement this interface get automatically registered as + * a log output, and unregistered when destroyed + */ +class ILogOutput +{ +public: + ILogOutput(); + virtual ~ILogOutput(); + + static void LogToAll( + const char* File, int Line, const FLogCategoryBase* Category, ELogVerbosity Verbosity, + _Printf_format_string_ const char* Fmt, ...); + + virtual void + Log(const char* File, int Line, const FLogCategoryBase* Category, ELogVerbosity Verbosity, const char* Msg) = 0; + +private: + struct FSharedData + { + std::mutex Mtx; + std::vector Outputs; + }; + static FSharedData* GetSharedData(); +}; + +#define EG_LOG_MINIMUM_VERBOSITY Log + +#define EG_DECLARE_LOG_CATEGORY(NAME, DEFAULT_VERBOSITY, COMPILETIME_VERBOSITY) \ + extern class FLogCategory##NAME \ + : public ::FLogCategory<::ELogVerbosity::DEFAULT_VERBOSITY, ::ELogVerbosity::COMPILETIME_VERBOSITY> \ + { \ + public: \ + FLogCategory##NAME() \ + : FLogCategory(#NAME) \ + { \ + } \ + } NAME; + +#define EG_DEFINE_LOG_CATEGORY(NAME) FLogCategory##NAME NAME; + +#define EG_LOG_CHECK_COMPILETIME_VERBOSITY(NAME, VERBOSITY) \ + (((int)::ELogVerbosity::VERBOSITY <= FLogCategory##NAME::CompileTimeVerbosity) && \ + ((int)::ELogVerbosity::VERBOSITY <= (int)::ELogVerbosity::EG_LOG_MINIMUM_VERBOSITY)) + +#define EG_LOG(NAME, VERBOSITY, Fmt, ...) \ + { \ + if constexpr (EG_LOG_CHECK_COMPILETIME_VERBOSITY(NAME, VERBOSITY)) \ + { \ + if (!NAME.IsSuppressed(::ELogVerbosity::VERBOSITY)) \ + { \ + ::ILogOutput::LogToAll(__FILE__, __LINE__, &NAME, ::ELogVerbosity::VERBOSITY, Fmt, ##__VA_ARGS__); \ + if (::ELogVerbosity::VERBOSITY == ::ELogVerbosity::Fatal) \ + { \ + ::DoAssert(__FILE__, __LINE__, Fmt, ##__VA_ARGS__); \ + } \ + } \ + } \ + } + +EG_DECLARE_LOG_CATEGORY(LogDefault, Log, Log) diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkAudioCapturer.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkAudioCapturer.cpp new file mode 100644 index 000000000000..e7b87b546725 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkAudioCapturer.cpp @@ -0,0 +1,324 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "NetworkAudioCapturer.h" +#include "Logging.h" + +// These are copied from webrtc internals +#define CHECKinitialized_() \ + { \ + if (!bInitialized) \ + { \ + return -1; \ + }; \ + } +#define CHECKinitialized__BOOL() \ + { \ + if (!bInitialized) \ + { \ + return false; \ + }; \ + } + +#define LOGFUNC() EG_LOG(LogDefault, Log, "%s", __FUNCTION__) + +void FNetworkAudioCapturer::ProcessPacket(PixelStreamingProtocol::EToProxyMsg PkType, const void* Data, uint32_t Size) +{ + if (PkType != PixelStreamingProtocol::EToProxyMsg::AudioPCM) + { + return; + } + + if (!(bInitialized && bRecordingInitialized)) + { + return; + } + + auto PkData = static_cast(Data); + + RecordingBuffer.insert(RecordingBuffer.end(), PkData, PkData + Size); + int BytesPer10Ms = (SampleRate * Channels * static_cast(sizeof(uint16_t))) / 100; + + // Feed in 10ms chunks + while (RecordingBuffer.size() >= BytesPer10Ms) + { + // If this check fails, then it means we tried to use it after it was + // destroyed in "Terminate". If so, then we should use a mutex around it + // so we are either destroying it, or using it. + // The way the objects and threads interact, and the way shutdown is done, + // it shouldn't happen, but nevertheless, having the check doesn't hurt. + check(DeviceBuffer); + if (DeviceBuffer) + { + DeviceBuffer->SetRecordedBuffer(RecordingBuffer.data(), BytesPer10Ms / (sizeof(uint16_t) * Channels)); + DeviceBuffer->DeliverRecordedData(); + } + + RecordingBuffer.erase(RecordingBuffer.begin(), RecordingBuffer.begin() + BytesPer10Ms); + } +} + +int32_t FNetworkAudioCapturer::ActiveAudioLayer(AudioLayer* audioLayer) const +{ + //LOGFUNC(); + *audioLayer = AudioDeviceModule::kDummyAudio; + return 0; +} + +int32_t FNetworkAudioCapturer::RegisterAudioCallback(webrtc::AudioTransport* audioCallback) +{ + //LOGFUNC(); + DeviceBuffer->RegisterAudioCallback(audioCallback); + return 0; +} + +int32_t FNetworkAudioCapturer::Init() +{ + //LOGFUNC(); + if (bInitialized) + return 0; + + DeviceBuffer = std::make_unique(); + + bInitialized = true; + return 0; +} + +int32_t FNetworkAudioCapturer::Terminate() +{ + //LOGFUNC(); + if (!bInitialized) + return 0; + + DeviceBuffer.reset(); + + bInitialized = false; + return 0; +} + +bool FNetworkAudioCapturer::Initialized() const +{ + //LOGFUNC(); + return bInitialized; +} + +int16_t FNetworkAudioCapturer::PlayoutDevices() +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +int16_t FNetworkAudioCapturer::RecordingDevices() +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +int32_t FNetworkAudioCapturer::PlayoutDeviceName( + uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize], char guid[webrtc::kAdmMaxGuidSize]) +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +int32_t FNetworkAudioCapturer::RecordingDeviceName( + uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize], char guid[webrtc::kAdmMaxGuidSize]) +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +int32_t FNetworkAudioCapturer::SetPlayoutDevice(uint16_t index) +{ + //LOGFUNC(); + CHECKinitialized_(); + return 0; +} + +int32_t FNetworkAudioCapturer::SetPlayoutDevice(WindowsDeviceType device) +{ + //LOGFUNC(); + CHECKinitialized_(); + return 0; +} + +int32_t FNetworkAudioCapturer::SetRecordingDevice(uint16_t index) +{ + //LOGFUNC(); + CHECKinitialized_(); + return 0; +} + +int32_t FNetworkAudioCapturer::SetRecordingDevice(WindowsDeviceType device) +{ + //LOGFUNC(); + CHECKinitialized_(); + return 0; +} + +int32_t FNetworkAudioCapturer::PlayoutIsAvailable(bool* available) +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +int32_t FNetworkAudioCapturer::InitPlayout() +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +bool FNetworkAudioCapturer::PlayoutIsInitialized() const +{ + //LOGFUNC(); + CHECKinitialized__BOOL(); + return false; +} + +int32_t FNetworkAudioCapturer::RecordingIsAvailable(bool* available) +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +int32_t FNetworkAudioCapturer::InitRecording() +{ + LOGFUNC(); + CHECKinitialized_(); + + // #Audio : Allow dynamic values for samplerate and/or channels , + // or receive those from UE4 ? + DeviceBuffer->SetRecordingSampleRate(SampleRate); + DeviceBuffer->SetRecordingChannels(Channels); + + bRecordingInitialized = true; + return 0; +} + +bool FNetworkAudioCapturer::RecordingIsInitialized() const +{ + //LOGFUNC(); + CHECKinitialized__BOOL(); + return bRecordingInitialized == true; +} + +int32_t FNetworkAudioCapturer::StartPlayout() +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +int32_t FNetworkAudioCapturer::StopPlayout() +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +bool FNetworkAudioCapturer::Playing() const +{ + //LOGFUNC(); + CHECKinitialized__BOOL(); + return false; +} + +int32_t FNetworkAudioCapturer::StartRecording() +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +int32_t FNetworkAudioCapturer::StopRecording() +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +bool FNetworkAudioCapturer::Recording() const +{ + //LOGFUNC(); + CHECKinitialized__BOOL(); + return bRecordingInitialized; +} + +int32_t FNetworkAudioCapturer::InitSpeaker() +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +bool FNetworkAudioCapturer::SpeakerIsInitialized() const +{ + //LOGFUNC(); + CHECKinitialized__BOOL(); + return false; +} + +int32_t FNetworkAudioCapturer::InitMicrophone() +{ + //LOGFUNC(); + CHECKinitialized_(); + return 0; +} + +bool FNetworkAudioCapturer::MicrophoneIsInitialized() const +{ + //LOGFUNC(); + CHECKinitialized__BOOL(); + return true; +} + +int32_t FNetworkAudioCapturer::StereoPlayoutIsAvailable(bool* available) const +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +int32_t FNetworkAudioCapturer::SetStereoPlayout(bool enable) +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +int32_t FNetworkAudioCapturer::StereoPlayout(bool* enabled) const +{ + //LOGFUNC(); + CHECKinitialized_(); + return -1; +} + +int32_t FNetworkAudioCapturer::StereoRecordingIsAvailable(bool* available) const +{ + //LOGFUNC(); + CHECKinitialized_(); + *available = true; + return 0; +} + +int32_t FNetworkAudioCapturer::SetStereoRecording(bool enable) +{ + //LOGFUNC(); + CHECKinitialized_(); + return 0; +} + +int32_t FNetworkAudioCapturer::StereoRecording(bool* enabled) const +{ + //LOGFUNC(); + CHECKinitialized_(); + *enabled = true; + return 0; +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkAudioCapturer.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkAudioCapturer.h new file mode 100644 index 000000000000..15184a6fecbd --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkAudioCapturer.h @@ -0,0 +1,184 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" + +class FNetworkAudioCapturer : public webrtc::AudioDeviceModule +{ +public: + void ProcessPacket(PixelStreamingProtocol::EToProxyMsg PkType, const void* Data, uint32_t Size); + +private: + // + // webrtc::AudioDeviceModule interface + // + int32_t ActiveAudioLayer(AudioLayer* audioLayer) const override; + int32_t RegisterAudioCallback(webrtc::AudioTransport* audioCallback) override; + + // Main initialization and termination + int32_t Init() override; + int32_t Terminate() override; + bool Initialized() const override; + + // Device enumeration + int16_t PlayoutDevices() override; + int16_t RecordingDevices() override; + int32_t PlayoutDeviceName( + uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize], char guid[webrtc::kAdmMaxGuidSize]) override; + int32_t RecordingDeviceName( + uint16_t index, char name[webrtc::kAdmMaxDeviceNameSize], char guid[webrtc::kAdmMaxGuidSize]) override; + + // Device selection + int32_t SetPlayoutDevice(uint16_t index) override; + int32_t SetPlayoutDevice(WindowsDeviceType device) override; + int32_t SetRecordingDevice(uint16_t index) override; + int32_t SetRecordingDevice(WindowsDeviceType device) override; + + // Audio transport initialization + int32_t PlayoutIsAvailable(bool* available) override; + int32_t InitPlayout() override; + bool PlayoutIsInitialized() const override; + int32_t RecordingIsAvailable(bool* available) override; + int32_t InitRecording() override; + bool RecordingIsInitialized() const override; + + // Audio transport control + virtual int32_t StartPlayout() override; + virtual int32_t StopPlayout() override; + virtual bool Playing() const override; + virtual int32_t StartRecording() override; + virtual int32_t StopRecording() override; + virtual bool Recording() const override; + + // Audio mixer initialization + virtual int32_t InitSpeaker() override; + virtual bool SpeakerIsInitialized() const override; + virtual int32_t InitMicrophone() override; + virtual bool MicrophoneIsInitialized() const override; + + // Speaker volume controls + virtual int32_t SpeakerVolumeIsAvailable(bool* available) override + { + return -1; + } + virtual int32_t SetSpeakerVolume(uint32_t volume) override + { + return -1; + } + virtual int32_t SpeakerVolume(uint32_t* volume) const override + { + return -1; + } + virtual int32_t MaxSpeakerVolume(uint32_t* maxVolume) const override + { + return -1; + } + virtual int32_t MinSpeakerVolume(uint32_t* minVolume) const override + { + return -1; + } + + // Microphone volume controls + virtual int32_t MicrophoneVolumeIsAvailable(bool* available) override + { + return -1; + } + virtual int32_t SetMicrophoneVolume(uint32_t volume) override + { + return -1; + } + virtual int32_t MicrophoneVolume(uint32_t* volume) const override + { + return -1; + } + virtual int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override + { + return -1; + } + virtual int32_t MinMicrophoneVolume(uint32_t* minVolume) const override + { + return -1; + } + + // Speaker mute control + virtual int32_t SpeakerMuteIsAvailable(bool* available) override + { + return -1; + } + virtual int32_t SetSpeakerMute(bool enable) override + { + return -1; + } + virtual int32_t SpeakerMute(bool* enabled) const override + { + return -1; + } + + // Microphone mute control + virtual int32_t MicrophoneMuteIsAvailable(bool* available) override + { + return -1; + } + virtual int32_t SetMicrophoneMute(bool enable) override + { + return -1; + } + virtual int32_t MicrophoneMute(bool* enabled) const override + { + return -1; + } + + // Stereo support + virtual int32_t StereoPlayoutIsAvailable(bool* available) const override; + virtual int32_t SetStereoPlayout(bool enable) override; + virtual int32_t StereoPlayout(bool* enabled) const override; + virtual int32_t StereoRecordingIsAvailable(bool* available) const override; + virtual int32_t SetStereoRecording(bool enable) override; + virtual int32_t StereoRecording(bool* enabled) const override; + + // Playout delay + virtual int32_t PlayoutDelay(uint16_t* delayMS) const override + { + return -1; + } + + // Only supported on Android. + virtual bool BuiltInAECIsAvailable() const override + { + return false; + } + virtual bool BuiltInAGCIsAvailable() const override + { + return false; + } + virtual bool BuiltInNSIsAvailable() const override + { + return false; + } + + // Enables the built-in audio effects. Only supported on Android. + virtual int32_t EnableBuiltInAEC(bool enable) override + { + return -1; + } + virtual int32_t EnableBuiltInAGC(bool enable) override + { + return -1; + } + virtual int32_t EnableBuiltInNS(bool enable) override + { + return -1; + } + + std::atomic bInitialized = false; + std::unique_ptr DeviceBuffer; + + std::vector Tempbuf; + std::vector RecordingBuffer; + int RecordingBufferSize = 0; + + std::atomic bRecordingInitialized = false; + int SampleRate = 48000; + int Channels = 2; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkVideoCapturer.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkVideoCapturer.cpp new file mode 100644 index 000000000000..e23bc6b936d6 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkVideoCapturer.cpp @@ -0,0 +1,36 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "NetworkVideoCapturer.h" +#include "Logging.h" +#include "UE4Connection.h" + +FNetworkVideoCapturer::FNetworkVideoCapturer() +{ + set_enable_video_adapter(false); + + std::vector Formats; + Formats.push_back(cricket::VideoFormat(1920, 1080, cricket::VideoFormat::FpsToInterval(60), cricket::FOURCC_H264)); + SetSupportedFormats(Formats); +} + +void FNetworkVideoCapturer::ProcessPacket(PixelStreamingProtocol::EToProxyMsg PkType, const void* Data, uint32_t Size) +{ + rtc::scoped_refptr buffer = new rtc::RefCountedObject(Width, Height); + webrtc::VideoFrame Frame{buffer, webrtc::VideoRotation::kVideoRotation_0, 0}; + + // #Andriy: WebRTC doesn't like frames with the same timestamp and will drop one of them + // we don't like our frames to be dropped so let's cheat with setting a unique value but close to be true + int64_t NtpTimeMs = rtc::TimeMillis(); + if (NtpTimeMs <= LastNtpTimeMs) + NtpTimeMs = LastNtpTimeMs + 1; + LastNtpTimeMs = NtpTimeMs; + Frame.set_ntp_time_ms(NtpTimeMs); + + auto PkData = reinterpret_cast(Data); + + buffer->GetBuffer().assign(PkData, PkData + Size); + + OnFrame(Frame, Width, Height); + ++FrameNo; +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkVideoCapturer.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkVideoCapturer.h new file mode 100644 index 000000000000..72af27c98f83 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/NetworkVideoCapturer.h @@ -0,0 +1,42 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" +#include "H264FrameBuffer.h" + +class FNetworkVideoCapturer : public cricket::VideoCapturer +{ +public: + FNetworkVideoCapturer(); + + void ProcessPacket(PixelStreamingProtocol::EToProxyMsg PkType, const void* Data, uint32_t Size); + +private: + ////////////////////////////////////////////////////////////////////////// + // cricket::VideoCapturer interface + cricket::CaptureState Start(const cricket::VideoFormat& Format) override + { return cricket::CS_RUNNING; } + + void Stop() override + {} + + bool IsRunning() override + { return true; } + + bool IsScreencast() const override + { return false; } + + bool GetPreferredFourccs(std::vector* fourccs) override + { + fourccs->push_back(cricket::FOURCC_H264); + return true; + } + ////////////////////////////////////////////////////////////////////////// + + uint64_t FrameNo = 0; + int32_t Width = 1920; + int32_t Height = 1080; + int32_t Framerate = 60; + int64_t LastNtpTimeMs = 0; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/ScopeGuard.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/ScopeGuard.h new file mode 100644 index 000000000000..d85d60431935 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/ScopeGuard.h @@ -0,0 +1,91 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +/* +Based on ScopeGuard presented at: +http://channel9.msdn.com/Shows/Going+Deep/C-and-Beyond-2012-Andrei-Alexandrescu-Systematic-Error-Handling-in-C +*/ + +#pragma once + +#include "WebRTCProxyCommon.h" + +template +class TScopeGuard +{ +public: + TScopeGuard(Func F) + : F(std::move(F)) + , Active(true) + { + } + + ~TScopeGuard() + { + if (Active) + F(); + } + + void Dismiss() + { + Active = false; + } + + TScopeGuard() = delete; + TScopeGuard(const TScopeGuard&) = delete; + TScopeGuard& operator=(const TScopeGuard&) = delete; + TScopeGuard(TScopeGuard&& Other) + : F(std::move(Other.F)) + , Active(Other.Active) + { + Other.Dismiss(); + } + +private: + Func F; + bool Active; +}; + + +/** + Using a template function to create guards, to make for shorter code. + e.g: + auto g1 = ScopeGuard( [&] { SomeCleanupCode(); } ); +*/ +template< class Func> +TScopeGuard ScopeGuard(Func F) +{ + return TScopeGuard(std::move(F)); +} + +/** + Macros to be able to set anonymous scope guards. E.g: + + // some code ... + SCOPE_EXIT { some cleanup code }; + // more code ... + SCOPE_EXIT { more cleanup code }; + // more code ... + */ +namespace detail +{ + enum class EScopeGuardOnExit {}; + template + __forceinline TScopeGuard operator+(EScopeGuardOnExit, Func&& F) { + return TScopeGuard(std::forward(F)); + } +} + +#define CONCATENATE_IMPL(S1,S2) S1##S2 +#define CONCATENATE(S1,S2) CONCATENATE_IMPL(S1,S2) + +// Note: __COUNTER__ Expands to an integer starting with 0 and incrementing by 1 every time it is used in a source file or included headers of the source file. +#ifdef __COUNTER__ + #define ANONYMOUS_VARIABLE(Str) \ + CONCATENATE(Str,__COUNTER__) +#else + #define ANONYMOUS_VARIABLE(Str) \ + CONCATENATE(Str,__LINE__) +#endif + +#define SCOPE_EXIT \ + auto ANONYMOUS_VARIABLE(SCOPE_EXIT_STATE) \ + = ::detail::EScopeGuardOnExit() + [&]() diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Semaphore.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Semaphore.h new file mode 100644 index 000000000000..b94d251ba0d7 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/Semaphore.h @@ -0,0 +1,81 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" + +/** + * Portable semaphore, based on a mutex and condition variable + * Read https://en.cppreference.com/w/cpp/thread/condition_variable if you are + * unfamiliar with condition_variable + */ +class FSemaphore +{ +public: + FSemaphore(unsigned int Count = 0) + : Count(Count) + { + } + + /** + * Increases the counter + */ + void Notify() + { + std::unique_lock Lk(Mtx); + Count++; + Cv.notify_one(); + } + + /** + * Blocks until the counter is >0 + */ + void Wait() + { + std::unique_lock Lk(Mtx); + Cv.wait(Lk, [this]() { return Count > 0; }); + Count--; + } + + /** + * Similar to "Wait", but doesn't block. + * If the semaphore is not ready (aka: counter==0), it will just return false + * without blocking + */ + bool TryWait() + { + std::unique_lock Lk(Mtx); + if (Count) + { + Count--; + return true; + } + else + { + return false; + } + } + + /** + * Waits for the semaphore to be set, until the specific time is reached. + * @return + * Returns true if the semaphore was set before we reached the specified time point. + * Returns false if we reached the time point before the semaphore was set. + */ + template + bool WaitUntil(const std::chrono::time_point& point) + { + std::unique_lock lock(Mtx); + if (!Cv.wait_until(lock, point, [this]() { return Count > 0; })) + { + return false; + } + Count--; + return true; + } + +private: + std::mutex Mtx; + std::condition_variable Cv; + unsigned int Count; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/SetSessionDescriptionObserver.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/SetSessionDescriptionObserver.h new file mode 100644 index 000000000000..1665b89d7219 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/SetSessionDescriptionObserver.h @@ -0,0 +1,43 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +////////////////////////////////////////////////////////////////////////// +// FSetSessionDescriptionObserver +// WebRTC requires an implementation of `webrtc::SetSessionDescriptionObserver` interface as a callback +// for setting session description, either on receiving remote `offer` (`PeerConnection::SetRemoteDescription`) +// of on sending `answer` (`PeerConnection::SetLocalDescription`) +class FSetSessionDescriptionObserver : public webrtc::SetSessionDescriptionObserver +{ +public: + using FSuccessCallback = std::function; + using FFailureCallback = std::function; + + static FSetSessionDescriptionObserver* + Create(FSuccessCallback successCallback, FFailureCallback failureCallback) + { + return new rtc::RefCountedObject(std::move(successCallback), std::move(failureCallback)); + } + + FSetSessionDescriptionObserver(FSuccessCallback successCallback, FFailureCallback failureCallback) + : SuccessCallback(std::move(successCallback)) + , FailureCallback(std::move(failureCallback)) + {} + + // we don't need to do anything on success + void OnSuccess() override + { + SuccessCallback(); + } + + // errors usually mean incompatibility between our session configuration (often H.264, its profile and level) and + // client (browser), malformed SDP or if client doesn't support PlanB/UnifiedPlan (whatever was used by proxy) + void OnFailure(const std::string& Error) override + { + FailureCallback(Error); + } + +private: + FSuccessCallback SuccessCallback; + FFailureCallback FailureCallback; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/SharedQueue.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/SharedQueue.h new file mode 100644 index 000000000000..9d5dc714d18d --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/SharedQueue.h @@ -0,0 +1,136 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" + +/** + * Multiple producer/multiple consumer thread safe queue + */ +template +class TSharedQueue +{ +public: + TSharedQueue() + { + } + + template + void Emplace(ARGS&&... Args) + { + std::lock_guard Lk(Mtx); + Queue.emplace(std::forward(Args)...); + DataCondVar.notify_one(); + } + + template + void Push(T&& Item) + { + std::lock_guard Lk(Mtx); + Queue.push(std::forward(Item)); + DataCondVar.notify_one(); + } + + /** + * Tries to pop an item from the queue. It does not block waiting for items + * to be available. + * + * @param OutItem popped item on exit (if an item was retrieved) + * @return true if an item as retrieved, false otherwise + */ + bool TryPop(T& OutItem) + { + std::lock_guard Lk(Mtx); + if (Queue.empty()) + { + return false; + } + OutItem = std::move(Queue.front()); + Queue.pop(); + return true; + } + + /** + * Retrieves all items into the supplied queue. + * This should be more efficient than retrieving one item at a time when a + * thread wants to process as many items as there are currently in the + * queue. Example: + * std::queue all; + * if (q.TryPopAll(all)) { + * ... process all items in the retrieved queue ... + * } + * + * @param OutQueue will contain the retrieved items on exit. Any pre-existing + * items will be lost. + * @return true if any items were retrieved + */ + bool TryPopAll(std::queue& OutQueue) + { + std::lock_guard Lk(Mtx); + OutQueue = std::move(Queue); + return OutQueue.size() != 0; + } + + /** + * Pops an item, blocking if necessary to wait for one if the queue is currently + * empty. + * @param OutItem popped item on exit + */ + void Pop(T& OutItem) + { + std::unique_lock Lk(Mtx); + DataCondVar.wait(Lk, [this] { return !Queue.empty(); }); + OutItem = std::move(Queue.front()); + Queue.pop(); + } + + /** + * Retrieves an item, blocking if necessary for the specified duration + * until items are available arrive. + * + * @param OutItem popped item on exit (if an item was retrieved) + * @param TimeoutMs How long to wait for an item to be available + * @return true if an item as retrieved, false if it timed out before an item + * was available + */ + bool Pop(T& OutItem, int64_t TimeoutMs) + { + std::unique_lock Lk(Mtx); + if (!DataCondVar.wait_for(Lk, std::chrono::milliseconds(TimeoutMs), [this] { return !Queue.empty(); })) + { + return false; + } + + OutItem = std::move(Queue.front()); + Queue.pop(); + return true; + } + + /** + * Checks if the queue is empty + */ + bool IsEmpty() const + { + std::lock_guard Lk(Mtx); + return Queue.empty(); + } + + /** + * Tells how many items are available in the queue + */ + size_t Size() const + { + std::lock_guard Lk(Mtx); + return Queue.size(); + } + +private: + std::queue Queue; + mutable std::mutex Mtx; + std::condition_variable DataCondVar; + + TSharedQueue& operator=(const TSharedQueue&) = delete; + TSharedQueue(const TSharedQueue& other) = delete; +}; + +using FWorkQueue = TSharedQueue>; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/StringUtils.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/StringUtils.cpp new file mode 100644 index 000000000000..cba0d1b35d4f --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/StringUtils.cpp @@ -0,0 +1,152 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "StringUtils.h" + +void VSNPrintf(char* OutBuffer, int BufSize, const char* Fmt, va_list Args) +{ + int res; +#if EG_PLATFORM == EG_PLATFORM_WINDOWS + res = _vsnprintf_s(OutBuffer, BufSize, _TRUNCATE, Fmt, Args); +#elif EG_PLATFORM == EG_PLATFORM_LINUX + res = vsnprintf(OutBuffer, BufSize, fmt, Args); +#else +#error Unknown platform +#endif + + if (res < 0) + { + // If this happens, it means we are probably using temporary strings, + // and we need to increase their sizes + // Leaving this assert here, so we can catch these situations in Debug builds. + // In Release builds, the string just stays truncated + assert(false); + } +} + +void SNPrintf(char* OutBuffer, int BufSize, _Printf_format_string_ const char* Fmt, ...) +{ + va_list Args; + va_start(Args, Fmt); + VSNPrintf(OutBuffer, BufSize, Fmt, Args); +} + +char* GetTemporaryString() +{ + // Per-thread scratchpad, with an array of several strings that keep + // cycling, to allow the caller to have some nesting before a string is reused. + thread_local static char Bufs[EG_TEMPORARY_STRING_MAX_NESTING][EG_TEMPORARY_STRING_MAX_SIZE]; + thread_local static int BufIndex = 0; + + char* Buf = Bufs[BufIndex]; + BufIndex++; + if (BufIndex == EG_TEMPORARY_STRING_MAX_NESTING) + { + BufIndex = 0; + } + + return Buf; +} + +const char* FormatString(_Printf_format_string_ const char* Fmt, ...) +{ + va_list Args; + va_start(Args, Fmt); + const char* Str = FormatStringVA(Fmt, Args); + va_end(Args); + return Str; +} + +char* FormatStringVA(const char* Fmt, va_list Argptr) +{ + char* Buf = GetTemporaryString(); + VSNPrintf(Buf, EG_TEMPORARY_STRING_MAX_SIZE, Fmt, Argptr); + return Buf; +} + +#if EG_PLATFORM == EG_PLATFORM_WINDOWS +std::wstring Widen(const std::string& Utf8) +{ + if (Utf8.empty()) + { + return std::wstring(); + } + + // Get length (in wchar_t's), so we can reserve the size we need before the + // actual conversion + const int Length = ::MultiByteToWideChar( + CP_UTF8, // convert from UTF-8 + 0, // default flags + Utf8.data(), // source UTF-8 string + (int)Utf8.length(), // length (in chars) of source UTF-8 string + NULL, // unused - no conversion done in this step + 0 // request size of destination buffer, in wchar_t's + ); + if (Length == 0) + throw std::exception("Can't get length of UTF-16 string"); + + std::wstring Utf16; + Utf16.resize(Length); + + // Do the actual conversion + if (!::MultiByteToWideChar( + CP_UTF8, // convert from UTF-8 + 0, // default flags + Utf8.data(), // source UTF-8 string + (int)Utf8.length(), // length (in chars) of source UTF-8 string + &Utf16[0], // destination buffer + (int)Utf16.length() // size of destination buffer, in wchar_t's + )) + { + throw std::exception("Can't convert string from UTF-8 to UTF-16"); + } + + return Utf16; +} + +std::string Narrow(const std::wstring& Str) +{ + if (Str.empty()) + { + return std::string(); + } + + // Get length (in wchar_t's), so we can reserve the size we need before the + // actual conversion + const int Utf8_length = ::WideCharToMultiByte( + CP_UTF8, // convert to UTF-8 + 0, // default flags + Str.data(), // source UTF-16 string + (int)Str.length(), // source string length, in wchar_t's, + NULL, // unused - no conversion required in this step + 0, // request buffer size + NULL, + NULL // unused + ); + + if (Utf8_length == 0) + { + throw "Can't get length of UTF-8 string"; + } + + std::string Utf8; + Utf8.resize(Utf8_length); + + // Do the actual conversion + if (!::WideCharToMultiByte( + CP_UTF8, // convert to UTF-8 + 0, // default flags + Str.data(), // source UTF-16 string + (int)Str.length(), // source string length, in wchar_t's, + &Utf8[0], // destination buffer + (int)Utf8.length(), // destination buffer size, in chars + NULL, + NULL // unused + )) + { + throw "Can't convert from UTF-16 to UTF-8"; + } + + return Utf8; +} +#endif diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/StringUtils.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/StringUtils.h new file mode 100644 index 000000000000..5a72af2f860f --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/StringUtils.h @@ -0,0 +1,104 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" + +// +// Temporary strings provide a temporary scratchpad for formatting/logging, +// which requiring memory allocation +// User code should not keep hold of these string pointers, since they are +// reused. Reuse period is specified by the "NESTING" macro +#define EG_TEMPORARY_STRING_MAX_SIZE (1024*8) +#define EG_TEMPORARY_STRING_MAX_NESTING 20 + +/** +* Typical vsnprintf/snprintf. +* By using these we avoid the usual windows deprecation warnings +*/ +void VSNPrintf(char* OutBuffer, int BufSize, const char* Fmt, va_list Args); +void SNPrintf(char* OutBuffer, int BufSize, _Printf_format_string_ const char* Fmt, ...); + +/** + * @return + * A string buffer of EG_TEMPORARY_STRING_MAX_SIZE characters that be be used as a scratchpad. + */ +char* GetTemporaryString(); + +/** + * Akin to snprintf, but uses a temporary string. + * @return A temporary string. + */ +const char* FormatString(_Printf_format_string_ const char* Fmt, ...); +char* FormatStringVA(const char* Fmt, va_list Argptr); + +/** + * Converts a utf8 string to wide string + */ +std::wstring Widen(const std::string& Utf8); + +/** + * Converts a wide string to utf8 + */ +std::string Narrow(const std::wstring& Str); + +// +// Case insensitive string search: +// Copied from http://stackoverflow.com/questions/3152241/case-insensitive-stdstring-find +// + +namespace detail +{ + // templated version of my_equal so it could work with both char and wchar_t + template + struct TCharEqual + { + TCharEqual(const std::locale& loc) + : loc_(loc) + { + } + bool operator()(charT ch1, charT ch2) { return std::toupper(ch1, loc_) == std::toupper(ch2, loc_); } + + private: + const std::locale& loc_; + }; +} + +/** + * Search for a substring (case insensitive) + * @param Where String to search in + * @param What string to search for in "Where" + * @return Position where the substring was found, or -1 if not found. + */ +// find substring (case insensitive) +template +static int CiFindSubStr(const T& Where, const T& What, const std::locale& loc = std::locale()) +{ + typename T::const_iterator It = + std::search(Where.begin(), Where.end(), What.begin(), What.end(), detail::TCharEqual(loc)); + if (It != Where.end()) + return It - Where.begin(); + else + return -1; // not found +} + +/** + * Checks if two strings are equal (case insensitive) + */ +template +static bool CiEquals(const T& Str1, const T& Str2, const std::locale& loc = std::locale()) +{ + if (Str1.size() != Str2.size()) + return false; + typename T::const_iterator It1 = Str1.begin(); + typename T::const_iterator It2 = Str2.begin(); + detail::TCharEqual Eq(loc); + while (It1 != Str1.end()) + { + if (!Eq(*It1, *It2)) + return false; + ++It1; + ++It2; + } + return true; +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/TimeUtils.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/TimeUtils.cpp new file mode 100644 index 000000000000..1490baa4ba50 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/TimeUtils.cpp @@ -0,0 +1,106 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "TimeUtils.h" +#include "StringUtils.h" + +const char* FDateTime::ToString(bool bIncludeMSec) +{ + if (bIncludeMSec) + { + return FormatString("%04d.%02d.%02d-%02d.%02d.%02d:%03d", + Year, Month, Day, Hour, Minute, Second, MSec); + } + else + { + return FormatString("%04d.%02d.%02d-%02d.%02d.%02d", + Year, Month, Day, Hour, Minute, Second); + } +} + +#if EG_PLATFORM == EG_PLATFORM_WINDOWS +FDateTime Now() +{ + SYSTEMTIME st; + GetLocalTime( &st ); + int Year = st.wYear; + int Month = st.wMonth; + int Day = st.wDay; + int Hour = st.wHour; + int Min = st.wMinute; + int Sec = st.wSecond; + int MSec = st.wMilliseconds; + + return FDateTime(Year, Month, Day, Hour, Min, Sec, MSec); +} + +FDateTime UtcNow() +{ + SYSTEMTIME st; + GetSystemTime( &st ); + int Year = st.wYear; + int Month = st.wMonth; + int Day = st.wDay; + int Hour = st.wHour; + int Min = st.wMinute; + int Sec = st.wSecond; + int MSec = st.wMilliseconds; + + return FDateTime(Year, Month, Day, Hour, Min, Sec, MSec); +} + +#elif EG_PLATFORM == EG_PLATFORM_LINUX + +// #LINUX : These were copied from UE4, but untested in WebRTCProxy itself so far. +// Once porting to Linux is done, fix any problems this might have + +FDateTime Now() +{ + // query for calendar time + struct timeval Time; + gettimeofday(&Time, NULL); + + // convert it to local time + struct tm LocalTime; + localtime_r(&Time.tv_sec, &LocalTime); + + // pull out data/time + int Year = LocalTime.tm_year + 1900; + int Month = LocalTime.tm_mon + 1; + int Day = LocalTime.tm_mday; + int Hour = LocalTime.tm_hour; + int Min = LocalTime.tm_min; + int Sec = LocalTime.tm_sec; + int MSec = Time.tv_usec / 1000; + + return FDateTime(Year, Month, Day, Hour, Min, Sec, MSec); +} + +FDateTime UtcNow() +{ + // query for calendar time + struct timeval Time; + gettimeofday(&Time, NULL); + + // convert it to UTC + struct tm LocalTime; + gmtime_r(&Time.tv_sec, &LocalTime); + + // pull out data/time + int Year = LocalTime.tm_year + 1900; + int Month = LocalTime.tm_mon + 1; + int Day = LocalTime.tm_mday; + int Hour = LocalTime.tm_hour; + int Min = LocalTime.tm_min; + int Sec = LocalTime.tm_sec; + int MSec = Time.tv_usec / 1000; + + return FDateTime(Year, Month, Day, Hour, Min, Sec, MSec); +} + +#else + + #error "Unknown platform" + +#endif + diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/TimeUtils.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/TimeUtils.h new file mode 100644 index 000000000000..8fd63e00f26b --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/TimeUtils.h @@ -0,0 +1,52 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" + + +struct FDateTime +{ + FDateTime(int Year, int Month, int Day, int Hour, int Minute, int Second, int Milliseconds) + : Year(Year) + , Month(Month) + , Day(Day) + , Hour(Hour) + , Minute(Minute) + , Second(Second) + , MSec(Milliseconds) + { + } + + // Full year (e.g: 2018) + int Year; + // 1..12 + int Month; + // Day of the month (e.g: 1..31) + int Day; + // (0..23) + int Hour; + // (0..59) + int Minute; + // (0..59) + int Second; + // Milliseconds (0..999) + int MSec; + + /** + Formats in a way ready for logging, matching UE4 format + YYYY.MM.DD-HH.MM.SS:MSEC + */ + const char* ToString(bool bIncludeMSec=true); +}; + + +/** + * Returns the local date/time + */ +FDateTime Now(); + +/** + * Returns curent UTC date/time + */ +FDateTime UtcNow(); diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/UE4Connection.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/UE4Connection.cpp new file mode 100644 index 000000000000..c85b238bce2c --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/UE4Connection.cpp @@ -0,0 +1,107 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "UE4Connection.h" +#include "Logging.h" + +using PixelStreamingProtocol::EToUE4Msg; +using PixelStreamingProtocol::EToProxyMsg; + +FUE4Connection::FUE4Connection(IUE4ConnectionObserver& Observer): + Observer(Observer), + Connection("UE4", *this) +{} + +void FUE4Connection::Connect(const std::string& IP, uint16_t Port) +{ + Connection.Connect(IP, Port); +} + +void FUE4Connection::OnConnect() +{ + Observer.OnUE4Connected(); +} + +void FUE4Connection::OnDisconnect(int Err) +{ + Observer.OnUE4Disconnected(); +} + +void FUE4Connection::StartStreaming() +{ + bStreamingStarted = true; + const auto msg = EToUE4Msg::StartStreaming; + Connection.Send(&msg, sizeof(msg)); +} + +void FUE4Connection::StopStreaming() +{ + const auto msg = EToUE4Msg::StopStreaming; + Connection.Send(&msg, sizeof(msg)); + bStreamingStarted = false; +} + +void FUE4Connection::ForceKeyFrame() +{ + const auto msg = EToUE4Msg::IFrameRequest; + Connection.Send(&msg, sizeof(msg)); +} + +void FUE4Connection::SetRate(uint32_t BitrateKbps, uint32_t Framerate) +{ + { + uint8_t Buf[1 + sizeof(uint16_t)] = { + static_cast(EToUE4Msg::AverageBitrateRequest) }; + if (BitrateKbps > std::numeric_limits::max()) + { + EG_LOG(LogDefault, Log, "%s : BitrateKbps is %u . Clamping to 65535.", __FUNCTION__, BitrateKbps); + BitrateKbps = std::numeric_limits::max(); + } + + *reinterpret_cast(&Buf[1]) = static_cast(BitrateKbps); + Connection.Send(Buf, sizeof(Buf)); + } + + { + uint8_t Buf[1 + sizeof(uint16_t)] = { static_cast(EToUE4Msg::MaxFpsRequest) }; + *reinterpret_cast(&Buf[1]) = static_cast(Framerate); + Connection.Send(Buf, sizeof(Buf)); + } +} + +void FUE4Connection::Send(const void* Data, uint32_t Size) +{ + Connection.Send(Data, Size); +} + +uint32_t FUE4Connection::OnRead(const uint8_t* Data, uint32_t Size) +{ + if (!bStreamingStarted) + return Size; // drop data as there's no clients to receive it + + using FTimestamp = uint64_t; + using FPayloadSize = uint32_t; + + if (Size < sizeof(FTimestamp) + sizeof(EToProxyMsg) + sizeof(FPayloadSize)) + return 0; + + const uint8_t* Ptr = Data; // pointer to current read pos in the buffer + + auto CaptureTimeMs = *reinterpret_cast(Ptr); + Ptr += sizeof(CaptureTimeMs); + + auto PktType = *reinterpret_cast(Ptr); + Ptr += sizeof(PktType); + + auto PayloadSize = *reinterpret_cast(Ptr); + Ptr += sizeof(PayloadSize); + + if (Ptr + PayloadSize > Data + Size) + return 0; + + Observer.OnUE4Packet(PktType, Ptr, PayloadSize); + + Ptr += PayloadSize; + + return static_cast(Ptr - Data); +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/UE4Connection.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/UE4Connection.h new file mode 100644 index 000000000000..af5ca3c11eab --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/UE4Connection.h @@ -0,0 +1,47 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" +#include "AsyncConnection.h" + +// callback interface for `FUE4Connection` +struct IUE4ConnectionObserver +{ + virtual ~IUE4ConnectionObserver() {} + + virtual void OnUE4Connected() = 0; + virtual void OnUE4Disconnected() = 0; + // reports incoming complete packet from UE4 preserving packet boundaries + virtual void OnUE4Packet(PixelStreamingProtocol::EToProxyMsg PktType, const void* Pkt, uint32_t Size) = 0; +}; + +// TCP client connection to UE4, manages UE4 <-> Proxy protocol +// automatically reconnects on disconnection +class FUE4Connection: public IAsyncConnectionObserver +{ +public: + explicit FUE4Connection(IUE4ConnectionObserver& Observer); + + // connects until succeeded + void Connect(const std::string& IP, uint16_t Port); + + // messages to UE4 + void StartStreaming(); + void StopStreaming(); + void ForceKeyFrame(); + void SetRate(uint32_t BitrateKbps, uint32_t Framerate); + // generic send for passing messages received from clients + void Send(const void* Data, uint32_t Size); + +private: + // IAsyncConnectionObserver impl + void OnConnect() override; + uint32_t OnRead(const uint8_t* Data, uint32_t Size) override; + void OnDisconnect(int Err) override; + +private: + IUE4ConnectionObserver& Observer; + FAsyncConnection Connection; + std::atomic bStreamingStarted = false; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/VideoEncoder.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/VideoEncoder.cpp new file mode 100644 index 000000000000..cb26a82621b1 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/VideoEncoder.cpp @@ -0,0 +1,301 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "VideoEncoder.h" +#include "Logging.h" +#include "H264FrameBuffer.h" +#include "ClientSession.h" + +namespace +{ + webrtc::SdpVideoFormat CreateH264Format(webrtc::H264::Profile profile, webrtc::H264::Level level) + { + const rtc::Optional profile_string = + webrtc::H264::ProfileLevelIdToString(webrtc::H264::ProfileLevelId(profile, level)); + check(profile_string); + return webrtc::SdpVideoFormat( + cricket::kH264CodecName, + {{cricket::kH264FmtpProfileLevelId, *profile_string}, + {cricket::kH264FmtpLevelAsymmetryAllowed, "1"}, + {cricket::kH264FmtpPacketizationMode, "1"}}); + } +} + +////////////////////////////////////////////////////////////////////////// +// +// FVideoEncoderFactory +// +////////////////////////////////////////////////////////////////////////// + +FVideoEncoderFactory::FVideoEncoderFactory(IVideoEncoderObserver& VideoSource) + : VideoSource(&VideoSource) +{ +} + +void FVideoEncoderFactory::AddSession(FClientSession& ClientSession) +{ + PendingClientSessions.Push(&ClientSession); +} + +std::vector FVideoEncoderFactory::GetSupportedFormats() const +{ + // return { CreateH264Format(webrtc::H264::kProfileBaseline, webrtc::H264::kLevel3_1), + // CreateH264Format(webrtc::H264::kProfileConstrainedBaseline, webrtc::H264::kLevel3_1) }; + // return { CreateH264Format(webrtc::H264::kProfileMain, webrtc::H264::kLevel3_1) }; + return {CreateH264Format(webrtc::H264::kProfileConstrainedBaseline, webrtc::H264::kLevel5_1)}; + // return { CreateH264Format(webrtc::H264::kProfileHigh, webrtc::H264::kLevel5_1) }; +} + +webrtc::VideoEncoderFactory::CodecInfo +FVideoEncoderFactory::QueryVideoEncoder(const webrtc::SdpVideoFormat& Format) const +{ + CodecInfo Info; + Info.is_hardware_accelerated = true; + Info.has_internal_source = false; + return Info; +} + +std::unique_ptr FVideoEncoderFactory::CreateVideoEncoder(const webrtc::SdpVideoFormat& Format) +{ + FClientSession* Session; + bool res = PendingClientSessions.Pop(Session, 0); + checkf(res, "no client session associated with encoder instance"); + + auto VideoEncoder = std::make_unique(*VideoSource, *Session); + Session->VideoEncoder = VideoEncoder.get(); + return VideoEncoder; +} + +// +// FVideoEncoder +// + +FVideoEncoder::FVideoEncoder(IVideoEncoderObserver& Observer, FClientSession& OwnerSession) + : Observer(&Observer) + , OwnerSession(&OwnerSession) +{ + check(this->Observer); + check(this->OwnerSession); + + bOwnsQualityControl = OwnerSession.bOriginalQualityController; + + CodecSpecific.codecType = webrtc::kVideoCodecH264; + // #TODO: Probably smarter setting of `packetization_mode` is required, look at `H264EncoderImpl` ctor + // CodecSpecific.codecSpecific.H264.packetization_mode = webrtc::H264PacketizationMode::SingleNalUnit; + CodecSpecific.codecSpecific.H264.packetization_mode = webrtc::H264PacketizationMode::NonInterleaved; +} + +void FVideoEncoder::SetQualityControlOwnership(bool bOwnership) +{ + if (bOwnsQualityControl != bOwnership) + { + EG_LOG( + LogDefault, + Log, + "%s : ClientId=%d, Ownership=%s", + __FUNCTION__, + OwnerSession->ClientId, + bOwnership ? "true" : "false"); + bForceBitrateRequest = bOwnership; + bOwnsQualityControl = bOwnership; + } +} + +bool FVideoEncoder::HasQualityControlOwnership() +{ + return bOwnsQualityControl; +} + +int32_t FVideoEncoder::InitEncode(const webrtc::VideoCodec* CodecSettings, int32_t NumberOfCores, size_t MaxPayloadSize) +{ + EncodedImage._completeFrame = true; + return 0; +} + +int32_t FVideoEncoder::RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback* Callback) +{ + this->Callback = Callback; + return 0; +} + +int32_t FVideoEncoder::Release() +{ + Callback = nullptr; + return 0; +} + +int32_t FVideoEncoder::Encode( + const webrtc::VideoFrame& Frame, const webrtc::CodecSpecificInfo* CodecSpecificInfo, + const std::vector* FrameTypes) +{ + // convert (copy) `frame` to `encodedFrame_`, check `webrtc::H264EncoderImpl::Encode` for reference + + FH264FrameBuffer* H264Frame = static_cast(Frame.video_frame_buffer().get()); + std::vector const& FrameBuffer = H264Frame->GetBuffer(); + + EncodedImage._encodedWidth = Frame.video_frame_buffer()->width(); + EncodedImage._encodedHeight = Frame.video_frame_buffer()->height(); + EncodedImage._timeStamp = Frame.timestamp(); + EncodedImage.ntp_time_ms_ = Frame.ntp_time_ms(); + EncodedImage.capture_time_ms_ = Frame.render_time_ms(); + EncodedImage.rotation_ = Frame.rotation(); + EncodedImage.content_type_ = webrtc::VideoContentType::UNSPECIFIED; + EncodedImage.timing_.flags = webrtc::TimingFrameFlags::kInvalid; + + //// set `encodedImage_._qp` and `encodedImage-._frameType` + //// a trick to use `H264BitstreamParser` for retrieving QP info and check for key-frames + //// the problem is that `H264BitstreamParser::ParseSlice()` is protected + // struct FBitstreamParser : public webrtc::H264BitstreamParser + //{ + // using webrtc::H264BitstreamParser::ParseSlice; + //}; + + EncodedImage._frameType = webrtc::kVideoFrameDelta; + std::vector NALUIndices = + webrtc::H264::FindNaluIndices(&FrameBuffer[0], FrameBuffer.size()); + bool bKeyFrameFound = false; + for (const webrtc::H264::NaluIndex& Index : NALUIndices) + { + // static_cast(&BitstreamParser) + // ->ParseSlice(&FrameBuffer[Index.payload_start_offset], Index.payload_size); + + webrtc::H264::NaluType NALUType = webrtc::H264::ParseNaluType(FrameBuffer[Index.payload_start_offset]); + + if (NALUType == webrtc::H264::kIdr /* || + NALUType == webrtc::H264::kSps || + NALUType == webrtc::H264::kPps*/ + && !bKeyFrameFound) + { + EncodedImage._frameType = webrtc::kVideoFrameKey; + // EG_LOG(LogDefault, Log, "key-frame"); + bKeyFrameFound = true; + // break; // we need to parse all NALUs so as H264BitstreamParser maintains internal state + break; + } + } + + // enforce key-frame if requested by webrtc and if we haven't received one + // seems it's always just one FrameType provided, as reference implementation of + // H264EncoderImpl checks only the first one + if (EncodedImage._frameType != webrtc::kVideoFrameKey && FrameTypes && (*FrameTypes)[0] == webrtc::kVideoFrameKey) + { + EG_LOG(LogDefault, Log, "key-frame requested, size=%zu", FrameTypes->size()); + + // #MULTICAST : Should we limit what video encoder instances ask for keyframes? + if (bOwnsQualityControl) + Observer->ForceKeyFrame(); + } + + // when we switch quality control to client with higher B/W WebRTC won't notify us that bitrate can + // be increased. So force set last recorded bitrate for this client though we also could set just sufficiently + // big number to force webRTC to report what actual B/W is + if (bOwnsQualityControl && bForceBitrateRequest && LastBitrate.get_sum_kbps() > 0) + { + SetRateAllocation(LastBitrate, LastFramerate); + } + + // BitstreamParser.GetLastSliceQp(&EncodedImage.qp_); + + // copy frame buffer + // EncodedImageBuffer.resize(FrameBuffer.size()); + EncodedImageBuffer.assign(begin(FrameBuffer), end(FrameBuffer)); + EncodedImage._buffer = &EncodedImageBuffer[0]; + EncodedImage._length = EncodedImage._size = EncodedImageBuffer.size(); + + // fill RTP fragmentation info + FragHeader.VerifyAndAllocateFragmentationHeader(NALUIndices.size()); + FragHeader.fragmentationVectorSize = static_cast(NALUIndices.size()); + for (int I = 0; I != NALUIndices.size(); ++I) + { + webrtc::H264::NaluIndex const& NALUIndex = NALUIndices[I]; + FragHeader.fragmentationOffset[I] = NALUIndex.payload_start_offset; + FragHeader.fragmentationLength[I] = NALUIndex.payload_size; + + webrtc::H264::NaluType NALUType = webrtc::H264::ParseNaluType(FrameBuffer[NALUIndex.payload_start_offset]); +#if 0 + EG_LOG( + LogDefault, + Log, + "NALU: %d, start=%z, payload=%z", + static_cast(NALUType), + NALUIndex.start_offset, + NALUIndex.payload_size); +#endif + } + + // Deliver encoded image. + Callback->OnEncodedImage(EncodedImage, &CodecSpecific, &FragHeader); + + ++FrameNo; + + return 0; +} + +int32_t FVideoEncoder::SetChannelParameters(uint32_t PacketLoss, int64_t Rtt) +{ + // EG_LOG( + // LogDefault, + // Log, + // "%s : ClientId=%d, PacketLoss=%u, Rtt=%" PRId64 "", + // __FUNCTION__, + // OwnerSession->ClientId, + // PacketLoss, + // Rtt); + return 0; +} + +int32_t FVideoEncoder::SetRates(uint32_t Bitrate, uint32_t Framerate) +{ + // EG_LOG( + // LogDefault, + // Log, + // "%s: ClientId=%d, BitRate=%u, Framerate=%u", + // __FUNCTION__, + // OwnerSession->ClientId, + // Bitrate, + // Framerate); + return 0; +} + +int32_t FVideoEncoder::SetRateAllocation(const webrtc::BitrateAllocation& Allocation, uint32_t Framerate) +{ + LastBitrate = Allocation; + LastFramerate = Framerate; + + if (!bOwnsQualityControl) + { + return 0; + } + + // it seems webrtc just reports the current framerate w/o much effort to probe what's + // max framerate it can achieve + // let's lift it a bit every time so we can keep it as high as possible + uint32_t LiftedFramerate = Framerate + std::min(static_cast(Framerate * 0.9), 1u); + EG_LOG( + LogDefault, + Log, + "%s : ClientId=%d, Bitrate=%u kbps, framerate=%u, lifted framerate=%u", + __FUNCTION__, + OwnerSession->ClientId, + Allocation.get_sum_kbps(), + Framerate, + LiftedFramerate); + + Observer->SetRate(Allocation.get_sum_kbps(), LiftedFramerate); + + bForceBitrateRequest = false; + + return 0; +} + +webrtc::VideoEncoder::ScalingSettings FVideoEncoder::GetScalingSettings() const +{ + // verifySlow(false); + // return ScalingSettings{ ScalingSettings::kOff }; + return ScalingSettings{0, 1024 * 1024}; +} + +bool FVideoEncoder::SupportsNativeHandle() const +{ + return true; +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/VideoEncoder.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/VideoEncoder.h new file mode 100644 index 000000000000..b95e97973377 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/VideoEncoder.h @@ -0,0 +1,83 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" +#include "SharedQueue.h" + +// Forward declarations +struct FClientSession; + +class IVideoEncoderObserver +{ +public: + virtual void ForceKeyFrame() = 0; + virtual void SetRate(uint32_t BitrateKbps, uint32_t Framerate) = 0; +}; + +class FVideoEncoder : public webrtc::VideoEncoder +{ +public: + explicit FVideoEncoder(IVideoEncoderObserver& Observer, FClientSession& OwnerSession); + + void SetQualityControlOwnership(bool bOwnership); + bool HasQualityControlOwnership(); + + // + // webrtc::VideoEncoder interface + // + int32_t InitEncode(const webrtc::VideoCodec* CodecSetings, int32_t NumberOfCores, size_t MaxPayloadSize) override; + int32_t RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback* Callback) override; + int32_t Release() override; + int32_t Encode( + const webrtc::VideoFrame& Frame, const webrtc::CodecSpecificInfo* CodecSpecificInfo, + const std::vector* FrameTypes) override; + int32_t SetChannelParameters(uint32_t PacketLoss, int64_t Rtt) override; + int32_t SetRates(uint32_t Bitrate, uint32_t Framerate) override; + int32_t SetRateAllocation(const webrtc::BitrateAllocation& Allocation, uint32_t Framerate) override; + ScalingSettings GetScalingSettings() const override; + bool SupportsNativeHandle() const override; + +private: + IVideoEncoderObserver* Observer; + + // Client session that this encoder instance belongs to + FClientSession* OwnerSession = nullptr; + webrtc::EncodedImageCallback* Callback = nullptr; + webrtc::EncodedImage EncodedImage; + std::vector EncodedImageBuffer; + webrtc::H264BitstreamParser BitstreamParser; + webrtc::CodecSpecificInfo CodecSpecific; + webrtc::RTPFragmentationHeader FragHeader; + bool bStartedFromSPS = false; + size_t FrameNo = 0; + + std::atomic bOwnsQualityControl = false; + std::atomic bForceBitrateRequest = false; + webrtc::BitrateAllocation LastBitrate; + uint32_t LastFramerate = 0; +}; + +class FVideoEncoderFactory : public webrtc::VideoEncoderFactory +{ +public: + explicit FVideoEncoderFactory(IVideoEncoderObserver& videoSource); + + /** + * This is used from the FClientSession::OnSucess to let the factory know + * what session the next created encoder should belong to. + * It allows us to get the right FClientSession <-> FVideoEncoder relationship + */ + void AddSession(FClientSession& ClientSession); + + // + // webrtc::VideoEncoderFactory implementation + // + std::vector GetSupportedFormats() const override; + CodecInfo QueryVideoEncoder(const webrtc::SdpVideoFormat& Format) const override; + std::unique_ptr CreateVideoEncoder(const webrtc::SdpVideoFormat& Format) override; + +private: + IVideoEncoderObserver* VideoSource; + TSharedQueue PendingClientSessions; +}; diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCLogging.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCLogging.cpp new file mode 100644 index 000000000000..68d0ef6d4a90 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCLogging.cpp @@ -0,0 +1,68 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "WebRTCLogging.h" +#include "StringUtils.h" +#include "FileLogOutput.h" +#include "TimeUtils.h" + +extern bool PARAM_DbgWindow_WebRTC; + +/** + * Receives logging from WebRTC internals, and writes it to a log file + * and VS's Output window + */ +class FWebRTCLogger : public rtc::LogSink +{ + public: + FWebRTCLogger() + : FileLog(nullptr, "-WebRTC.log") + { + // Disable WebRTC's internal calls to VS's OutputDebugString, because we are calling here, + // so we can add timestamps. + rtc::LogMessage::LogToDebug(rtc::LS_NONE); + } + + ~FWebRTCLogger() + { + } + + private: + void OnLogMessage(const std::string& message) override + { + FDateTime DateTime = PARAM_LocalTime ? Now() : UtcNow(); + const char* Msg = FormatString( + "[%s]: WEBRTC: %s", + DateTime.ToString(), + message.c_str()); + + if (PARAM_DbgWindow_WebRTC) + { + OutputDebugStringA(Msg); + } + + FileLog.Write(Msg); + } + + FFileLogOutput FileLog; +}; + +namespace +{ + std::unique_ptr WebRTCLogger; +} + +void InitializeWebRTCLogging(rtc::LoggingSeverity Verbosity) +{ + WebRTCLogger = std::make_unique(); + rtc::LogMessage::AddLogToStream(WebRTCLogger.get(), Verbosity); + rtc::LogMessage::SetLogToStderr(false); +} + +void StopWebRTCLogging() +{ + EG_LOG(LogDefault, Log, "Stopping WebRTC logging"); + rtc::LogMessage::RemoveLogToStream(WebRTCLogger.get()); + WebRTCLogger.reset(); +} + diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCLogging.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCLogging.h new file mode 100644 index 000000000000..d6377ebbbd26 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCLogging.h @@ -0,0 +1,8 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "WebRTCProxyCommon.h" + +void InitializeWebRTCLogging(rtc::LoggingSeverity Verbosity); +void StopWebRTCLogging(); diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxy.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxy.cpp new file mode 100644 index 000000000000..0ff0ede64aa8 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxy.cpp @@ -0,0 +1,294 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" + +#include "Console.h" +#include "SharedQueue.h" +#include "FileLogOutput.h" +#include "CmdLine.h" +#include "WebRTCLogging.h" +#include "Conductor.h" +#include "StringUtils.h" +#include "ScopeGuard.h" +#include "CrashDetection.h" + +const char* Help = +"\ +WebRTCProxy\n\ +Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.\n\ +\n\ +Parameters:\n\ +\n\ +-help\n\ +Shows this help\n\ +\n\ +-Cirrus=\n\ +The Cirrus server to connect to. If not specified. it defaults to 127.0.0.1:8888\n\ +\n\ +-StunServer=\n\ +Stun server to use.\n\ +\n\ +-UE4Port=\n\ +The port UE4 is listening on\n\ +\n\ +-AutoSetBitrate\n\ +If specified, it forcibly sends a bitrate request to UE4 once a client gets\n\ +quality control ownership\n\ +\n\ +-PlanB\n\ +If specified, it will use PlanB sdp semantics. Default is UnifiedPlan.\n\ +\n\ +-dbgwindow=[Proxy|WebRTC|All|None]\n\ +If running under the debugger (e.g: Visual Studio), it specifies what logs to\n\ +send to the Output Window.\n\ + Proxy - Only logs from WebRTCProxy itself will be displayed.\n\ + WebRTC - Only logs from WebRTC internals will be displayed.\n\ + All - (Default) Both WebRTCProxy and WebRTC internal logs are displayed.\n\ + None - No logs sent to the Output Window\n\ +\n\ +-LocalTime\n\ +If specified, it will use local time in logging, instead of UTC.\n\ +\n\ +\n\ +"; + +std::pair PARAM_Cirrus{ "127.0.0.1", 8888 }; +uint16_t PARAM_UE4Port = 8124; +bool PARAM_PlanB = false; +bool PARAM_DbgWindow_Proxy = true; +bool PARAM_DbgWindow_WebRTC = true; +bool PARAM_LocalTime = false; // By default we use UTC time + +bool ParseParameters(int argc, char* argv[]) +{ + FCmdLine Params; + if (!Params.Parse(argc, argv)) + { + printf(Help); + return false; + } + + if (Params.Has("Help")) + { + printf(Help); + return false; + } + + // Splits a string in the form of "XXXX:NNN" into a pair + auto ProcessAddressParameter = [&Params](const char* Name, std::pair& OutAddr) -> bool + { + if (!Params.Has(Name)) + { + return true; + } + + const char* const Param = Params.Get(Name).c_str(); + const char* Ptr = Param; + // Find separator + while (!(*Ptr==0 || *Ptr == ':' || *Ptr == '|')) + { + Ptr++; + } + + OutAddr.first = std::string(Param, Ptr); + // If at the end of the string, then no separator was found (and no port specified) + if (*Ptr && OutAddr.first!="") + { + int Port = std::atoi(Ptr + 1); + if (Port < 1 || Port>65535) + { + EG_LOG(LogDefault, Error, "Invalid port number for parameter '%s'", Name); + return false; + } + OutAddr.second = static_cast(Port); + } + else + { + EG_LOG(LogDefault, Error, "Invalid format for parameter '%s'", Name); + OutAddr.second = 0; + return false; + } + + return true; + }; + + if (!ProcessAddressParameter("Cirrus", PARAM_Cirrus)) + { + return false; + } + + PARAM_UE4Port = Params.GetAsInt("UE4Port", 8124).second; + + PARAM_PlanB = Params.Has("PlanB"); + + if (Params.Has("DbgWindow")) + { + const std::string& Val = Params.Get("dbgwindow"); + if (CiEquals(Val, std::string("Proxy"))) + { + PARAM_DbgWindow_Proxy = true; + PARAM_DbgWindow_WebRTC = false; + } + else if (CiEquals(Val, std::string("WebRTC"))) + { + PARAM_DbgWindow_Proxy = false; + PARAM_DbgWindow_WebRTC = true; + } + else if (CiEquals(Val, std::string("All"))) + { + PARAM_DbgWindow_Proxy = true; + PARAM_DbgWindow_WebRTC = true; + } + else if (CiEquals(Val, std::string("None"))) + { + PARAM_DbgWindow_Proxy = false; + PARAM_DbgWindow_WebRTC = false; + } + else + { + EG_LOG(LogDefault, Error, "Invalid parameter format for parameter 'dbgwindow'"); + return false; + } + } + + PARAM_LocalTime = Params.Has("LocalTime"); + + return true; +} + +// This is used by the Control handler (set with ConsoleCtrlHandler function) +// to wait for the main thread to finish +std::atomic bFinished = false; +DWORD MainThreadId = 0; + +// Handler function will be called on separate thread! +static BOOL WINAPI ConsoleCtrlHandler(DWORD dwCtrlType) +{ + switch (dwCtrlType) + { + case CTRL_C_EVENT: // Ctrl+C + break; + case CTRL_BREAK_EVENT: // Ctrl+Break + break; + case CTRL_CLOSE_EVENT: // Closing the console window + break; + case CTRL_LOGOFF_EVENT: // User logs off. Passed only to services! + break; + case CTRL_SHUTDOWN_EVENT: // System is shutting down. Passed only to services! + break; + } + + EG_LOG(LogDefault, Log, "Console Ctrl Handler: %lu", dwCtrlType); + EG_LOG(LogDefault, Log, "Waiting to finish UE4WebRTCProxy..."); + + if (!MainThreadId) + { + return FALSE; + } + + PostThreadMessage(MainThreadId, WM_QUIT, 0, 0); + // Wait for the main thread to finish + while (!bFinished) + { + Sleep(100); + } + + // Return TRUE if handled this message, further handler functions won't be called. + // Return FALSE to pass this message to further handlers until default handler calls ExitProcess(). + return FALSE; +} + +int mainImpl(int argc, char* argv[]) +{ + FConsole Console; + Console.Init(120, 40, 400, 2000); + + MainThreadId = GetCurrentThreadId(); + SetConsoleCtrlHandler(ConsoleCtrlHandler, TRUE); + + // NOTE: Parsing the parameters before creating the file logger, so the log + // filename takes into account the -LocalTime parameter (if specified) + if (!ParseParameters(argc, argv)) + { + return EXIT_FAILURE; + } + + // + // Create file loggers + // + FFileLogOutput FileLogger(nullptr); // Our own log file + // WebRTC logging + InitializeWebRTCLogging(rtc::LoggingSeverity::LS_VERBOSE); + // Make sure we stop the webrtc logging, otherwise it crashes on exit + SCOPE_EXIT{ StopWebRTCLogging(); }; + + // Log the command line parameters, so we know what parameters were used for this run + { + std::string Str; + for (int i = 0; i < argc; i++) + { + Str += std::string(argv[i]) + " "; + } + + EG_LOG(LogDefault, Log, "CmdLine: %s", Str.c_str()); + } + + SetupCrashDetection(); + // If you want to test crash detection when not running a debugger, enable the block below. + // It will cause an access violation after 1 second. + // NOTE: If running under the debugger, it will not trigger the crash detection +#if 0 + std::thread([]() + { + std::this_thread::sleep_for(std::chrono::milliseconds(1000)); + *reinterpret_cast(0) = 1; + }).detach(); +#endif + + // #REFACTOR : Make this cross platform +#if EG_PLATFORM == EG_PLATFORM_WINDOWS + rtc::EnsureWinsockInit(); + rtc::Win32SocketServer w32_ss; + rtc::Win32Thread w32_thread(&w32_ss); + rtc::ThreadManager::Instance()->SetCurrentThread(&w32_thread); +#elif EG_PLATFORM_LINUX==EG_PLATFORM_LINUX +#error Not yet implemented +#else +#error Unknown platform +#endif + + rtc::InitializeSSL(); + auto Conductor = std::make_unique(); + + // Main loop. + MSG Msg; + BOOL Gm; + while ((Gm = ::GetMessage(&Msg, NULL, 0, 0)) != 0 && Gm != -1) + { + ::TranslateMessage(&Msg); + ::DispatchMessage(&Msg); + } + + rtc::CleanupSSL(); + + EG_LOG(LogDefault, Log, "Exiting UE4WebRTCProxy"); + + return EXIT_SUCCESS; +} + +int main(int argc, char* argv[]) +{ + int ExitCode; + try + { + ExitCode = mainImpl(argc, argv); + } + catch (std::exception&e) + { + printf("%s\n", e.what()); + ExitCode = EXIT_FAILURE; + } + + bFinished = true; + return ExitCode; +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyCommon.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyCommon.cpp new file mode 100644 index 000000000000..15949bdb5a13 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyCommon.cpp @@ -0,0 +1,111 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" +#include "WebRTCProxyCommon.h" +#include "StringUtils.h" +#include "Logging.h" + +namespace detail +{ + void BreakImpl() + { +#if EG_PLATFORM == EG_PLATFORM_WINDOWS + __debugbreak(); +#elif EG_PLATFORM == EG_PLATFORM_LINUX + #error Not implemented yet +#else + #error Unknown platform +#endif + } +} + +void DoAssert(const char* File, int Line, _Printf_format_string_ const char* Fmt, ...) +{ + // The actual call to break + auto DoBreak = []() { + detail::BreakImpl(); + exit(EXIT_FAILURE); + }; + + // Detect reentrancy, since we call a couple of things from here that + // can end up asserting + static bool Executing; + if (Executing) + { + DoBreak(); + return; + } + Executing = true; + + char Msg[1024]; + va_list Args; + va_start(Args, Fmt); + VSNPrintf(Msg, 1024, Fmt, Args); + va_end(Args); + + EG_LOG(LogDefault, Error, "ASSERT: %s, %d: %s\n", File, Line, Msg); + + DoBreak(); +} + +#if EG_PLATFORM == EG_PLATFORM_WINDOWS +std::string GetProcessPath(std::string* Filename) +{ + wchar_t Buf[MAX_PATH]; + GetModuleFileNameW(NULL, Buf, MAX_PATH); + + std::string Res = Narrow(Buf); + std::string::size_type Index = Res.rfind("\\"); + + if (Index != std::string::npos) + { + if (Filename) + { + *Filename = Res.substr(Index + 1); + } + + Res = Res.substr(0, Index + 1); + } + else + { + return ""; + } + + return Res; +} +#elif EG_PLATFORM == EG_PLATFORM_LINUX +#error Not implemented yet +#else +#error Unknown platform +#endif + +std::string GetExtension(const std::string& FullFilename, std::string* Basename) +{ + size_t SlashPos = FullFilename.find_last_of("/\\"); + size_t P = FullFilename.find_last_of("."); + + // Where the file name starts (we ignore directories) + size_t NameStart = SlashPos != std::string::npos ? SlashPos + 1 : 0; + + // Account for the fact there might not be an extension, but there is a dot character, + // as for example in relative paths. E.g: ..\SomeFile + if (P == std::string::npos || (SlashPos != std::string::npos && P < SlashPos)) + { + if (Basename) + { + *Basename = FullFilename.substr(NameStart); + } + + return ""; + } + else + { + std::string Res = FullFilename.substr(P + 1); + if (Basename) + { + *Basename = FullFilename.substr(NameStart, P - NameStart); + } + + return Res; + } +} diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyCommon.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyCommon.h new file mode 100644 index 000000000000..52124dd52098 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyCommon.h @@ -0,0 +1,142 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +// When using UE4 header files then ensure we redefine UE4 specific types. +using uint8 = uint8_t; + +// Directly use what is defined in UE4, to avoid duplication and bugs due +// to enums mismatches +#include "../../../../../Plugins/Experimental/PixelStreaming/Source//PixelStreaming/Private/ProtocolDefs.h" + +#define EG_PLATFORM_WINDOWS 1 +#define EG_PLATFORM_LINUX 2 + +#if defined(_WIN32) + #define EG_PLATFORM EG_PLATFORM_WINDOWS +#elif __linux__ + #define EG_PLATFORM EG_PLATFORM_LINUX +#endif + +// Set any configuration flags not defined. +// This allows just specifying e.g EG_BUILD_DEBUG=1 in the project, and have the other +// ones automatically set to 0 +#ifndef EG_BUILD_DEBUG + #define EG_BUILD_DEBUG 0 +#endif +#ifndef EG_BUILD_DEVELOPMENT + #define EG_BUILD_DEVELOPMENT 0 +#endif +#ifndef EG_BUILD_SHIPPING + #define EG_BUILD_SHIPPING 0 +#endif +#ifndef USE_CHECK_IN_SHIPPING + #define USE_CHECK_IN_SHIPPING 0 +#endif +#ifndef DO_GUARD_SLOW + #define DO_GUARD_SLOW 0 +#endif + +/** + * Forceful assert, even on Release builds + */ +void DoAssert(const char* File, int Line, _Printf_format_string_ const char* Fmt, ...); + +/** + * Gets the current process path + * @param Filename If specified, it will contain the name of the executable on return + * @return The executable's directory + */ +std::string GetProcessPath(std::string* Filename = nullptr); + +/** + * Gets the extension of a file name + * @param FullFilename File name to get the extension from + * @param Basename If specified, it will contain the filename without extension + */ +std::string GetExtension(const std::string& FullFilename, std::string* Basename); + + +////////////////////////////////////////////////////////////////////////// +// check and verify macros work in a similar way to Unreal Engine +// +// "check" expressions are runtime asserts that are compiled out in Shipping builds, +// unless USE_CHECK_IN_SHIPPING is 1 +// +// "verify" expressions are ALWAYS evaluated, but they don't halt execution in Shipping builds +// unless USE_CHECK_IN_SHIPPING is 1 +// +// "checkSlow/checkfSlow" macros do the same as the normal check/checkf, but +// are compiled out in Development and Shipping. It's meant to be used for checks that are +// quite pedantic and might affect performance in Development. +// If you want these to be be enabled in Development and even Shipping (provided USE_CHECK_IN_SHIPPING is 1), +// then set DO_GUARD_SLOW to 1 +// +////////////////////////////////////////////////////////////////////////// + + +// +// Check macros +// +#if EG_BUILD_DEBUG || EG_BUILD_DEVELOPMENT || (EG_BUILD_SHIPPING && USE_CHECK_IN_SHIPPING) + #define check(Exp) if (!(Exp)) { ::DoAssert(__FILE__, __LINE__, #Exp); } + #define checkf(Exp, Fmt, ...) if (!(Exp)) { ::DoAssert(__FILE__, __LINE__, Fmt, ##__VA_ARGS__); } // By using ##__VA_ARGS__ , it will remove the last comma, if __VA_ARGS__ is empty +#else + #define check(Exp) ((void)0) + #define checkf(Exp, Fmt, ...) ((void)0) +#endif + +// +// Check slow macros +#if EG_BUILD_DEBUG || (EG_BUILD_DEVELOPMENT && DO_GUARD_SLOW) || (EG_BUILD_SHIPPING && USE_CHECK_IN_SHIPPING && DO_GUARD_SLOW) + #define checkSlow(Exp) if (!(Exp)) { ::DoAssert(__FILE__, __LINE__, #Exp); } + #define checkfSlow(Exp, Fmt, ...) if (!(Exp)) { ::DoAssert(__FILE__, __LINE__, Fmt, ##__VA_ARGS__); } // By using ##__VA_ARGS__ , it will remove the last comma, if __VA_ARGS__ is empty +#else + #define checkSlow(Exp) ((void)0) + #define checkfSlow(Exp, Fmt, ...) ((void)0) +#endif + + +// +// verify macros +// +#if EG_BUILD_DEBUG || EG_BUILD_DEVELOPMENT || (EG_BUILD_SHIPPING && USE_CHECK_IN_SHIPPING) + #define verify(Exp) if (!(Exp)) { ::DoAssert(__FILE__, __LINE__, #Exp); } + #define verifyf(Exp, Fmt, ...) if (!(Exp)) { ::DoAssert(__FILE__, __LINE__, Fmt, ##__VA_ARGS__); } // By using ##__VA_ARGS__ , it will remove the last comma, if __VA_ARGS__ is empty +#else + #define verify(Exp) if (!(Exp)) {} + #define verifyf(Exp, Fmt, ...) if (!(Exp)) {} +#endif + +// +// Available parameters +// +extern std::pair PARAM_Cirrus; +extern uint16_t PARAM_UE4Port; +extern bool PARAM_PlanB; +extern bool PARAM_LocalTime; + +#if EG_PLATFORM == EG_PLATFORM_WINDOWS + #pragma warning(disable: 26439) // This kind of function may not throw. Declare it 'noexcept' (f.6). + #pragma warning(disable: 26444) // warning C26444: Avoid unnamed objects with custom construction and destruction (es.84). + #pragma warning(disable: 6319) // Use of the comma-operator in a tested expression causes the left argument to be ignored when it has no side-effects. +#endif + +using FClientId = uint32_t; + +// Names used for a IceCandidate JSON object. +const char kCandidateSdpMidName[] = "sdpMid"; +const char kCandidateSdpMlineIndexName[] = "sdpMLineIndex"; +const char kCandidateSdpName[] = "candidate"; + +// Names used for a SessionDescription JSON object. +const char kSessionDescriptionTypeName[] = "type"; +const char kSessionDescriptionSdpName[] = "sdp"; + +// Names used for a CirrusConfig JSON object +const char kPeerConnectionConfigName[] = "peerConnectionConfig"; +const char kIceServersName[] = "iceServers"; +const char kUrlsName[] = "urls"; +const char kUsernameName[] = "username"; +const char kCredentialName[] = "credential"; + diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyPCH.cpp b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyPCH.cpp new file mode 100644 index 000000000000..24aa77f095fe --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyPCH.cpp @@ -0,0 +1,3 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "WebRTCProxyPCH.h" diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyPCH.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyPCH.h new file mode 100644 index 000000000000..49723694c434 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/WebRTCProxyPCH.h @@ -0,0 +1,94 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +#pragma once + +#include "targetver.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +//#include "crazygaze/spas/spas.h" + +#if EG_PLATFORM == EG_PLATFORM_WINDOWS +#pragma warning(push) +//#pragma warning(disable : ALL_CODE_ANALYSIS_WARNINGS) +#pragma warning(disable: 26439 ) // warning C26439: This kind of function may not throw. Declare it 'noexcept' (f.6). +#pragma warning(disable: 6255) // warning C6255: _alloca indicates failure by raising a stack overflow exception. Consider using _malloca instead. +#pragma warning(disable: 26451) // warning C26451: Arithmetic overflow: Using operator '*' on a 4 byte value and then casting the result to a 8 byte value. Cast the value to the wider type before calling operator '*' to avoid overflow (io.2). +#pragma warning(disable: 26495) // warning C26495: Variable 'webrtc::StringRtpHeaderExtension::value_' is uninitialized. Always initialize a member variable (type.6). +#pragma warning(disable: 26434) // warning C26434: Function 'cricket::VideoCodec::operator!=' hides a non-virtual function 'cricket::Codec::operator!=' (c.128). +#pragma warning(disable: 26444) // warning C26444: Avoid unnamed objects with custom construction and destruction (es.84). +#pragma warning(disable: 4244) // warning C4244: 'argument': conversion from 'const int' to 'float', possible loss of data +#endif +// +// WebRTC headers +// +#include "api/mediastreaminterface.h" +#include "api/peerconnectioninterface.h" +#include "api/audio_codecs/audio_decoder_factory_template.h" +#include "api/audio_codecs/audio_encoder_factory_template.h" +#include "api/audio_codecs/opus/audio_decoder_opus.h" +#include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/test/fakeconstraints.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "api\video_codecs\video_encoder.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/i420_buffer.h" + +#include "rtc_base/thread.h" +#include "rtc_base/refcountedobject.h" +#include "rtc_base/json.h" +#include "rtc_base/logging.h" +#include "rtc_base/flags.h" +#include "rtc_base/checks.h" +#include "rtc_base/ssladapter.h" +#include "rtc_base/win32socketinit.h" +#include "rtc_base/win32socketserver.h" +#include "rtc_base/arraysize.h" +#include "rtc_base/nethelpers.h" +#include "rtc_base/stringutils.h" +#include "rtc_base/physicalsocketserver.h" +#include "rtc_base/signalthread.h" +#include "rtc_base/sigslot.h" +#include "rtc_base/atomicops.h" + +// #REFACTOR : Possibly remove this one once we make use of cross-platform sockets +#include "rtc_base/win32.h" +#include "rtc_base/win32socketserver.h" + +#include "rtc_base/asynctcpsocket.h" + +#include "media/base/videocapturer.h" +#include "media/engine/webrtcvideocapturerfactory.h" +#include "media/engine/internaldecoderfactory.h" +#include "media/base/h264_profile_level_id.h" +#include "media/engine/webrtcvideoencoderfactory.h" +#include "media/base/adaptedvideotracksource.h" +#include "media/base/mediachannel.h" +#include "media/base/videocommon.h" + +#include "modules/video_capture/video_capture_factory.h" +#include "modules/audio_device/include/audio_device.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "modules/video_coding/codecs/h264/include/h264.h" + +#include "common_video/h264/h264_bitstream_parser.h" +#include "common_video/h264/h264_common.h" + +#include "media/base/videobroadcaster.h" + +#if EG_PLATFORM == EG_PLATFORM_WINDOWS +#pragma warning(pop) +#endif diff --git a/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/targetver.h b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/targetver.h new file mode 100644 index 000000000000..92f6e2571915 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebRTCProxy/src/targetver.h @@ -0,0 +1,10 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +// Including SDKDDKVer.h defines the highest available Windows platform. + +// If you wish to build your application for a previous Windows platform, include WinSDKVer.h and +// set the _WIN32_WINNT macro to the platform you wish to support before including SDKDDKVer.h. + +#include diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/matchmaker.js b/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/matchmaker.js new file mode 100644 index 000000000000..28109ba08423 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/matchmaker.js @@ -0,0 +1,122 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +const httpPort = 90; +const matchmakerPort = 9999; + +const argv = require('yargs').argv; + +const express = require('express'); +const app = express(); +const http = require('http').Server(app); + +// A list of all the Cirrus server which are connected to the Matchmaker. +var cirrusServers = new Map(); + +// +// Parse command line. +// + +if (typeof argv.httpPort != 'undefined') { + httpPort = argv.httpPort; +} +if (typeof argv.matchmakerPort != 'undefined') { + matchmakerPort = argv.matchmakerPort; +} + +// +// Connect to browser. +// + +http.listen(httpPort, () => { + console.log('HTTP listening on *:' + httpPort); +}); + +// Get a Cirrus server if there is one available which has no clients connected. +function getAvailableCirrusServer() { + for (cirrusServer of cirrusServers.values()) { + if (cirrusServer.numConnectedClients === 0) { + return cirrusServer; + } + } + + console.log('WARNING: No empty Cirrus servers are available'); + return undefined; +} + +// Handle standard URL. +app.get('/', (req, res) => { + cirrusServer = getAvailableCirrusServer(); + if (cirrusServer != undefined) { + res.redirect(`http://${cirrusServer.address}:${cirrusServer.port}/`); + console.log(`Redirect to ${cirrusServer.address}:${cirrusServer.port}`); + } else { + res.send('No Cirrus servers are available'); + } +}); + +// Handle URL with custom HTML. +app.get('/custom_html/:htmlFilename', (req, res) => { + cirrusServer = getAvailableCirrusServer(); + if (cirrusServer != undefined) { + res.redirect(`http://${cirrusServer.address}:${cirrusServer.port}/custom_html/${req.params.htmlFilename}`); + console.log(`Redirect to ${cirrusServer.address}:${cirrusServer.port}`); + } else { + res.send('No Cirrus servers are available'); + } +}); + +// +// Connection to Cirrus. +// + +const net = require('net'); + +function disconnect(connection) { + console.log(`Ending connection to remote address ${connection.remoteAddress}`); + connection.end(); +} + +const matchmaker = net.createServer((connection) => { + connection.on('data', (data) => { + try { + message = JSON.parse(data); + } catch(e) { + console.log(`ERROR (${e.toString()}): Failed to parse Cirrus information from data: ${data.toString()}`); + disconnect(connection); + return; + } + if (message.type === 'connect') { + // A Cirrus server connects to this Matchmaker server. + cirrusServer = { + address: message.address, + port: message.port, + numConnectedClients: 0 + }; + cirrusServers.set(connection, cirrusServer); + console.log(`Cirrus server ${cirrusServer.address}:${cirrusServer.port} connected to Matchmaker`); + } else if (message.type === 'clientConnected') { + // A client connects to a Cirrus server. + cirrusServer = cirrusServers.get(connection); + cirrusServer.numConnectedClients++; + console.log(`Client connected to Cirrus server ${cirrusServer.address}:${cirrusServer.port}`); + } else if (message.type === 'clientDisconnected') { + // A client disconnects from a Cirrus server. + cirrusServer = cirrusServers.get(connection); + cirrusServer.numConnectedClients--; + console.log(`Client disconnected from Cirrus server ${cirrusServer.address}:${cirrusServer.port}`); + } else { + console.log('ERROR: Unknown data: ' + JSON.stringify(message)); + disconnect(connection); + } + }); + + // A Cirrus server disconnects from this Matchmaker server. + connection.on('error', () => { + cirrusServers.delete(connection); + console.log(`Cirrus server ${cirrusServer.address}:${cirrusServer.port} disconnected from Matchmaker`); + }); +}); + +matchmaker.listen(matchmakerPort, () => { + console.log('Matchmaker listening on *:' + matchmakerPort); +}); diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/package-lock.json b/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/package-lock.json new file mode 100644 index 000000000000..e9b1cd8fc761 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/package-lock.json @@ -0,0 +1,994 @@ +{ + "name": "cirrus-matchmaker", + "version": "0.0.1", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "accepts": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", + "integrity": "sha1-63d99gEXI6OxTopywIBcjoZ0a9I=", + "requires": { + "mime-types": "2.1.18", + "negotiator": "0.6.1" + } + }, + "after": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/after/-/after-0.8.2.tgz", + "integrity": "sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8=" + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" + }, + "arraybuffer.slice": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz", + "integrity": "sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==" + }, + "async-limiter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.0.tgz", + "integrity": "sha512-jp/uFnooOiO+L211eZOoSyzpOITMXx1rBITauYykG3BRYPu8h0UcxsPNB04RR5vo4Tyz3+ay17tR6JVf9qzYWg==" + }, + "backo2": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz", + "integrity": "sha1-MasayLEpNjRj41s+u2n038+6eUc=" + }, + "base64-arraybuffer": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz", + "integrity": "sha1-c5JncZI7Whl0etZmqlzUv5xunOg=" + }, + "base64id": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/base64id/-/base64id-1.0.0.tgz", + "integrity": "sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY=" + }, + "better-assert": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + "integrity": "sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI=", + "requires": { + "callsite": "1.0.0" + } + }, + "blob": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/blob/-/blob-0.0.4.tgz", + "integrity": "sha1-vPEwUspURj8w+fx+lbmkdjCpSSE=" + }, + "body-parser": { + "version": "1.18.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz", + "integrity": "sha1-h2eKGdhLR9hZuDGZvVm84iKxBFQ=", + "requires": { + "bytes": "3.0.0", + "content-type": "1.0.4", + "debug": "2.6.9", + "depd": "1.1.2", + "http-errors": "1.6.3", + "iconv-lite": "0.4.19", + "on-finished": "2.3.0", + "qs": "6.5.1", + "raw-body": "2.3.2", + "type-is": "1.6.16" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + } + } + }, + "bytes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" + }, + "callsite": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", + "integrity": "sha1-KAOY5dZkvXQDi28JBRU+borxvCA=" + }, + "camelcase": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", + "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=" + }, + "cliui": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", + "integrity": "sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==", + "requires": { + "string-width": "2.1.1", + "strip-ansi": "4.0.0", + "wrap-ansi": "2.1.0" + } + }, + "code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" + }, + "component-bind": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/component-bind/-/component-bind-1.0.0.tgz", + "integrity": "sha1-AMYIq33Nk4l8AAllGx06jh5zu9E=" + }, + "component-emitter": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz", + "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=" + }, + "component-inherit": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/component-inherit/-/component-inherit-0.0.3.tgz", + "integrity": "sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM=" + }, + "content-disposition": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", + "integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ=" + }, + "content-type": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" + }, + "cookie": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", + "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" + }, + "cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + }, + "cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", + "requires": { + "lru-cache": "4.1.3", + "shebang-command": "1.2.0", + "which": "1.3.1" + } + }, + "debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "requires": { + "ms": "2.0.0" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=" + }, + "depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" + }, + "destroy": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", + "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" + }, + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" + }, + "encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" + }, + "engine.io": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-3.2.0.tgz", + "integrity": "sha512-mRbgmAtQ4GAlKwuPnnAvXXwdPhEx+jkc0OBCLrXuD/CRvwNK3AxRSnqK4FSqmAMRRHryVJP8TopOvmEaA64fKw==", + "requires": { + "accepts": "1.3.5", + "base64id": "1.0.0", + "cookie": "0.3.1", + "debug": "3.1.0", + "engine.io-parser": "2.1.2", + "ws": "3.3.3" + } + }, + "engine.io-client": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-3.2.1.tgz", + "integrity": "sha512-y5AbkytWeM4jQr7m/koQLc5AxpRKC1hEVUb/s1FUAWEJq5AzJJ4NLvzuKPuxtDi5Mq755WuDvZ6Iv2rXj4PTzw==", + "requires": { + "component-emitter": "1.2.1", + "component-inherit": "0.0.3", + "debug": "3.1.0", + "engine.io-parser": "2.1.2", + "has-cors": "1.1.0", + "indexof": "0.0.1", + "parseqs": "0.0.5", + "parseuri": "0.0.5", + "ws": "3.3.3", + "xmlhttprequest-ssl": "1.5.5", + "yeast": "0.1.2" + } + }, + "engine.io-parser": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-2.1.2.tgz", + "integrity": "sha512-dInLFzr80RijZ1rGpx1+56/uFoH7/7InhH3kZt+Ms6hT8tNx3NGW/WNSA/f8As1WkOfkuyb3tnRyuXGxusclMw==", + "requires": { + "after": "0.8.2", + "arraybuffer.slice": "0.0.7", + "base64-arraybuffer": "0.1.5", + "blob": "0.0.4", + "has-binary2": "1.0.3" + } + }, + "escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" + }, + "etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" + }, + "execa": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", + "integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=", + "requires": { + "cross-spawn": "5.1.0", + "get-stream": "3.0.0", + "is-stream": "1.1.0", + "npm-run-path": "2.0.2", + "p-finally": "1.0.0", + "signal-exit": "3.0.2", + "strip-eof": "1.0.0" + } + }, + "express": { + "version": "4.16.3", + "resolved": "https://registry.npmjs.org/express/-/express-4.16.3.tgz", + "integrity": "sha1-avilAjUNsyRuzEvs9rWjTSL37VM=", + "requires": { + "accepts": "1.3.5", + "array-flatten": "1.1.1", + "body-parser": "1.18.2", + "content-disposition": "0.5.2", + "content-type": "1.0.4", + "cookie": "0.3.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "1.1.2", + "encodeurl": "1.0.2", + "escape-html": "1.0.3", + "etag": "1.8.1", + "finalhandler": "1.1.1", + "fresh": "0.5.2", + "merge-descriptors": "1.0.1", + "methods": "1.1.2", + "on-finished": "2.3.0", + "parseurl": "1.3.2", + "path-to-regexp": "0.1.7", + "proxy-addr": "2.0.3", + "qs": "6.5.1", + "range-parser": "1.2.0", + "safe-buffer": "5.1.1", + "send": "0.16.2", + "serve-static": "1.13.2", + "setprototypeof": "1.1.0", + "statuses": "1.4.0", + "type-is": "1.6.16", + "utils-merge": "1.0.1", + "vary": "1.1.2" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "safe-buffer": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", + "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==" + } + } + }, + "finalhandler": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.1.tgz", + "integrity": "sha512-Y1GUDo39ez4aHAw7MysnUD5JzYX+WaIj8I57kO3aEPT1fFRL4sr7mjei97FgnwhAyyzRYmQZaTHb2+9uZ1dPtg==", + "requires": { + "debug": "2.6.9", + "encodeurl": "1.0.2", + "escape-html": "1.0.3", + "on-finished": "2.3.0", + "parseurl": "1.3.2", + "statuses": "1.4.0", + "unpipe": "1.0.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + } + } + }, + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "requires": { + "locate-path": "2.0.0" + } + }, + "forwarded": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", + "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" + }, + "fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" + }, + "get-caller-file": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.2.tgz", + "integrity": "sha1-9wLmMSfn4jHBYKgMFVSstw1QR+U=" + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + }, + "has-binary2": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-binary2/-/has-binary2-1.0.3.tgz", + "integrity": "sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw==", + "requires": { + "isarray": "2.0.1" + } + }, + "has-cors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-cors/-/has-cors-1.1.0.tgz", + "integrity": "sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk=" + }, + "http-errors": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", + "integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=", + "requires": { + "depd": "1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.0", + "statuses": "1.4.0" + } + }, + "iconv-lite": { + "version": "0.4.19", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz", + "integrity": "sha512-oTZqweIP51xaGPI4uPa56/Pri/480R+mo7SeU+YETByQNhDG55ycFyNLIgta9vXhILrxXDmF7ZGhqZIcuN0gJQ==" + }, + "indexof": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz", + "integrity": "sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10=" + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + }, + "invert-kv": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz", + "integrity": "sha1-EEqOSqym09jNFXqO+L+rLXo//bY=" + }, + "ipaddr.js": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.6.0.tgz", + "integrity": "sha1-4/o1e3c9phnybpXwSdBVxyeW+Gs=" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" + }, + "isarray": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.1.tgz", + "integrity": "sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4=" + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" + }, + "lcid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz", + "integrity": "sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=", + "requires": { + "invert-kv": "1.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "requires": { + "p-locate": "2.0.0", + "path-exists": "3.0.0" + } + }, + "lru-cache": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.3.tgz", + "integrity": "sha512-fFEhvcgzuIoJVUF8fYr5KR0YqxD238zgObTps31YdADwPPAp82a4M8TrckkWyx7ekNlf9aBcVn81cFwwXngrJA==", + "requires": { + "pseudomap": "1.0.2", + "yallist": "2.1.2" + } + }, + "media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" + }, + "mem": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/mem/-/mem-1.1.0.tgz", + "integrity": "sha1-Xt1StIXKHZAP5kiVUFOZoN+kX3Y=", + "requires": { + "mimic-fn": "1.2.0" + } + }, + "merge-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" + }, + "methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" + }, + "mime": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz", + "integrity": "sha512-KI1+qOZu5DcW6wayYHSzR/tXKCDC5Om4s1z2QJjDULzLcmf3DvzS7oluY4HCTrc+9FiKmWUgeNLg7W3uIQvxtQ==" + }, + "mime-db": { + "version": "1.33.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz", + "integrity": "sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==" + }, + "mime-types": { + "version": "2.1.18", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", + "integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==", + "requires": { + "mime-db": "1.33.0" + } + }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==" + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "negotiator": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", + "integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=" + }, + "npm-run-path": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", + "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", + "requires": { + "path-key": "2.0.1" + } + }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" + }, + "object-component": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/object-component/-/object-component-0.0.3.tgz", + "integrity": "sha1-8MaapQ78lbhmwYb0AKM3acsvEpE=" + }, + "on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", + "requires": { + "ee-first": "1.1.1" + } + }, + "os-locale": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-2.1.0.tgz", + "integrity": "sha512-3sslG3zJbEYcaC4YVAvDorjGxc7tv6KVATnLPZONiljsUncvihe9BQoVCEs0RZ1kmf4Hk9OBqlZfJZWI4GanKA==", + "requires": { + "execa": "0.7.0", + "lcid": "1.0.0", + "mem": "1.1.0" + } + }, + "p-finally": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=" + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "requires": { + "p-try": "1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "requires": { + "p-limit": "1.3.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" + }, + "parseqs": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/parseqs/-/parseqs-0.0.5.tgz", + "integrity": "sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0=", + "requires": { + "better-assert": "1.0.2" + } + }, + "parseuri": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.5.tgz", + "integrity": "sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo=", + "requires": { + "better-assert": "1.0.2" + } + }, + "parseurl": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", + "integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M=" + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=" + }, + "path-to-regexp": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" + }, + "proxy-addr": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.3.tgz", + "integrity": "sha512-jQTChiCJteusULxjBp8+jftSQE5Obdl3k4cnmLA6WXtK6XFuWRnvVL7aCiBqaLPM8c4ph0S4tKna8XvmIwEnXQ==", + "requires": { + "forwarded": "0.1.2", + "ipaddr.js": "1.6.0" + } + }, + "pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" + }, + "qs": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", + "integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==" + }, + "range-parser": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", + "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=" + }, + "raw-body": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.2.tgz", + "integrity": "sha1-vNYMd9Prk83gBQKVw/N5OJvIj4k=", + "requires": { + "bytes": "3.0.0", + "http-errors": "1.6.2", + "iconv-lite": "0.4.19", + "unpipe": "1.0.0" + }, + "dependencies": { + "depd": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.1.tgz", + "integrity": "sha1-V4O04cRZ8G+lyif5kfPQbnoxA1k=" + }, + "http-errors": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.2.tgz", + "integrity": "sha1-CgAsyFcHGSp+eUbO7cERVfYOxzY=", + "requires": { + "depd": "1.1.1", + "inherits": "2.0.3", + "setprototypeof": "1.0.3", + "statuses": "1.4.0" + } + }, + "setprototypeof": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.3.tgz", + "integrity": "sha1-ZlZ+NwQ+608E2RvWWMDL77VbjgQ=" + } + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" + }, + "require-main-filename": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", + "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=" + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "send": { + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/send/-/send-0.16.2.tgz", + "integrity": "sha512-E64YFPUssFHEFBvpbbjr44NCLtI1AohxQ8ZSiJjQLskAdKuriYEP6VyGEsRDH8ScozGpkaX1BGvhanqCwkcEZw==", + "requires": { + "debug": "2.6.9", + "depd": "1.1.2", + "destroy": "1.0.4", + "encodeurl": "1.0.2", + "escape-html": "1.0.3", + "etag": "1.8.1", + "fresh": "0.5.2", + "http-errors": "1.6.3", + "mime": "1.4.1", + "ms": "2.0.0", + "on-finished": "2.3.0", + "range-parser": "1.2.0", + "statuses": "1.4.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + } + } + }, + "serve-static": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz", + "integrity": "sha512-p/tdJrO4U387R9oMjb1oj7qSMaMfmOyd4j9hOFoxZe2baQszgHcSWjuya/CiT5kgZZKRudHNOA0pYXOl8rQ5nw==", + "requires": { + "encodeurl": "1.0.2", + "escape-html": "1.0.3", + "parseurl": "1.3.2", + "send": "0.16.2" + } + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" + }, + "setprototypeof": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", + "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" + }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "requires": { + "shebang-regex": "1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=" + }, + "signal-exit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" + }, + "socket.io": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-2.1.1.tgz", + "integrity": "sha512-rORqq9c+7W0DAK3cleWNSyfv/qKXV99hV4tZe+gGLfBECw3XEhBy7x85F3wypA9688LKjtwO9pX9L33/xQI8yA==", + "requires": { + "debug": "3.1.0", + "engine.io": "3.2.0", + "has-binary2": "1.0.3", + "socket.io-adapter": "1.1.1", + "socket.io-client": "2.1.1", + "socket.io-parser": "3.2.0" + } + }, + "socket.io-adapter": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-1.1.1.tgz", + "integrity": "sha1-KoBeihTWNyEk3ZFZrUUC+MsH8Gs=" + }, + "socket.io-client": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-2.1.1.tgz", + "integrity": "sha512-jxnFyhAuFxYfjqIgduQlhzqTcOEQSn+OHKVfAxWaNWa7ecP7xSNk2Dx/3UEsDcY7NcFafxvNvKPmmO7HTwTxGQ==", + "requires": { + "backo2": "1.0.2", + "base64-arraybuffer": "0.1.5", + "component-bind": "1.0.0", + "component-emitter": "1.2.1", + "debug": "3.1.0", + "engine.io-client": "3.2.1", + "has-binary2": "1.0.3", + "has-cors": "1.1.0", + "indexof": "0.0.1", + "object-component": "0.0.3", + "parseqs": "0.0.5", + "parseuri": "0.0.5", + "socket.io-parser": "3.2.0", + "to-array": "0.1.4" + } + }, + "socket.io-parser": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-3.2.0.tgz", + "integrity": "sha512-FYiBx7rc/KORMJlgsXysflWx/RIvtqZbyGLlHZvjfmPTPeuD/I8MaW7cfFrj5tRltICJdgwflhfZ3NVVbVLFQA==", + "requires": { + "component-emitter": "1.2.1", + "debug": "3.1.0", + "isarray": "2.0.1" + } + }, + "statuses": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", + "integrity": "sha512-zhSCtt8v2NDrRlPQpCNtw/heZLtfUDqxBM1udqikb/Hbk52LK4nQSwr10u77iopCW5LsyHpuXS0GnEc48mLeew==" + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "requires": { + "is-fullwidth-code-point": "2.0.0", + "strip-ansi": "4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "3.0.0" + } + }, + "strip-eof": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=" + }, + "to-array": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/to-array/-/to-array-0.1.4.tgz", + "integrity": "sha1-F+bBH3PdTz10zaek/zI46a2b+JA=" + }, + "type-is": { + "version": "1.6.16", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz", + "integrity": "sha512-HRkVv/5qY2G6I8iab9cI7v1bOIdhm94dVjQCPFElW9W+3GeDOSHmy2EBYe4VTApuzolPcmgFTN3ftVJRKR2J9Q==", + "requires": { + "media-typer": "0.3.0", + "mime-types": "2.1.18" + } + }, + "ultron": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.1.1.tgz", + "integrity": "sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og==" + }, + "unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" + }, + "utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" + }, + "vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "requires": { + "isexe": "2.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=" + }, + "wrap-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "requires": { + "string-width": "1.0.2", + "strip-ansi": "3.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "requires": { + "number-is-nan": "1.0.1" + } + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "requires": { + "code-point-at": "1.1.0", + "is-fullwidth-code-point": "1.0.0", + "strip-ansi": "3.0.1" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "2.1.1" + } + } + } + }, + "ws": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-3.3.3.tgz", + "integrity": "sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA==", + "requires": { + "async-limiter": "1.0.0", + "safe-buffer": "5.1.2", + "ultron": "1.1.1" + } + }, + "xmlhttprequest-ssl": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz", + "integrity": "sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4=" + }, + "y18n": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.1.tgz", + "integrity": "sha1-bRX7qITAhnnA136I53WegR4H+kE=" + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + }, + "yargs": { + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-10.1.2.tgz", + "integrity": "sha512-ivSoxqBGYOqQVruxD35+EyCFDYNEFL/Uo6FcOnz+9xZdZzK0Zzw4r4KhbrME1Oo2gOggwJod2MnsdamSG7H9ig==", + "requires": { + "cliui": "4.1.0", + "decamelize": "1.2.0", + "find-up": "2.1.0", + "get-caller-file": "1.0.2", + "os-locale": "2.1.0", + "require-directory": "2.1.1", + "require-main-filename": "1.0.1", + "set-blocking": "2.0.0", + "string-width": "2.1.1", + "which-module": "2.0.0", + "y18n": "3.2.1", + "yargs-parser": "8.1.0" + } + }, + "yargs-parser": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-8.1.0.tgz", + "integrity": "sha512-yP+6QqN8BmrgW2ggLtTbdrOyBNSI7zBa4IykmiV5R1wl1JWNxQvWhMfMdmzIYtKU7oP3OOInY/tl2ov3BDjnJQ==", + "requires": { + "camelcase": "4.1.0" + } + }, + "yeast": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/yeast/-/yeast-0.1.2.tgz", + "integrity": "sha1-AI4G2AlDIMNy28L47XagymyKxBk=" + }, + "yui-lint": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/yui-lint/-/yui-lint-0.2.0.tgz", + "integrity": "sha1-6RPadPqgZPtTwDdxstMSk4ZfAYU=" + } + } +} diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/package.json b/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/package.json new file mode 100644 index 000000000000..34d068be16ea --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/package.json @@ -0,0 +1,10 @@ +{ + "name": "cirrus-matchmaker", + "version": "0.0.1", + "description": "Cirrus servers connect to the Matchmaker which redirects a browser to the next available Cirrus server", + "dependencies": { + "express": "^4.16.2", + "socket.io": "^2.0.4", + "yargs": "^10.1.1" + } +} diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/run.bat b/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/run.bat new file mode 100644 index 000000000000..1077573f40c4 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/run.bat @@ -0,0 +1,14 @@ +:: Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +@echo off + +pushd %~dp0 + +call setup.bat + +title Matchmaker + +::Run node server +node matchmaker %* + +popd +pause \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/setup.bat b/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/setup.bat new file mode 100644 index 000000000000..be1fa7507c53 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/Matchmaker/setup.bat @@ -0,0 +1,6 @@ +:: Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +pushd %~dp0 + +npm install + +popd diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/cirrus.js b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/cirrus.js new file mode 100644 index 000000000000..6b4d45dd3a88 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/cirrus.js @@ -0,0 +1,790 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +//-- Server side logic. Serves pixel streaming WebRTC-based page, proxies data back to WebRTC proxy --// + +var express = require('express'); +var app = express(); + +const fs = require('fs'); +const path = require('path'); +const querystring = require('querystring'); +const bodyParser = require('body-parser'); +const logging = require('./modules/logging.js'); +logging.RegisterConsoleLogger(); + +// Command line argument --configFile needs to be checked before loading the config, all other command line arguments are dealt with through the config object + +const defaultConfig = { + UseFrontend: false, + UseMatchmaker: false, + UseHTTPS: false, + UseAuthentication: false, + LogToFile: true, + HomepageFile: 'player.htm', + AdditionalRoutes: new Map() +}; + +const argv = require('yargs').argv; +var configFile = (typeof argv.configFile != 'undefined') ? argv.configFile.toString() : '.\\config.json'; +const config = require('./modules/config.js').init(configFile, defaultConfig) + +if (config.LogToFile) { + logging.RegisterFileLogger('./logs'); +} + +console.log("Config: " + JSON.stringify(config, null, '\t')) + +var http = require('http').Server(app); + +if(config.UseHTTPS){ + //HTTPS certificate details + const options = { + key: fs.readFileSync(path.join(__dirname, './certificates/client-key.pem')), + cert: fs.readFileSync(path.join(__dirname, './certificates/client-cert.pem')) + }; + + var https = require('https').Server(options, app); + var io = require('socket.io')(https); +} else { + var io = require('socket.io')(http); +} + +//If not using authetication then just move on to the next function/middleware +var isAuthenticated = redirectUrl => function(req, res, next){ return next(); } + +if(config.UseAuthentication && config.UseHTTPS){ + var passport = require('passport'); + require('./modules/authentication').init(app); + // Replace the isAuthenticated with the one setup on passport module + isAuthenticated = passport.authenticationMiddleware ? passport.authenticationMiddleware : isAuthenticated +} else if(config.UseAuthentication && !config.UseHTTPS) { + console.log('ERROR: Trying to use authentication without using HTTPS, this is not allowed and so authentication will NOT be turned on, please turn on HTTPS to turn on authentication'); +} + +const helmet = require('helmet'); +var hsts = require('hsts'); +var net = require('net'); + +var FRONTEND_WEBSERVER = 'https://localhost'; +if(config.UseFrontend){ + var httpPort = 3000; + var httpsPort = 8000; + + //Required for self signed certs otherwise just get an error back when sending request to frontend see https://stackoverflow.com/a/35633993 + process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0" + + const httpsClient = require('./modules/httpsClient.js'); + var webRequest = new httpsClient(); +} else { + var httpPort = 80; + var httpsPort = 443; +} + +var proxyPort = 8888; // port to listen to WebRTC proxy connections +var proxyBuffer = new Buffer(0); + +var matchmakerAddress = '127.0.0.1'; +var matchmakerPort = 9999; + +var gameSessionId; +var userSessionId; +var serverPublicIp; + +//Example of STUN server setting +//let clientConfig = {peerConnectionOptions: { 'iceServers': [{'urls': ['stun:34.250.222.95:19302']}] }}; +var clientConfig = {peerConnectionOptions: {}}; + +// Parse public server address from command line +// --publicIp +try { + if(typeof config.publicIp != 'undefined'){ + serverPublicIp = config.publicIp.toString(); + } + + if(typeof config.httpPort != 'undefined'){ + httpPort = config.httpPort; + } + + if(typeof config.httpsPort != 'undefined'){ + httpsPort = config.httpsPort; + } + + if(typeof config.proxyPort != 'undefined'){ + proxyPort = config.proxyPort; + } + + if(typeof config.frontendUrl != 'undefined'){ + FRONTEND_WEBSERVER = config.frontendUrl; + } + + if(typeof config.peerConnectionOptions != 'undefined'){ + clientConfig.peerConnectionOptions = JSON.parse(config.peerConnectionOptions); + console.log(`peerConnectionOptions = ${JSON.stringify(clientConfig.peerConnectionOptions)}`); + } + + if (typeof config.matchmakerAddress != 'undefined') { + matchmakerAddress = config.matchmakerAddress; + } + + if (typeof config.matchmakerPort != 'undefined') { + matchmakerPort = config.matchmakerPort; + } +} catch (e) { + console.error(e); + process.exit(2); +} + +if(config.UseHTTPS){ + app.use(helmet()); + + app.use(hsts({ + maxAge: 15552000 // 180 days in seconds + })); + + //Setup http -> https redirect + console.log('Redirecting http->https'); + app.use(function (req, res, next) { + if (!req.secure) { + if(req.get('Host')){ + var hostAddressParts = req.get('Host').split(':'); + var hostAddress = hostAddressParts[0]; + if(httpsPort != 443) { + hostAddress = `${hostAddress}:${httpsPort}`; + } + return res.redirect(['https://', hostAddress, req.originalUrl].join('')); + } else { + console.log(`ERROR unable to get host name from header. Requestor ${req.ip}, url path: '${req.originalUrl}', available headers ${JSON.stringify(req.headers)}`); + return res.status(400).send('Bad Request'); + } + } + next(); + }); +} + +sendGameSessionData(); + +//Setup folders +app.use(express.static(path.join(__dirname, '/public'))) +app.use('/images', express.static(path.join(__dirname, './images'))) +app.use('/scripts', [isAuthenticated('/login'), express.static(path.join(__dirname, '/scripts'))]); +app.use('/', [isAuthenticated('/login'), express.static(path.join(__dirname, '/custom_html'))]) + +try{ + for (var property in config.AdditionalRoutes) { + if (config.AdditionalRoutes.hasOwnProperty(property)) { + console.log(`Adding additional routes "${property}" -> "${config.AdditionalRoutes[property]}"`) + app.use(property, [isAuthenticated('/login'), express.static(path.join(__dirname, config.AdditionalRoutes[property]))]); + } + } +} catch(err) { + console.log(`Error reading config.AdditionalRoutes: ${err}`) +} + + +app.get('/', isAuthenticated('/login'), function(req, res){ + homepageFile = (typeof config.HomepageFile != 'undefined' && config.HomepageFile != '') ? config.HomepageFile.toString() : defaultConfig.HomepageFile; + homepageFilePath = path.join(__dirname, homepageFile) + + fs.access(homepageFilePath, (err) => { + if (err) { + console.log('Unable to locate file ' + homepageFilePath) + res.status(404).send('Unable to locate file ' + homepageFile); + } + else { + res.sendFile(homepageFilePath); + } + }); +}); + +//Setup the login page if we are using authentication +if(config.UseAuthentication){ + app.get('/login', function(req, res){ + res.sendFile(__dirname + '/login.htm'); + }); + + // create application/x-www-form-urlencoded parser + var urlencodedParser = bodyParser.urlencoded({ extended: false }) + + //login page form data is posted here + app.post('/login', + urlencodedParser, + passport.authenticate('local', { failureRedirect: '/login' }), + function(req, res){ + //On success try to redirect to the page that they originally tired to get to, default to '/' if no redirect was found + var redirectTo = req.session.redirectTo ? req.session.redirectTo : '/'; + delete req.session.redirectTo; + console.log(`Redirecting to: '${redirectTo}'`); + res.redirect(redirectTo); + } + ); +} + +/* +app.get('/:sessionId', isAuthenticated('/login'), function(req, res){ + let sessionId = req.params.sessionId; + console.log(sessionId); + + //For now don't verify session id is valid, just send player.htm if they get the right server + res.sendFile(__dirname + '/player.htm'); +}); +*/ + +/* +app.get('/custom_html/:htmlFilename', isAuthenticated('/login'), function(req, res){ + let htmlFilename = req.params.htmlFilename; + + let htmlPathname = __dirname + '/custom_html/' + htmlFilename; + + console.log(htmlPathname); + fs.access(htmlPathname, (err) => { + if (err) { + res.status(404).send('Unable to locate file ' + htmlPathname); + } + else { + res.sendFile(htmlPathname); + } + }); +}); +*/ + +let clients = []; // either web-browsers or native webrtc receivers +let nextClientId = 100; + +let proxySocket; + +function cleanUpProxyConnection() { + if(proxySocket){ + proxySocket.end(); + proxySocket = undefined; + proxyBuffer = new Buffer(0); + // make a copy of `clients` array as it will be modified in the loop + let clientsCopy = clients.slice(); + clientsCopy.forEach(function (c) { + c.ws.disconnect(); + }); + } +} + +let proxyListener = net.createServer(function(socket) { + // 'connection' listener + console.log('proxy connected'); + + socket.setNoDelay(); + + socket.on('data', function (data) { + proxyBuffer = Buffer.concat([proxyBuffer, data]); + + // WebRTC proxy uses json messages instead of binary blob so need to read messages differently + while (handleProxyMessage(socket)) { } + }); + + socket.on('end', function () { + console.log('proxy connection end'); + cleanUpProxyConnection(); + }); + + socket.on('disconnect', function () { + console.log('proxy disconnected'); + cleanUpProxyConnection(); + }); + + socket.on('close', function() { + sendServerDisconnect(); + console.log('proxy connection closed'); + proxySocket = undefined; + }); + + socket.on('error', function (error) { + console.log(`proxy connection error ${JSON.stringify(error)}`); + cleanUpProxyConnection(); + }); + + proxySocket = socket; + + sendConfigToProxy(); +}); + +proxyListener.maxConnections = 1; +proxyListener.listen(proxyPort, () => { + console.log('Listening to proxy connections on: ' + proxyPort); +}); + +// Must be kept in sync with PixelStreamingProtocol::EProxyToCirrusMsg C++ enum. +const EProxyToCirrusMsg = { + answer: 0, // [msgId:1][clientId:4][size:4][string:size] + iceCandidate: 1, // [msgId:1][clientId:4][size:4][string:size] + disconnectClient: 2 // [msgId:1][clientId:4] +} + +// Must be kept in sync with PixelStreamingProtocol::ECirrusToProxyMsg C++ enum. +const ECirrusToProxyMsg = { + offer: 0, // [msgId: 1][clientId:4][size:4][string:size] + iceCandidate: 1, // [msgId:1][clientId:4][size:4][string:size] + clientDisconnected: 2, // [msgId:1][clientId:4] + config: 3 // [msgId:1][size:4][config:size] +} + +function readJsonMsg(consumed) { + // format: [size:4][string:size] + if (proxyBuffer.length < consumed + 4) + return [0, ""]; + let msgSize = proxyBuffer.readUInt32LE(consumed); + consumed += 4; + if (proxyBuffer.length < consumed + msgSize) + return [0, ""]; + let msg = proxyBuffer.toString('ascii', consumed, consumed + msgSize); + consumed += msgSize; + return [consumed, JSON.parse(msg)]; +} + +function handleProxyMessage(socket) { + // msgId + if(proxyBuffer.length == 0) + return false; + let msgId = proxyBuffer.readUInt8(0); + let consumed = 1; + + // clientId + if (proxyBuffer.length < consumed + 4) + return false; + let clientId = proxyBuffer.readUInt32LE(consumed); + consumed += 4; + + let client = clients.find(function(c) { return c.id == clientId; }); + if (!client) { + // Client is likely no longer connected, but this can also occur if bad data is recieved, this can not be validated as yet so assume former + console.error(`proxy message ${msgId}: client ${clientId} not found. Check proxy->cirrus protocol consistency`); + } + + switch (msgId) { + case EProxyToCirrusMsg.answer: // fall through + case EProxyToCirrusMsg.iceCandidate: + let [localConsumed, msg] = readJsonMsg(consumed); + if (localConsumed == 0) + return false; + consumed = localConsumed; + + if(client){ + switch (msgId) + { + case EProxyToCirrusMsg.answer: + console.log(`answer -> client ${clientId}`); + client.ws.emit('webrtc-answer', msg); + break; + case EProxyToCirrusMsg.iceCandidate: + console.log(`ICE candidate -> client ${clientId}`); + client.ws.emit('webrtc-ice', msg); + break; + default: + throw "unhandled case, check all \"fall through\" cases from above"; + } + } + + break; + case EProxyToCirrusMsg.disconnectClient: + console.warn(`Proxy instructed to disconnect client ${clientId}`); + if(client){ + client.ws.onclose = function() {}; + client.ws.disconnect(true); + let idx = clients.map(function(p) { return p.id; }).indexOf(clientId); + clients.splice(idx, 1); // remove it + sendClientDisconnectedToProxy(clientId); + } + break; + default: + console.error(`Invalid message id ${msgId} from proxy`); + cleanUpProxyConnection(); + return false; + } + + proxyBuffer = proxyBuffer.slice(consumed); + return true; +} + +function sendConfigToProxy() { + // [msgId:1][size:4][string:size] + if (!proxySocket) + return false; + + let cfg = {}; + cfg.peerConnectionConfig = clientConfig.peerConnectionOptions; + let msg = JSON.stringify(cfg); + console.log(`config to Proxy: ${msg}`); + + let data = new DataView(new ArrayBuffer(1 + 4 + msg.length)); + data.setUint8(0, ECirrusToProxyMsg.config); + data.setUint32(1, msg.length, true); + for (let i = 0; i != msg.length; ++i) + data.setUint8(1 + 4 + i, msg.charCodeAt(i)); + proxySocket.write(Buffer.from(data.buffer)); + return true; +} + +function sendClientDisconnectedToProxy(clientId) { + // [msgId:1][clientId:4] + if (!proxySocket) + return; + let data = new DataView(new ArrayBuffer(1 + 4)); + data.setUint8(0, ECirrusToProxyMsg.clientDisconnected); + data.setUint32(1, clientId, true); + proxySocket.write(Buffer.from(data.buffer)); +} + +function sendStringMsgToProxy(msgId, clientId, msg) { + // [msgId:1][clientId:4][size:4][string:size] + if (!proxySocket) + return false; + let data = new DataView(new ArrayBuffer(1 + 4 + 4 + msg.length)); + data.setUint8(0, msgId); + data.setUint32(1, clientId, true); + data.setUint32(1 + 4, msg.length, true); + for (let i = 0; i != msg.length; ++i) + data.setUint8(1 + 4 + 4 + i, msg.charCodeAt(i)); + proxySocket.write(Buffer.from(data.buffer)); + return true; +} + +function sendOfferToProxy(clientId, offer) { + sendStringMsgToProxy(ECirrusToProxyMsg.offer, clientId, offer); +} + +function sendIceCandidateToProxy(clientId, iceCandidate) { + sendStringMsgToProxy(ECirrusToProxyMsg.iceCandidate, clientId, iceCandidate); +} + +/** + * Function that handles the connection to the matchmaker. + */ + +if (config.UseMatchmaker) { + var matchmaker = net.connect(matchmakerPort, matchmakerAddress, () => { + console.log(`Cirrus connected to Matchmaker ${matchmakerAddress}:${matchmakerPort}`); + message = { + type: 'connect', + address: typeof serverPublicIp === 'undefined' ? '127.0.0.1' : serverPublicIp, + port: httpPort + }; + matchmaker.write(JSON.stringify(message)); + }); + + matchmaker.on('error', () => { + console.log('Cirrus disconnected from matchmaker'); + }); +} + +/** + * Function that handles an incoming client connection. + */ +function handleNewClient(ws) { + // NOTE: This needs to be the first thing to be sent + ws.emit('clientConfig', clientConfig); + + var clientId = ++nextClientId; + console.log(`client ${clientId} (${ws.request.connection.remoteAddress}) connected`); + clients.push({ws: ws, id: clientId}); + + // Send client counts to all connected clients + ws.emit('clientCount', {count: clients.length - 1}); + + clients.forEach(function(c){ + if(c.id == clientId) + return; + c.ws.emit('clientCount', {count: clients.length - 1}); + }); + + ws.on('userConfig', function(userConfig) { + receiveUserConfig(clientId, userConfig, ws); + }); + + /** + * This is where events received from client are translated + * and sent on to the proxy socket + */ + + ws.on('message', function (msg) { + console.error(`client #${clientId}: unexpected msg "${msg}"`); + }); + + ws.on('kick', function(msg){ + // make a copy of `clients` cos the array will be modified in the loop + let clientsCopy = clients.slice(); + clientsCopy.forEach(function(c){ + if(c.id == clientId) + return; + console.log('Kicking client ' + c.id); + c.ws.disconnect(); + }) + ws.emit('clientCount', {count: 0}); + }) + + var removeClient = function() { + let idx = clients.map(function(c) { return c.ws; }).indexOf(ws); + let clientId = clients[idx].id; + clients.splice(idx, 1); // remove it + sendClientDisconnectedToProxy(clientId); + sendClientDisconnectedToFrontend(); + sendClientDisconnectedToMatchmaker(); + } + + ws.on('disconnect', function () { + console.log(`client ${clientId} disconnected`); + removeClient(); + }); + + ws.on('close', function (code, reason) { + console.log(`client ${clientId} connection closed: ${code} - ${reason}`); + removeClient(); + }); + + ws.on('error', function (err) { + console.log(`client ${clientId} connection error: ${err}`); + removeClient(); + }); +}; + +/** + * Config data received from the web browser or device native client. + */ +function receiveUserConfig(clientId, userConfigString, ws) { + console.log(`client ${clientId}: userConfig = ${userConfigString}`); + userConfig = JSON.parse(userConfigString) + + // Check the sort of data the web browser or device native client will send. + switch (userConfig.emitData) + { + case "ArrayBuffer": + { + ws.on('webrtc-offer', function(offer) { + console.log(`offer <- client ${clientId}`); + sendOfferToProxy(clientId, offer); + }); + + ws.on('webrtc-ice', function(candidate) { + console.log(`ICE candidate <- client ${clientId}`); + sendIceCandidateToProxy(clientId, candidate); + }); + + ws.on('webrtc-stats', function(stats){ + console.log(`Received webRTC stats from player ID: ${clientId} \r\n${JSON.stringify(stats)}`); + }); + + break; + } + case "Array": + { + //TODO: this is untested as requires iOS WebRTC integration + ws.on('webrtc-offer', function(offer) { + console.log(`offer <- client ${clientId}`); + sendOfferToProxy(clientId, offer); + }); + + ws.on('webrtc-ice', function(candidate) { + console.log(`ICE candidate <- client ${clientId}`); + sendIceCandidateToProxy(clientId, candidate); + }); + + ws.on('webrtc-stats', function(stats){ + console.log(`Received webRTC stats from player ID: ${clientId} \r\n${JSON.stringify(stats)}`); + }); + + break; + } + default: + { + console.log(`Unknown user config emit data type ${userConfig.emitData}`); + break; + } + } +} + + +//IO events +io.on('connection', function (ws) { + // Reject connection if proxy is not connected + if (!proxySocket) { + ws.disconnect(); + return; + } + + handleNewClient(ws); + sendClientConnectedToFrontend(); + sendClientConnectedToMatchmaker(); +}); + +//Setup http and https servers +http.listen(httpPort, function () { + console.logColor(logging.Green, 'Http listening on *: ' + httpPort); + }); + +if(config.UseHTTPS){ + https.listen(httpsPort, function () { + console.logColor(logging.Green, 'Https listening on *: ' + httpsPort); + }); +} + +//Keep trying to send gameSessionId in case the server isn't ready yet +function sendGameSessionData(){ + //If we are not using the frontend web server don't try and make requests to it + if(!config.UseFrontend) + return; + + webRequest.get(`${FRONTEND_WEBSERVER}/server/requestSessionId`, + function(response, body) { + if(response.statusCode === 200){ + gameSessionId = body; + console.log('SessionId: ' + gameSessionId); + } + else{ + console.log('Status code: ' + response.statusCode); + console.log(body); + } + }, + function(err){ + //Repeatedly try in cases where the connection timed out or never connected + if (err.code === "ECONNRESET") { + //timeout + sendGameSessionData(); + } else if(err.code === 'ECONNREFUSED') { + console.log('Frontend server not running, unable to setup game session'); + } else { + console.log(err); + } + }); +} + +function sendUserSessionData(serverPort){ + //If we are not using the frontend web server don't try and make requests to it + if(!config.UseFrontend) + return; + + webRequest.get(`${FRONTEND_WEBSERVER}/server/requestUserSessionId?gameSessionId=${gameSessionId}&serverPort=${serverPort}&appName=${querystring.escape(clientConfig.AppName)}&appDescription=${querystring.escape(clientConfig.AppDescription)}${(typeof serverPublicIp === 'undefined' ? '' : '&serverHost=' + serverPublicIp)}`, + function(response, body) { + if(response.statusCode === 410){ + sendUserSessionData(serverPort); + }else if(response.statusCode === 200){ + userSessionId = body; + console.log('UserSessionId: ' + userSessionId); + } else { + console.log('Status code: ' + response.statusCode); + console.log(body); + } + }, + function(err){ + //Repeatedly try in cases where the connection timed out or never connected + if (err.code === "ECONNRESET") { + //timeout + sendUserSessionData(serverPort); + } else if(err.code === 'ECONNREFUSED') { + console.log('Frontend server not running, unable to setup user session'); + } else { + console.log(err); + } + }); +} + +function sendServerDisconnect(){ + //If we are not using the frontend web server don't try and make requests to it + if(!config.UseFrontend) + return; + + webRequest.get(`${FRONTEND_WEBSERVER}/server/serverDisconnected?gameSessionId=${gameSessionId}&appName=${querystring.escape(clientConfig.AppName)}`, + function(response, body) { + if(response.statusCode === 200){ + console.log('serverDisconnected acknowledged by Frontend'); + } else { + console.log('Status code: ' + response.statusCode); + console.log(body); + } + }, + function(err){ + //Repeatedly try in cases where the connection timed out or never connected + if (err.code === "ECONNRESET") { + //timeout + sendServerDisconnect(); + } else if(err.code === 'ECONNREFUSED') { + console.log('Frontend server not running, unable to setup user session'); + } else { + console.log(err); + } + }); +} + +function sendClientConnectedToFrontend(){ + //If we are not using the frontend web server don't try and make requests to it + if(!config.UseFrontend) + return; + + webRequest.get(`${FRONTEND_WEBSERVER}/server/clientConnected?gameSessionId=${gameSessionId}&appName=${querystring.escape(clientConfig.AppName)}`, + function(response, body) { + if(response.statusCode === 200){ + console.log('clientConnected acknowledged by Frontend'); + } + else{ + console.log('Status code: ' + response.statusCode); + console.log(body); + } + }, + function(err){ + //Repeatedly try in cases where the connection timed out or never connected + if (err.code === "ECONNRESET") { + //timeout + sendClientConnectedToFrontend(); + } else if(err.code === 'ECONNREFUSED') { + console.log('Frontend server not running, unable to setup game session'); + } else { + console.log(err); + } + }); +} + +function sendClientDisconnectedToFrontend(){ + //If we are not using the frontend web server don't try and make requests to it + if(!config.UseFrontend) + return; + + webRequest.get(`${FRONTEND_WEBSERVER}/server/clientDisconnected?gameSessionId=${gameSessionId}&appName=${querystring.escape(clientConfig.AppName)}`, + function(response, body) { + if(response.statusCode === 200){ + console.log('clientDisconnected acknowledged by Frontend'); + } + else{ + console.log('Status code: ' + response.statusCode); + console.log(body); + } + }, + function(err){ + //Repeatedly try in cases where the connection timed out or never connected + if (err.code === "ECONNRESET") { + //timeout + sendClientDisconnectedEvent(); + } else if(err.code === 'ECONNREFUSED') { + console.log('Frontend server not running, unable to setup game session'); + } else { + console.log(err); + } + }); +} + +// The Matchmaker will not re-direct clients to this Cirrus server if any client +// is connected. +function sendClientConnectedToMatchmaker() { + if (!config.UseMatchmaker) + return; + + message = { + type: 'clientConnected' + }; + matchmaker.write(JSON.stringify(message)); +} + +// The Matchmaker is interested when nobody is connected to a Cirrus server +// because then it can re-direct clients to this re-cycled Cirrus server. +function sendClientDisconnectedToMatchmaker() { + if (!config.UseMatchmaker) + return; + + message = { + type: 'clientDisconnected' + }; + matchmaker.write(JSON.stringify(message)); +} \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/db/index.js b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/db/index.js new file mode 100644 index 000000000000..5e949b6231cf --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/db/index.js @@ -0,0 +1,2 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +exports.users = require('./users'); diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/db/store_password.js b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/db/store_password.js new file mode 100644 index 000000000000..58fc1bfef35e --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/db/store_password.js @@ -0,0 +1,78 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +// +// Usage: node store_password --username --password +// +// There is an optional paramter '--usersFile ' that can be used to specify a +// different location for the file to save the users to. The default location is './users.json' + +const argv = require('yargs').argv; +const fs = require('fs'); +const bcrypt = require('bcrypt'); + +var username, password; +var usersFile = './users.json' + +const STORE_PLAINTEXT_PASSWORD = false; + +try { + if(typeof argv.username != 'undefined'){ + username = argv.username.toString(); + } + + if(typeof argv.password != 'undefined'){ + password = argv.password; + } + + if(typeof argv.usersFile != 'undefined'){ + usersFile = argv.usersFile; + } +} catch (e) { + console.error(e); + process.exit(2); +} + +if(username && password){ + let existingAccounts = []; + if (fs.existsSync(usersFile)) { + console.log(`File '${usersFile}' exists, reading file`) + var content = fs.readFileSync(usersFile, 'utf8'); + try{ + existingAccounts = JSON.parse(content); + } + catch(e){ + console.error(`Existing file '${usersFile}', has invalid JSON: ${e}`); + } + } + + var existingUser = existingAccounts.find( u => u.username == username) + if(existingUser){ + console.log(`User '${username}', already exists, updating password`) + existingUser.passwordHash = generatePasswordHash(password) + if(STORE_PLAINTEXT_PASSWORD) + existingUser.password = password; + else if (existingUser.password) + delete existingUser.password; + + } else { + console.log(`Adding new user '${username}'`) + let newUser = { + id: existingAccounts.length + 1, + username: username, + passwordHash: generatePasswordHash(password) + } + if(STORE_PLAINTEXT_PASSWORD) + newUser.password = password; + + existingAccounts.push(newUser); + } + + console.log(`Writing updated users to '${usersFile}'`); + var newContent = JSON.stringify(existingAccounts); + fs.writeFileSync(usersFile, newContent); +} else { + console.log(`Please pass in both username (${username}) and password (${password}) please`); +} + +function generatePasswordHash(pass){ + return bcrypt.hashSync(pass, 12) +} \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/db/users.js b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/db/users.js new file mode 100644 index 000000000000..9a8cd1a6f2c4 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/db/users.js @@ -0,0 +1,35 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +const fs = require('fs'); +const path = require('path'); + +// Read in users from file +let records = []; +let usersFile = path.join(__dirname, './users.json'); +if (fs.existsSync(usersFile)) { + console.log(`Reading users from '${usersFile}'`) + var content = fs.readFileSync(usersFile, 'utf8'); + try { + records = JSON.parse(content); + } catch(e) { + console.log(`ERROR: Failed to parse users from file '${usersFile}'`) + } +} + +exports.findById = function(id, cb) { + var idx = id - 1; + if (records[idx]) { + cb(null, records[idx]); + } else { + cb(new Error('User ' + id + ' does not exist')); + } +} + +exports.findByUsername = function(username, cb) { + for (var i = 0, len = records.length; i < len; i++) { + var record = records[i]; + if (record.username === username) { + return cb(null, record); + } + } + return cb(null, null); +} diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/index.js b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/index.js new file mode 100644 index 000000000000..6a5db980b451 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/index.js @@ -0,0 +1,4 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +module.exports = { + init: require('./init') +} \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/init.js b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/init.js new file mode 100644 index 000000000000..9d7db4533c2a --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/authentication/init.js @@ -0,0 +1,109 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +// Adapted from +// * https://blog.risingstack.com/node-hero-node-js-authentication-passport-js/ +// * https://github.com/RisingStack/nodehero-authentication/tree/master/app +// * https://github.com/passport/express-4.x-local-example + + +const passport = require('passport'); +const session = require('express-session'); +const bcrypt = require('bcrypt'); +const LocalStrategy = require('passport-local').Strategy +const path = require('path'); +const fs = require('fs'); +var db = require('./db'); + +function initPassport (app) { + + // Generate session secret if it doesn't already exist and save it to file for use next time + let config = {}; + let configPath = path.join(__dirname, './config.json'); + if (fs.existsSync(configPath)) { + let content = fs.readFileSync(configPath, 'utf8'); + try { + config = JSON.parse(content); + } catch (e) { + console.log(`Error with config file '${configPath}': ${e}`); + } + } + + if(!config.sessionSecret){ + config.sessionSecret = bcrypt.genSaltSync(12); + let content = JSON.stringify(config); + fs.writeFileSync(configPath, content); + } + + // Setup session id settings + app.use(session({ + secret: config.sessionSecret, + resave: false, + saveUninitialized: false, + cookie: { + secure: true, + maxAge: 24 * 60 * 60 * 1000 /* 1 day */ + //maxAge: 5 * 1000 /* 5 seconds */ + } + })) + + app.use(passport.initialize()); + app.use(passport.session()); + + passport.serializeUser(function(user, cb) { + cb(null, user.id); + }); + + passport.deserializeUser(function(id, cb) { + db.users.findById(id, function (err, user) { + if (err) { return cb(err); } + cb(null, user); + }); + }); + + console.log('Setting up auth'); + passport.use(new LocalStrategy( + (username, password, callback) => { + db.users.findByUsername(username, (err, user) => { + if (err) { + console.log(`Unable to login '${username}', error ${err}`); + return callback(err); + } + + // User not found + if (!user) { + console.log(`User '${username}' not found`); + return callback(null, false); + } + + // Always use hashed passwords and fixed time comparison + bcrypt.compare(password, user.passwordHash, (err, isValid) => { + if (err) { + console.log(`Error comparing password for user '${username}': ${err}`); + return callback(err); + } + if (!isValid) { + console.log(`Password incorrect for user '${username}'`) + return callback(null, false); + } + + console.log(`User '${username}' logged in`); + return callback(null, user); + }); + }) + } + )); + + passport.authenticationMiddleware = function authenticationMiddleware (redirectUrl) { + return function (req, res, next) { + if (req.isAuthenticated()) { + return next(); + } + + // Set redirectTo property so that user can be redirected back there after logging in + //console.log(`Original request path '${req.originalUrl}'`); + req.session.redirectTo = req.originalUrl; + res.redirect(redirectUrl); + } + } +} + +module.exports = initPassport \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/config.js b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/config.js new file mode 100644 index 000000000000..83505048f78a --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/config.js @@ -0,0 +1,49 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +//-- Provides configuration information from file and combines it with default values and command line arguments --// +//-- Hierachy of values: Default Values < Config File < Command Line arguments --// + +const fs = require('fs'); +const path = require('path'); +const argv = require('yargs').argv; + +function initConfig(configFile, defaultConfig){ + defaultConfig = defaultConfig || {}; + + // Using object spread syntax: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Spread_syntax#Spread_in_object_literals + let config = {...defaultConfig}; + try{ + let configData = fs.readFileSync(configFile, 'UTF8'); + fileConfig = JSON.parse(configData); + config = {...config, ...fileConfig} + // Update config file with any additional defaults (does not override existing values if default has changed) + fs.writeFileSync(configFile, JSON.stringify(config, null, '\t'), 'UTF8'); + } catch(err) { + if (err.code === 'ENOENT') { + console.log("No config file found, writing defaults to log file " + configFile); + fs.writeFileSync(configFile, JSON.stringify(config, null, '\t'), 'UTF8'); + } else if (err instanceof SyntaxError) { + console.log(`ERROR: Invalid JSON in ${configFile}, ignoring file config, ${err}`) + } else { + console.log(`ERROR: ${err}`); + } + } + + try{ + //Make a copy of the command line args and remove the unneccessary ones + //The _ value is an array of any elements without a key + let commandLineConfig = {...argv} + delete commandLineConfig._; + delete commandLineConfig.help; + delete commandLineConfig.version; + delete commandLineConfig['$0']; + config = {...config, ...commandLineConfig} + } catch(err) { + console.log(`ERROR: ${err}`); + } + return config; +} + +module.exports = { + init: initConfig +} \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/httpsClient.js b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/httpsClient.js new file mode 100644 index 000000000000..e48287c65d4b --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/httpsClient.js @@ -0,0 +1,95 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +var querystring = require('querystring') +const https = require('https'); +const assert = require('assert'); + +function cleanUrl(aUrl){ + let url = aUrl; + if(url.startsWith("https://")) + url = url.substring("https://".length); + + return url +} + +function createOptions(requestType, url){ + let index = url.indexOf('/'); + + let urlParts = url.split('/', 2) + + return { + hostname: (index === -1) ? url.substring(0) : url.substring(0, index), + port: 443, + path: (index === -1) ? '' : url.substring(index), + method: requestType, + timeout: 30000, + }; +} + +function makeHttpsCall(options, aCallback, aError){ + //console.log(JSON.stringify(options)); + const req = https.request(options, function(response){ + let data = ''; + + //console.log('statusCode:', response.statusCode); + //console.log('headers:', response.headers); + + // A chunk of data has been received. + response.on('data', (chunk) => { + data += chunk; + }); + + // The whole response has been received. Print out the result. + response.on('end', () => { + if(typeof aCallback != "undefined") + aCallback(response, data); + }); + }); + + req.on('timeout', function () { + console.log("Request timed out. " + (options.timeout / 1000) + " seconds expired"); + + // Source: https://github.com/nodejs/node/blob/master/test/parallel/test-http-client-timeout-option.js#L27 + req.destroy(); + }); + + req.on("error", (err) => { + if(typeof aError != "undefined") { + aError(err); + } else { + console.log("Error: " + err.message); + } + }); + + return req; +} + +module.exports = class HttpClient { + get(aUrl, aCallback, aError) { + let url = cleanUrl(aUrl); + + let options = createOptions('GET', url); + + const req = makeHttpsCall(options, aCallback, aError); + + req.end(); + } + + post(aUrl, body, aCallback, aError) { + let url = cleanUrl(aUrl); + + let options = createOptions('POST', url); + + let postBody = querystring.stringify(body); + + //Add extra options for POST request type + options.headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + 'Content-Length': postBody.length + }; + + const req = makeHttpsCall(options, aCallback, aError); + + req.write(postBody); + req.end(); + } +} \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/logging.js b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/logging.js new file mode 100644 index 000000000000..dd4e175a1f60 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/modules/logging.js @@ -0,0 +1,108 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +const fs = require('fs'); +const { Console } = require('console'); + +var loggers=[]; +var logFunctions=[]; +var logColorFunctions=[]; + +console.log = function(msg) { + logFunctions.forEach((logFunction) => { + logFunction(msg); + }); +} + +console.logColor = function(color, msg) { + logColorFunctions.forEach((logColorFunction) => { + logColorFunction(color, msg); + }); +} + +const AllAttributesOff = '\x1b[0m'; +const BoldOn = '\x1b[1m'; +const Black = '\x1b[30m'; +const Red = '\x1b[31m'; +const Green = '\x1b[32m'; +const Yellow = '\x1b[33m'; +const Blue = '\x1b[34m'; +const Magenta = '\x1b[35m'; +const Cyan = '\x1b[36m'; +const White = '\x1b[37m'; + +/** + * Pad the start of the given number with zeros so it takes up the number of digits. + * e.g. zeroPad(5, 3) = '005' and zeroPad(23, 2) = '23'. + */ +function zeroPad(number, digits) { + let string = number.toString(); + while (string.length < digits) { + string = '0' + string; + } + return string; +} + +/** + * Create a string of the form 'YEAR.MONTH.DATE.HOURS.MINUTES.SECONDS'. + */ +function dateTimeToString() { + let date = new Date(); + return `${date.getFullYear()}.${zeroPad(date.getMonth(), 2)}.${zeroPad(date.getDate(), 2)}.${zeroPad(date.getHours(), 2)}.${zeroPad(date.getMinutes(), 2)}.${zeroPad(date.getSeconds(), 2)}`; +} + +/** + * Create a string of the form 'HOURS.MINUTES.SECONDS.MILLISECONDS'. + */ +function timeToString() { + let date = new Date(); + return `${zeroPad(date.getHours(), 2)}:${zeroPad(date.getMinutes(), 2)}:${zeroPad(date.getSeconds(), 2)}.${zeroPad(date.getMilliseconds(), 3)}`; +} + +function RegisterFileLogger(path) { + if(path == null) + path = './'; + + if (!fs.existsSync(path)) + fs.mkdirSync(path); + + var output = fs.createWriteStream(`./logs/${dateTimeToString()}.log`); + var fileLogger = new Console(output); + logFunctions.push(function(msg) { + fileLogger.log(`${timeToString()} ${msg}`); + }); + + logColorFunctions.push(function(color, msg) { + fileLogger.log(`${timeToString()} ${msg}`); + }); + loggers.push(fileLogger); +} + +function RegisterConsoleLogger() { + var consoleLogger = new Console(process.stdout, process.stderr) + logFunctions.push(function(msg) { + consoleLogger.log(`${timeToString()} ${msg}`); + }); + + logColorFunctions.push(function(color, msg) { + consoleLogger.log(`${BoldOn}${color}${timeToString()} ${msg}${AllAttributesOff}`); + }); + loggers.push(consoleLogger); +} + +module.exports = { + //Functions + RegisterFileLogger, + RegisterConsoleLogger, + + //Variables + AllAttributesOff, + BoldOn, + Black, + Red, + Green, + Yellow, + Blue, + Magenta, + Cyan, + White +} \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/package-lock.json b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/package-lock.json new file mode 100644 index 000000000000..98f8d1ebbb38 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/package-lock.json @@ -0,0 +1,1655 @@ +{ + "name": "cirrus-webserver", + "version": "0.0.1", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "accepts": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.4.tgz", + "integrity": "sha1-hiRnWMfdbSGmR0/whKR0DsBesh8=", + "requires": { + "mime-types": "2.1.17", + "negotiator": "0.6.1" + } + }, + "after": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/after/-/after-0.8.2.tgz", + "integrity": "sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8=" + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" + }, + "arraybuffer.slice": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz", + "integrity": "sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==" + }, + "async-limiter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.0.tgz", + "integrity": "sha512-jp/uFnooOiO+L211eZOoSyzpOITMXx1rBITauYykG3BRYPu8h0UcxsPNB04RR5vo4Tyz3+ay17tR6JVf9qzYWg==" + }, + "backo2": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz", + "integrity": "sha1-MasayLEpNjRj41s+u2n038+6eUc=" + }, + "base64-arraybuffer": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz", + "integrity": "sha1-c5JncZI7Whl0etZmqlzUv5xunOg=" + }, + "base64id": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/base64id/-/base64id-1.0.0.tgz", + "integrity": "sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY=" + }, + "bcrypt": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-2.0.1.tgz", + "integrity": "sha512-DwB7WgJPdskbR+9Y3OTJtwRq09Lmm7Na6b+4ewvXjkD0nfNRi1OozxljHm5ETlDCBq9DTy04lQz+rj+T2ztIJg==", + "requires": { + "nan": "2.10.0", + "node-pre-gyp": "0.9.1" + }, + "dependencies": { + "abbrev": { + "version": "1.1.1", + "bundled": true + }, + "ansi-regex": { + "version": "2.1.1", + "bundled": true + }, + "aproba": { + "version": "1.2.0", + "bundled": true + }, + "are-we-there-yet": { + "version": "1.1.4", + "bundled": true, + "requires": { + "delegates": "1.0.0", + "readable-stream": "2.3.5" + } + }, + "balanced-match": { + "version": "1.0.0", + "bundled": true + }, + "brace-expansion": { + "version": "1.1.11", + "bundled": true, + "requires": { + "balanced-match": "1.0.0", + "concat-map": "0.0.1" + } + }, + "chownr": { + "version": "1.0.1", + "bundled": true + }, + "code-point-at": { + "version": "1.1.0", + "bundled": true + }, + "concat-map": { + "version": "0.0.1", + "bundled": true + }, + "console-control-strings": { + "version": "1.1.0", + "bundled": true + }, + "core-util-is": { + "version": "1.0.2", + "bundled": true + }, + "debug": { + "version": "2.6.9", + "bundled": true, + "requires": { + "ms": "2.0.0" + } + }, + "deep-extend": { + "version": "0.4.2", + "bundled": true + }, + "delegates": { + "version": "1.0.0", + "bundled": true + }, + "detect-libc": { + "version": "1.0.3", + "bundled": true + }, + "fs-minipass": { + "version": "1.2.5", + "bundled": true, + "requires": { + "minipass": "2.2.4" + } + }, + "fs.realpath": { + "version": "1.0.0", + "bundled": true + }, + "gauge": { + "version": "2.7.4", + "bundled": true, + "requires": { + "aproba": "1.2.0", + "console-control-strings": "1.1.0", + "has-unicode": "2.0.1", + "object-assign": "4.1.1", + "signal-exit": "3.0.2", + "string-width": "1.0.2", + "strip-ansi": "3.0.1", + "wide-align": "1.1.2" + } + }, + "glob": { + "version": "7.1.2", + "bundled": true, + "requires": { + "fs.realpath": "1.0.0", + "inflight": "1.0.6", + "inherits": "2.0.3", + "minimatch": "3.0.4", + "once": "1.4.0", + "path-is-absolute": "1.0.1" + } + }, + "has-unicode": { + "version": "2.0.1", + "bundled": true + }, + "iconv-lite": { + "version": "0.4.21", + "bundled": true, + "requires": { + "safer-buffer": "2.1.2" + } + }, + "ignore-walk": { + "version": "3.0.1", + "bundled": true, + "requires": { + "minimatch": "3.0.4" + } + }, + "inflight": { + "version": "1.0.6", + "bundled": true, + "requires": { + "once": "1.4.0", + "wrappy": "1.0.2" + } + }, + "inherits": { + "version": "2.0.3", + "bundled": true + }, + "ini": { + "version": "1.3.5", + "bundled": true + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "bundled": true, + "requires": { + "number-is-nan": "1.0.1" + } + }, + "isarray": { + "version": "1.0.0", + "bundled": true + }, + "minimatch": { + "version": "3.0.4", + "bundled": true, + "requires": { + "brace-expansion": "1.1.11" + } + }, + "minimist": { + "version": "0.0.8", + "bundled": true + }, + "minipass": { + "version": "2.2.4", + "bundled": true, + "requires": { + "safe-buffer": "5.1.1", + "yallist": "3.0.2" + }, + "dependencies": { + "yallist": { + "version": "3.0.2", + "bundled": true + } + } + }, + "minizlib": { + "version": "1.1.0", + "bundled": true, + "requires": { + "minipass": "2.2.4" + } + }, + "mkdirp": { + "version": "0.5.1", + "bundled": true, + "requires": { + "minimist": "0.0.8" + } + }, + "ms": { + "version": "2.0.0", + "bundled": true + }, + "needle": { + "version": "2.2.0", + "bundled": true, + "requires": { + "debug": "2.6.9", + "iconv-lite": "0.4.21", + "sax": "1.2.4" + } + }, + "node-pre-gyp": { + "version": "0.9.1", + "bundled": true, + "requires": { + "detect-libc": "1.0.3", + "mkdirp": "0.5.1", + "needle": "2.2.0", + "nopt": "4.0.1", + "npm-packlist": "1.1.10", + "npmlog": "4.1.2", + "rc": "1.2.6", + "rimraf": "2.6.2", + "semver": "5.5.0", + "tar": "4.4.1" + } + }, + "nopt": { + "version": "4.0.1", + "bundled": true, + "requires": { + "abbrev": "1.1.1", + "osenv": "0.1.5" + } + }, + "npm-bundled": { + "version": "1.0.3", + "bundled": true + }, + "npm-packlist": { + "version": "1.1.10", + "bundled": true, + "requires": { + "ignore-walk": "3.0.1", + "npm-bundled": "1.0.3" + } + }, + "npmlog": { + "version": "4.1.2", + "bundled": true, + "requires": { + "are-we-there-yet": "1.1.4", + "console-control-strings": "1.1.0", + "gauge": "2.7.4", + "set-blocking": "2.0.0" + } + }, + "number-is-nan": { + "version": "1.0.1", + "bundled": true + }, + "object-assign": { + "version": "4.1.1", + "bundled": true + }, + "once": { + "version": "1.4.0", + "bundled": true, + "requires": { + "wrappy": "1.0.2" + } + }, + "os-homedir": { + "version": "1.0.2", + "bundled": true + }, + "os-tmpdir": { + "version": "1.0.2", + "bundled": true + }, + "osenv": { + "version": "0.1.5", + "bundled": true, + "requires": { + "os-homedir": "1.0.2", + "os-tmpdir": "1.0.2" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "bundled": true + }, + "process-nextick-args": { + "version": "2.0.0", + "bundled": true + }, + "rc": { + "version": "1.2.6", + "bundled": true, + "requires": { + "deep-extend": "0.4.2", + "ini": "1.3.5", + "minimist": "1.2.0", + "strip-json-comments": "2.0.1" + }, + "dependencies": { + "minimist": { + "version": "1.2.0", + "bundled": true + } + } + }, + "readable-stream": { + "version": "2.3.5", + "bundled": true, + "requires": { + "core-util-is": "1.0.2", + "inherits": "2.0.3", + "isarray": "1.0.0", + "process-nextick-args": "2.0.0", + "safe-buffer": "5.1.1", + "string_decoder": "1.0.3", + "util-deprecate": "1.0.2" + } + }, + "rimraf": { + "version": "2.6.2", + "bundled": true, + "requires": { + "glob": "7.1.2" + } + }, + "safe-buffer": { + "version": "5.1.1", + "bundled": true + }, + "safer-buffer": { + "version": "2.1.2", + "bundled": true + }, + "sax": { + "version": "1.2.4", + "bundled": true + }, + "semver": { + "version": "5.5.0", + "bundled": true + }, + "set-blocking": { + "version": "2.0.0", + "bundled": true + }, + "signal-exit": { + "version": "3.0.2", + "bundled": true + }, + "string-width": { + "version": "1.0.2", + "bundled": true, + "requires": { + "code-point-at": "1.1.0", + "is-fullwidth-code-point": "1.0.0", + "strip-ansi": "3.0.1" + } + }, + "string_decoder": { + "version": "1.0.3", + "bundled": true, + "requires": { + "safe-buffer": "5.1.1" + } + }, + "strip-ansi": { + "version": "3.0.1", + "bundled": true, + "requires": { + "ansi-regex": "2.1.1" + } + }, + "strip-json-comments": { + "version": "2.0.1", + "bundled": true + }, + "tar": { + "version": "4.4.1", + "bundled": true, + "requires": { + "chownr": "1.0.1", + "fs-minipass": "1.2.5", + "minipass": "2.2.4", + "minizlib": "1.1.0", + "mkdirp": "0.5.1", + "safe-buffer": "5.1.1", + "yallist": "3.0.2" + }, + "dependencies": { + "yallist": { + "version": "3.0.2", + "bundled": true + } + } + }, + "util-deprecate": { + "version": "1.0.2", + "bundled": true + }, + "wide-align": { + "version": "1.1.2", + "bundled": true, + "requires": { + "string-width": "1.0.2" + } + }, + "wrappy": { + "version": "1.0.2", + "bundled": true + } + } + }, + "better-assert": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz", + "integrity": "sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI=", + "requires": { + "callsite": "1.0.0" + } + }, + "blob": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/blob/-/blob-0.0.4.tgz", + "integrity": "sha1-vPEwUspURj8w+fx+lbmkdjCpSSE=" + }, + "body-parser": { + "version": "1.18.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz", + "integrity": "sha1-h2eKGdhLR9hZuDGZvVm84iKxBFQ=", + "requires": { + "bytes": "3.0.0", + "content-type": "1.0.4", + "debug": "2.6.9", + "depd": "1.1.1", + "http-errors": "1.6.2", + "iconv-lite": "0.4.19", + "on-finished": "2.3.0", + "qs": "6.5.1", + "raw-body": "2.3.2", + "type-is": "1.6.15" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "qs": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", + "integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==" + } + } + }, + "bytes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" + }, + "callsite": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", + "integrity": "sha1-KAOY5dZkvXQDi28JBRU+borxvCA=" + }, + "camelcase": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", + "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=" + }, + "camelize": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/camelize/-/camelize-1.0.0.tgz", + "integrity": "sha1-FkpUg+Yw+kMh5a8HAg5TGDGyYJs=" + }, + "cliui": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.0.0.tgz", + "integrity": "sha512-nY3W5Gu2racvdDk//ELReY+dHjb9PlIcVDFXP72nVIhq2Gy3LuVXYwJoPVudwQnv1shtohpgkdCKT2YaKY0CKw==", + "requires": { + "string-width": "2.1.1", + "strip-ansi": "4.0.0", + "wrap-ansi": "2.1.0" + } + }, + "code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" + }, + "component-bind": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/component-bind/-/component-bind-1.0.0.tgz", + "integrity": "sha1-AMYIq33Nk4l8AAllGx06jh5zu9E=" + }, + "component-emitter": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz", + "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=" + }, + "component-inherit": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/component-inherit/-/component-inherit-0.0.3.tgz", + "integrity": "sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM=" + }, + "content-disposition": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", + "integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ=" + }, + "content-security-policy-builder": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/content-security-policy-builder/-/content-security-policy-builder-1.1.0.tgz", + "integrity": "sha1-2R8bB2I2wRmFDH3umSS/VeBXcrM=", + "requires": { + "dashify": "0.2.2" + } + }, + "content-type": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" + }, + "cookie": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", + "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" + }, + "cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + }, + "crc": { + "version": "3.4.4", + "resolved": "https://registry.npmjs.org/crc/-/crc-3.4.4.tgz", + "integrity": "sha1-naHpgOO9RPxck79as9ozeNheRms=" + }, + "cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", + "requires": { + "lru-cache": "4.1.1", + "shebang-command": "1.2.0", + "which": "1.3.0" + } + }, + "dasherize": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dasherize/-/dasherize-2.0.0.tgz", + "integrity": "sha1-bYCcnNDPe7iVLYD8hPoT1H3bEwg=" + }, + "dashify": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/dashify/-/dashify-0.2.2.tgz", + "integrity": "sha1-agdBWgHJH69KMuONnfunH2HLIP4=" + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=" + }, + "depd": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.1.tgz", + "integrity": "sha1-V4O04cRZ8G+lyif5kfPQbnoxA1k=" + }, + "destroy": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", + "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" + }, + "dns-prefetch-control": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/dns-prefetch-control/-/dns-prefetch-control-0.1.0.tgz", + "integrity": "sha1-YN20V3dOF48flBXwyrsOhbCzALI=" + }, + "dont-sniff-mimetype": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/dont-sniff-mimetype/-/dont-sniff-mimetype-1.0.0.tgz", + "integrity": "sha1-WTKJDcn04vGeXrAqIAJuXl78j1g=" + }, + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" + }, + "encodeurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.1.tgz", + "integrity": "sha1-eePVhlU0aQn+bw9Fpd5oEDspTSA=" + }, + "engine.io": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-3.1.4.tgz", + "integrity": "sha1-PQIRtwpVLOhB/8fahiezAamkFi4=", + "requires": { + "accepts": "1.3.3", + "base64id": "1.0.0", + "cookie": "0.3.1", + "debug": "2.6.9", + "engine.io-parser": "2.1.2", + "uws": "0.14.5", + "ws": "3.3.3" + }, + "dependencies": { + "accepts": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.3.tgz", + "integrity": "sha1-w8p0NJOGSMPg2cHjKN1otiLChMo=", + "requires": { + "mime-types": "2.1.17", + "negotiator": "0.6.1" + } + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "engine.io-client": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-3.1.4.tgz", + "integrity": "sha1-T88TcLRxY70s6b4nM5ckMDUNTqE=", + "requires": { + "component-emitter": "1.2.1", + "component-inherit": "0.0.3", + "debug": "2.6.9", + "engine.io-parser": "2.1.2", + "has-cors": "1.1.0", + "indexof": "0.0.1", + "parseqs": "0.0.5", + "parseuri": "0.0.5", + "ws": "3.3.3", + "xmlhttprequest-ssl": "1.5.4", + "yeast": "0.1.2" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "engine.io-parser": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-2.1.2.tgz", + "integrity": "sha512-dInLFzr80RijZ1rGpx1+56/uFoH7/7InhH3kZt+Ms6hT8tNx3NGW/WNSA/f8As1WkOfkuyb3tnRyuXGxusclMw==", + "requires": { + "after": "0.8.2", + "arraybuffer.slice": "0.0.7", + "base64-arraybuffer": "0.1.5", + "blob": "0.0.4", + "has-binary2": "1.0.2" + } + }, + "escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" + }, + "etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" + }, + "execa": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", + "integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=", + "requires": { + "cross-spawn": "5.1.0", + "get-stream": "3.0.0", + "is-stream": "1.1.0", + "npm-run-path": "2.0.2", + "p-finally": "1.0.0", + "signal-exit": "3.0.2", + "strip-eof": "1.0.0" + } + }, + "expect-ct": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/expect-ct/-/expect-ct-0.1.0.tgz", + "integrity": "sha1-UnNWeN4YUwiQ2Ne5XwrGNkCVgJQ=" + }, + "express": { + "version": "4.16.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.16.2.tgz", + "integrity": "sha1-41xt/i1kt9ygpc1PIXgb4ymeB2w=", + "requires": { + "accepts": "1.3.4", + "array-flatten": "1.1.1", + "body-parser": "1.18.2", + "content-disposition": "0.5.2", + "content-type": "1.0.4", + "cookie": "0.3.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "1.1.1", + "encodeurl": "1.0.1", + "escape-html": "1.0.3", + "etag": "1.8.1", + "finalhandler": "1.1.0", + "fresh": "0.5.2", + "merge-descriptors": "1.0.1", + "methods": "1.1.2", + "on-finished": "2.3.0", + "parseurl": "1.3.2", + "path-to-regexp": "0.1.7", + "proxy-addr": "2.0.2", + "qs": "6.5.1", + "range-parser": "1.2.0", + "safe-buffer": "5.1.1", + "send": "0.16.1", + "serve-static": "1.13.1", + "setprototypeof": "1.1.0", + "statuses": "1.3.1", + "type-is": "1.6.15", + "utils-merge": "1.0.1", + "vary": "1.1.2" + }, + "dependencies": { + "setprototypeof": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", + "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" + } + } + }, + "express-session": { + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/express-session/-/express-session-1.15.6.tgz", + "integrity": "sha512-r0nrHTCYtAMrFwZ0kBzZEXa1vtPVrw0dKvGSrKP4dahwBQ1BJpF2/y1Pp4sCD/0kvxV4zZeclyvfmw0B4RMJQA==", + "requires": { + "cookie": "0.3.1", + "cookie-signature": "1.0.6", + "crc": "3.4.4", + "debug": "2.6.9", + "depd": "1.1.1", + "on-headers": "1.0.1", + "parseurl": "1.3.2", + "uid-safe": "2.1.5", + "utils-merge": "1.0.1" + } + }, + "finalhandler": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.0.tgz", + "integrity": "sha1-zgtoVbRYU+eRsvzGgARtiCU91/U=", + "requires": { + "debug": "2.6.9", + "encodeurl": "1.0.1", + "escape-html": "1.0.3", + "on-finished": "2.3.0", + "parseurl": "1.3.2", + "statuses": "1.3.1", + "unpipe": "1.0.0" + } + }, + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "requires": { + "locate-path": "2.0.0" + } + }, + "forwarded": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", + "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" + }, + "frameguard": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/frameguard/-/frameguard-3.0.0.tgz", + "integrity": "sha1-e8rUae57lukdEs6zlZx4I1qScuk=" + }, + "fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" + }, + "get-caller-file": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.2.tgz", + "integrity": "sha1-9wLmMSfn4jHBYKgMFVSstw1QR+U=" + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + }, + "has-binary2": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-binary2/-/has-binary2-1.0.2.tgz", + "integrity": "sha1-6D26SfC5vk0CbSc2U1DZ8D9Uvpg=", + "requires": { + "isarray": "2.0.1" + } + }, + "has-cors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-cors/-/has-cors-1.1.0.tgz", + "integrity": "sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk=" + }, + "helmet": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/helmet/-/helmet-3.9.0.tgz", + "integrity": "sha512-czCyS77TyanWlfVSoGlb9GBJV2Q2zJayKxU5uBw0N1TzDTs/qVNh1SL8Q688KU0i0Sb7lQ/oLtnaEqXzl2yWvA==", + "requires": { + "dns-prefetch-control": "0.1.0", + "dont-sniff-mimetype": "1.0.0", + "expect-ct": "0.1.0", + "frameguard": "3.0.0", + "helmet-csp": "2.6.0", + "hide-powered-by": "1.0.0", + "hpkp": "2.0.0", + "hsts": "2.1.0", + "ienoopen": "1.0.0", + "nocache": "2.0.0", + "referrer-policy": "1.1.0", + "x-xss-protection": "1.0.0" + } + }, + "helmet-csp": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/helmet-csp/-/helmet-csp-2.6.0.tgz", + "integrity": "sha512-n/oW9l6RtO4f9YvphsNzdvk1zITrSN7iRT8ojgrJu/N3mVdHl9zE4OjbiHWcR64JK32kbqx90/yshWGXcjUEhw==", + "requires": { + "camelize": "1.0.0", + "content-security-policy-builder": "1.1.0", + "dasherize": "2.0.0", + "lodash.reduce": "4.6.0", + "platform": "1.3.4" + } + }, + "hide-powered-by": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hide-powered-by/-/hide-powered-by-1.0.0.tgz", + "integrity": "sha1-SoWtZYgfYoV/xwr3F0oRhNzM4ys=" + }, + "hpkp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hpkp/-/hpkp-2.0.0.tgz", + "integrity": "sha1-EOFCJk52IVpdMMROxD3mTe5tFnI=" + }, + "hsts": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/hsts/-/hsts-2.1.0.tgz", + "integrity": "sha512-zXhh/DqgrTXJ7erTN6Fh5k/xjMhDGXCqdYN3wvxUvGUQvnxcFfUd8E+6vLg/nk3ss1TYMb+DhRl25fYABioTvA==" + }, + "http-errors": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.2.tgz", + "integrity": "sha1-CgAsyFcHGSp+eUbO7cERVfYOxzY=", + "requires": { + "depd": "1.1.1", + "inherits": "2.0.3", + "setprototypeof": "1.0.3", + "statuses": "1.3.1" + } + }, + "iconv-lite": { + "version": "0.4.19", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz", + "integrity": "sha512-oTZqweIP51xaGPI4uPa56/Pri/480R+mo7SeU+YETByQNhDG55ycFyNLIgta9vXhILrxXDmF7ZGhqZIcuN0gJQ==" + }, + "ienoopen": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/ienoopen/-/ienoopen-1.0.0.tgz", + "integrity": "sha1-NGpCj0dKrI9QzzeE6i0PFvYr2ms=" + }, + "indexof": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz", + "integrity": "sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10=" + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + }, + "invert-kv": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz", + "integrity": "sha1-EEqOSqym09jNFXqO+L+rLXo//bY=" + }, + "ipaddr.js": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.5.2.tgz", + "integrity": "sha1-1LUFvemUaYfM8PxY2QEP+WB+P6A=" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" + }, + "isarray": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.1.tgz", + "integrity": "sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4=" + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" + }, + "lcid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz", + "integrity": "sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=", + "requires": { + "invert-kv": "1.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "requires": { + "p-locate": "2.0.0", + "path-exists": "3.0.0" + } + }, + "lodash.reduce": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/lodash.reduce/-/lodash.reduce-4.6.0.tgz", + "integrity": "sha1-8atrg5KZrUj3hKu/R2WW8DuRTTs=" + }, + "lru-cache": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.1.tgz", + "integrity": "sha512-q4spe4KTfsAS1SUHLO0wz8Qiyf1+vMIAgpRYioFYDMNqKfHQbg+AVDH3i4fvpl71/P1L0dBl+fQi+P37UYf0ew==", + "requires": { + "pseudomap": "1.0.2", + "yallist": "2.1.2" + } + }, + "media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" + }, + "mem": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/mem/-/mem-1.1.0.tgz", + "integrity": "sha1-Xt1StIXKHZAP5kiVUFOZoN+kX3Y=", + "requires": { + "mimic-fn": "1.1.0" + } + }, + "merge-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" + }, + "methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" + }, + "mime": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz", + "integrity": "sha512-KI1+qOZu5DcW6wayYHSzR/tXKCDC5Om4s1z2QJjDULzLcmf3DvzS7oluY4HCTrc+9FiKmWUgeNLg7W3uIQvxtQ==" + }, + "mime-db": { + "version": "1.30.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.30.0.tgz", + "integrity": "sha1-dMZD2i3Z1qRTmZY0ZbJtXKfXHwE=" + }, + "mime-types": { + "version": "2.1.17", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.17.tgz", + "integrity": "sha1-Cdejk/A+mVp5+K+Fe3Cp4KsWVXo=", + "requires": { + "mime-db": "1.30.0" + } + }, + "mimic-fn": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.1.0.tgz", + "integrity": "sha1-5md4PZLonb00KBi1IwudYqZyrRg=" + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "nan": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.10.0.tgz", + "integrity": "sha512-bAdJv7fBLhWC+/Bls0Oza+mvTaNQtP+1RyhhhvD95pgUJz6XM5IzgmxOkItJ9tkoCiplvAnXI1tNmmUD/eScyA==" + }, + "negotiator": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", + "integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=" + }, + "nocache": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/nocache/-/nocache-2.0.0.tgz", + "integrity": "sha1-ICtIAhoMTL3i34DeFaF0Q8i0OYA=" + }, + "npm-run-path": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", + "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", + "requires": { + "path-key": "2.0.1" + } + }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" + }, + "object-component": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/object-component/-/object-component-0.0.3.tgz", + "integrity": "sha1-8MaapQ78lbhmwYb0AKM3acsvEpE=" + }, + "on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", + "requires": { + "ee-first": "1.1.1" + } + }, + "on-headers": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.1.tgz", + "integrity": "sha1-ko9dD0cNSTQmUepnlLCFfBAGk/c=" + }, + "os-locale": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-2.1.0.tgz", + "integrity": "sha512-3sslG3zJbEYcaC4YVAvDorjGxc7tv6KVATnLPZONiljsUncvihe9BQoVCEs0RZ1kmf4Hk9OBqlZfJZWI4GanKA==", + "requires": { + "execa": "0.7.0", + "lcid": "1.0.0", + "mem": "1.1.0" + } + }, + "p-finally": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=" + }, + "p-limit": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.2.0.tgz", + "integrity": "sha512-Y/OtIaXtUPr4/YpMv1pCL5L5ed0rumAaAeBSj12F+bSlMdys7i8oQF/GUJmfpTS/QoaRrS/k6pma29haJpsMng==", + "requires": { + "p-try": "1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "requires": { + "p-limit": "1.2.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" + }, + "parseqs": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/parseqs/-/parseqs-0.0.5.tgz", + "integrity": "sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0=", + "requires": { + "better-assert": "1.0.2" + } + }, + "parseuri": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.5.tgz", + "integrity": "sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo=", + "requires": { + "better-assert": "1.0.2" + } + }, + "parseurl": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", + "integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M=" + }, + "passport": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/passport/-/passport-0.4.0.tgz", + "integrity": "sha1-xQlWkTR71a07XhgCOMORTRbwWBE=", + "requires": { + "passport-strategy": "1.0.0", + "pause": "0.0.1" + } + }, + "passport-local": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/passport-local/-/passport-local-1.0.0.tgz", + "integrity": "sha1-H+YyaMkudWBmJkN+O5BmYsFbpu4=", + "requires": { + "passport-strategy": "1.0.0" + } + }, + "passport-strategy": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/passport-strategy/-/passport-strategy-1.0.0.tgz", + "integrity": "sha1-tVOaqPwiWj0a0XlHbd8ja0QPUuQ=" + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=" + }, + "path-to-regexp": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" + }, + "pause": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz", + "integrity": "sha1-HUCLP9t2kjuVQ9lvtMnf1TXZy10=" + }, + "platform": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.4.tgz", + "integrity": "sha1-bw+xftqqSPIUQrOpdcBjEw8cPr0=" + }, + "proxy-addr": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.2.tgz", + "integrity": "sha1-ZXFQT0e7mI7IGAJT+F3X4UlSvew=", + "requires": { + "forwarded": "0.1.2", + "ipaddr.js": "1.5.2" + } + }, + "pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" + }, + "qs": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", + "integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==" + }, + "random-bytes": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", + "integrity": "sha1-T2ih3Arli9P7lYSMMDJNt11kNgs=" + }, + "range-parser": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", + "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=" + }, + "raw-body": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.2.tgz", + "integrity": "sha1-vNYMd9Prk83gBQKVw/N5OJvIj4k=", + "requires": { + "bytes": "3.0.0", + "http-errors": "1.6.2", + "iconv-lite": "0.4.19", + "unpipe": "1.0.0" + } + }, + "referrer-policy": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/referrer-policy/-/referrer-policy-1.1.0.tgz", + "integrity": "sha1-NXdOtzW/UPtsB46DM0tHI1AgfXk=" + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" + }, + "require-main-filename": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", + "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=" + }, + "safe-buffer": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", + "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==" + }, + "send": { + "version": "0.16.1", + "resolved": "https://registry.npmjs.org/send/-/send-0.16.1.tgz", + "integrity": "sha512-ElCLJdJIKPk6ux/Hocwhk7NFHpI3pVm/IZOYWqUmoxcgeyM+MpxHHKhb8QmlJDX1pU6WrgaHBkVNm73Sv7uc2A==", + "requires": { + "debug": "2.6.9", + "depd": "1.1.1", + "destroy": "1.0.4", + "encodeurl": "1.0.1", + "escape-html": "1.0.3", + "etag": "1.8.1", + "fresh": "0.5.2", + "http-errors": "1.6.2", + "mime": "1.4.1", + "ms": "2.0.0", + "on-finished": "2.3.0", + "range-parser": "1.2.0", + "statuses": "1.3.1" + } + }, + "serve-static": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.1.tgz", + "integrity": "sha512-hSMUZrsPa/I09VYFJwa627JJkNs0NrfL1Uzuup+GqHfToR2KcsXFymXSV90hoyw3M+msjFuQly+YzIH/q0MGlQ==", + "requires": { + "encodeurl": "1.0.1", + "escape-html": "1.0.3", + "parseurl": "1.3.2", + "send": "0.16.1" + } + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" + }, + "setprototypeof": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.3.tgz", + "integrity": "sha1-ZlZ+NwQ+608E2RvWWMDL77VbjgQ=" + }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "requires": { + "shebang-regex": "1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=" + }, + "signal-exit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" + }, + "socket.io": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-2.0.4.tgz", + "integrity": "sha1-waRZDO/4fs8TxyZS8Eb3FrKeYBQ=", + "requires": { + "debug": "2.6.9", + "engine.io": "3.1.4", + "socket.io-adapter": "1.1.1", + "socket.io-client": "2.0.4", + "socket.io-parser": "3.1.2" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "socket.io-adapter": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-1.1.1.tgz", + "integrity": "sha1-KoBeihTWNyEk3ZFZrUUC+MsH8Gs=" + }, + "socket.io-client": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-2.0.4.tgz", + "integrity": "sha1-CRilUkBtxeVAs4Dc2Xr8SmQzL44=", + "requires": { + "backo2": "1.0.2", + "base64-arraybuffer": "0.1.5", + "component-bind": "1.0.0", + "component-emitter": "1.2.1", + "debug": "2.6.9", + "engine.io-client": "3.1.4", + "has-cors": "1.1.0", + "indexof": "0.0.1", + "object-component": "0.0.3", + "parseqs": "0.0.5", + "parseuri": "0.0.5", + "socket.io-parser": "3.1.2", + "to-array": "0.1.4" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "socket.io-parser": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-3.1.2.tgz", + "integrity": "sha1-28IoIVH8T6675Aru3Ady66YZ9/I=", + "requires": { + "component-emitter": "1.2.1", + "debug": "2.6.9", + "has-binary2": "1.0.2", + "isarray": "2.0.1" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + } + } + }, + "statuses": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", + "integrity": "sha1-+vUbnrdKrvOzrPStX2Gr8ky3uT4=" + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "requires": { + "is-fullwidth-code-point": "2.0.0", + "strip-ansi": "4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "3.0.0" + } + }, + "strip-eof": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=" + }, + "to-array": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/to-array/-/to-array-0.1.4.tgz", + "integrity": "sha1-F+bBH3PdTz10zaek/zI46a2b+JA=" + }, + "type-is": { + "version": "1.6.15", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.15.tgz", + "integrity": "sha1-yrEPtJCeRByChC6v4a1kbIGARBA=", + "requires": { + "media-typer": "0.3.0", + "mime-types": "2.1.17" + } + }, + "uid-safe": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz", + "integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==", + "requires": { + "random-bytes": "1.0.0" + } + }, + "ultron": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.1.1.tgz", + "integrity": "sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og==" + }, + "unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" + }, + "utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" + }, + "uws": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/uws/-/uws-0.14.5.tgz", + "integrity": "sha1-Z6rzPEaypYel9mZtAPdpEyjxSdw=", + "optional": true + }, + "vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" + }, + "which": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.0.tgz", + "integrity": "sha512-xcJpopdamTuY5duC/KnTTNBraPK54YwpenP4lzxU8H91GudWpFv38u0CKjclE1Wi2EH2EDz5LRcHcKbCIzqGyg==", + "requires": { + "isexe": "2.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=" + }, + "wrap-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "requires": { + "string-width": "1.0.2", + "strip-ansi": "3.0.1" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "requires": { + "number-is-nan": "1.0.1" + } + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "requires": { + "code-point-at": "1.1.0", + "is-fullwidth-code-point": "1.0.0", + "strip-ansi": "3.0.1" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "2.1.1" + } + } + } + }, + "ws": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-3.3.3.tgz", + "integrity": "sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA==", + "requires": { + "async-limiter": "1.0.0", + "safe-buffer": "5.1.1", + "ultron": "1.1.1" + } + }, + "x-xss-protection": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/x-xss-protection/-/x-xss-protection-1.0.0.tgz", + "integrity": "sha1-iYr7k4abJGYc+cUvnujbjtB2Tdk=" + }, + "xmlhttprequest-ssl": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.4.tgz", + "integrity": "sha1-BPVgkVcks4kIhxXMDteBPpZ3v1c=" + }, + "y18n": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.1.tgz", + "integrity": "sha1-bRX7qITAhnnA136I53WegR4H+kE=" + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + }, + "yargs": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-10.1.1.tgz", + "integrity": "sha512-7uRL1HZdCbc1QTP+X8mehOPuCYKC/XTaqAPj7gABLfTt6pgLyVRn3QVte4qhtilZouWCvqd1kipgMKl5tKsFiw==", + "requires": { + "cliui": "4.0.0", + "decamelize": "1.2.0", + "find-up": "2.1.0", + "get-caller-file": "1.0.2", + "os-locale": "2.1.0", + "require-directory": "2.1.1", + "require-main-filename": "1.0.1", + "set-blocking": "2.0.0", + "string-width": "2.1.1", + "which-module": "2.0.0", + "y18n": "3.2.1", + "yargs-parser": "8.1.0" + } + }, + "yargs-parser": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-8.1.0.tgz", + "integrity": "sha512-yP+6QqN8BmrgW2ggLtTbdrOyBNSI7zBa4IykmiV5R1wl1JWNxQvWhMfMdmzIYtKU7oP3OOInY/tl2ov3BDjnJQ==", + "requires": { + "camelcase": "4.1.0" + } + }, + "yeast": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/yeast/-/yeast-0.1.2.tgz", + "integrity": "sha1-AI4G2AlDIMNy28L47XagymyKxBk=" + }, + "yui-lint": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/yui-lint/-/yui-lint-0.2.0.tgz", + "integrity": "sha1-6RPadPqgZPtTwDdxstMSk4ZfAYU=" + } + } +} diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/package.json b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/package.json new file mode 100644 index 000000000000..6abc19792e0d --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/package.json @@ -0,0 +1,15 @@ +{ + "name": "cirrus-webserver", + "version": "0.0.1", + "description": "cirrus web server", + "dependencies": { + "bcrypt": "^2.0.1", + "express": "^4.16.2", + "express-session": "^1.15.6", + "helmet": "^3.9.0", + "passport": "^0.4.0", + "passport-local": "^1.0.0", + "socket.io": "^2.0.4", + "yargs": "^10.1.1" + } +} diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/public/login.css b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/public/login.css new file mode 100644 index 000000000000..2faaee96c99d --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/public/login.css @@ -0,0 +1,49 @@ +/*Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.*/ + +:root { + /*Using colour scheme https://color.adobe.com/TD-Colors---Option-3-color-theme-10394433/*/ + --colour1:#2B3A42; + --colour2:#3F5765; + --colour3:#BDD4DE; + --colour4:#EFEFEF; + --colour5:#FF5035; +} + +form{ + margin: 0px auto; + padding: 1em; + width: 350px; + border-radius: 10px; + border: 1px solid #CCC; + background-color: var(--colour4) +} + +.entry{ + padding: 5px; +} + +label { + display: inline-block; + width: 25%; + text-align: right; +} + +input { + text-indent: 5px; + font-family: verdana,sans-serif; + font-size: 1em; + + width: 65%; + box-sizing: border-box; + border: 1px solid #999; +} + +.button { + margin: 0px auto; + width: 70%; +} + +button { + width: 100%; + font-family: verdana,sans-serif; +} \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/public/player.css b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/public/player.css new file mode 100644 index 000000000000..062f789a57d7 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/public/player.css @@ -0,0 +1,289 @@ +/*Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.*/ + +:root { + /*Using colour scheme https://color.adobe.com/TD-Colors---Option-3-color-theme-10394433/*/ + --colour1:#2B3A42; + --colour2:#3F5765; + --colour3:#BDD4DE; + --colour4:#EFEFEF; + --colour5:#FF5035; + + --buttonFont:Helvetica; + --inputFont:Helvetica; +} + +body{ + margin: 0px; + background-color: black; +} + +#playerUI { + width: 100%; + position: absolute; + /*top: 0; + left: 0;*/ + z-index: 10; +} + +#statsContainer{ + background-color: rgba(0,0,0,0.8); + text-align: left; + display: none; +} + +#stats{ + font-size: 14px; + font-weight: bold; + padding: 6px; + color: lime; +} + +canvas{ + image-rendering: crisp-edges; + position: absolute; +} + +video{ + position: absolute; + width: 100%; + height: 100%; +} + +#player{ + width: 1280px; + height: 720px; + position: relative; + background-color: #000; +} + +#overlay{ + -moz-border-radius-bottomright: 5px; + -moz-border-radius-bottomleft: 5px; + -webkit-border-bottom-right-radius: 5px; + -webkit-border-bottom-left-radius: 5px; + border-bottom-right-radius: 5px; /* future proofing */ + border-bottom-left-radius: 5px; /* future proofing */ + -khtml-border-bottom-right-radius: 5px; /* for old Konqueror browsers */ + -khtml-border-bottom-left-radius: 5px; /* for old Konqueror browsers */ + + -webkit-touch-callout: none; /* iOS Safari */ + -webkit-user-select: none; /* Safari */ + -khtml-user-select: none; /* Konqueror HTML */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* Internet Explorer/Edge */ + user-select: none; /* Non-prefixed version, currently + supported by Chrome and Opera */ + + position: absolute; + padding: 4px; + top: 0; + right: 2%; + z-index: 100; + border: 2px solid var(--colour4); + border-top-width: 0px; +} + +.overlay { + background-color: var(--colour2); + font-family: var(--buttonFont); + font-weight: lighter; + color: var(--colour4); +} + +#overlayButton:hover{ + cursor:pointer; +} + +#overlayButton{ + text-align: right; + font-size: 40px; +} + +#overlaySettings{ + width: 300px; + padding: 4px; + display: none; +} + +#videoMessageOverlay{ + z-index: 20; + color: var(--colour4);; + font-size: 1.8em; + position: absolute; + margin: auto; + font-family: var(--inputFont);; + width: 100%; +} + +#videoPlayOverlay{ + z-index: 30; + position: absolute; + color: var(--colour4); + font-size: 1.8em; + font-family: var(--inputFont); + width: 100%; + height: 100%; + background-color: rgba(255, 255, 255, 0.3); +} + +/* State for element to be clickable */ +.clickableState{ + align-items: center; + justify-content: center; + display: flex; + cursor: pointer; +} + +/* State for element to show text, this is for informational use*/ +.textDisplayState{ + display: flex; +} + +/* State to hide overlay, WebRTC communication is in progress and or is playing */ +.hiddenState{ + display: none; +} + +#playButton{ + display: inline-block; + height: auto; +} + +img#playButton{ + max-width: 241px; + width: 10%; +} + +#UIInteraction{ + position: fixed; +} + +#UIInteractionButtonBoundary{ + padding: 2px; +} + +#UIInteractionButton{ + cursor: pointer; +} + +.settings-text{ + color: var(--colour4); + vertical-align: middle; + font-size: 18px; + font-weight: normal; + display: inline-block; +} + +.overlay-button{ + line-height: 1.1; + padding: 1px 6px; +} + +.btn-overlay{ + float: right; + vertical-align: middle; + display: inline-block; +} + +.btn-flat{ + background: var(--colour4); + border: 2px solid var(--colour5); + font-weight: bold; + cursor: pointer; + font-family: var(--buttonFont); + font-size: 10px; + color: var(--colour5); + border-radius: 5px; + height: 17px; +} + +.btn-flat:disabled{ + background: var(--colour4); + border-color: var(--colour3); + color: var(--colour3); + cursor: default; +} + +.btn-flat:active{ + border-color: var(--colour2); + color: var(--colour2); +} + +.btn-flat:focus{ + outline: none; +} +/*** Toggle Switch styles ***/ +.tgl-switch { + float: right; + vertical-align: middle; + display: inline-block; +} + +.tgl-switch .tgl { + display:none; +} + +.tgl, .tgl:after, .tgl:before, .tgl *, .tgl *:after, .tgl *:before, .tgl + .tgl-slider { + -webkit-box-sizing: border-box; + box-sizing: border-box; +} +.tgl::-moz-selection, .tgl:after::-moz-selection, .tgl:before::-moz-selection, .tgl *::-moz-selection, .tgl *:after::-moz-selection, .tgl *:before::-moz-selection, .tgl + .tgl-slider::-moz-selection { + background: none; +} +.tgl::selection, .tgl:after::selection, .tgl:before::selection, .tgl *::selection, .tgl *:after::selection, .tgl *:before::selection, .tgl + .tgl-slider::selection { + background: none; +} + +.tgl + .tgl-slider { + outline: 0; + display: block; + width: 40px; + height: 18px; + position: relative; + cursor: pointer; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} + +.tgl + .tgl-slider:after, .tgl + .tgl-slider:before { + position: relative; + display: block; + content: ""; + width: 50%; + height: 100%; +} +.tgl + .tgl-slider:after { + left: 0; +} +.tgl + .tgl-slider:before { + display: none; +} + +.tgl-flat + .tgl-slider { + padding: 2px; + -webkit-transition: all .2s ease; + transition: all .2s ease; + background: #fff; + border: 3px solid var(--colour4); + border-radius: 2em; +} + +.tgl-flat + .tgl-slider:after { + -webkit-transition: all .2s ease; + transition: all .2s ease; + background: var(--colour4); + content: ""; + border-radius: 1em; +} + +.tgl-flat:checked + .tgl-slider { + border: 3px solid var(--colour5); +} + +.tgl-flat:checked + .tgl-slider:after { + left: 50%; + background: var(--colour5); +} +/*** Toggle Switch styles ***/ diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/run.bat b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/run.bat new file mode 100644 index 000000000000..ead60ae9094a --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/run.bat @@ -0,0 +1,15 @@ +:: Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +@echo off + +pushd %~dp0 + +call setup.bat + +title Cirrus + +::Run node server +::If running with frontend web server and accessing outside of localhost pass in --publicIp= +node cirrus %* + +popd +pause diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/runAWS.bat b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/runAWS.bat new file mode 100644 index 000000000000..b22aaa6946d3 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/runAWS.bat @@ -0,0 +1,16 @@ +:: Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +@echo off + +pushd %~dp0 + +call setup.bat + +title Cirrus + +::Run node server +::If running with matchmaker web server and accessing outside of localhost pass in --publicIp= + +Powershell.exe -executionpolicy unrestricted -File Start_AWS_SignallingServer.ps1 + +popd +pause diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/runNoSetup.bat b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/runNoSetup.bat new file mode 100644 index 000000000000..659d6349cf5d --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/runNoSetup.bat @@ -0,0 +1,13 @@ +:: Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +@echo off + +pushd %~dp0 + +title Cirrus + +::Run node server +::If running with frontend web server and accessing outside of localhost pass in --publicIp= +node cirrus %* + +popd +pause diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/scripts/app.js b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/scripts/app.js new file mode 100644 index 000000000000..619d7e0f2815 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/scripts/app.js @@ -0,0 +1,1238 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +var webRtcPlayerObj = null; +var print_stats = false; +var print_inputs = false; +var connect_on_load = false; + +var is_reconnection = false; +var socket; + +var qualityControlOwnershipCheckBox; +var matchViewportResolution; +// TODO: Remove this - workaround because of bug causing UE to crash when switching resolutions too quickly +var lastTimeResized = new Date().getTime(); +var resizeTimeout; + +var responseEventListeners = new Map(); + +var t0 = Date.now(); +function log(str) { + console.log(`${Math.floor(Date.now() - t0)}: ` + str); +} + +function setupHtmlEvents(){ + //Window events + window.addEventListener('resize', resizePlayerStyle, true); + window.addEventListener('orientationchange', onOrientationChange); + + //HTML elements controls + let resizeCheckBox = document.getElementById('enlarge-display-to-fill-window-tgl'); + if (resizeCheckBox != null) { + resizeCheckBox.onchange = function(event){ + resizePlayerStyle(); + } + } + + qualityControlOwnershipCheckBox = document.getElementById('quality-control-ownership-tgl'); + if (qualityControlOwnershipCheckBox != null) { + qualityControlOwnershipCheckBox.onchange = function (event) { + requestQualityControl(); + } + } + + let prioritiseQualityCheckbox = document.getElementById('prioritise-quality-tgl'); + let qualityParamsSubmit = document.getElementById('quality-params-submit'); + + if (prioritiseQualityCheckbox != null) { + prioritiseQualityCheckbox.onchange = function (event) { + if (prioritiseQualityCheckbox.checked) { + // TODO: This state should be read from the UE Application rather than from the initial values in the HTML + let lowBitrate = document.getElementById('low-bitrate-text').value; + let highBitrate = document.getElementById('high-bitrate-text').value; + let minFPS = document.getElementById('min-fps-text').value; + + let initialDescriptor = { + PrioritiseQuality: 1, + LowBitrate: lowBitrate, + HighBitrate: highBitrate, + MinFPS: minFPS + } + // TODO: The descriptor should be sent as is to a generic handler on the UE side + // but for now we're just sending it as separate console commands + //emitUIInteraction(initialDescriptor); + sendQualityConsoleCommands(initialDescriptor); + console.log(initialDescriptor); + + qualityParamsSubmit.onclick = function (event) { + let lowBitrate = document.getElementById('low-bitrate-text').value; + let highBitrate = document.getElementById('high-bitrate-text').value; + let minFPS = document.getElementById('min-fps-text').value; + let descriptor = { + PrioritiseQuality: 1, + LowBitrate: lowBitrate, + HighBitrate: highBitrate, + MinFPS: minFPS + } + //emitUIInteraction(descriptor); + sendQualityConsoleCommands(descriptor); + console.log(descriptor); + } + } else { // Prioritise Quality unchecked + let initialDescriptor = { + PrioritiseQuality: 0 + } + //emitUIInteraction(initialDescriptor); + sendQualityConsoleCommands(initialDescriptor); + console.log(initialDescriptor); + + qualityParamsSubmit.onclick = null; + } + } + } + + let showFPSCheckBox = document.getElementById('show-fps-tgl'); + if (showFPSCheckBox != null) { + showFPSCheckBox.onchange = function (event) { + let consoleDescriptor = { + Console: 'stat fps' + } + emitUIInteraction(consoleDescriptor); + } + } + + let matchViewportResolutionCheckBox = document.getElementById('match-viewport-res-tgl'); + if (matchViewportResolutionCheckBox != null) { + matchViewportResolutionCheckBox.onchange = function (event) { + matchViewportResolution = matchViewportResolutionCheckBox.checked; + } + } + + let statsCheckBox = document.getElementById('show-stats-tgl'); + if (statsCheckBox != null) { + statsCheckBox.onchange = function(event){ + let stats = document.getElementById('statsContainer'); + stats.style.display = (event.target.checked) ? "block" : "none"; + } + } + + var kickButton = document.getElementById('kick-other-players-button'); + if(kickButton) { + kickButton.onclick = function(event){ + socket.emit('kick', {}); + } + } +} + +function sendQualityConsoleCommands(descriptor) { + if (descriptor.PrioritiseQuality != null) { + var command = 'Streamer.PrioritiseQuality ' + descriptor.PrioritiseQuality; + let consoleDescriptor = { + Console: command + } + emitUIInteraction(consoleDescriptor); + } + + if (descriptor.LowBitrate != null) { + var command = 'Streamer.LowBitrate ' + descriptor.LowBitrate; + let consoleDescriptor = { + Console: command + } + emitUIInteraction(consoleDescriptor); + } + + if (descriptor.HighBitrate != null) { + var command = 'Streamer.HighBitrate ' + descriptor.HighBitrate; + let consoleDescriptor = { + Console: command + } + emitUIInteraction(consoleDescriptor); + } + + if (descriptor.MinFPS != null) { + var command = 'Streamer.MinFPS ' + descriptor.MinFPS; + let consoleDescriptor = { + Console: command + } + emitUIInteraction(consoleDescriptor); + } +} + +function setOverlay(htmlClass, htmlElement, onClickFunction){ + var videoPlayOverlay = document.getElementById('videoPlayOverlay'); + if(!videoPlayOverlay){ + var playerDiv = document.getElementById('player'); + videoPlayOverlay = document.createElement('div'); + videoPlayOverlay.id = 'videoPlayOverlay'; + playerDiv.appendChild(videoPlayOverlay); + } + + // Remove existing html child elements so we can add the new one + while (videoPlayOverlay.lastChild) { + videoPlayOverlay.removeChild(videoPlayOverlay.lastChild); + } + + if(htmlElement) + videoPlayOverlay.appendChild(htmlElement); + + if(onClickFunction){ + videoPlayOverlay.addEventListener('click', function onOverlayClick(event){ + onClickFunction(event); + videoPlayOverlay.removeEventListener('click', onOverlayClick); + }); + } + + // Remove existing html classes so we can set the new one + var cl = videoPlayOverlay.classList; + for( var i = cl.length-1; i >= 0; i-- ) { + cl.remove( cl[i] ); + } + + videoPlayOverlay.classList.add(htmlClass); +} + +function showConnectOverlay(){ + var startText = document.createElement('div'); + startText.id = 'playButton'; + startText.innerHTML = 'Click to start'; + + setOverlay('clickableState', startText, event => { + connect(); + }); +} + +function showTextOverlay(text){ + var textOverlay = document.createElement('div'); + textOverlay.id = 'messageOverlay'; + textOverlay.innerHTML = text ? text : ''; + setOverlay('textDisplayState', textOverlay); +} + +function showPlayOverlay(){ + var img = document.createElement('img'); + img.id = 'playButton'; + img.src = '/images/Play.png'; + img.alt = 'Start Streaming'; + setOverlay('clickableState', img, event => { + if(webRtcPlayerObj) + webRtcPlayerObj.video.play(); + + requestQualityControl(); + + hideOverlay(); + }); +} + +function hideOverlay(){ + setOverlay('hiddenState'); +} + +function createWebRtcOffer(){ + if(webRtcPlayerObj){ + console.log('Creating offer'); + showTextOverlay('Starting connection to server, please wait'); + webRtcPlayerObj.createOffer(); + } else { + console.log('WebRTC player not setup, cannot create offer'); + showTextOverlay('Unable to setup video'); + } +} + +function sendInputData(data){ + if(webRtcPlayerObj) + webRtcPlayerObj.send(data); + } + +function addResponseEventListener(name, listener) { + responseEventListeners.set(name, listener); +} + +function removeResponseEventListener(name) { + responseEventListeners.remove(name); +} + +// Must be kept in sync with PixelStreamingProtocol::EToClientMsg C++ enum. +const ToClientMessageType = { + QualityControlOwnership: 0, + Response: 1 +}; + +function setupWebRtcPlayer(htmlElement, clientConfig){ + webRtcPlayerObj = new webRtcPlayer({peerConnectionOptions: clientConfig.peerConnectionOptions}); + htmlElement.appendChild(webRtcPlayerObj.video); + + webRtcPlayerObj.onWebRtcOffer = function (offer) { + socket.emit("webrtc-offer", offer); + }; + + webRtcPlayerObj.onWebRtcCandidate = function(candidate) { + socket.emit('webrtc-ice', candidate); + }; + + webRtcPlayerObj.onVideoInitialised = function(){ + showPlayOverlay(); + resizePlayerStyle(); + } + + webRtcPlayerObj.onDataChannelConnected = function(){ + showTextOverlay('WebRTC connected, waiting for video'); + } + + webRtcPlayerObj.onDataChannelMessage = function (data) { + var view = new Uint8Array(data); + if (view[0] == ToClientMessageType.QualityControlOwnership) + { + let ownership = view[1] == 0 ? false : true; + // If we own the quality control, we can't relenquish it. We only loose + // quality control when another peer asks for it + if (qualityControlOwnershipCheckBox != null) { + qualityControlOwnershipCheckBox.disabled = ownership; + qualityControlOwnershipCheckBox.checked = ownership; + } + } else if (view[0] == ToClientMessageType.Response) { + let response = new TextDecoder("utf-16").decode(data.slice(1)); + for (let listener of responseEventListeners.values()) { + listener(response); + } + } + } + + socket.on('webrtc-answer', function(webRTCData) { + webRtcPlayerObj.receiveAnswer(webRTCData); + let printInterval = 5 * 60 * 1000; /*Print every 5 minutes*/ + let nextPrintDuration = printInterval; + + webRtcPlayerObj.onAggregatedStats = (aggregatedStats) => { + let numberFormat = new Intl.NumberFormat(window.navigator.language, { maximumFractionDigits: 0 }); + let timeFormat = new Intl.NumberFormat(window.navigator.language, { maximumFractionDigits: 0, minimumIntegerDigits: 2 }); + let statsText = ''; + + // Calculate duration of run + let runTime = (aggregatedStats.timestamp - aggregatedStats.timestampStart) / 1000; + let timeValues = []; + let timeDurations = [60, 60] + for(let timeIndex = 0; timeIndex < timeDurations.length; timeIndex ++) + { + timeValues.push(runTime % timeDurations[timeIndex]); + runTime = runTime / timeDurations[timeIndex]; + } + timeValues.push(runTime); + + let runTimeSeconds = timeValues[0]; + let runTimeMinutes = Math.floor(timeValues[1]); + let runTimeHours = Math.floor([timeValues[2]]); + + receivedBytesMeasurement = 'B' + receivedBytes = aggregatedStats.hasOwnProperty('bytesReceived') ? aggregatedStats.bytesReceived : 0; + let dataMeasurements = ['kB', 'MB', 'GB']; + for(let index = 0; index < dataMeasurements.length; index++){ + if(receivedBytes < 100 * 1000) + break; + receivedBytes = receivedBytes / 1000; + receivedBytesMeasurement = dataMeasurements[index]; + }; + + statsText += `Duration: ${timeFormat.format((runTimeHours))}:${timeFormat.format((runTimeMinutes))}:${timeFormat.format((runTimeSeconds))}
`; + statsText += `Video Resolution: ${ + aggregatedStats.hasOwnProperty('frameWidth') && aggregatedStats.frameWidth && aggregatedStats.hasOwnProperty('frameHeight') && aggregatedStats.frameHeight? + aggregatedStats.frameWidth + 'x' + aggregatedStats.frameHeight : 'N/A' + }
`; + statsText += `Received (${receivedBytesMeasurement}): ${numberFormat.format((receivedBytes))}
`; + statsText += `Frames Decoded: ${aggregatedStats.hasOwnProperty('framesDecoded') ? numberFormat.format(aggregatedStats.framesDecoded) : 'N/A'}
`; + statsText += `Packets Lost: ${aggregatedStats.hasOwnProperty('packetsLost') ? numberFormat.format(aggregatedStats.packetsLost) : 'N/A'}
`; + statsText += `Bitrate (kbps): ${aggregatedStats.hasOwnProperty('bitrate') ? numberFormat.format(aggregatedStats.bitrate) : 'N/A'}
`; + statsText += `Framerate: ${aggregatedStats.hasOwnProperty('framerate') ? numberFormat.format(aggregatedStats.framerate) : 'N/A'}
`; + statsText += `Frames dropped: ${aggregatedStats.hasOwnProperty('framesDropped') ? numberFormat.format(aggregatedStats.framesDropped) : 'N/A'}
`; + statsText += `Latency (ms): ${aggregatedStats.hasOwnProperty('currentRoundTripTime') ? numberFormat.format(aggregatedStats.currentRoundTripTime * 1000) : 'N/A'}
`; + + let statsDiv = document.getElementById("stats"); + if(statsDiv){ + statsDiv.innerHTML = statsText; + } + + if(print_stats){ + if(aggregatedStats.timestampStart){ + if((aggregatedStats.timestamp - aggregatedStats.timestampStart) > nextPrintDuration ){ + console.log(JSON.stringify(aggregatedStats)); + if(socket.connected) + socket.emit('webrtc-stats', aggregatedStats); + nextPrintDuration += printInterval; + } + } + } + } + + webRtcPlayerObj.aggregateStats(1 * 1000 /*Check every 1 second*/); + + //let displayStats = () => { webRtcPlayerObj.getStats( (s) => { s.forEach(stat => { console.log(JSON.stringify(stat)); }); } ); } + //var displayStatsIntervalId = setInterval(displayStats, 30 * 1000); + }); + + socket.on('webrtc-ice', function(iceCandidate) { + if(webRtcPlayerObj) + webRtcPlayerObj.handleCandidateFromServer(iceCandidate); + }); + + registerInputs(webRtcPlayerObj.video); + + createWebRtcOffer(); + + return webRtcPlayerObj.video; +} + +var styleWidth; +var styleHeight; +var styleTop; +var styleLeft; +var styleCursor = 'default'; +var styleAdditional; + +const ControlSchemeType = { + // A mouse can lock inside the WebRTC player so the user can simply move the + // mouse to control the orientation of the camera. The user presses the + // Escape key to unlock the mouse. + LockedMouse : 0, + + // A mouse can hover over the WebRTC player so the user needs to click and + // drag to control the orientation of the camera. + HoveringMouse : 1 +}; + +var inputOptions = { + // The control scheme controls the behaviour of the mouse when it interacts + // with the WebRTC player. + controlScheme: ControlSchemeType.LockedMouse, + + // Browser keys are those which are typically used by the browser UI. We + // usually want to suppress these to allow, for example, UE4 to show shader + // complexity with the F5 key without the web page refreshing. + suppressBrowserKeys: true, + + // UE4 has a faketouches option which fakes a single finger touch when the + // user drags with their mouse. We may perform the reverse; a single finger + // touch may be converted into a mouse drag UE4 side. This allows a + // non-touch application to be controlled partially via a touch device. + fakeMouseWithTouches: false +}; + +function resizePlayerStyleToFillWindow(playerElement) { + let videoElement = playerElement.getElementsByTagName("VIDEO"); + + // Fill the player display in window, keeping picture's aspect ratio. + let windowAspectRatio = window.innerHeight / window.innerWidth; + let playerAspectRatio = playerElement.clientHeight / playerElement.clientWidth; + // We want to keep the video ratio correct for the video stream + let videoAspectRatio = videoElement.videoHeight / videoElement.videoWidth; + if(isNaN(videoAspectRatio)){ + //Video is not initialised yet so set playerElement to size of window + styleWidth = window.innerWidth; + styleHeight = window.innerHeight; + styleTop = 0; + styleLeft = 0; + playerElement.style = "top: " + styleTop + "px; left: " + styleLeft + "px; width: " + styleWidth + "px; height: " + styleHeight + "px; cursor: " + styleCursor + "; " + styleAdditional; + } else if (windowAspectRatio < playerAspectRatio) { + // Window height is the constraining factor so to keep aspect ratio change width appropriately + styleWidth = Math.floor(window.innerHeight / videoAspectRatio); + styleHeight = window.innerHeight; + styleTop = 0; + styleLeft = Math.floor((window.innerWidth - styleWidth) * 0.5); + //Video is now 100% of the playerElement, so set the playerElement style + playerElement.style = "top: " + styleTop + "px; left: " + styleLeft + "px; width: " + styleWidth + "px; height: " + styleHeight + "px; cursor: " + styleCursor + "; " + styleAdditional; + } else { + // Window width is the constraining factor so to keep aspect ratio change height appropriately + styleWidth = window.innerWidth; + styleHeight = Math.floor(window.innerWidth * videoAspectRatio); + styleTop = Math.floor((window.innerHeight - styleHeight) * 0.5); + styleLeft = 0; + //Video is now 100% of the playerElement, so set the playerElement style + playerElement.style = "top: " + styleTop + "px; left: " + styleLeft + "px; width: " + styleWidth + "px; height: " + styleHeight + "px; cursor: " + styleCursor + "; " + styleAdditional; + } +} + +function resizePlayerStyleToActualSize(playerElement) { + let videoElement = playerElement.getElementsByTagName("VIDEO"); + + // Display image in its actual size + styleWidth = videoElement.videoWidth; + styleHeight = videoElement.videoHeight; + styleTop = Math.floor((window.innerHeight - styleHeight) * 0.5); + styleLeft = Math.floor((window.innerWidth - styleWidth) * 0.5); + //Video is now 100% of the playerElement, so set the playerElement style + playerElement.style = "top: " + styleTop + "px; left: " + styleLeft + "px; width: " + styleWidth + "px; height: " + styleHeight + "px; cursor: " + styleCursor + "; " + styleAdditional; +} + +function resizePlayerStyleToArbitrarySize(playerElement) { + let videoElement = playerElement.getElementsByTagName("VIDEO"); + //Video is now 100% of the playerElement, so set the playerElement style + playerElement.style = "top: 0px; left: 0px; width: " + styleWidth + "px; height: " + styleHeight + "px; cursor: " + styleCursor + "; " + styleAdditional; +} + +function resizePlayerStyle(event) { + var playerElement = document.getElementById('player'); + + if(!playerElement) + return; + + updateVideoStreamSize(); + + // Calculating and normalizing positions depends on the width and height of + // the player. + playerElementClientRect = playerElement.getBoundingClientRect(); + setupNormalizeAndQuantize(); + + if (playerElement.classList.contains('fixed-size')) + return; + + let checkBox = document.getElementById('enlarge-display-to-fill-window-tgl'); + let windowSmallerThanPlayer = window.innerWidth < playerElement.videoWidth || window.innerHeight < playerElement.videoHeight; + if (checkBox != null) { + if (checkBox.checked || windowSmallerThanPlayer) { + resizePlayerStyleToFillWindow(playerElement); + } else { + resizePlayerStyleToActualSize(playerElement); + } + } else { + resizePlayerStyleToArbitrarySize(playerElement); + } +} + +function updateVideoStreamSize() { + if (!matchViewportResolution) { + return; + } + + var now = new Date().getTime(); + if (now - lastTimeResized > 1000) { + var playerElement = document.getElementById('player'); + if (!playerElement) + return; + + let descriptor = { + Console: 'setres ' + playerElement.clientWidth + 'x' + playerElement.clientHeight + }; + emitUIInteraction(descriptor); + console.log(descriptor); + lastTimeResized = new Date().getTime(); + } + else { + console.log('Resizing too often - skipping'); + clearTimeout(resizeTimeout); + resizeTimeout = setTimeout(updateVideoStreamSize, 1000); + } +} + +// Fix for bug in iOS where windowsize is not correct at instance or orientation change +// https://github.com/dimsemenov/PhotoSwipe/issues/1315 +var _orientationChangeTimeout; +function onOrientationChange(event){ + clearTimeout(_orientationChangeTimeout); + _orientationChangeTimeout = setTimeout(function() { + resizePlayerStyle(); + }, 500); +} + +// Must be kept in sync with PixelStreamingProtocol::EToUE4Msg C++ enum. +const MessageType = { + + /**********************************************************************/ + + /* + * Control Messages. Range = 0..49. + */ + IFrameRequest : 0, + RequestQualityControl: 1, + MaxFpsRequest: 2, + AverageBitrateRequest: 3, + StartStreaming: 4, + StopStreaming: 5, + + /**********************************************************************/ + + /* + * Input Messages. Range = 50..89. + */ + + // Generic Input Messages. Range = 50..59. + UIInteraction: 50, + Command: 51, + + // Keyboard Input Message. Range = 60..69. + KeyDown: 60, + KeyUp: 61, + KeyPress: 62, + + // Mouse Input Messages. Range = 70..79. + MouseEnter: 70, + MouseLeave: 71, + MouseDown: 72, + MouseUp: 73, + MouseMove: 74, + MouseWheel: 75, + + // Touch Input Messages. Range = 80..89. + TouchStart: 80, + TouchEnd: 81, + TouchMove: 82, + + /**************************************************************************/ +}; + +// A generic message has a type and a descriptor. +function emitDescriptor(messageType, descriptor) { + // Convert the dscriptor object into a JSON string. + let descriptorAsString = JSON.stringify(descriptor); + + // Add the UTF-16 JSON string to the array byte buffer, going two bytes at + // a time. + let data = new DataView(new ArrayBuffer(1 + 2 + 2 * descriptorAsString.length)); + let byteIdx = 0; + data.setUint8(byteIdx, messageType); + byteIdx++; + data.setUint16(byteIdx, descriptorAsString.length, true); + byteIdx += 2; + for (i = 0; i < descriptorAsString.length; i++) { + data.setUint16(byteIdx, descriptorAsString.charCodeAt(i), true); + byteIdx += 2; + } + sendInputData(data.buffer); +} + +// A UI interation will occur when the user presses a button powered by +// JavaScript as opposed to pressing a button which is part of the pixel +// streamed UI from the UE4 client. +function emitUIInteraction(descriptor) { + emitDescriptor(MessageType.UIInteraction, descriptor); +} + +// A build-in command can be sent to UE4 client. The commands are defined by a +// JSON descriptor and will be executed automatically. +// The currently supported commands are: +// +// 1. A command to run any console command: +// "{ ConsoleCommand: }" +// +// 2. A command to change the resolution to the given width and height. +// "{ Resolution: { Width: , Height: } }" +// +// 3. A command to change the encoder settings by reducing the bitrate by the +// given percentage. +// "{ Encoder: { BitrateReduction: } }" +function emitCommand(descriptor) { + emitDescriptor(MessageType.Command, descriptor); +} + +function requestQualityControl() { + sendInputData(new Uint8Array([MessageType.RequestQualityControl]).buffer); +} + +var playerElementClientRect = undefined; +var normalizeAndQuantizeUnsigned = undefined; +var normalizeAndQuantizeSigned = undefined; + +function setupNormalizeAndQuantize() { + let playerElement = document.getElementById('player'); + let videoElement = playerElement.getElementsByTagName("video"); + + if (playerElement && videoElement.length > 0) { + let playerAspectRatio = playerElement.clientHeight / playerElement.clientWidth; + let videoAspectRatio = videoElement[0].videoHeight / videoElement[0].videoWidth; + + // Unsigned XY positions are the ratio (0.0..1.0) along a viewport axis, + // quantized into an uint16 (0..65536). + // Signed XY deltas are the ratio (-1.0..1.0) along a viewport axis, + // quantized into an int16 (-32767..32767). + // This allows the browser viewport and client viewport to have a different + // size. + // Hack: Currently we set an out-of-range position to an extreme (65535) + // as we can't yet accurately detect mouse enter and leave events + // precisely inside a video with an aspect ratio which causes mattes. + if (playerAspectRatio > videoAspectRatio) { + if (print_inputs) { + console.log('Setup Normalize and Quantize for playerAspectRatio > videoAspectRatio'); + } + let ratio = playerAspectRatio / videoAspectRatio; + // Unsigned. + normalizeAndQuantizeUnsigned = (x, y) => { + let normalizedX = x / playerElement.clientWidth; + let normalizedY = ratio * (y / playerElement.clientHeight - 0.5) + 0.5; + if (normalizedX < 0.0 || normalizedX > 1.0 || normalizedY < 0.0 || normalizedY > 1.0) { + return { + inRange: false, + x: 65535, + y: 65535 + }; + } else { + return { + inRange: true, + x: normalizedX * 65536, + y: normalizedY * 65536 + }; + } + } + // Signed. + normalizeAndQuantizeSigned = (x, y) => { + let normalizedX = x / (0.5 * playerElement.clientWidth); + let normalizedY = (ratio * y) / (0.5 * playerElement.clientHeight); + return { + x: normalizedX * 32767, + y: normalizedY * 32767 + }; + } + } else { + if (print_inputs) { + console.log('Setup Normalize and Quantize for playerAspectRatio <= videoAspectRatio'); + } + let ratio = videoAspectRatio / playerAspectRatio; + normalizeAndQuantizeUnsigned = (x, y) => { + // Unsigned. + let normalizedX = ratio * (x / playerElement.clientWidth - 0.5) + 0.5; + let normalizedY = y / playerElement.clientHeight; + if (normalizedX < 0.0 || normalizedX > 1.0 || normalizedY < 0.0 || normalizedY > 1.0) { + return { + inRange: false, + x: 65535, + y: 65535 + }; + } else { + return { + inRange: true, + x: normalizedX * 65536, + y: normalizedY * 65536 + }; + } + } + normalizeAndQuantizeSigned = (x, y) => { + // Signed. + let normalizedX = (ratio * x) / (0.5 * playerElement.clientWidth); + let normalizedY = y / (0.5 * playerElement.clientHeight); + return { + x: normalizedX * 32767, + y: normalizedY * 32767 + }; + } + } + } +} + +function emitMouseMove(x, y, deltaX, deltaY) { + if (print_inputs) { + console.log(`x: ${x}, y:${y}, dX: ${deltaX}, dY: ${deltaY}`); + } + let coord = normalizeAndQuantizeUnsigned(x, y); + let delta = normalizeAndQuantizeSigned(deltaX, deltaY); + var Data = new DataView(new ArrayBuffer(9)); + Data.setUint8(0, MessageType.MouseMove); + Data.setUint16(1, coord.x, true); + Data.setUint16(3, coord.y, true); + Data.setInt16(5, delta.x, true); + Data.setInt16(7, delta.y, true); + sendInputData(Data.buffer); +} + +function emitMouseDown(button, x, y) { + if (print_inputs) { + console.log(`mouse button ${button} down at (${x}, ${y})`); + } + let coord = normalizeAndQuantizeUnsigned(x, y); + var Data = new DataView(new ArrayBuffer(6)); + Data.setUint8(0, MessageType.MouseDown); + Data.setUint8(1, button); + Data.setUint16(2, coord.x, true); + Data.setUint16(4, coord.y, true); + sendInputData(Data.buffer); +} + +function emitMouseUp(button, x, y) { + if (print_inputs) { + console.log(`mouse button ${button} up at (${x}, ${y})`); + } + let coord = normalizeAndQuantizeUnsigned(x, y); + var Data = new DataView(new ArrayBuffer(6)); + Data.setUint8(0, MessageType.MouseUp); + Data.setUint8(1, button); + Data.setUint16(2, coord.x, true); + Data.setUint16(4, coord.y, true); + sendInputData(Data.buffer); +} + +function emitMouseWheel(delta, x, y) { + if (print_inputs) { + console.log(`mouse wheel with delta ${delta} at (${x}, ${y})`); + } + let coord = normalizeAndQuantizeUnsigned(x, y); + var Data = new DataView(new ArrayBuffer(7)); + Data.setUint8(0, MessageType.MouseWheel); + Data.setInt16(1, delta, true); + Data.setUint16(3, coord.x, true); + Data.setUint16(5, coord.y, true); + sendInputData(Data.buffer); +} + +// https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/button +const MouseButton = { + MainButton: 0, // Left button. + AuxiliaryButton: 1, // Wheel button. + SecondaryButton: 2 // Right button. +}; + +// https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/buttons +const MouseButtonsMask = { + PrimaryButton: 1, // Left button. + SecondaryButton: 2, // Right button. + AuxiliaryButton: 4 // Wheel button. +}; + +// If the user has any mouse buttons pressed then release them. +function releaseMouseButtons(buttons, x, y) { + if (buttons & MouseButtonsMask.PrimaryButton) { + emitMouseUp(MouseButton.MainButton, x, y); + } + if (buttons & MouseButtonsMask.SecondaryButton) { + emitMouseUp(MouseButton.SecondaryButton, x, y); + } + if (buttons & MouseButtonsMask.AuxiliaryButton) { + emitMouseUp(MouseButton.AuxiliaryButton, x, y); + } +} + +// If the user has any mouse buttons pressed then press them again. +function pressMouseButtons(buttons, x, y) { + if (buttons & MouseButtonsMask.PrimaryButton) { + emitMouseDown(MouseButton.MainButton, x, y); + } + if (buttons & MouseButtonsMask.SecondaryButton) { + emitMouseDown(MouseButton.SecondaryButton, x, y); + } + if (buttons & MouseButtonsMask.AuxiliaryButton) { + emitMouseDown(MouseButton.AuxiliaryButton, x, y); + } +} + +function registerInputs(playerElement){ + if(!playerElement) + return; + + registerMouseEnterAndLeaveEvents(playerElement); + registerTouchEvents(playerElement); +} + +function registerMouseEnterAndLeaveEvents(playerElement) { + playerElement.onmouseenter = function (e) { + if (print_inputs) { + console.log('mouse enter'); + } + var Data = new DataView(new ArrayBuffer(1)); + Data.setUint8(0, MessageType.MouseEnter); + sendInputData(Data.buffer); + playerElement.pressMouseButtons(e); + } + + playerElement.onmouseleave = function (e) { + if (print_inputs) { + console.log('mouse leave'); + } + var Data = new DataView(new ArrayBuffer(1)); + Data.setUint8(0, MessageType.MouseLeave); + sendInputData(Data.buffer); + playerElement.releaseMouseButtons(e); + } +} + +// A locked mouse works by the user clicking in the browser player and the +// cursor disappears and is locked. The user moves the cursor and the camera +// moves, for example. The user presses escape to free the mouse. +function registerLockedMouseEvents(playerElement) { + var x = playerElement.width / 2; + var y = playerElement.height / 2; + + playerElement.requestPointerLock = playerElement.requestPointerLock || playerElement.mozRequestPointerLock; + document.exitPointerLock = document.exitPointerLock || document.mozExitPointerLock; + + playerElement.onclick = function () { + playerElement.requestPointerLock(); + }; + + // Respond to lock state change events + document.addEventListener('pointerlockchange', lockStateChange, false); + document.addEventListener('mozpointerlockchange', lockStateChange, false); + + function lockStateChange() { + if (document.pointerLockElement === playerElement || + document.mozPointerLockElement === playerElement) { + console.log('Pointer locked'); + document.addEventListener("mousemove", updatePosition, false); + } else { + console.log('The pointer lock status is now unlocked'); + document.removeEventListener("mousemove", updatePosition, false); + } + } + + function updatePosition(e) { + x += e.movementX; + y += e.movementY; + if (x > styleWidth) { + x -= styleWidth; + } + if (y > styleHeight) { + y -= styleHeight; + } + if (x < 0) { + x = styleWidth + x; + } + if (y < 0) { + y = styleHeight - y; + } + emitMouseMove(x, y, e.movementX, e.movementY); + } + + playerElement.onmousedown = function (e) { + emitMouseDown(e.button, x, y); + }; + + playerElement.onmouseup = function (e) { + emitMouseUp(e.button, x, y); + }; + + playerElement.onmousewheel = function (e) { + emitMouseWheel(e.wheelDelta, x, y); + } + + playerElement.pressMouseButtons = function (e) { + pressMouseButtons(e.buttons, x, y); + } + + playerElement.releaseMouseButtons = function (e) { + releaseMouseButtons(e.buttons, x, y); + } +} + +// A hovering mouse works by the user clicking the mouse button when they want +// the cursor to have an effect over the video. Otherwise the cursor just +// passes over the browser. +function registerHoveringMouseEvents(playerElement) { + styleCursor = 'none'; // We will rely on UE4 client's software cursor. + + playerElement.onmousemove = function (e) { + emitMouseMove(e.offsetX, e.offsetY, e.movementX, e.movementY); + e.preventDefault(); + } + + playerElement.onmousedown = function (e) { + emitMouseDown(e.button, e.offsetX, e.offsetY); + e.preventDefault(); + }; + + playerElement.onmouseup = function (e) { + emitMouseUp(e.button, e.offsetX, e.offsetY); + e.preventDefault(); + }; + + // When the context menu is shown then it is safest to release the button + // which was pressed when the event happened. This will guarantee we will + // get at least one mouse up corresponding to a mouse down event. Otherwise + // the mouse can get stuck. + // https://github.com/facebook/react/issues/5531 + playerElement.oncontextmenu = function (e) { + emitMouseUp(e.button, e.offsetX, e.offsetY); + } + + if ('onmousewheel' in playerElement) { + playerElement.onmousewheel = function (e) { + emitMouseWheel(e.wheelDelta, e.offsetX, e.offsetY); + e.preventDefault(); + } + } else { + playerElement.addEventListener('DOMMouseScroll', function (e) { + emitMouseWheel(e.detail * -120, e.offsetX, e.offsetY); + e.preventDefault(); + }, false) + } + + playerElement.pressMouseButtons = function (e) { + pressMouseButtons(e.buttons, e.offsetX, e.offsetY); + } + + playerElement.releaseMouseButtons = function (e) { + releaseMouseButtons(e.buttons, e.offsetX, e.offsetY); + } +} + +function registerTouchEvents(playerElement) { + + // We need to assign a unique identifier to each finger. + // We do this by mapping each Touch object to the identifier. + var fingers = [9, 8, 7, 6, 5, 4, 3, 2, 1, 0]; + var fingerIds = {}; + + function rememberTouch(touch) { + let finger = fingers.pop(); + if (finger === undefined) { + console.log('exhausted touch indentifiers'); + } + fingerIds[touch.identifier] = finger; + } + + function forgetTouch(touch) { + fingers.push(fingerIds[touch.identifier]); + delete fingerIds[touch.identifier]; + } + + function emitTouchData(type, touches) { + let data = new DataView(new ArrayBuffer(2 + 6 * touches.length)); + data.setUint8(0, type); + data.setUint8(1, touches.length); + let byte = 2; + for (let t = 0; t < touches.length; t++) { + let touch = touches[t]; + let x = touch.clientX - playerElement.offsetLeft; + let y = touch.clientY - playerElement.offsetTop; + if (print_inputs) { + console.log(`F${fingerIds[touch.identifier]}=(${x}, ${y})`); + } + let coord = normalizeAndQuantizeUnsigned(x, y); + data.setUint16(byte, coord.x, true); + byte += 2; + data.setUint16(byte, coord.y, true); + byte += 2; + data.setUint8(byte, fingerIds[touch.identifier], true); + byte += 1; + data.setUint8(byte, 255 * touch.force, true); // force is between 0.0 and 1.0 so quantize into byte. + byte += 1; + } + sendInputData(data.buffer); + } + + if (inputOptions.fakeMouseWithTouches) { + + var finger = undefined; + + playerElement.ontouchstart = function (e) { + if (finger === undefined) { + let firstTouch = e.changedTouches[0]; + finger = { + id: firstTouch.identifier, + x: firstTouch.clientX - playerElementClientRect.left, + y: firstTouch.clientY - playerElementClientRect.top + }; + // Hack: Mouse events require an enter and leave so we just + // enter and leave manually with each touch as this event + // is not fired with a touch device. + playerElement.onmouseenter(e); + emitMouseDown(MouseButton.MainButton, finger.x, finger.y); + } + e.preventDefault(); + } + + playerElement.ontouchend = function (e) { + for (let t = 0; t < e.changedTouches.length; t++) { + let touch = e.changedTouches[t]; + if (touch.identifier === finger.id) { + let x = touch.clientX - playerElementClientRect.left; + let y = touch.clientY - playerElementClientRect.top; + emitMouseUp(MouseButton.MainButton, x, y); + // Hack: Manual mouse leave event. + playerElement.onmouseleave(e); + finger = undefined; + break; + } + } + e.preventDefault(); + } + + playerElement.ontouchmove = function (e) { + for (let t = 0; t < e.touches.length; t++) { + let touch = e.touches[t]; + if (touch.identifier === finger.id) { + let x = touch.clientX - playerElementClientRect.left; + let y = touch.clientY - playerElementClientRect.top; + emitMouseMove(x, y, x - finger.x, y - finger.y); + finger.x = x; + finger.y = y; + break; + } + } + e.preventDefault(); + } + } else { + playerElement.ontouchstart = function (e) { + // Assign a unique identifier to each touch. + for (let t = 0; t < e.changedTouches.length; t++) { + rememberTouch(e.changedTouches[t]); + } + + if (print_inputs) { + console.log('touch start'); + } + emitTouchData(MessageType.TouchStart, e.changedTouches); + e.preventDefault(); + } + + playerElement.ontouchend = function (e) { + if (print_inputs) { + console.log('touch end'); + } + emitTouchData(MessageType.TouchEnd, e.changedTouches); + + // Re-cycle unique identifiers previously assigned to each touch. + for (let t = 0; t < e.changedTouches.length; t++) { + forgetTouch(e.changedTouches[t]); + } + e.preventDefault(); + } + + playerElement.ontouchmove = function (e) { + if (print_inputs) { + console.log('touch move'); + } + emitTouchData(MessageType.TouchMove, e.touches); + e.preventDefault(); + } + } +} + +// Browser keys do not have a charCode so we only need to test keyCode. +function isKeyCodeBrowserKey(keyCode) { + // Function keys or tab key. + return keyCode >= 112 && keyCode <= 123 || keyCode == 9; +} + +function registerKeyboardEvents() { + document.onkeydown = function (e) { + if (print_inputs) { + console.log(`key down ${e.keyCode}, repeat = ${e.repeat}`); + } + sendInputData(new Uint8Array([MessageType.KeyDown, e.keyCode, e.repeat]).buffer); + if (inputOptions.suppressBrowserKeys && isKeyCodeBrowserKey(e.keyCode)) { + e.preventDefault(); + } + }; + + document.onkeyup = function (e) { + if (print_inputs) { + console.log(`key up ${e.keyCode}`); + } + sendInputData(new Uint8Array([MessageType.KeyUp, e.keyCode]).buffer); + if (inputOptions.suppressBrowserKeys && isKeyCodeBrowserKey(e.keyCode)) { + e.preventDefault(); + } + }; + + document.onkeypress = function (e) { + if (print_inputs) { + console.log(`key press ${e.charCode}`); + } + let data = new DataView(new ArrayBuffer(3)); + data.setUint8(0, MessageType.KeyPress); + data.setUint16(1, e.charCode, true); + sendInputData(data.buffer); + } +} + +function onExpandOverlay_Click() { + let subElement = document.getElementById('overlaySettings'); + if (subElement.style.display === "none" || subElement.style.display === "") { + subElement.style.display = "block"; + } else { + subElement.style.display = "none"; + } +} + +function start() { + let statsDiv = document.getElementById("stats"); + if(statsDiv){ + statsDiv.innerHTML = 'Not connected'; + } + + if (!connect_on_load || is_reconnection){ + showConnectOverlay(); + resizePlayerStyle(); + } else { + connect(); + } +} + +function connect() { + socket = io(); + + socket.on('clientConfig', function (clientConfig) { + onClientConfig(clientConfig); + }); + + socket.on('message', function (data) { + console.log(`unrecognised message ${data.byteLength}: ${data.slice(0, 50).toString("hex")}`); + }); + + socket.on('clientCount', function (data) { + var kickButton = document.getElementById('kick-other-players-button'); + if (kickButton) + kickButton.value = `Kick (${data.count})`; + }); + + socket.on('connect', () => { + log("connected"); + sendUserConfig(); + }); + + socket.on('error', (error) => { + console.log(`WS error ${error}`); + }); + + socket.on('disconnect', (reason) => { + console.log(`Connection is closed: ${reason}`); + socket.close(); + socket = undefined; + is_reconnection = true; + + // destroy `webRtcPlayerObj` if any + let playerDiv = document.getElementById('player'); + if (webRtcPlayerObj) { + playerDiv.removeChild(webRtcPlayerObj.video); + webRtcPlayerObj.close(); + webRtcPlayerObj = undefined; + } + + start(); + }); +} + +/** + * Config data to sent to the Cirrus web server. + */ +function sendUserConfig() { + userConfig = { + emitData: 'ArrayBuffer' + }; + userConfigString = JSON.stringify(userConfig); + log(`userConfig = ${userConfigString}`); + socket.emit('userConfig', userConfigString); +} + +/** + * Config data received from WebRTC sender via the Cirrus web server + */ +function onClientConfig(clientConfig) { + log(`clientConfig = ${JSON.stringify(clientConfig)}`); + + let playerDiv = document.getElementById('player'); + let playerElement = setupWebRtcPlayer(playerDiv, clientConfig) + resizePlayerStyle(); + + switch (inputOptions.controlScheme) { + case ControlSchemeType.HoveringMouse: + registerHoveringMouseEvents(playerElement); + break; + case ControlSchemeType.LockedMouse: + registerLockedMouseEvents(playerElement); + break; + default: + console.log(`ERROR: Unknown control scheme ${inputOptions.controlScheme}`); + registerLockedMouseEvents(playerElement); + break; + } +} + +function load() { + setupHtmlEvents(); + registerKeyboardEvents(); + start(); +} diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/scripts/webRtcPlayer.js b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/scripts/webRtcPlayer.js new file mode 100644 index 000000000000..c2d63dd43325 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/scripts/webRtcPlayer.js @@ -0,0 +1,288 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +// universal module definition - read https://www.davidbcalhoun.com/2014/what-is-amd-commonjs-and-umd/ + +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define(["./adapter"], factory); + } else if (typeof exports === 'object') { + // Node. Does not work with strict CommonJS, but + // only CommonJS-like environments that support module.exports, + // like Node. + module.exports = factory(require("./adapter")); + } else { + // Browser globals (root is window) + root.webRtcPlayer = factory(root.adapter); + } +}(this, function (adapter) { + + function webRtcPlayer(parOptions) { + parOptions = parOptions || {}; + + var self = this; + + //********************** + //Config setup + //**********************; + this.cfg = parOptions.peerConnectionOptions || {}; + this.cfg.sdpSemantics = 'unified-plan'; + this.pcClient = null; + this.dcClient = null; + this.tnClient = null; + + this.sdpConstraints = { + offerToReceiveAudio: 1, + offerToReceiveVideo: 1 + }; + + // See https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit for values + this.dataChannelOptions = {ordered: true}; + + //********************** + //Functions + //********************** + + //Create Video element and expose that as a parameter + createWebRtcVideo = function() { + var video = document.createElement('video'); + + video.id = "streamingVideo"; + video.playsInline = true; + video.addEventListener('loadedmetadata', function(e){ + if(self.onVideoInitialised){ + self.onVideoInitialised(); + } + }, true); + return video; + } + + this.video = createWebRtcVideo(); + + onsignalingstatechange = function(state) { + console.info('signaling state change:', state) + }; + + oniceconnectionstatechange = function(state) { + console.info('ice connection state change:', state) + }; + + onicegatheringstatechange = function(state) { + console.info('ice gathering state change:', state) + }; + + handleOnTrack = function(e){ + console.log('handleOnTrack', e.streams); + if (self.video.srcObject !== e.streams[0]) { + console.log('setting video stream from ontrack'); + self.video.srcObject = e.streams[0]; + } + }; + + setupDataChannel = function(pc, label, options) { + try { + var datachannel = pc.createDataChannel(label, options) + console.log(`Created datachannel (${label})`) + + datachannel.onopen = function (e) { + console.log(`data channel (${label}) connect`) + if(self.onDataChannelConnected){ + self.onDataChannelConnected(); + } + } + + datachannel.onclose = function (e) { + console.log(`data channel (${label}) closed`) + } + + datachannel.onmessage = function (e) { + console.log(`Got message (${label})`, e.data) + if (self.onDataChannelMessage) + self.onDataChannelMessage(e.data); + } + + return datachannel; + } catch (e) { + console.warn('No data channel', e); + return null; + } + } + + onicecandidate = function (e) { + console.log('ICE candidate', e) + if (e.candidate) { + self.onWebRtcCandidate(JSON.stringify(e.candidate)); + } + }; + + handleCreateOffer = function (pc) { + pc.createOffer(self.sdpConstraints).then(function (offerDesc) { + pc.setLocalDescription(offerDesc); + if (self.onWebRtcOffer) { + // (andriy): increase start bitrate from 300 kbps to 20 mbps and max bitrate from 2.5 mbps to 100 mbps + // (100 mbps means we don't restrict encoder at all) + // after we `setLocalDescription` because other browsers are not so happy to see google-specific config + offerDesc.sdp = offerDesc.sdp.replace(/(a=fmtp:\d+ .*level-asymmetry-allowed=.*)\r\n/gm, "$1;x-google-start-bitrate=20000;x-google-max-bitrate=100000\r\n"); + //console.log('Sending offer: ', offerDesc) + self.onWebRtcOffer(JSON.stringify(offerDesc)); + } + }, + function () { console.warn("Couldn't create offer") }); + } + + setupPeerConnection = function (pc) { + if (pc.SetBitrate) + console.log("Hurray! there's RTCPeerConnection.SetBitrate function"); + + //Setup peerConnection events + pc.onsignalingstatechange = onsignalingstatechange; + pc.oniceconnectionstatechange = oniceconnectionstatechange; + pc.onicegatheringstatechange = onicegatheringstatechange; + + pc.ontrack = handleOnTrack; + pc.onicecandidate = onicecandidate; + }; + + generateAggregatedStatsFunction = function(){ + if(!self.aggregatedStats) + self.aggregatedStats = {}; + + return function(stats){ + //console.log('Printing Stats'); + + let newStat = {}; + stats.forEach(stat => { + //console.log(JSON.stringify(stat)) + if (stat.type == 'inbound-rtp' + && !stat.isRemote + && (stat.mediaType == 'video' || stat.id.toLowerCase().includes('video'))) { + + newStat.timestamp = stat.timestamp; + newStat.bytesReceived = stat.bytesReceived; + newStat.framesDecoded = stat.framesDecoded; + newStat.packetsLost = stat.packetsLost; + newStat.bytesReceivedStart = self.aggregatedStats && self.aggregatedStats.bytesReceivedStart ? self.aggregatedStats.bytesReceivedStart : stat.bytesReceived; + newStat.framesDecodedStart = self.aggregatedStats && self.aggregatedStats.framesDecodedStart ? self.aggregatedStats.framesDecodedStart : stat.framesDecoded; + newStat.timestampStart = self.aggregatedStats && self.aggregatedStats.timestampStart ? self.aggregatedStats.timestampStart : stat.timestamp; + + if(self.aggregatedStats && self.aggregatedStats.timestamp){ + if(self.aggregatedStats.bytesReceived){ + // bitrate = bits received since last time / number of ms since last time + //This is automatically in kbits (where k=1000) since time is in ms and stat we want is in seconds (so a '* 1000' then a '/ 1000' would negate each other) + newStat.bitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceived) / (newStat.timestamp - self.aggregatedStats.timestamp); + newStat.bitrate = Math.floor(newStat.bitrate); + newStat.lowBitrate = self.aggregatedStats.lowBitrate && self.aggregatedStats.lowBitrate < newStat.bitrate ? self.aggregatedStats.lowBitrate : newStat.bitrate + newStat.highBitrate = self.aggregatedStats.highBitrate && self.aggregatedStats.highBitrate > newStat.bitrate ? self.aggregatedStats.highBitrate : newStat.bitrate + } + + if(self.aggregatedStats.bytesReceivedStart){ + newStat.avgBitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceivedStart) / (newStat.timestamp - self.aggregatedStats.timestampStart); + newStat.avgBitrate = Math.floor(newStat.avgBitrate); + } + + if(self.aggregatedStats.framesDecoded){ + // framerate = frames decoded since last time / number of seconds since last time + newStat.framerate = (newStat.framesDecoded - self.aggregatedStats.framesDecoded) / ((newStat.timestamp - self.aggregatedStats.timestamp) / 1000); + newStat.framerate = Math.floor(newStat.framerate); + newStat.lowFramerate = self.aggregatedStats.lowFramerate && self.aggregatedStats.lowFramerate < newStat.framerate ? self.aggregatedStats.lowFramerate : newStat.framerate + newStat.highFramerate = self.aggregatedStats.highFramerate && self.aggregatedStats.highFramerate > newStat.framerate ? self.aggregatedStats.highFramerate : newStat.framerate + } + + if(self.aggregatedStats.framesDecodedStart){ + newStat.avgframerate = (newStat.framesDecoded - self.aggregatedStats.framesDecodedStart) / ((newStat.timestamp - self.aggregatedStats.timestampStart) / 1000); + newStat.avgframerate = Math.floor(newStat.avgframerate); + } + } + } + + //Read video track stats + if(stat.type == 'track' && stat.trackIdentifier == 'video_label'){ + newStat.framesDropped = stat.framesDropped; + newStat.framesReceived = stat.framesReceived; + newStat.framesDroppedPercentage = stat.framesDropped / stat.framesReceived * 100; + newStat.frameHeight = stat.frameHeight; + newStat.frameWidth = stat.frameWidth; + newStat.frameHeightStart = self.aggregatedStats && self.aggregatedStats.frameHeightStart ? self.aggregatedStats.frameHeightStart : stat.frameHeight; + newStat.frameWidthStart = self.aggregatedStats && self.aggregatedStats.frameWidthStart ? self.aggregatedStats.frameWidthStart : stat.frameWidth; + } + + if(stat.type =='candidate-pair' && stat.hasOwnProperty('currentRoundTripTime')){ + newStat.currentRoundTripTime = stat.currentRoundTripTime; + } + }); + + //console.log(JSON.stringify(newStat)); + self.aggregatedStats = newStat; + + if(self.onAggregatedStats) + self.onAggregatedStats(newStat) + } + }; + + //********************** + //Public functions + //********************** + + //This is called when revceiving new ice candidates individually instead of part of the offer + //This is currently not used but would be called externally from this class + this.handleCandidateFromServer = function(iceCandidate) { + console.log("ICE candidate: ", iceCandidate); + let candidate = new RTCIceCandidate(iceCandidate); + self.pcClient.addIceCandidate(candidate).then(_=>{ + console.log('ICE candidate successfully added'); + }); + }; + + //Called externaly to create an offer for the server + this.createOffer = function() { + if(self.pcClient){ + console.log("Closing existing PeerConnection") + self.pcClient.close(); + self.pcClient = null; + } + self.pcClient = new RTCPeerConnection(self.cfg); + setupPeerConnection(self.pcClient); + self.dcClient = setupDataChannel(self.pcClient, 'cirrus', self.dataChannelOptions); + handleCreateOffer(self.pcClient); + }; + + //Called externaly when an answer is received from the server + this.receiveAnswer = function(answer) { + console.log('Received answer', answer); + var answerDesc = new RTCSessionDescription(answer); + self.pcClient.setRemoteDescription(answerDesc); + }; + + this.close = function(){ + if(self.pcClient){ + console.log("Closing existing peerClient") + self.pcClient.close(); + self.pcClient = null; + } + if(self.aggregateStatsIntervalId) + clearInterval(self.aggregateStatsIntervalId); + } + + //Sends data across the datachannel + this.send = function(data){ + if(self.dcClient && self.dcClient.readyState == 'open'){ + //console.log('Sending data on dataconnection', self.dcClient) + self.dcClient.send(data); + } + }; + + this.getStats = function(onStats){ + if(self.pcClient && onStats){ + self.pcClient.getStats(null).then((stats) => { onStats(stats); }); + } + } + + this.aggregateStats = function(checkInterval){ + let calcAggregatedStats = generateAggregatedStatsFunction(); + let printAggregatedStats = () => { self.getStats(calcAggregatedStats); } + self.aggregateStatsIntervalId = setInterval(printAggregatedStats, checkInterval); + } + }; + + return webRtcPlayer; + +})); diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/setup.bat b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/setup.bat new file mode 100644 index 000000000000..be1fa7507c53 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/setup.bat @@ -0,0 +1,6 @@ +:: Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +pushd %~dp0 + +npm install + +popd diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Bootstrap.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Bootstrap.tps new file mode 100644 index 000000000000..c77b00c66432 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Bootstrap.tps @@ -0,0 +1,13 @@ + + + bootstrap v3.x, v4.x + /Engine/Source/Programs/PixelStreaming/WebServers/ + This is a requirement to using Bootstrap, providing better UI elements for the client web pages created for demoing pixelstreaming + https://github.com/twitter/bootstrap/blob/master/LICENSE + + Licensees + Git + P4 + + /Engine/Source/ThirdParty/Licenses/Bootstrap_License.txt + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Express.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Express.tps new file mode 100644 index 000000000000..34b7c117e2d9 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Express.tps @@ -0,0 +1,13 @@ + + + Express v4.16.2 + /Engine/Source/Programs/PixelStreaming/WebServers/ + Express is a web framework for Node.js. + https://github.com/expressjs/express/blob/master/LICENSE + + Licensees + Git + P4 + + /Engine/Plugins/Experimental/PixelStreaming/Source/Express_License.txt + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/FontAwesome.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/FontAwesome.tps new file mode 100644 index 000000000000..21aaf65860a9 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/FontAwesome.tps @@ -0,0 +1,11 @@ + + + FontAwesome Free v5.1 + /Engine/Source/Programs/PixelStreaming/WebServers/ + Provides a consistent icon style to use in the sites for demoing pixelstreaming. + https://github.com/FortAwesome/Font-Awesome/blob/master/LICENSE.txt + + P4 + + None + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Helmet.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Helmet.tps new file mode 100644 index 000000000000..9edfb72ff462 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Helmet.tps @@ -0,0 +1,13 @@ + + + Helmet v3.10.0 + /Engine/Source/Programs/PixelStreaming/WebServers/ + Helmet helps you secure your Express apps by setting various HTTP headers. + https://github.com/helmetjs/helmet/blob/master/LICENSE + + Licensees + Git + P4 + + /Engine/Plugins/Experimental/PixelStreaming/Source/Helmet_License.txt + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/JQuery.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/JQuery.tps new file mode 100644 index 000000000000..9b9456ac6730 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/JQuery.tps @@ -0,0 +1,13 @@ + + + JQuery + /Engine/Source/Programs/PixelStreaming/WebServers/ + This is a requirement to using Bootstrap, providing access to the DOM in the browser for easier and more advanced client side interactions and UI. Used for Project Cirrus. + https://github.com/jquery/jquery/blob/master/LICENSE.txt; https://js.foundation/pdf/ip-policy.pdf + + Licensees + Git + P4 + + /Engine/Source/ThirdParty/Licenses/JQuery_License.txt + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Popper.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Popper.tps new file mode 100644 index 000000000000..2c8925fc5875 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Popper.tps @@ -0,0 +1,13 @@ + + + Popper.js v1.14.3 + /Engine/Source/Programs/PixelStreaming/WebServers/ + A requirement to using Bootstrap. + https://github.com/FezVrasta/popper.js/blob/master/LICENSE.md + + Licensees + Git + P4 + + /Engine/Source/ThirdParty/Licenses/Popper.js_License.txt + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Socket.io.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Socket.io.tps new file mode 100644 index 000000000000..cdc50d6810bf --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Socket.io.tps @@ -0,0 +1,13 @@ + + + Socket.io v2.0.4 + /Engine/Source/Programs/PixelStreaming/WebServers/ + Enables real-time bidirectional event-based communication. + https://github.com/socketio/socket.io/blob/master/LICENSE + + Licensees + Git + P4 + + /Engine/Plugins/Experimental/PixelStreaming/Source/Socket.io_License.txt + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/WebRTCadapter.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/WebRTCadapter.tps new file mode 100644 index 000000000000..a7480a8b0369 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/WebRTCadapter.tps @@ -0,0 +1,13 @@ + + + WebRTC adapter (adapter.js) v6.3.2 + /Engine/Source/Programs/PixelStreaming/WebServers/ + Used as a cross browser interface for WebRTC. + https://github.com/webrtc/adapter/blob/master/LICENSE.md + + Licensees + Git + P4 + + /Engine/Source/ThirdParty/Licenses/WebRTCadapter_License.txt + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Yargs.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Yargs.tps new file mode 100644 index 000000000000..306a10295b07 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/Yargs.tps @@ -0,0 +1,13 @@ + + + Yargs v11.0.0 + /Engine/Source/Programs/PixelStreaming/WebServers/ + A module for Node.js, used to parse command line arguments. + https://github.com/yargs/yargs/blob/master/LICENSE + + Licensees + Git + P4 + + /Engine/Plugins/Experimental/PixelStreaming/Source/Yargs_License.txt + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/expression-session.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/expression-session.tps new file mode 100644 index 000000000000..5188339d7261 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/expression-session.tps @@ -0,0 +1,13 @@ + + + express-session v1.15.6 + /Engine/Source/Programs/PixelStreaming/WebServers/ + Used to create session id's used to remember a person who has logged into a server across page loads so that they don't have to log in every time the reload or navigate to a different page hosted on a webserver + https://github.com/expressjs/session/blob/master/LICENSE + + Licensees + Git + P4 + + /Engine/Source/ThirdParty/Licenses/express-session_license.txt + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/nodebcryptjs.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/nodebcryptjs.tps new file mode 100644 index 000000000000..a2aa49fe002d --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/nodebcryptjs.tps @@ -0,0 +1,13 @@ + + + node.bcrypt.js v3.0.0 + /Engine/Source/Programs/PixelStreaming/WebServers/ + This is used to verify passwords match the ones stored using the bcrypt algorithm. The passwords are always stored using bcrypt and so we never know the unencrypted password. This allows us to implement a authentication system on the web server so that only people we give accounts to can access the web server. This use is only for prototype stage, production will use the Epic unreal account system + https://github.com/kelektiv/node.bcrypt.js/blob/master/LICENSE + + Licensees + Git + P4 + + /Engine/Source/ThirdParty/Licenses/node.bcrypt.js_License.txt + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/passport-local.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/passport-local.tps new file mode 100644 index 000000000000..da297f64a51f --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/passport-local.tps @@ -0,0 +1,13 @@ + + + passport-local v1.0.0 + /Engine/Source/Programs/PixelStreaming/WebServers/ + This is a implementation for the Passport middleware that allows you to store user credentials locally on the machine (passwords are stored with bcrypt and not reversible) to be used to authenticate users on a node.js webserver. This use is only for prototype stage, production will use the Epic unreal account system. + https://github.com/jaredhanson/passport-local/blob/master/LICENSE + + Licensees + Git + P4 + + /Engine/Source/ThirdParty/Licenses/passport-local_license.txt + \ No newline at end of file diff --git a/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/passport.tps b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/passport.tps new file mode 100644 index 000000000000..fa1a6bbf4000 --- /dev/null +++ b/Engine/Source/Programs/PixelStreaming/WebServers/SignallingWebServer/tps/passport.tps @@ -0,0 +1,13 @@ + + + passport v0.4.0 + /Engine/Source/Programs/PixelStreaming/WebServers/ + Is the authentication middleware that adds the ability to securely log in a user to the webserver. This is a generic framework that you add specific implementation frameworks (separate TPS's will be provided for these) to to provide authentication on a node.js webserver. + https://github.com/jaredhanson/passport/blob/master/LICENSE + + Licensees + Git + P4 + + /Engine/Source/ThirdParty/Licenses/passport_license.txt + \ No newline at end of file diff --git a/Engine/Source/Programs/UnrealBuildTool/Configuration/TargetRules.cs b/Engine/Source/Programs/UnrealBuildTool/Configuration/TargetRules.cs index e5b96942bf8c..8541422f783f 100644 --- a/Engine/Source/Programs/UnrealBuildTool/Configuration/TargetRules.cs +++ b/Engine/Source/Programs/UnrealBuildTool/Configuration/TargetRules.cs @@ -363,7 +363,7 @@ namespace UnrealBuildTool /// Whether to compile the developer tools. /// [RequiresUniqueBuildEnvironment] - public bool bBuildDeveloperTools = true; + public bool? bBuildDeveloperTools; /// /// Whether to force compiling the target platform modules, even if they wouldn't normally be built. @@ -1550,7 +1550,8 @@ namespace UnrealBuildTool public bool bBuildDeveloperTools { - get { return Inner.bBuildDeveloperTools; } + // appropriate default will be set by this point + get { return Inner.bBuildDeveloperTools.Value; } } public bool bForceBuildTargetPlatforms diff --git a/Engine/Source/Programs/UnrealBuildTool/Configuration/UEBuildTarget.cs b/Engine/Source/Programs/UnrealBuildTool/Configuration/UEBuildTarget.cs index d2faf448ab18..679dd2e77fe3 100644 --- a/Engine/Source/Programs/UnrealBuildTool/Configuration/UEBuildTarget.cs +++ b/Engine/Source/Programs/UnrealBuildTool/Configuration/UEBuildTarget.cs @@ -1567,14 +1567,20 @@ namespace UnrealBuildTool return false; } + // Get the project directory. We will ignore any manifests under this directory (ie. anything not under engine/enterprise folders). + DirectoryReference ProjectDir = DirectoryReference.FromFile(ProjectFile); + // Read any the existing module manifests under the engine directory Dictionary ExistingFileToManifest = new Dictionary(); foreach(FileReference ExistingFile in FileReferenceToModuleManifestPairs.Select(x => x.Key)) { - ModuleManifest ExistingManifest; - if(ExistingFile.IsUnderDirectory(UnrealBuildTool.EngineDirectory) && ModuleManifest.TryRead(ExistingFile, out ExistingManifest)) + if(ProjectDir == null || !ExistingFile.IsUnderDirectory(ProjectDir)) { - ExistingFileToManifest.Add(ExistingFile, ExistingManifest); + ModuleManifest ExistingManifest; + if(ModuleManifest.TryRead(ExistingFile, out ExistingManifest)) + { + ExistingFileToManifest.Add(ExistingFile, ExistingManifest); + } } } @@ -3651,18 +3657,8 @@ namespace UnrealBuildTool GlobalCompileEnvironment.Definitions.Add(String.Format("IS_MONOLITHIC={0}", ShouldCompileMonolithic() ? "1" : "0")); - if (Rules.bCompileAgainstEngine) - { - GlobalCompileEnvironment.Definitions.Add("WITH_ENGINE=1"); - GlobalCompileEnvironment.Definitions.Add( - String.Format("WITH_UNREAL_DEVELOPER_TOOLS={0}", Rules.bBuildDeveloperTools ? "1" : "0")); - } - else - { - GlobalCompileEnvironment.Definitions.Add("WITH_ENGINE=0"); - // Can't have developer tools w/out engine - GlobalCompileEnvironment.Definitions.Add("WITH_UNREAL_DEVELOPER_TOOLS=0"); - } + GlobalCompileEnvironment.Definitions.Add(String.Format("WITH_ENGINE={0}", Rules.bCompileAgainstEngine ? "1" : "0")); + GlobalCompileEnvironment.Definitions.Add(String.Format("WITH_UNREAL_DEVELOPER_TOOLS={0}", Rules.bBuildDeveloperTools ? "1" : "0")); // Set a macro to control whether to initialize ApplicationCore. Command line utilities should not generally need this. if (Rules.bCompileAgainstApplicationCore) diff --git a/Engine/Source/Programs/UnrealBuildTool/System/RulesAssembly.cs b/Engine/Source/Programs/UnrealBuildTool/System/RulesAssembly.cs index ac21cff2b20a..7f3b84e48b75 100644 --- a/Engine/Source/Programs/UnrealBuildTool/System/RulesAssembly.cs +++ b/Engine/Source/Programs/UnrealBuildTool/System/RulesAssembly.cs @@ -479,12 +479,18 @@ namespace UnrealBuildTool if (Rules.bCompileLeanAndMeanUE) { Rules.bBuildEditor = false; - Rules.bBuildDeveloperTools = false; + Rules.bBuildDeveloperTools = Rules.bBuildDeveloperTools ?? false; Rules.bCompileSimplygon = false; Rules.bCompileSimplygonSSF = false; Rules.bCompileSpeedTree = false; } + // if the bBuildDeveloperTools switch hasn't been defined, default it to the bCompileAgainstEngine switch. + if (!Rules.bBuildDeveloperTools.HasValue) + { + Rules.bBuildDeveloperTools = Rules.bCompileAgainstEngine; + } + // Automatically include CoreUObject if (Rules.bCompileAgainstEngine) { diff --git a/Engine/Source/Programs/UnrealFrontend/Private/Commands/UserInterfaceCommand.cpp b/Engine/Source/Programs/UnrealFrontend/Private/Commands/UserInterfaceCommand.cpp index 18423a58e561..ee892e61b1a1 100644 --- a/Engine/Source/Programs/UnrealFrontend/Private/Commands/UserInterfaceCommand.cpp +++ b/Engine/Source/Programs/UnrealFrontend/Private/Commands/UserInterfaceCommand.cpp @@ -116,10 +116,8 @@ void FUserInterfaceCommand::InitializeSlateApplication( const FString& LayoutIni if (bAllowDebugTools) { - static const FName SlateReflectorModuleName("SlateReflector"); - FModuleManager::LoadModuleChecked(SlateReflectorModuleName); - ISlateReflectorModule* SlateReflectorModule = FModuleManager::GetModulePtr(SlateReflectorModuleName); - if (SlateReflectorModule != nullptr) + ISlateReflectorModule* SlateReflectorModule = FModuleManager::LoadModulePtr("SlateReflector"); + if (SlateReflectorModule) { SlateReflectorModule->RegisterTabSpawner(UserInterfaceCommand::DeveloperTools); } diff --git a/Engine/Source/Programs/UnrealFrontend/UnrealFrontend.Build.cs b/Engine/Source/Programs/UnrealFrontend/UnrealFrontend.Build.cs index 004b71961e53..6c7a8474c72c 100644 --- a/Engine/Source/Programs/UnrealFrontend/UnrealFrontend.Build.cs +++ b/Engine/Source/Programs/UnrealFrontend/UnrealFrontend.Build.cs @@ -6,7 +6,11 @@ public class UnrealFrontend : ModuleRules { public UnrealFrontend( ReadOnlyTargetRules Target ) : base(Target) { - PublicIncludePaths.Add("Runtime/Launch/Public"); + PublicIncludePaths.AddRange( + new string[] { + "Runtime/Launch/Public", + } + ); PrivateIncludePaths.AddRange( new string[] { @@ -31,8 +35,8 @@ public class UnrealFrontend : ModuleRules "ProjectLauncher", "Projects", "SessionFrontend", - "SessionServices", - "Slate", + "SessionServices", + "Slate", "SlateCore", "SourceCodeAccess", "StandaloneRenderer", diff --git a/Engine/Source/Programs/UnrealFrontend/UnrealFrontend.Target.cs b/Engine/Source/Programs/UnrealFrontend/UnrealFrontend.Target.cs index ed0b88988195..3bf83a845ac0 100644 --- a/Engine/Source/Programs/UnrealFrontend/UnrealFrontend.Target.cs +++ b/Engine/Source/Programs/UnrealFrontend/UnrealFrontend.Target.cs @@ -10,7 +10,7 @@ public class UnrealFrontendTarget : TargetRules Type = TargetType.Program; LinkType = TargetLinkType.Modular; AdditionalPlugins.Add("UdpMessaging"); - LaunchModuleName = "UnrealFrontend"; + LaunchModuleName = "UnrealFrontend"; bBuildEditor = false; bCompileAgainstEngine = false; diff --git a/Engine/Source/Programs/UnrealHeaderTool/Private/HeaderParser.cpp b/Engine/Source/Programs/UnrealHeaderTool/Private/HeaderParser.cpp index bc23d30d5c17..597232ef2353 100644 --- a/Engine/Source/Programs/UnrealHeaderTool/Private/HeaderParser.cpp +++ b/Engine/Source/Programs/UnrealHeaderTool/Private/HeaderParser.cpp @@ -8738,7 +8738,45 @@ bool FHeaderParser::DefaultValueStringCppFormatToInnerFormat(const UProperty* Pr } else if( Property->IsA(UTextProperty::StaticClass()) ) { - return FDefaultValueHelper::StringFromCppString(CppForm, TEXT("FText"), OutForm); + // Handle legacy cases of FText::FromString being used as default values + // These should be replaced with INVTEXT as FText::FromString can produce inconsistent keys + if (FDefaultValueHelper::StringFromCppString(CppForm, TEXT("FText::FromString"), OutForm)) + { + UE_LOG_WARNING_UHT(TEXT("FText::FromString should be replaced with INVTEXT for default parameter values")); + return true; + } + + // Parse the potential value into an instance + FText ParsedText; + if (FDefaultValueHelper::Is(CppForm, TEXT("FText()")) || FDefaultValueHelper::Is(CppForm, TEXT("FText::GetEmpty()"))) + { + ParsedText = FText::GetEmpty(); + } + else + { + static const FString UHTDummyNamespace = TEXT("__UHT_DUMMY_NAMESPACE__"); + + if (!FTextStringHelper::ReadFromString(*CppForm, ParsedText, *UHTDummyNamespace, nullptr, nullptr, /*bRequiresQuotes*/true, EStringTableLoadingPolicy::Find)) + { + return false; + } + + // If the namespace of the parsed text matches the default we gave then this was a LOCTEXT macro which we + // don't allow in default values as they rely on an external macro that is known to C++ but not to UHT + // TODO: UHT could parse these if it tracked the current LOCTEXT_NAMESPACE macro as it parsed + if (TOptional ParsedTextNamespace = FTextInspector::GetNamespace(ParsedText)) + { + if (ParsedTextNamespace.GetValue().Equals(UHTDummyNamespace)) + { + FError::Throwf(TEXT("LOCTEXT default parameter values are not supported; use NSLOCTEXT instead: %s \"%s\" "), *Property->GetName(), *CppForm); + return false; + } + } + } + + // Normalize the default value from the parsed value + FTextStringHelper::WriteToString(OutForm, ParsedText, /*bRequiresQuotes*/false); + return true; } else if( Property->IsA(UStrProperty::StaticClass()) ) { diff --git a/Engine/Source/Runtime/AppFramework/Private/Widgets/Testing/STestSuite.cpp b/Engine/Source/Runtime/AppFramework/Private/Widgets/Testing/STestSuite.cpp index aef2b20fa14b..6c339c8b710f 100644 --- a/Engine/Source/Runtime/AppFramework/Private/Widgets/Testing/STestSuite.cpp +++ b/Engine/Source/Runtime/AppFramework/Private/Widgets/Testing/STestSuite.cpp @@ -5992,8 +5992,7 @@ TSharedRef SpawnResponsiveGrid(const FSpawnTabArgs& Args) { TSharedRef ResponsiveGridTab = SNew(SDockTab) - .Label(LOCTEXT("ResponsiveGridTabLabel", "Responsive Grid")) - .ToolTipText(LOCTEXT("ResponsiveGridTabToolTip", "")); + .Label(LOCTEXT("ResponsiveGridTabLabel", "Responsive Grid")); ResponsiveGridTab->SetContent ( @@ -6135,7 +6134,7 @@ TSharedRef SpawnTestSuite1( const FSpawnTabArgs& Args ) FMenuBarBuilder MenuBarBuilder = FMenuBarBuilder( TSharedPtr() ); MenuBarBuilder.AddPullDownMenu( NSLOCTEXT("TestSuite", "WindowMenuLabel", "Window"), - NSLOCTEXT("TestSuite", "WindowMenuToolTip", ""), + FText::GetEmpty(), FNewMenuDelegate::CreateSP(TestSuite1TabManager.ToSharedRef(), &FTabManager::PopulateTabSpawnerMenu, TestSuiteMenu::MenuRoot)); TestSuite1Tab->SetContent diff --git a/Engine/Source/Runtime/Cbor/Cbor.Build.cs b/Engine/Source/Runtime/Cbor/Cbor.Build.cs new file mode 100644 index 000000000000..983172961c78 --- /dev/null +++ b/Engine/Source/Runtime/Cbor/Cbor.Build.cs @@ -0,0 +1,17 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +namespace UnrealBuildTool.Rules +{ + public class Cbor : ModuleRules + { + public Cbor(ReadOnlyTargetRules Target) : base(Target) + { + PublicDependencyModuleNames.AddRange( + new string[] + { + "Core", + } + ); + } + } +} diff --git a/Engine/Source/Runtime/Cbor/Private/CborModule.cpp b/Engine/Source/Runtime/Cbor/Private/CborModule.cpp new file mode 100644 index 000000000000..3f8aff39d0af --- /dev/null +++ b/Engine/Source/Runtime/Cbor/Private/CborModule.cpp @@ -0,0 +1,30 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "CoreMinimal.h" +#include "CborGlobals.h" +#include "Modules/ModuleInterface.h" +#include "Modules/ModuleManager.h" + +DEFINE_LOG_CATEGORY(LogCbor); + +/** + * Implements the Cbor module. + */ +class FCborModule + : public IModuleInterface +{ +public: + + // IModuleInterface interface + + virtual void StartupModule( ) override { } + virtual void ShutdownModule( ) override { } + + virtual bool SupportsDynamicReloading( ) override + { + return false; + } +}; + + +IMPLEMENT_MODULE(FCborModule, Cbor); diff --git a/Engine/Source/Runtime/Cbor/Private/CborReader.cpp b/Engine/Source/Runtime/Cbor/Private/CborReader.cpp new file mode 100644 index 000000000000..1b9bd248086a --- /dev/null +++ b/Engine/Source/Runtime/Cbor/Private/CborReader.cpp @@ -0,0 +1,301 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "CborReader.h" + +FCborReader::FCborReader(FArchive* InStream) + : Stream(InStream) +{ + ContextStack.Emplace(); +} + +FCborReader::~FCborReader() +{ + check(ContextStack.Num() == 1 && ContextStack.Top().RawCode() == ECborCode::Dummy); +} + +const FArchive* FCborReader::GetArchive() const +{ + return Stream; +} + +bool FCborReader::IsError() const +{ + // the dummy context holds previous error + return ContextStack[0].IsError(); +} + +FCborHeader FCborReader::GetError() const +{ + // the dummy context holds previous error + return ContextStack[0].Header; +} + +const FCborContext& FCborReader::GetContext() const +{ + return ContextStack.Top(); +} + +bool FCborReader::ReadNext(FCborContext& OutContext) +{ + OutContext.Reset(); + + // if an error happened, successive read are also errors + if (IsError()) + { + OutContext.Header = GetError(); + return false; + } + + // Invalid stream error + if (Stream == nullptr) + { + OutContext.Header = SetError(ECborCode::ErrorStreamFailure); + return false; + } + + // Current parent + FCborContext& ParentContext = ContextStack.Top(); + + // Check if we reached container end, if so output as if we read a break code + if (ParentContext.IsFiniteContainer() && ParentContext.Length == 0) + { + OutContext.Header.Set(ECborCode::Break); + // Report 0 Length + OutContext.Length = ParentContext.Length; + // Report parent context container type + OutContext.RawTextValue.Add((char)ParentContext.MajorType()); + // Done with parent context + ContextStack.Pop(); + return true; + } + + // Done reading + if (Stream->AtEnd()) + { + OutContext.Header.Set(ParentContext.RawCode() == ECborCode::Dummy ? ECborCode::StreamEnd : ECborCode::ErrorContext); + return false; + } + + // Read the cbor header + *Stream << OutContext.Header; + + // Check for break item + if (OutContext.IsBreak()) + { + // Got a break item out of a indefinite context + if (!ParentContext.IsIndefiniteContainer()) + { + OutContext.Header = SetError(ECborCode::ErrorBreak); + return false; + } + + // Odd number of item read + if (ParentContext.MajorType() == ECborCode::Map && (ParentContext.Length & 1)) + { + OutContext.Header = SetError(ECborCode::ErrorMapContainer); + return false; + } + // Report Length + OutContext.Length = ParentContext.Length; + // Report parent context container type + OutContext.RawTextValue.Add((char)ParentContext.MajorType()); + // Done with parent context + ContextStack.Pop(); + return true; + } + + // if the type is indefinite, we increment the length of the parent context + if (ParentContext.IsIndefiniteContainer()) + { + ++ParentContext.Length; + + // If we have an indefinite string but current context type doesn't match flag an error + if (ParentContext.IsString() && ParentContext.MajorType() != OutContext.MajorType()) + { + OutContext.Header = SetError(ECborCode::ErrorStringNesting); + return false; + } + } + // Otherwise the length was set when we read the parent context, decrement it, container end if flagged when reaching 0 + else if (ParentContext.IsFiniteContainer()) + { + --ParentContext.Length; + } + + // Read item + switch (OutContext.MajorType()) + { + case ECborCode::Uint: + OutContext.UIntValue = ReadUIntValue(OutContext, *Stream); + break; + case ECborCode::Int: + OutContext.UIntValue = ~ReadUIntValue(OutContext, *Stream); + break; + case ECborCode::ByteString: + // fall through + case ECborCode::TextString: + // if we have an indefinite string item, push the context + if (OutContext.IsIndefiniteContainer()) + { + OutContext.Length = 0; + ContextStack.Push(OutContext); + } + // Otherwise read the string length in bytes, then serialize the raw context in the byte array + else + { + OutContext.Length = ReadUIntValue(OutContext, *Stream); + OutContext.RawTextValue.SetNumUninitialized(OutContext.Length + 1); // Length doesn't count the null terminating character + Stream->Serialize(OutContext.RawTextValue.GetData(), OutContext.Length); + OutContext.RawTextValue[OutContext.Length] = '\0'; + } + break; + case ECborCode::Array: + OutContext.Length = OutContext.AdditionalValue() == ECborCode::Indefinite ? 0 : ReadUIntValue(OutContext, *Stream); + ContextStack.Push(OutContext); + break; + case ECborCode::Map: + OutContext.Length = OutContext.AdditionalValue() == ECborCode::Indefinite ? 0 : ReadUIntValue(OutContext, *Stream) * 2; + ContextStack.Push(OutContext); + break; + case ECborCode::Tag: + OutContext.UIntValue = ReadUIntValue(OutContext, *Stream); + break; + case ECborCode::Prim: + ReadPrimValue(OutContext, *Stream); + break; + } + + if (OutContext.IsError()) + { + SetError(OutContext.RawCode()); + return false; + } + return true; +} + +bool FCborReader::SkipContainer(ECborCode ContainerType) +{ + if (GetContext().MajorType() != ContainerType) + { + return false; + } + uint32 Depth = 0; + FCborContext Context; + while (ReadNext(Context)) + { + if (Context.IsBreak() && Depth-- == 0) + { + break; + } + + if (Context.IsContainer()) + { + ++Depth; + } + } + return !IsError(); +} + +uint64 FCborReader::ReadUIntValue(FCborContext& Context, FArchive& Ar) +{ + uint64 AdditionalValue = (uint8)Context.AdditionalValue(); + switch (Context.AdditionalValue()) + { + case ECborCode::Value_1Byte: + { + uint8 Temp; + Ar << Temp; + AdditionalValue = Temp; + } + break; + case ECborCode::Value_2Bytes: + { + uint16 Temp; + Ar << Temp; + AdditionalValue = Temp; + } + break; + case ECborCode::Value_4Bytes: + { + uint32 Temp; + Ar << Temp; + AdditionalValue = Temp; + } + break; + case ECborCode::Value_8Bytes: + { + uint64 Temp; + Ar << Temp; + AdditionalValue = Temp; + } + break; + case ECborCode::Unused_28: + // Fall through + case ECborCode::Unused_29: + // Fall through + case ECborCode::Unused_30: + // Fall through + case ECborCode::Indefinite: + // Error + Context.Header.Set(ECborCode::ErrorReservedItem); + break; + default: + // Use value directly, Noop + break; + } + return AdditionalValue; +} + +void FCborReader::ReadPrimValue(FCborContext& Context, FArchive& Ar) +{ + switch (Context.AdditionalValue()) + { + case ECborCode::False: + Context.BoolValue = false; + break; + case ECborCode::True: + Context.BoolValue = true; + break; + case ECborCode::Null: + // fall through + case ECborCode::Undefined: + // noop + break; + case ECborCode::Value_1Byte: + { + uint8 Temp; + Ar << Temp; + } + break; + case ECborCode::Value_2Bytes: + // We do not support half float encoding + Context.Header.Set(ECborCode::ErrorNoHalfFloat); + break; + case ECborCode::Value_4Bytes: + { + float Temp; + Ar << Temp; + Context.FloatValue = Temp; + } + break; + case ECborCode::Value_8Bytes: + { + double Temp; + Ar << Temp; + Context.DoubleValue = Temp; + } + break; + default: + // Error other values are unused, break item should have been processed elsewhere + Context.Header.Set(ECborCode::ErrorReservedItem); + break; + } +} + +FCborHeader FCborReader::SetError(ECborCode ErrorCode) +{ + FCborContext& Dummy = ContextStack[0]; + Dummy.Header.Set(ErrorCode); + return Dummy.Header; +} + diff --git a/Engine/Source/Runtime/Cbor/Private/CborWriter.cpp b/Engine/Source/Runtime/Cbor/Private/CborWriter.cpp new file mode 100644 index 000000000000..79f6e44a2519 --- /dev/null +++ b/Engine/Source/Runtime/Cbor/Private/CborWriter.cpp @@ -0,0 +1,178 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "CborWriter.h" + +FCborWriter::FCborWriter(FArchive* InStream) + : Stream(InStream) +{ + check(Stream != nullptr && Stream->IsSaving()); + ContextStack.Emplace(); +} + +FCborWriter::~FCborWriter() +{ + check(ContextStack.Num() == 1 && ContextStack.Top().RawCode() == ECborCode::Dummy); +} + +const FArchive* FCborWriter::GetArchive() const +{ + return Stream; +} + +void FCborWriter::WriteContainerStart(ECborCode ContainerType, int64 NbItem) +{ + check(ContainerType == ECborCode::Array || ContainerType == ECborCode::Map); + CheckContext(ContainerType); + + FCborHeader Header; + + // if NbItem is negative consider the map indefinite + if (NbItem < 0) + { + Header.Set(ContainerType | ECborCode::Indefinite); + *Stream << Header; + } + else + { + Header = WriteUIntValue(ContainerType, *Stream, (uint64) NbItem); + } + FCborContext Context; + Context.Header = Header; + // Length in context for indefinite container is marked as 0 and count up. + // Map length in context are marked as twice their number of pairs in finite container and counted down. + // @see CheckContext + Context.Length = NbItem < 0 ? 0 : (ContainerType == ECborCode::Map ? NbItem * 2 : NbItem); + ContextStack.Add(MoveTemp(Context)); +} + +void FCborWriter::WriteContainerEnd() +{ + check(ContextStack.Top().IsIndefiniteContainer()); + FCborHeader Header(ECborCode::Break); + *Stream << Header; + ContextStack.Pop(); +} + +void FCborWriter::WriteNull() +{ + CheckContext(ECborCode::Prim); + FCborHeader Header(ECborCode::Prim | ECborCode::Null); + *Stream << Header; +} + +void FCborWriter::WriteValue(uint64 Value) +{ + CheckContext(ECborCode::Uint); + WriteUIntValue(ECborCode::Uint, *Stream, Value); +} + +void FCborWriter::WriteValue(int64 Value) +{ + if (Value < 0) + { + CheckContext(ECborCode::Int); + WriteUIntValue(ECborCode::Int, *Stream, ~Value); + } + else + { + CheckContext(ECborCode::Uint); + WriteUIntValue(ECborCode::Uint, *Stream, Value); + } +} + +void FCborWriter::WriteValue(bool Value) +{ + CheckContext(ECborCode::Prim); + FCborHeader Header(ECborCode::Prim | (Value ? ECborCode::True : ECborCode::False)); + *Stream << Header; +} + +void FCborWriter::WriteValue(float Value) +{ + CheckContext(ECborCode::Prim); + FCborHeader Header(ECborCode::Prim | ECborCode::Value_4Bytes); + *Stream << Header; + *Stream << Value; +} + +void FCborWriter::WriteValue(double Value) +{ + CheckContext(ECborCode::Prim); + FCborHeader Header(ECborCode::Prim | ECborCode::Value_8Bytes); + *Stream << Header; + *Stream << Value; +} + +void FCborWriter::WriteValue(const FString& Value) +{ + CheckContext(ECborCode::TextString); + FTCHARToUTF8 UTF8String(*Value); + // Write string header + WriteUIntValue(ECborCode::TextString, *Stream, (uint64)UTF8String.Length()); + // Write string + check(sizeof(decltype(*UTF8String.Get())) == 1); + Stream->Serialize(const_cast(UTF8String.Get()), UTF8String.Length()); +} + +void FCborWriter::WriteValue(const char* CString, uint64 Length) +{ + CheckContext(ECborCode::ByteString); + // Write c string header + WriteUIntValue(ECborCode::ByteString, *Stream, Length); + Stream->Serialize(const_cast(CString), Length); +} + +FCborHeader FCborWriter::WriteUIntValue(FCborHeader Header, FArchive& Ar, uint64 Value) +{ + if (Value < 24) + { + Header.Set(Header.MajorType() | (ECborCode)Value); + Ar << Header; + } + else if (Value < 256) + { + Header.Set(Header.MajorType() | ECborCode::Value_1Byte); + Ar << Header; + uint8 Temp = Value; + Ar << Temp; + } + else if (Value < 65536) + { + Header.Set((uint8)(Header.MajorType() | ECborCode::Value_2Bytes)); + Ar << Header; + uint16 Temp = Value; + Ar << Temp; + } + else if (Value < 0x100000000L) + { + Header.Set((uint8)(Header.MajorType() | ECborCode::Value_4Bytes)); + Ar << Header; + uint32 Temp = Value; + Ar << Temp; + } + else + { + Header.Set((uint8)(Header.MajorType() | ECborCode::Value_8Bytes)); + Ar << Header; + uint64 Temp = Value; + Ar << Temp; + } + return Header; +} + +void FCborWriter::CheckContext(ECborCode MajorType) +{ + FCborContext& Context = ContextStack.Top(); + if (Context.IsIndefiniteContainer()) + { + ++Context.Length; + check(!Context.IsString() || MajorType != Context.MajorType()); + } + else if (Context.IsFiniteContainer()) + { + if (--Context.Length == 0) + { + ContextStack.Pop(); + } + } +} diff --git a/Engine/Source/Runtime/Cbor/Private/Tests/CborTests.cpp b/Engine/Source/Runtime/Cbor/Private/Tests/CborTests.cpp new file mode 100644 index 000000000000..7f7b5650936b --- /dev/null +++ b/Engine/Source/Runtime/Cbor/Private/Tests/CborTests.cpp @@ -0,0 +1,288 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "CoreMinimal.h" +#include "Misc/AutomationTest.h" +#include "Serialization/MemoryReader.h" +#include "Serialization/MemoryWriter.h" + +#include "CborReader.h" +#include "CborWriter.h" + +#if WITH_DEV_AUTOMATION_TESTS + +/** + * FCborAutomationTest + * Simple unit test that runs Cbor's in-built test cases + */ +IMPLEMENT_SIMPLE_AUTOMATION_TEST(FCborAutomationTest, "System.Core.Serialization.CBOR", EAutomationTestFlags::ApplicationContextMask | EAutomationTestFlags::SmokeFilter ) + + +/** + * Execute the Cbor test cases + * + * @return true if the test was successful, false otherwise + */ +bool FCborAutomationTest::RunTest(const FString& Parameters) +{ + // Create the Writer + TArray Bytes; + TUniquePtr OutputStream = MakeUnique(Bytes); + FCborWriter Writer(OutputStream.Get()); + + // Create the Reader + TUniquePtr InputStream = MakeUnique(Bytes); + FCborReader Reader(InputStream.Get()); + + int64 TestInt = 0; + FCborContext Context; + + // Positive Integer Item + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Uint); + check(Context.AsUInt() == TestInt); + check(Context.AsInt() == TestInt); + + TestInt = 1; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Uint); + check(Context.AsUInt() == TestInt); + check(Context.AsInt() == TestInt); + + TestInt = 10; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Uint); + check(Context.AsUInt() == TestInt); + check(Context.AsInt() == TestInt); + + TestInt = 23; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Uint); + check(Context.AsUInt() == TestInt); + check(Context.AsInt() == TestInt); + + TestInt = 24; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Uint); + check(Context.AdditionalValue() == ECborCode::Value_1Byte); + check(Context.AsUInt() == TestInt); + check(Context.AsInt() == TestInt); + + TestInt = 1000; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Uint); + check(Context.AdditionalValue() == ECborCode::Value_2Bytes); + + check(Context.AsUInt() == TestInt); + check(Context.AsInt() == TestInt); + + TestInt = 3000000000; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Uint); + check(Context.AdditionalValue() == ECborCode::Value_4Bytes); + + check(Context.AsUInt() == TestInt); + check(Context.AsInt() == TestInt); + + TestInt = 9223372036854775807; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Uint); + check(Context.AdditionalValue() == ECborCode::Value_8Bytes); + check(Context.AsUInt() == TestInt); + check(Context.AsInt() == TestInt); + + // Negative numbers + + TestInt = -1; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Int); + check(Context.AsInt() == TestInt); + + TestInt = -23; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Int); + check(Context.AsInt() == TestInt); + + TestInt = -25; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Int); + check(Context.AdditionalValue() == ECborCode::Value_1Byte); + check(Context.AsInt() == TestInt); + + TestInt = -1000; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Int); + check(Context.AdditionalValue() == ECborCode::Value_2Bytes); + check(Context.AsInt() == TestInt); + + TestInt = -3000000000LL; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Int); + check(Context.AdditionalValue() == ECborCode::Value_4Bytes); + check(Context.AsInt() == TestInt); + + TestInt = -92233720368547758LL; //-9223372036854775807LL; + Writer.WriteValue(TestInt); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Int); + check(Context.AdditionalValue() == ECborCode::Value_8Bytes); + check(Context.AsInt() == TestInt); + + // Bool + + bool TestBool = false; + Writer.WriteValue(TestBool); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Prim); + check(Context.AdditionalValue() == ECborCode::False); + check(Context.AsBool() == TestBool); + + TestBool = true; + Writer.WriteValue(TestBool); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Prim); + check(Context.AdditionalValue() == ECborCode::True); + check(Context.AsBool() == TestBool); + + // Float + + float TestFloat = 3.14159265f; + Writer.WriteValue(TestFloat); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Prim); + check(Context.AdditionalValue() == ECborCode::Value_4Bytes); + check(Context.AsFloat() == TestFloat); + + + // Double + + double TestDouble = 3.14159265; // 3.4028234663852886e+38; + Writer.WriteValue(TestDouble); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Prim); + check(Context.AdditionalValue() == ECborCode::Value_8Bytes); + check(Context.AsDouble() == TestDouble); + + + // String + + FString TestString(TEXT("ANSIString")); + + Writer.WriteValue(TestString); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::TextString); + check(Context.AsString() == TestString); + + TestString = TEXT("ありがとう"); + Writer.WriteValue(TestString); + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::TextString); + check(Context.AsString() == TestString); + + // C String + char TestCString[] = "Potato"; + + Writer.WriteValue(TestCString, (sizeof(TestCString) / sizeof(char)) - 1); // do not count the null terminating character + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::ByteString); + check(TCString::Strcmp(Context.AsCString(), TestCString) == 0); + + // Array + TArray IntArray { 0, 1, -1, 10, -1000, -3000000000LL, 240, -24 }; + Writer.WriteContainerStart(ECborCode::Array, IntArray.Num()); + for (int64 Val : IntArray) + { + Writer.WriteValue(Val); + } + // Array start & length + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Array); + check(Context.AsLength() == IntArray.Num()); + + for (int64 Val : IntArray) + { + check(Reader.ReadNext(Context) == true); + check(Context.AsInt() == Val); + } + + // Read array end, report length 0 on finite container + // although the array wasn't written as indefinite, + // the reader will emit a virtual break token to notify the container end + check(Reader.ReadNext(Context) == true); + check(Context.IsBreak()); + check(Context.AsLength() == 0); + + // Indefinite Array + Writer.WriteContainerStart(ECborCode::Array, -1); + for (int64 Val : IntArray) + { + Writer.WriteValue(Val); + } + Writer.WriteContainerEnd(); + + // Array start & length + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Array); + check(Context.IsIndefiniteContainer()); + check(Context.AsLength() == 0); + + for (int64 Val : IntArray) + { + check(Reader.ReadNext(Context) == true); + check(Context.AsInt() == Val); + } + + // Read array end, report length + // although the array wasn't written as indefinite, + // the reader will emit a virtual break token to notify the container end + check(Reader.ReadNext(Context) == true); + check(Context.IsBreak()); + check(Context.AsLength() == IntArray.Num()); + + // Map + TMap StringMap = { {TEXT("Apple"), TEXT("Orange")}, {TEXT("Potato"), TEXT("Tomato")}, {TEXT("Meat"), TEXT("Treat")} }; + Writer.WriteContainerStart(ECborCode::Map, StringMap.Num()); + + for (const auto& Pair : StringMap) + { + Writer.WriteValue(Pair.Key); + Writer.WriteValue(Pair.Value); + } + + // Map start & length + check(Reader.ReadNext(Context) == true); + check(Context.MajorType() == ECborCode::Map); + check(Context.AsLength() == StringMap.Num() * 2); + + for (const auto& Pair : StringMap) + { + check(Reader.ReadNext(Context) == true); + check(Context.AsString() == Pair.Key); + check(Reader.ReadNext(Context) == true); + check(Context.AsString() == Pair.Value); + } + + // Read map end + // although the array wasn't written as indefinite, + // the reader will emit a virtual break token to notify the container end + check(Reader.ReadNext(Context) == true); + check(Context.IsBreak()); + + check(Reader.ReadNext(Context) == false); + check(Context.RawCode() == ECborCode::StreamEnd); + return true; +} + +#endif //WITH_DEV_AUTOMATION_TESTS diff --git a/Engine/Source/Runtime/Cbor/Public/CborGlobals.h b/Engine/Source/Runtime/Cbor/Public/CborGlobals.h new file mode 100644 index 000000000000..4099098801c7 --- /dev/null +++ b/Engine/Source/Runtime/Cbor/Public/CborGlobals.h @@ -0,0 +1,7 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" + +CBOR_API DECLARE_LOG_CATEGORY_EXTERN(LogCbor, Log, All); diff --git a/Engine/Source/Runtime/Cbor/Public/CborReader.h b/Engine/Source/Runtime/Cbor/Public/CborReader.h new file mode 100644 index 000000000000..9a788707a723 --- /dev/null +++ b/Engine/Source/Runtime/Cbor/Public/CborReader.h @@ -0,0 +1,61 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "CborTypes.h" + +/** + * Reader for a the cbor protocol encoded stream + * @see http://cbor.io + */ +class CBOR_API FCborReader +{ +public: + FCborReader(FArchive* InStream); + ~FCborReader(); + + /** @return the archive we are reading from. */ + const FArchive* GetArchive() const; + + /** @return true if the reader is in error. */ + bool IsError() const; + + /** @return A cbor Header containing an error code as its raw code. */ + FCborHeader GetError() const; + + /** + * The cbor context of the reader can either be + * a container context or a dummy. + * A reference to the context shouldn't be held while calling ReadNext. + * @return The current cbor context. */ + const FCborContext& GetContext() const; + + /** + * Read the next value from the cbor stream. + * @param OutContext the context to read the value into. + * @return true if successful, false if an error was returned or the end of the stream was reached. + */ + bool ReadNext(FCborContext& OutContext); + + /** + * Skip a container of ContainerType type + * @param ContainerType the container we expect to skip. + * @return true if successful, false if the current container wasn't a ContainerType or an error occurred. + */ + bool SkipContainer(ECborCode ContainerType); + +private: + /** Read a uint value from Ar into OutContext and also return it. */ + static uint64 ReadUIntValue(FCborContext& OutContext, FArchive& Ar); + /** Read a Prim value from Ar into OutContext. */ + static void ReadPrimValue(FCborContext& OutContext, FArchive& Ar); + + /** Set an error in the reader and return it. */ + FCborHeader SetError(ECborCode ErrorCode); + + /** The archive we are reading from. */ + FArchive* Stream; + /** Holds the context stack for the reader. */ + TArray ContextStack; +}; \ No newline at end of file diff --git a/Engine/Source/Runtime/Cbor/Public/CborTypes.h b/Engine/Source/Runtime/Cbor/Public/CborTypes.h new file mode 100644 index 000000000000..131b74778306 --- /dev/null +++ b/Engine/Source/Runtime/Cbor/Public/CborTypes.h @@ -0,0 +1,283 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "Misc/EnumClassFlags.h" + +/** + * Possible cbor code for cbor headers. + * @see http://cbor.io + */ +enum class ECborCode : uint8 +{ + None = 0, // no code + // Major Types + Uint = 0 << 5, // positive/unsigned int + Int = 1 << 5, // negative number + ByteString = 2 << 5, // byte string + TextString = 3 << 5, // text string + Array = 4 << 5, // array + Map = 5 << 5, // map + Tag = 6 << 5, // semantic tag + Prim = 7 << 5, // bool, null, char, half-float, float, double, break code + + // Additional Value Info + Value_1Byte = 0x18, // Additional value in next byte + Value_2Bytes = 0x19, // Additional value in next 2 bytes + Value_4Bytes = 0x1A, // Additional value in next 4 bytes + Value_8Bytes = 0x1B, // Additional value in next 8 bytes + Unused_28 = 0x1C, // Unused value in protocol + Unused_29 = 0x1D, // Unused value in protocol + Unused_30 = 0x1E, // Unused value in protocol + Indefinite = 0x1F, // Indicate indefinite containers + + // Prim type codes + False = 0x14, // boolean + True = 0x15, // boolean + Null = 0x16, // null value + Undefined = 0x17, // undefined, unused in the writer + + // Special values + Break = 0xFF, // break code (Prim | 31) + + // Protocol unused values, used to report context or errors + // State + Dummy = 0x1C, // mark a dummy (Uint | 28) + StreamEnd = 0x3C, // stream end (Int | 28) + // Errors + ErrorReservedItem = 0x1D, // reserved value (Uint | 29) + ErrorStreamFailure = 0x1E, // stream error (Uint | 30) + ErrorBreak = 0x3D, // break not allowed (Int | 29) + ErrorMapContainer = 0x3E, // odd item number in map (Int | 30) + ErrorNoHalfFloat = 0x5D, // no half float support (ByteString | 29) + ErrorContext = 0x5E, // reader/writer context error (ByteString | 30) + ErrorStringNesting = 0x7D, // infinite string wrong type (TextString | 29) +}; +ENUM_CLASS_FLAGS(ECborCode); + +class FCborReader; +class FCborWriter; + +/** + * class that represent a cbor header + */ +class FCborHeader +{ +public: + FCborHeader(uint8 InHeader = 0) + : Header(InHeader) + {} + FCborHeader(ECborCode InHeader) + : Header((uint8)InHeader) + {} + + /** Set a cbor code for the header. */ + void Set(ECborCode Code) + { + Header = (uint8)Code; + } + + /** Set a cbor code as a uint8. */ + void Set(uint8 Code) + { + Header = Code; + } + + /** Get the cbor header as a uint8 */ + uint8 Raw() const + { + return Header; + } + + /** Get the cbor header raw code. */ + ECborCode RawCode() const + { + return (ECborCode)Header; + } + + /** Get the major type part of the cbor header. */ + ECborCode MajorType() const + { + return (ECborCode)(Header & (7 << 5)); + } + + /** Get the additional value part of the cbor header. */ + ECborCode AdditionalValue() const + { + return (ECborCode)(Header & 0x1F); + } + + /** Serialization helper */ + friend FArchive& operator<<(FArchive& Ar, FCborHeader& InHeader) + { + return Ar << InHeader.Header; + } + +private: + /** Hold the header value. */ + uint8 Header; +}; + +/** + * class that represent a cbor context + * which consists of a header and value pair + */ +struct FCborContext +{ + FCborContext() + : Header(ECborCode::Dummy) + , IntValue(0) + {} + + /** Reset the context to a dummy state. */ + void Reset() + { + *this = FCborContext(); + } + + /** @return the context header raw code. */ + ECborCode RawCode() const + { + return Header.RawCode(); + } + + /** @return the context header major type. */ + ECborCode MajorType() const + { + return Header.MajorType(); + } + + /** @return the context header additional value. */ + ECborCode AdditionalValue() const + { + return Header.AdditionalValue(); + } + + /** @return true if this context represents an error code. */ + bool IsError() const + { + // All error code have their additional value set to those 2 protocol unused values. + return AdditionalValue() == ECborCode::Unused_29 || AdditionalValue() == ECborCode::Unused_30; + } + + /** @return true if this context represent a break code. */ + bool IsBreak() const + { + return Header.RawCode() == ECborCode::Break; + } + + /** @return true if this context represents a string type. */ + bool IsString() const + { + return MajorType() == ECborCode::TextString || MajorType() == ECborCode::ByteString; + } + + /** @return true if this context represents a container. (indefinite string are containers.)*/ + bool IsContainer() const + { + return IsIndefiniteContainer() || IsFiniteContainer(); + } + + /** @return true if this context represents an indefinite container. */ + bool IsIndefiniteContainer() const + { + return (MajorType() == ECborCode::Array || MajorType() == ECborCode::Map || MajorType() == ECborCode::ByteString || MajorType() == ECborCode::TextString) + && AdditionalValue() == ECborCode::Indefinite; + } + + /** @return true if this context represents an finite container. */ + bool IsFiniteContainer() const + { + return (MajorType() == ECborCode::Array || MajorType() == ECborCode::Map) + && AdditionalValue() != ECborCode::Indefinite; + } + + /** @return the context as the container code the break context is associated with. */ + ECborCode AsBreak() const + { + check(Header.RawCode() == ECborCode::Break && RawTextValue.Num() == 1); + return (ECborCode)RawTextValue[0]; + } + + /** @return the context as a container length. Map container returns their length as twice their number of pairs. */ + uint64 AsLength() const + { + check(RawCode() == ECborCode::Break || MajorType() == ECborCode::Array || MajorType() == ECborCode::Map || MajorType() == ECborCode::ByteString || MajorType() == ECborCode::TextString); + return Length; + } + + /** @return the context as an unsigned int. */ + uint64 AsUInt() const + { + check(MajorType() == ECborCode::Uint); + return UIntValue; + } + + /** @return the context as an int. */ + int64 AsInt() const + { + check(MajorType() == ECborCode::Int || MajorType() == ECborCode::Uint); + return IntValue; + } + + /** @return the context as a bool. */ + bool AsBool() const + { + check(MajorType() == ECborCode::Prim && (AdditionalValue() == ECborCode::False || AdditionalValue() == ECborCode::True)); + return BoolValue; + } + + /** @return the context as a float. */ + float AsFloat() const + { + check(Header.RawCode() == (ECborCode::Prim | ECborCode::Value_4Bytes)); + return FloatValue; + } + + /** @return the context as a double. */ + double AsDouble() const + { + check(Header.RawCode() == (ECborCode::Prim | ECborCode::Value_8Bytes)); + return DoubleValue; + } + + /** @return the context as a string. */ + FString AsString() const + { + check(MajorType() == ECborCode::TextString); + return FString(FUTF8ToTCHAR(RawTextValue.GetData()).Get()); + } + + /** @return the context as a C string. */ + const char* AsCString() const + { + check(MajorType() == ECborCode::ByteString); + return RawTextValue.GetData(); + } + +private: + friend class FCborReader; + friend class FCborWriter; + + FCborContext(ECborCode Code) + : Header(Code) + , IntValue(0) + {} + + // Holds the context header. + FCborHeader Header; + + /** Union to hold the context value. */ + union + { + int64 IntValue; + uint64 UIntValue; + bool BoolValue; + float FloatValue; + double DoubleValue; + uint64 Length; + }; + // Hold text value separately since, non trivial type are a mess in union, also used to report container type for break code + TArray RawTextValue; +}; \ No newline at end of file diff --git a/Engine/Source/Runtime/Cbor/Public/CborWriter.h b/Engine/Source/Runtime/Cbor/Public/CborWriter.h new file mode 100644 index 000000000000..83f9062cb2a5 --- /dev/null +++ b/Engine/Source/Runtime/Cbor/Public/CborWriter.h @@ -0,0 +1,55 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "CborTypes.h" + +/** +* Writer for encoding a stream with the cbor protocol +* @see http://cbor.io +*/ +class CBOR_API FCborWriter +{ +public: + FCborWriter(FArchive* InStream); + ~FCborWriter(); + +public: + /** @return the archive we are writing to. */ + const FArchive* GetArchive() const; + + /** + * Write a container start code. + * @param ContainerType container major type, either array or map. + * @param NbItem the number of item in the container or negative to indicate indefinite containers. + */ + void WriteContainerStart(ECborCode ContainerType, int64 NbItem); + + /** Write a container break code, need a indefinite container context. */ + void WriteContainerEnd(); + + /** Write a value. */ + + void WriteNull(); + void WriteValue(uint64 Value); + void WriteValue(int64 Value); + void WriteValue(bool Value); + void WriteValue(float Value); + void WriteValue(double Value); + void WriteValue(const FString& Value); + void WriteValue(const char* CString, uint64 Length); + +private: + /** Write a uint Value for Header in Ar and return the final generated cbor Header. */ + static FCborHeader WriteUIntValue(FCborHeader Header, FArchive& Ar, uint64 Value); + + /** Validate the current writer context for MajorType. */ + void CheckContext(ECborCode MajorType); + + /** The archive being written to. */ + FArchive* Stream; + /** The writer context stack. */ + TArray ContextStack; +}; + diff --git a/Engine/Source/Runtime/Core/Private/Containers/Ticker.cpp b/Engine/Source/Runtime/Core/Private/Containers/Ticker.cpp index 75be60fde141..0ebe7e87a011 100644 --- a/Engine/Source/Runtime/Core/Private/Containers/Ticker.cpp +++ b/Engine/Source/Runtime/Core/Private/Containers/Ticker.cpp @@ -46,13 +46,10 @@ void FTicker::RemoveTicker(FDelegateHandle Handle) Elements.RemoveAllSwap(CompareHandle); TickedElements.RemoveAllSwap(CompareHandle); // if we are ticking, we must check for the edge case of the CurrentElement removing itself. - if (bInTick) + if (bInTick && CompareHandle(CurrentElement)) { - if (CompareHandle(CurrentElement)) - { - // Technically it's possible for someone to try to remove CurrentDelegate multiple times, so make sure we never set this value to false in here. - bCurrentElementRemoved = true; - } + // Technically it's possible for someone to try to remove CurrentDelegate multiple times, so make sure we never set this value to false in here. + bCurrentElementRemoved = true; } } @@ -108,6 +105,8 @@ void FTicker::Tick(float DeltaTime) // Now that we've considered all the delegates, we swap it back into the Elements array. Exchange(TickedElements, Elements); + // Also clear the CurrentElement delegate as our tick is done + CurrentElement.Delegate.Unbind(); } FTicker::FElement::FElement() diff --git a/Engine/Source/Runtime/Core/Private/Internationalization/ICUInternationalization.cpp b/Engine/Source/Runtime/Core/Private/Internationalization/ICUInternationalization.cpp index e450c183f18a..add3c7e1b3c4 100644 --- a/Engine/Source/Runtime/Core/Private/Internationalization/ICUInternationalization.cpp +++ b/Engine/Source/Runtime/Core/Private/Internationalization/ICUInternationalization.cpp @@ -726,7 +726,7 @@ void FICUInternationalization::InitializeTimeZone() const int32 DefaultTzOffsetMinutes = ICUDefaultTz->getRawOffset() / 60000; const int32 RawOffsetHours = DefaultTzOffsetMinutes / 60; const int32 RawOffsetMinutes = DefaultTzOffsetMinutes % 60; - UE_LOG(LogICUInternationalization, Display, TEXT("ICU TimeZone Detection - Raw Offset: %+d:%02d, Platform Override: '%s'"), RawOffsetHours, RawOffsetMinutes, *TimeZoneId); + UE_LOG(LogICUInternationalization, Log, TEXT("ICU TimeZone Detection - Raw Offset: %+d:%02d, Platform Override: '%s'"), RawOffsetHours, RawOffsetMinutes, *TimeZoneId); } void FICUInternationalization::InitializeInvariantGregorianCalendar() diff --git a/Engine/Source/Runtime/Core/Private/Internationalization/StringTableRegistry.cpp b/Engine/Source/Runtime/Core/Private/Internationalization/StringTableRegistry.cpp index 554187c3faf7..a2e3666cad9d 100644 --- a/Engine/Source/Runtime/Core/Private/Internationalization/StringTableRegistry.cpp +++ b/Engine/Source/Runtime/Core/Private/Internationalization/StringTableRegistry.cpp @@ -65,7 +65,12 @@ void FStringTableRegistry::RegisterStringTable(const FName InTableId, FStringTab FScopeLock RegisteredStringTablesLock(&RegisteredStringTablesCS); checkf(!InTableId.IsNone(), TEXT("String table ID cannot be 'None'!")); - checkf(!RegisteredStringTables.Contains(InTableId), TEXT("String table ID '%s' is already in use!"), *InTableId.ToString()); +#if DO_CHECK + { + FStringTableConstPtr ExistingStringTable = RegisteredStringTables.FindRef(InTableId); + checkf(!ExistingStringTable.IsValid() || IStringTableEngineBridge::IsStringTableAssetBeingReplaced(ExistingStringTable->GetOwnerAsset()), TEXT("String table ID '%s' is already in use!"), *InTableId.ToString()); + } +#endif RegisteredStringTables.Add(InTableId, MoveTemp(InTable)); } diff --git a/Engine/Source/Runtime/Core/Private/Internationalization/TextLocalizationManager.cpp b/Engine/Source/Runtime/Core/Private/Internationalization/TextLocalizationManager.cpp index a19b302e7744..84ba8fd51f3c 100644 --- a/Engine/Source/Runtime/Core/Private/Internationalization/TextLocalizationManager.cpp +++ b/Engine/Source/Runtime/Core/Private/Internationalization/TextLocalizationManager.cpp @@ -24,7 +24,12 @@ DEFINE_LOG_CATEGORY_STATIC(LogTextLocalizationManager, Log, All); -static FString AccessedStringBeforeLocLoadedErrorMsg = TEXT("Can't access string. Loc System hasn't been initialized yet!"); +const FString& GetAccessedStringBeforeLocLoadedErrorMsg() +{ + // Note: This is in a function to ensure it is initialized before we use it (eg, so that a file-scope static FText isn't being initialized before this string) + static const FString AccessedStringBeforeLocLoadedErrorMsg = TEXT("Can't access string. Loc System hasn't been initialized yet!"); + return AccessedStringBeforeLocLoadedErrorMsg; +} bool IsLocalizationLockedByConfig() { @@ -631,7 +636,7 @@ FTextDisplayStringRef FTextLocalizationManager::GetDisplayString(const FString& { if (!bIsInitialized) { - *(UnlocalizedString) = AccessedStringBeforeLocLoadedErrorMsg; + *(UnlocalizedString) = GetAccessedStringBeforeLocLoadedErrorMsg(); } } @@ -956,7 +961,7 @@ void FTextLocalizationManager::UpdateFromNative(const FTextLocalizationResource& } else { - if (!LiveStringEntry.bIsLocalized && *LiveStringEntry.DisplayString == AccessedStringBeforeLocLoadedErrorMsg) + if (!LiveStringEntry.bIsLocalized && LiveStringEntry.DisplayString->Equals(GetAccessedStringBeforeLocLoadedErrorMsg(), ESearchCase::CaseSensitive)) { *LiveStringEntry.DisplayString = FString(); } @@ -1064,7 +1069,7 @@ void FTextLocalizationManager::UpdateFromLocalizations(TArrayViewEquals(GetAccessedStringBeforeLocLoadedErrorMsg(), ESearchCase::CaseSensitive)) { *(LiveStringEntry.DisplayString) = FString(); } diff --git a/Engine/Source/Runtime/Core/Private/Math/UnrealMath.cpp b/Engine/Source/Runtime/Core/Private/Math/UnrealMath.cpp index 128ff21b20cb..073e448bf651 100644 --- a/Engine/Source/Runtime/Core/Private/Math/UnrealMath.cpp +++ b/Engine/Source/Runtime/Core/Private/Math/UnrealMath.cpp @@ -1843,7 +1843,7 @@ uint32 FMath::ComputeProjectedSphereScissorRect(FIntRect& InOutScissorRect, FVec } } -bool FMath::PlaneAABBIntersection(const FPlane& P, const FBox& AABB) +int32 FMath::PlaneAABBRelativePosition(const FPlane& P, const FBox& AABB) { // find diagonal most closely aligned with normal of plane FVector Vmin, Vmax; @@ -1876,7 +1876,20 @@ bool FMath::PlaneAABBIntersection(const FPlane& P, const FBox& AABB) float dMin = P.PlaneDot(Vmin); // if Max is below plane, or Min is above we know there is no intersection.. otherwise there must be one - return (dMax >= 0.f && dMin <= 0.f); + if (dMax < 0.f) + { + return -1; + } + else if (dMin > 0.f) + { + return 1; + } + return 0; +} + +bool FMath::PlaneAABBIntersection(const FPlane& P, const FBox& AABB) +{ + return PlaneAABBRelativePosition(P, AABB) == 0; } bool FMath::SphereConeIntersection(const FVector& SphereCenter, float SphereRadius, const FVector& ConeAxis, float ConeAngleSin, float ConeAngleCos) diff --git a/Engine/Source/Runtime/Core/Private/Misc/DefaultValueHelper.cpp b/Engine/Source/Runtime/Core/Private/Misc/DefaultValueHelper.cpp index 7016eea9a145..3cfef30e7742 100644 --- a/Engine/Source/Runtime/Core/Private/Misc/DefaultValueHelper.cpp +++ b/Engine/Source/Runtime/Core/Private/Misc/DefaultValueHelper.cpp @@ -402,31 +402,6 @@ bool FDefaultValueHelper::StringFromCppString(const FString& Source, const FStri return false; } - if (Source.Find(FString("::"), ESearchCase::CaseSensitive) == Pos) - { - Pos += 2; - - if (!Trim(Pos, Source)) - { - return false; - } - - const FString AllowedFunctionName(TEXT("FromString")); - if (Source.Find(AllowedFunctionName, ESearchCase::CaseSensitive, ESearchDir::FromStart, Pos) == Pos) - { - Pos += AllowedFunctionName.Len(); - } - else - { - return false; - } - - if (!Trim(Pos, Source)) - { - return false; - } - } - if( TS(TEXT("(")) != Source[Pos++] ) { return false; diff --git a/Engine/Source/Runtime/Core/Private/Tests/Internationalization/TextTest.cpp b/Engine/Source/Runtime/Core/Private/Tests/Internationalization/TextTest.cpp index 0e585515ac1b..73a6df0c60fe 100644 --- a/Engine/Source/Runtime/Core/Private/Tests/Internationalization/TextTest.cpp +++ b/Engine/Source/Runtime/Core/Private/Tests/Internationalization/TextTest.cpp @@ -74,8 +74,6 @@ bool FTextTest::RunTest (const FString& Parameters) FText ArgText2 = FText::FromString(TEXT("Arg2")); FText ArgText3 = FText::FromString(TEXT("Arg3")); -#define INVTEXT(x) FText::FromString(TEXT(x)) - #define TEST( Desc, A, B ) if( !A.EqualTo(B) ) AddError(FString::Printf(TEXT("%s - A=%s B=%s"),*Desc,*A.ToString(),*B.ToString())) FText TestText; diff --git a/Engine/Source/Runtime/Core/Public/Async/Future.h b/Engine/Source/Runtime/Core/Public/Async/Future.h index 6bd72a4cc225..30f8a6ddb3fe 100644 --- a/Engine/Source/Runtime/Core/Public/Async/Future.h +++ b/Engine/Source/Runtime/Core/Public/Async/Future.h @@ -11,11 +11,7 @@ #include "Misc/DateTime.h" #include "HAL/Event.h" #include "HAL/PlatformProcess.h" - -template class TSharedFuture; - -/* TFutureBase -*****************************************************************************/ +#include "Misc/ScopeLock.h" /** * Base class for the internal state of asynchronous return values (futures). @@ -78,21 +74,50 @@ public: return false; } + /** + * Set a continuation to be called on completion of the promise + * @param Continuation + */ + void SetContinuation(TUniqueFunction&& Continuation) + { + bool bShouldJustRun = IsComplete(); + if (!bShouldJustRun) + { + FScopeLock Lock(&Mutex); + bShouldJustRun = IsComplete(); + if (!bShouldJustRun) + { + CompletionCallback = MoveTemp(Continuation); + } + } + if (bShouldJustRun) + { + Continuation(); + } + } + protected: /** Notifies any waiting threads that the result is available. */ void MarkComplete() { - Complete = true; + TUniqueFunction Continuation; + { + FScopeLock Lock(&Mutex); + Continuation = MoveTemp(CompletionCallback); + Complete = true; + } CompletionEvent->Trigger(); - if (CompletionCallback) + if (Continuation) { - CompletionCallback(); + Continuation(); } } private: + /** Mutex used to allow proper handling of continuations */ + mutable FCriticalSection Mutex; /** An optional callback function that is executed the state is completed. */ TUniqueFunction CompletionCallback; @@ -101,7 +126,7 @@ private: FEvent* CompletionEvent; /** Whether the asynchronous result is available. */ - bool Complete; + TAtomic Complete; }; @@ -180,6 +205,8 @@ private: InternalResultType Result; }; +/* TFuture +*****************************************************************************/ /** * Abstract base template for futures and shared futures. @@ -304,6 +331,40 @@ protected: return State; } + /** + * Set a completion callback that will be called once the future completes + * or immediately if already completed + * + * @param Continuation a continuation taking an argument of type TFuture + * @return nothing at the moment but could return another future to allow future chaining + */ + template + auto Then(Func Continuation); + + /** + * Convenience wrapper for Then that + * set a completion callback that will be called once the future completes + * or immediately if already completed + * @param Continuation a continuation taking an argument of type InternalResultType + * @return nothing at the moment but could return another future to allow future chaining + */ + template + auto Next(Func Continuation); + + /** + * Reset the future. + * Reseting a future removes any continuation from its shared state and invalidates it. + * Useful for discarding yet to be completed future cleanly. + */ + void Reset() + { + if (IsValid()) + { + this->State->SetContinuation(nullptr); + this->State.Reset(); + } + } + private: /** Holds the future's state. */ @@ -311,12 +372,8 @@ private: }; -/* TFuture -*****************************************************************************/ - template class TSharedFuture; - /** * Template for unshared futures. */ @@ -375,6 +432,24 @@ public: return TSharedFuture(MoveTemp(*this)); } + /** + * Expose Then functionality + * @see TFutureBase + */ + using BaseType::Then; + + /** + * Expose Next functionality + * @see TFutureBase + */ + using BaseType::Next; + + /** + * Expose Reset functionality + * @see TFutureBase + */ + using BaseType::Reset; + private: /** Hidden copy constructor (futures cannot be copied). */ @@ -443,6 +518,24 @@ public: return TSharedFuture(MoveTemp(*this)); } + /** + * Expose Then functionality + * @see TFutureBase + */ + using BaseType::Then; + + /** + * Expose Next functionality + * @see TFutureBase + */ + using BaseType::Next; + + /** + * Expose Reset functionality + * @see TFutureBase + */ + using BaseType::Reset; + private: /** Hidden copy constructor (futures cannot be copied). */ @@ -508,6 +601,24 @@ public: */ TSharedFuture Share(); + /** + * Expose Then functionality + * @see TFutureBase + */ + using BaseType::Then; + + /** + * Expose Next functionality + * @see TFutureBase + */ + using BaseType::Next; + + /** + * Expose Reset functionality + * @see TFutureBase + */ + using BaseType::Reset; + private: /** Hidden copy constructor (futures cannot be copied). */ @@ -517,7 +628,6 @@ private: TFuture& operator=(const TFuture&); }; - /* TSharedFuture *****************************************************************************/ @@ -1082,3 +1192,56 @@ private: /** Whether a future has already been retrieved from this promise. */ bool FutureRetrieved; }; + +/* TFuture::Then +*****************************************************************************/ + +namespace FutureDetail +{ + /** + * Template for setting a promise value from a continuation. + */ + template + inline void SetPromiseValue(TPromise& Promise, Func& Function, TFuture&& Param) + { + Promise.SetValue(Function(MoveTemp(Param))); + } + template + inline void SetPromiseValue(TPromise& Promise, Func& Function, TFuture&& Param) + { + Function(MoveTemp(Param)); + Promise.SetValue(); + } +} + +// Then implementation +template +template +auto TFutureBase::Then(Func Continuation) //-> TFuture())))> +{ + check(IsValid()); + using ReturnValue = decltype(Continuation(MoveTemp(TFuture()))); + + TPromise Promise; + TFuture FutureResult = Promise.GetFuture(); + TUniqueFunction Callback = [PromiseCapture = MoveTemp(Promise), ContinuationCapture = MoveTemp(Continuation), StateCapture = this->State]() mutable + { + FutureDetail::SetPromiseValue(PromiseCapture, ContinuationCapture, TFuture(MoveTemp(StateCapture))); + }; + + // This invalidate this future. + StateType MovedState = MoveTemp(this->State); + MovedState->SetContinuation(MoveTemp(Callback)); + return FutureResult; +} + +// Next implementation +template +template +auto TFutureBase::Next(Func Continuation) //-> TFuture +{ + return this->Then([Continuation = MoveTemp(Continuation)](TFuture Self) mutable + { + return Continuation(Self.Get()); + }); +} \ No newline at end of file diff --git a/Engine/Source/Runtime/Core/Public/Containers/LruCache.h b/Engine/Source/Runtime/Core/Public/Containers/LruCache.h index c48f8404efe2..e745e19a2012 100644 --- a/Engine/Source/Runtime/Core/Public/Containers/LruCache.h +++ b/Engine/Source/Runtime/Core/Public/Containers/LruCache.h @@ -278,7 +278,7 @@ public: */ FORCEINLINE const ValueType* Find(const KeyType& Key) const { - FCacheEntry** EntryPtr = LookupSet.Find(Key); + FCacheEntry*const * EntryPtr = LookupSet.Find(Key); if (EntryPtr != nullptr) { @@ -288,6 +288,39 @@ public: return nullptr; } + /** + * Find the value of the entry with the specified key. + * + * @param Key The key of the entry to get. + * @return Reference to the value, or triggers an assertion if the key does not exist. + */ + FORCEINLINE const ValueType& FindChecked(const KeyType& Key) const + { + FCacheEntry*const * EntryPtr = LookupSet.Find(Key); + + check(EntryPtr); + + return (*EntryPtr)->Value; + } + + /** + * Find the value of the entry with the specified key. + * + * @param Key The key of the entry to get. + * @return Copy of the value, or the default value for the ValueType if the key does not exist. + */ + FORCEINLINE ValueType FindRef(const KeyType& Key) const + { + FCacheEntry*const * EntryPtr = LookupSet.Find(Key); + + if (EntryPtr != nullptr) + { + return (*EntryPtr)->Value; + } + + return ValueType(); + } + /** * Find the value of the entry with the specified key and mark it as the most recently used. * @@ -309,6 +342,43 @@ public: return &(*EntryPtr)->Value; } + /** + * Find the value of the entry with the specified key and mark it as the most recently used. + * + * @param Key The key of the entry to get. + * @return Pointer to the value, or triggers an assertion if the key does not exist. + */ + const ValueType& FindAndTouchChecked(const KeyType& Key) + { + FCacheEntry** EntryPtr = LookupSet.Find(Key); + + check(EntryPtr); + + MarkAsRecent(**EntryPtr); + + return (*EntryPtr)->Value; + } + + /** + * Find the value of the entry with the specified key and mark it as the most recently used. + * + * @param Key The key of the entry to get. + * @return Copy of the value, or the default value for the ValueType if the key does not exist. + */ + ValueType FindAndTouchRef(const KeyType& Key) + { + FCacheEntry** EntryPtr = LookupSet.Find(Key); + + if (EntryPtr == nullptr) + { + return ValueType(); + } + + MarkAsRecent(**EntryPtr); + + return (*EntryPtr)->Value; + } + /** * Find the value of an entry using a predicate. * @@ -406,6 +476,19 @@ public: return NumRemoved; } + /** + * Remove and return the least recent element from the cache. + * + * @return Copy of removed value. + */ + FORCEINLINE ValueType RemoveLeastRecent() + { + check(LeastRecent); + ValueType LeastRecentElement = MoveTemp(LeastRecent->Value); + Remove(LeastRecent); + return LeastRecentElement; + } + public: /** diff --git a/Engine/Source/Runtime/Core/Public/HAL/FeedbackContextAnsi.h b/Engine/Source/Runtime/Core/Public/HAL/FeedbackContextAnsi.h index 1f987451f8df..27b38bcca7b4 100644 --- a/Engine/Source/Runtime/Core/Public/HAL/FeedbackContextAnsi.h +++ b/Engine/Source/Runtime/Core/Public/HAL/FeedbackContextAnsi.h @@ -7,6 +7,7 @@ #include "Containers/StringConv.h" #include "CoreGlobals.h" #include "Misc/OutputDeviceRedirector.h" +#include "Misc/OutputDeviceConsole.h" #include "Misc/App.h" #include "Misc/OutputDeviceHelper.h" #include "Misc/FeedbackContext.h" @@ -50,8 +51,10 @@ public: void Serialize( const TCHAR* V, ELogVerbosity::Type Verbosity, const class FName& Category ) override { // When -stdout is specified then FOutputDeviceStdOutput will be installed and pipe logging to stdout. - // If so don't use LocalPrint or else duplicate messages will be written to stdout - static bool bUsingStdOut = FParse::Param(FCommandLine::Get(), TEXT("stdout")); + // If so don't use LocalPrint or else duplicate messages will be written to stdout. + // A similar issue happens when a Console is shown. + static bool bUsingStdOut = FParse::Param(FCommandLine::Get(), TEXT("stdout")) || + (GLogConsole != nullptr && GLogConsole->IsShown()); if (bUsingStdOut == false && (Verbosity == ELogVerbosity::Error || Verbosity == ELogVerbosity::Warning || Verbosity == ELogVerbosity::Display)) diff --git a/Engine/Source/Runtime/Core/Public/Internationalization/Internationalization.h b/Engine/Source/Runtime/Core/Public/Internationalization/Internationalization.h index 78a9a72c7890..072bfd15b6f4 100644 --- a/Engine/Source/Runtime/Core/Public/Internationalization/Internationalization.h +++ b/Engine/Source/Runtime/Core/Public/Internationalization/Internationalization.h @@ -259,4 +259,9 @@ private: */ #define NSLOCTEXT( InNamespace, InKey, InTextLiteral ) FInternationalization::ForUseOnlyByLocMacroAndGraphNodeTextLiterals_CreateText( TEXT( InTextLiteral ), TEXT( InNamespace ), TEXT( InKey ) ) +/** + * Creates a culture invariant FText from the given string literal. + */ +#define INVTEXT(InTextLiteral) FText::AsCultureInvariant(TEXT(InTextLiteral)) + #undef LOC_DEFINE_REGION diff --git a/Engine/Source/Runtime/Core/Public/Internationalization/StringTableCore.h b/Engine/Source/Runtime/Core/Public/Internationalization/StringTableCore.h index a746606b50b0..17518d62e1f5 100644 --- a/Engine/Source/Runtime/Core/Public/Internationalization/StringTableCore.h +++ b/Engine/Source/Runtime/Core/Public/Internationalization/StringTableCore.h @@ -195,12 +195,19 @@ public: return InstancePtr && InstancePtr->IsStringTableFromAssetImpl(InTableId); } + /** Is this string table asset being replaced due to a hot-reload? */ + static bool IsStringTableAssetBeingReplaced(const UStringTable* InStringTableAsset) + { + return InstancePtr && InStringTableAsset && InstancePtr->IsStringTableAssetBeingReplacedImpl(InStringTableAsset); + } + protected: virtual ~IStringTableEngineBridge() {} virtual void RedirectAndLoadStringTableAssetImpl(FName& InOutTableId, const EStringTableLoadingPolicy InLoadingPolicy) = 0; virtual void CollectStringTableAssetReferencesImpl(const FName InTableId, FStructuredArchive::FSlot Slot) = 0; virtual bool IsStringTableFromAssetImpl(const FName InTableId) = 0; + virtual bool IsStringTableAssetBeingReplacedImpl(const UStringTable* InStringTableAsset) = 0; /** Singleton instance, populated by the derived type */ static IStringTableEngineBridge* InstancePtr; diff --git a/Engine/Source/Runtime/Core/Public/Math/UnrealMathUtility.h b/Engine/Source/Runtime/Core/Public/Math/UnrealMathUtility.h index 147b8059c6f8..03b3214b1f34 100644 --- a/Engine/Source/Runtime/Core/Public/Math/UnrealMathUtility.h +++ b/Engine/Source/Runtime/Core/Public/Math/UnrealMathUtility.h @@ -1029,6 +1029,15 @@ struct FMath : public FPlatformMath */ static CORE_API bool PlaneAABBIntersection(const FPlane& P, const FBox& AABB); + /** + * Determine the position of an AABB relative to a plane: + * completely above (in the direction of the normal of the plane), completely below or intersects it + * @param P - the plane to test + * @param AABB - the axis aligned bounding box to test + * @return -1 if below, 1 if above, 0 if intersects + */ + static CORE_API int32 PlaneAABBRelativePosition(const FPlane& P, const FBox& AABB); + /** * Performs a sphere vs box intersection test using Arvo's algorithm: * diff --git a/Engine/Source/Runtime/Core/Public/Misc/ITransaction.h b/Engine/Source/Runtime/Core/Public/Misc/ITransaction.h index 45084c72ba65..c9345e98a683 100644 --- a/Engine/Source/Runtime/Core/Public/Misc/ITransaction.h +++ b/Engine/Source/Runtime/Core/Public/Misc/ITransaction.h @@ -3,6 +3,7 @@ #pragma once #include "CoreTypes.h" +#include "Misc/Guid.h" #include "UObject/UObjectHierarchyFwd.h" #include "Change.h" @@ -12,6 +13,64 @@ typedef void(*STRUCT_AR)( class FArchive& Ar, void* TPtr ); // serialize typedef void(*STRUCT_DTOR)( void* TPtr ); // destruct +/** Different kinds of actions that can trigger a transaction state change */ +enum class ETransactionStateEventType : uint8 +{ + /** A transaction has been started. This will be followed by a TransactionCanceled or TransactionFinalized event. */ + TransactionStarted, + /** A transaction was canceled. */ + TransactionCanceled, + /** A transaction was finalized. */ + TransactionFinalized, + + /** A transaction will be used used in an undo/redo operation. This will be followed by a UndoRedoFinalized event. */ + UndoRedoStarted, + /** A transaction has been used in an undo/redo operation. */ + UndoRedoFinalized, +}; + + +/** + * Convenience struct for passing around transaction context. + */ +struct FTransactionContext +{ + FTransactionContext() + : TransactionId() + , OperationId() + , Title() + , Context() + , PrimaryObject(nullptr) + { + } + + FTransactionContext(const FGuid& InTransactionId, const FGuid& InOperationId, const FText& InSessionTitle, const TCHAR* InContext, UObject* InPrimaryObject) + : TransactionId(InTransactionId) + , OperationId(InOperationId) + , Title(InSessionTitle) + , Context(InContext) + , PrimaryObject(InPrimaryObject) + { + } + + bool IsValid() const + { + return TransactionId.IsValid() && OperationId.IsValid(); + } + + /** Unique identifier for the transaction, used to track it during its lifetime */ + FGuid TransactionId; + /** Unique identifier for the active operation on the transaction (if any) */ + FGuid OperationId; + /** Descriptive title of the transaction */ + FText Title; + /** The context that generated the transaction */ + FString Context; + /** The primary UObject for the transaction (if any). */ + UObject* PrimaryObject; +}; + + /** * Interface for transaction object annotations. * @@ -20,11 +79,12 @@ typedef void(*STRUCT_DTOR)( void* TPtr ); // destruct * on the UObject that a modification was performed on, but it does not see other changes that may have * to be remembered in order to properly restore the object internals. */ -class ITransactionObjectAnnotation -{ +class ITransactionObjectAnnotation +{ public: - virtual ~ITransactionObjectAnnotation() {} + virtual ~ITransactionObjectAnnotation() = default; virtual void AddReferencedObjects(class FReferenceCollector& Collector) = 0; + virtual void Serialize(class FArchive& Ar) = 0; }; @@ -44,6 +104,19 @@ struct FTransactionObjectDeltaChange return bHasNameChange || bHasOuterChange || bHasPendingKillChange || bHasNonPropertyChanges || ChangedProperties.Num() > 0; } + void Merge(const FTransactionObjectDeltaChange& InOther) + { + bHasNameChange |= InOther.bHasNameChange; + bHasOuterChange |= InOther.bHasOuterChange; + bHasPendingKillChange |= InOther.bHasPendingKillChange; + bHasNonPropertyChanges |= InOther.bHasNonPropertyChanges; + + for (const FName& OtherChangedPropName : InOther.ChangedProperties) + { + ChangedProperties.AddUnique(OtherChangedPropName); + } + } + /** True if the object name has changed */ bool bHasNameChange : 1; /** True of the object outer has changed */ @@ -79,14 +152,32 @@ enum class ETransactionObjectEventType : uint8 class FTransactionObjectEvent { public: - FTransactionObjectEvent(const ETransactionObjectEventType InEventType, const FTransactionObjectDeltaChange& InDeltaChange, const TSharedPtr& InAnnotation, const FName InOriginalObjectName, const FName InOriginalObjectPathName, const FName InOriginalObjectOuterPathName) - : EventType(InEventType) + FTransactionObjectEvent() = default; + + FTransactionObjectEvent(const FGuid& InTransactionId, const FGuid& InOperationId, const ETransactionObjectEventType InEventType, const FTransactionObjectDeltaChange& InDeltaChange, const TSharedPtr& InAnnotation, const FName InOriginalObjectName, const FName InOriginalObjectPathName, const FName InOriginalObjectOuterPathName) + : TransactionId(InTransactionId) + , OperationId(InOperationId) + , EventType(InEventType) , DeltaChange(InDeltaChange) , Annotation(InAnnotation) , OriginalObjectName(InOriginalObjectName) , OriginalObjectPathName(InOriginalObjectPathName) , OriginalObjectOuterPathName(InOriginalObjectOuterPathName) { + check(TransactionId.IsValid()); + check(OperationId.IsValid()); + } + + /** The unique identifier of the transaction this event belongs to */ + const FGuid& GetTransactionId() const + { + return TransactionId; + } + + /** The unique identifier for the active operation on the transaction this event belongs to */ + const FGuid& GetOperationId() const + { + return OperationId; } /** What kind of action caused this event? */ @@ -132,9 +223,9 @@ public: } /** Were any non-property changes made to the object? */ - bool HasNonPropertyChanges() const + bool HasNonPropertyChanges(const bool InSerializationOnly = false) const { - return DeltaChange.bHasNameChange || DeltaChange.bHasOuterChange || DeltaChange.bHasPendingKillChange || DeltaChange.bHasNonPropertyChanges; + return (!InSerializationOnly && (DeltaChange.bHasNameChange || DeltaChange.bHasOuterChange || DeltaChange.bHasPendingKillChange)) || DeltaChange.bHasNonPropertyChanges; } /** Were any property changes made to the object? */ @@ -150,12 +241,25 @@ public: } /** Get the annotation object associated with the object being transacted (if any). */ - TSharedPtr GetAnnotation() const + TSharedPtr GetAnnotation() const { return Annotation; } + /** Merge this transaction event with another */ + void Merge(const FTransactionObjectEvent& InOther) + { + if (EventType == ETransactionObjectEventType::Snapshot) + { + EventType = InOther.EventType; + } + + DeltaChange.Merge(InOther.DeltaChange); + } + private: + FGuid TransactionId; + FGuid OperationId; ETransactionObjectEventType EventType; FTransactionObjectDeltaChange DeltaChange; TSharedPtr Annotation; @@ -164,7 +268,6 @@ private: FName OriginalObjectOuterPathName; }; - /** * Interface for transactions. * @@ -175,11 +278,23 @@ class ITransaction { public: + /** BeginOperation should be called when a transaction or undo/redo starts */ + virtual void BeginOperation() = 0; + + /** EndOperation should be called when a transaction is finalized or canceled or undo/redo ends */ + virtual void EndOperation() = 0; + /** Called when this transaction is completed to finalize the transaction */ - virtual void Finalize( ) = 0; + virtual void Finalize() = 0; /** Applies the transaction. */ - virtual void Apply( ) = 0; + virtual void Apply() = 0; + + /** Gets the full context for the transaction */ + virtual FTransactionContext GetContext() const = 0; + + /** @returns if this transaction tracks PIE objects */ + virtual bool ContainsPieObjects() const = 0; /** * Saves an array to the transaction. diff --git a/Engine/Source/Runtime/Core/Public/Serialization/MemoryReader.h b/Engine/Source/Runtime/Core/Public/Serialization/MemoryReader.h index 766e691b006e..4e5f85a633ae 100644 --- a/Engine/Source/Runtime/Core/Public/Serialization/MemoryReader.h +++ b/Engine/Source/Runtime/Core/Public/Serialization/MemoryReader.h @@ -11,7 +11,7 @@ /** * Archive for reading arbitrary data from the specified memory location */ -class FMemoryReader final : public FMemoryArchive +class FMemoryReader : public FMemoryArchive { public: /** diff --git a/Engine/Source/Runtime/Core/Public/UObject/EnterpriseObjectVersion.h b/Engine/Source/Runtime/Core/Public/UObject/EnterpriseObjectVersion.h index 111aa9315198..492bd18f3706 100644 --- a/Engine/Source/Runtime/Core/Public/UObject/EnterpriseObjectVersion.h +++ b/Engine/Source/Runtime/Core/Public/UObject/EnterpriseObjectVersion.h @@ -20,6 +20,9 @@ struct CORE_API FEnterpriseObjectVersion // Update FMediaFrameworkCaptureCameraViewportCameraOutputInfo with LazyObjectPtr MediaFrameworkUserDataLazyObject, + + // Live Live timecode synchronization updates + LiveLinkTimeSynchronization, // ------------------------------------------------------ VersionPlusOne, diff --git a/Engine/Source/Runtime/CoreUObject/Private/UObject/Obj.cpp b/Engine/Source/Runtime/CoreUObject/Private/UObject/Obj.cpp index b96b1562f2a5..de5d575f089d 100644 --- a/Engine/Source/Runtime/CoreUObject/Private/UObject/Obj.cpp +++ b/Engine/Source/Runtime/CoreUObject/Private/UObject/Obj.cpp @@ -13,6 +13,7 @@ #include "Misc/ConfigCacheIni.h" #include "Misc/CoreDelegates.h" #include "Misc/App.h" +#include "Misc/ITransaction.h" #include "Modules/ModuleManager.h" #include "UObject/ObjectMacros.h" #include "UObject/UObjectGlobals.h" @@ -542,6 +543,25 @@ void UObject::PostTransacted(const FTransactionObjectEvent& TransactionEvent) FCoreUObjectDelegates::OnObjectTransacted.Broadcast(this, TransactionEvent); } +TSharedPtr UObject::FindOrCreateTransactionAnnotation() const +{ + return FactoryTransactionAnnotation(ETransactionAnnotationCreationMode::FindOrCreate); +} + +TSharedPtr UObject::CreateAndRestoreTransactionAnnotation(FArchive& Ar) const +{ + TSharedPtr TransactionAnnotation = FactoryTransactionAnnotation(ETransactionAnnotationCreationMode::DefaultInstance); + if (TransactionAnnotation.IsValid()) + { + TransactionAnnotation->Serialize(Ar); + if (Ar.IsError()) + { + TransactionAnnotation.Reset(); + } + } + return TransactionAnnotation; +} + bool UObject::IsSelectedInEditor() const { return !IsPendingKill() && GSelectedObjectAnnotation.Get(this); @@ -1119,9 +1139,10 @@ bool UObject::Modify( bool bAlwaysMarkDirty/*=true*/ ) if (CanModify()) { - // Do not consider PIE world objects or script packages, as they should never end up in the + // Do not consider script packages, as they should never end up in the // transaction buffer and we don't want to mark them dirty here either. - if (GetOutermost()->HasAnyPackageFlags(PKG_PlayInEditor | PKG_ContainsScript | PKG_CompiledIn) == false || GetClass()->HasAnyClassFlags(CLASS_DefaultConfig | CLASS_Config)) + // We do want to consider PIE objects however + if (GetOutermost()->HasAnyPackageFlags(PKG_ContainsScript | PKG_CompiledIn) == false || GetClass()->HasAnyClassFlags(CLASS_DefaultConfig | CLASS_Config)) { // Attempt to mark the package dirty and save a copy of the object to the transaction // buffer. The save will fail if there isn't a valid transactor, the object isn't diff --git a/Engine/Source/Runtime/CoreUObject/Private/UObject/PropertyTag.cpp b/Engine/Source/Runtime/CoreUObject/Private/UObject/PropertyTag.cpp index 2bea9d0a423c..4d92f1e98a54 100644 --- a/Engine/Source/Runtime/CoreUObject/Private/UObject/PropertyTag.cpp +++ b/Engine/Source/Runtime/CoreUObject/Private/UObject/PropertyTag.cpp @@ -14,7 +14,8 @@ FPropertyTag // Constructors. FPropertyTag::FPropertyTag() - : Type (NAME_None) + : Prop (nullptr) + , Type (NAME_None) , BoolVal (0) , Name (NAME_None) , StructName(NAME_None) @@ -25,10 +26,12 @@ FPropertyTag::FPropertyTag() , ArrayIndex(INDEX_NONE) , SizeOffset(INDEX_NONE) , HasPropertyGuid(0) -{} +{ +} FPropertyTag::FPropertyTag( FArchive& InSaveAr, UProperty* Property, int32 InIndex, uint8* Value, uint8* Defaults ) - : Type (Property->GetID()) + : Prop (Property) + , Type (Property->GetID()) , BoolVal (0) , Name (Property->GetFName()) , StructName(NAME_None) @@ -105,6 +108,8 @@ void operator<<(FStructuredArchive::FSlot Slot, FPropertyTag& Tag) FStructuredArchive::FRecord Record = Slot.EnterRecord(); int32 Version = UnderlyingArchive.UE4Ver(); + checkf(!UnderlyingArchive.IsSaving() || Tag.Prop, TEXT("FPropertyTag must be constructed with a valid property when used for saving data!")); + // Name. Record << NAMED_ITEM("Name", Tag.Name); if ((Tag.Name == NAME_None) || !Tag.Name.IsValid()) @@ -135,7 +140,15 @@ void operator<<(FStructuredArchive::FSlot Slot, FPropertyTag& Tag) // only need to serialize this for bools else if (Tag.Type == NAME_BoolProperty && !UnderlyingArchive.IsTextFormat()) { - Record << NAMED_ITEM("BoolVal", Tag.BoolVal); + if (UnderlyingArchive.IsSaving()) + { + FSerializedPropertyScope SerializedProperty(UnderlyingArchive, Tag.Prop); + Record << NAMED_ITEM("BoolVal", Tag.BoolVal); + } + else + { + Record << NAMED_ITEM("BoolVal", Tag.BoolVal); + } } // only need to serialize this for bytes/enums else if (Tag.Type == NAME_ByteProperty || Tag.Type == NAME_EnumProperty) diff --git a/Engine/Source/Runtime/CoreUObject/Private/UObject/UObjectGlobals.cpp b/Engine/Source/Runtime/CoreUObject/Private/UObject/UObjectGlobals.cpp index 392d2d450e89..95fe06e5034d 100644 --- a/Engine/Source/Runtime/CoreUObject/Private/UObject/UObjectGlobals.cpp +++ b/Engine/Source/Runtime/CoreUObject/Private/UObject/UObjectGlobals.cpp @@ -2197,12 +2197,13 @@ bool SaveToTransactionBuffer(UObject* Object, bool bMarkDirty) { bool bSavedToTransactionBuffer = false; - // Neither PIE world objects nor script packages should end up in the transaction buffer. Additionally, in order + // Script packages should not end up in the transaction buffer. + // PIE objects should go through however. Additionally, in order // to save a copy of the object, we must have a transactor and the object must be transactional. - const bool IsTransactional = Object->HasAnyFlags(RF_Transactional); - const bool IsNotPIEOrContainsScriptObject = (Object->GetOutermost()->HasAnyPackageFlags( PKG_PlayInEditor | PKG_ContainsScript) == false); + const bool bIsTransactional = Object->HasAnyFlags(RF_Transactional); + const bool bIsNotScriptPackage = (Object->GetOutermost()->HasAnyPackageFlags(PKG_ContainsScript) == false); - if ( GUndo && IsTransactional && IsNotPIEOrContainsScriptObject ) + if ( GUndo && bIsTransactional && bIsNotScriptPackage) { // Mark the package dirty, if requested if ( bMarkDirty ) @@ -2227,12 +2228,13 @@ bool SaveToTransactionBuffer(UObject* Object, bool bMarkDirty) */ void SnapshotTransactionBuffer(UObject* Object) { - // Neither PIE world objects nor script packages should end up in the transaction buffer. Additionally, in order + // Script packages should not end up in the transaction buffer. + // PIE objects should go through however. Additionally, in order // to save a copy of the object, we must have a transactor and the object must be transactional. - const bool IsTransactional = Object->HasAnyFlags(RF_Transactional); - const bool IsNotPIEOrContainsScriptObject = (Object->GetOutermost()->HasAnyPackageFlags(PKG_PlayInEditor | PKG_ContainsScript) == false); + const bool bIsTransactional = Object->HasAnyFlags(RF_Transactional); + const bool bIsNotScriptPackage = (Object->GetOutermost()->HasAnyPackageFlags(PKG_ContainsScript) == false); - if (GUndo && IsTransactional && IsNotPIEOrContainsScriptObject) + if (GUndo && bIsTransactional && bIsNotScriptPackage) { GUndo->SnapshotObject(Object); } diff --git a/Engine/Source/Runtime/CoreUObject/Public/UObject/GCObjectScopeGuard.h b/Engine/Source/Runtime/CoreUObject/Public/UObject/GCObjectScopeGuard.h index 15c5ceb230e5..f561b4064ca6 100644 --- a/Engine/Source/Runtime/CoreUObject/Public/UObject/GCObjectScopeGuard.h +++ b/Engine/Source/Runtime/CoreUObject/Public/UObject/GCObjectScopeGuard.h @@ -33,3 +33,35 @@ public: private: const UObject* Object; }; + +/** + * Specific implementation of FGCObject that prevents an array of UObject-based pointers from being GC'd while this guard is in scope. + */ +template +class TGCObjectsScopeGuard : public FGCObject +{ + static_assert(TPointerIsConvertibleFromTo::Value, "TGCObjectsScopeGuard: ObjectType must be pointers to a type derived from UObject"); + +public: + explicit TGCObjectsScopeGuard(const TArray& InObjects) + : Objects(InObjects) + { + } + + virtual ~TGCObjectsScopeGuard() + { + } + + /** Non-copyable */ + TGCObjectsScopeGuard(const TGCObjectsScopeGuard&) = delete; + TGCObjectsScopeGuard& operator=(const TGCObjectsScopeGuard&) = delete; + + //~ FGCObject interface + virtual void AddReferencedObjects(FReferenceCollector& Collector) override + { + Collector.AddReferencedObjects(Objects); + } + +private: + TArray Objects; +}; diff --git a/Engine/Source/Runtime/CoreUObject/Public/UObject/Object.h b/Engine/Source/Runtime/CoreUObject/Public/UObject/Object.h index 137dc126ebbe..777141d0b793 100644 --- a/Engine/Source/Runtime/CoreUObject/Public/UObject/Object.h +++ b/Engine/Source/Runtime/CoreUObject/Public/UObject/Object.h @@ -314,9 +314,6 @@ public: */ virtual void PostEditChangeChainProperty( struct FPropertyChangedChainEvent& PropertyChangedEvent ); - /** Gathers external data required for applying an undo transaction */ - virtual TSharedPtr GetTransactionAnnotation() const { return NULL; } - /** Called before applying a transaction to the object. Default implementation simply calls PreEditChange. */ virtual void PreEditUndo(); @@ -333,6 +330,17 @@ public: */ virtual void PostTransacted(const FTransactionObjectEvent& TransactionEvent); + /** Find or create and populate an annotation object with any external data required for applying a transaction */ + TSharedPtr FindOrCreateTransactionAnnotation() const; + + /** Create and restore a previously serialized annotation object with any external data required for applying a transaction */ + TSharedPtr CreateAndRestoreTransactionAnnotation(FArchive& Ar) const; + +protected: + /** Factory a new annotation object and optionally populate it with data */ + enum class ETransactionAnnotationCreationMode : uint8 { DefaultInstance, FindOrCreate }; + virtual TSharedPtr FactoryTransactionAnnotation(const ETransactionAnnotationCreationMode InCreationMode) const { return nullptr; } + private: /** * Test the selection state of a UObject diff --git a/Engine/Source/Runtime/CoreUObject/Public/UObject/PropertyTag.h b/Engine/Source/Runtime/CoreUObject/Public/UObject/PropertyTag.h index c6b2fb793a15..13dd03c2f9b4 100644 --- a/Engine/Source/Runtime/CoreUObject/Public/UObject/PropertyTag.h +++ b/Engine/Source/Runtime/CoreUObject/Public/UObject/PropertyTag.h @@ -14,6 +14,9 @@ */ struct FPropertyTag { + // Transient. + UProperty* Prop; + // Variables. FName Type; // Type of property uint8 BoolVal; // a boolean property's value (never need to serialize data for bool properties except here) diff --git a/Engine/Source/Runtime/CoreUObject/Public/UObject/StructOnScope.h b/Engine/Source/Runtime/CoreUObject/Public/UObject/StructOnScope.h index a1d023fc928b..1525b34c3798 100644 --- a/Engine/Source/Runtime/CoreUObject/Public/UObject/StructOnScope.h +++ b/Engine/Source/Runtime/CoreUObject/Public/UObject/StructOnScope.h @@ -17,24 +17,24 @@ protected: /** Whether the struct memory is owned by this instance. */ bool OwnsMemory; - FStructOnScope() - : SampleStructMemory(nullptr) - , OwnsMemory(false) - { - } - virtual void Initialize() { - if (ScriptStruct.IsValid()) + if (const UStruct* ScriptStructPtr = ScriptStruct.Get()) { - SampleStructMemory = (uint8*)FMemory::Malloc(ScriptStruct->GetStructureSize() ? ScriptStruct->GetStructureSize() : 1); - ScriptStruct.Get()->InitializeStruct(SampleStructMemory); + SampleStructMemory = (uint8*)FMemory::Malloc(ScriptStructPtr->GetStructureSize() ? ScriptStructPtr->GetStructureSize() : 1); + ScriptStructPtr->InitializeStruct(SampleStructMemory); OwnsMemory = true; } } public: + FStructOnScope() + : SampleStructMemory(nullptr) + , OwnsMemory(false) + { + } + FStructOnScope(const UStruct* InScriptStruct) : ScriptStruct(InScriptStruct) , SampleStructMemory(nullptr) @@ -50,6 +50,40 @@ public: { } + FStructOnScope(FStructOnScope&& InOther) + { + ScriptStruct = InOther.ScriptStruct; + SampleStructMemory = InOther.SampleStructMemory; + OwnsMemory = InOther.OwnsMemory; + + InOther.OwnsMemory = false; + InOther.Reset(); + } + + FStructOnScope& operator=(FStructOnScope&& InOther) + { + if (this != &InOther) + { + Reset(); + + ScriptStruct = InOther.ScriptStruct; + SampleStructMemory = InOther.SampleStructMemory; + OwnsMemory = InOther.OwnsMemory; + + InOther.OwnsMemory = false; + InOther.Reset(); + } + return *this; + } + + FStructOnScope(const FStructOnScope&) = delete; + FStructOnScope& operator=(const FStructOnScope&) = delete; + + virtual bool OwnsStructMemory() const + { + return OwnsMemory; + } + virtual uint8* GetStructMemory() { return SampleStructMemory; @@ -87,19 +121,31 @@ public: return; } - if (ScriptStruct.IsValid() && SampleStructMemory) + if (const UStruct* ScriptStructPtr = ScriptStruct.Get()) { - ScriptStruct.Get()->DestroyStruct(SampleStructMemory); - ScriptStruct = NULL; + if (SampleStructMemory) + { + ScriptStructPtr->DestroyStruct(SampleStructMemory); + } + ScriptStruct = nullptr; } if (SampleStructMemory) { FMemory::Free(SampleStructMemory); - SampleStructMemory = NULL; + SampleStructMemory = nullptr; } } + virtual void Reset() + { + Destroy(); + + ScriptStruct = nullptr; + SampleStructMemory = nullptr; + OwnsMemory = false; + } + virtual ~FStructOnScope() { Destroy(); @@ -108,12 +154,8 @@ public: /** Re-initializes the scope with a specified UStruct */ void Initialize(TWeakObjectPtr InScriptStruct) { + Destroy(); ScriptStruct = InScriptStruct; Initialize(); } - -private: - - FStructOnScope(const FStructOnScope&); - FStructOnScope& operator=(const FStructOnScope&); }; diff --git a/Engine/Source/Runtime/Engine/Classes/Components/SceneComponent.h b/Engine/Source/Runtime/Engine/Classes/Components/SceneComponent.h index 8f44f7402c22..f36daad7a395 100644 --- a/Engine/Source/Runtime/Engine/Classes/Components/SceneComponent.h +++ b/Engine/Source/Runtime/Engine/Classes/Components/SceneComponent.h @@ -1107,7 +1107,7 @@ public: #if WITH_EDITOR /** Called when this component is moved in the editor */ - virtual void PostEditComponentMove(bool bFinished) {} + virtual void PostEditComponentMove(bool bFinished); virtual bool CanEditChange( const UProperty* Property ) const override; virtual const int32 GetNumUncachedStaticLightingInteractions() const; diff --git a/Engine/Source/Runtime/Engine/Classes/EditorFramework/AssetImportData.h b/Engine/Source/Runtime/Engine/Classes/EditorFramework/AssetImportData.h index 6b8efa1cb3a6..47967d40f945 100644 --- a/Engine/Source/Runtime/Engine/Classes/EditorFramework/AssetImportData.h +++ b/Engine/Source/Runtime/Engine/Classes/EditorFramework/AssetImportData.h @@ -125,6 +125,7 @@ public: /** Convert an absolute import path so that it's relative to either this object's package, BaseDir() or leave it absolute */ static FString SanitizeImportFilename(const FString& InPath, const UPackage* Outermost); + static FString SanitizeImportFilename(const FString& InPath, const FString& PackagePath); virtual void PostLoad() override; diff --git a/Engine/Source/Runtime/Engine/Classes/Engine/Engine.h b/Engine/Source/Runtime/Engine/Classes/Engine/Engine.h index eccc3dcd5558..082532c5b918 100644 --- a/Engine/Source/Runtime/Engine/Classes/Engine/Engine.h +++ b/Engine/Source/Runtime/Engine/Classes/Engine/Engine.h @@ -1284,7 +1284,11 @@ public: private: /** Control how the Engine process the Framerate/Timestep */ UPROPERTY(transient) - UEngineCustomTimeStep* CustomTimeStep; + UEngineCustomTimeStep* DefaultCustomTimeStep; + + /** Control how the Engine process the Framerate/Timestep */ + UPROPERTY(transient) + UEngineCustomTimeStep* CurrentCustomTimeStep; public: /** @@ -1296,20 +1300,39 @@ public: FSoftClassPath CustomTimeStepClassName; private: - /** Provide a timecode to the Engine */ + /** + * Default timecode provider that will be used when no custom provider is set. + * This is expected to be valid throughout the entire life of the application. + */ UPROPERTY(transient) - UTimecodeProvider* TimecodeProvider; + UTimecodeProvider* DefaultTimecodeProvider; + + UPROPERTY(transient) + UTimecodeProvider* CustomTimecodeProvider; public: - /** Provide a timecode to the Engine */ + + /** + * Allows UEngine subclasses a chance to override the DefaultTimecodeProvider class. + * This must be set before InitializeObjectReferences is called. + * This is intentionally protected and not exposed to config. + */ + UPROPERTY(config, EditAnywhere, Category=Timecode, meta=(MetaClass="TimecodeProvider", DisplayName="DefaultTimecodeProvider", ConfigRestartRequired=true)) + FSoftClassPath DefaultTimecodeProviderClassName; + + /** + * Override the CustomTimecodeProvider when the engine is started. + * When set, this does not change the DefaultTImecodeProvider class. + * Instead, it will create an instance and set it as the CustomTimecodeProvider. + */ UPROPERTY(config, EditAnywhere, Category=Timecode, meta=(MetaClass="TimecodeProvider", DisplayName="TimecodeProvider", ConfigRestartRequired=true)) FSoftClassPath TimecodeProviderClassName; - + /** * Frame rate used to generated the engine Timecode's frame number when no TimecodeProvider are specified. * It doesn't control the Engine frame rate. The Engine can run faster or slower that the specified TimecodeFrameRate. */ - UPROPERTY(config, EditAnywhere, Category=Timecode) + UPROPERTY(config, EditAnywhere, Category=Timecode, Meta=(ConfigRestartRequired=true)) FFrameRate DefaultTimecodeFrameRate; public: @@ -1903,9 +1926,6 @@ public: virtual void FinishDestroy() override; virtual void Serialize(FArchive& Ar) override; static void AddReferencedObjects(UObject* InThis, FReferenceCollector& Collector); -#if WITH_EDITOR - virtual bool CanEditChange(const UProperty* InProperty) const override; -#endif // #if WITH_EDITOR //~ End UObject Interface. /** Initialize the game engine. */ @@ -2024,6 +2044,9 @@ public: */ void UpdateTimeAndHandleMaxTickRate(); + /** Causes the current CustomTimeStep to be shut down and then reinitialized. */ + void ReinitializeCustomTimeStep(); + /** * Set the CustomTimeStep that will control the Engine Framerate/Timestep * @@ -2032,7 +2055,13 @@ public: bool SetCustomTimeStep(UEngineCustomTimeStep* InCustomTimeStep); /** Get the CustomTimeStep that control the Engine Framerate/Timestep */ - UEngineCustomTimeStep* GetCustomTimeStep() const { return CustomTimeStep; }; + UEngineCustomTimeStep* GetCustomTimeStep() const { return CurrentCustomTimeStep; }; + + /** + * Get the DefaultCustomTimeStep created at the engine initialization. + * This may be null if no CustomTimeStep was defined in the project settings and may not be currently active. + */ + UEngineCustomTimeStep* GetDefaultCustomTimeStep() const { return DefaultCustomTimeStep; } /** Executes the deferred commands */ void TickDeferredCommands(); @@ -2055,15 +2084,36 @@ public: /** Update FApp::Timecode. */ void UpdateTimecode(); + /** Causes the current TimecodeProvider to be shut down and then reinitialized. */ + void ReinitializeTimecodeProvider(); + /** - * Get the TimecodeProvider that control the FApp::Timecode + * Sets the CustomTimecodeProvider for the engine, shutting down the the default provider if necessary. + * Passing nullptr will clear the current CustomTimecodeProvider, and re-initialize the default. * - * @return true if the TimecodeProvider was properly initialized + * @return True if the provider was set (and initialized successfully when non-null). */ bool SetTimecodeProvider(UTimecodeProvider* InTimecodeProvider); - /** Get the TimecodeProvider that control the Engine's Timecode */ - UTimecodeProvider* GetTimecodeProvider() const { return TimecodeProvider; }; + /** + * Get the TimecodeProvider that control the Engine's Timecode + * The return value should always be non-null. + */ + const UTimecodeProvider* GetTimecodeProvider() const { return CustomTimecodeProvider ? CustomTimecodeProvider : GetDefaultTimecodeProvider(); } + + /** + * Get the DefaultTimecodeProvider. + * This should be valid throughout the lifetime of the Engine (although, it may not always be active). + */ + const UTimecodeProvider* GetDefaultTimecodeProvider() const { check(DefaultTimecodeProvider != nullptr); return DefaultTimecodeProvider; } + +protected: + + /** Provide mutable access to the current TimecodeProvider to Engine. This is needed to Initialize / Shutdown providers. */ + UTimecodeProvider* GetTimecodeProviderProtected() { return const_cast(const_cast(this)->GetTimecodeProvider()); } + UTimecodeProvider* GetDefaultTimecodeProviderProtected() { return const_cast(const_cast(this)->GetDefaultTimecodeProvider()); } + +public: /** * Pauses / un-pauses the game-play when focus of the game's window gets lost / gained. @@ -3106,6 +3156,14 @@ private: /** Allows subclasses to pass the failure to a UGameInstance if possible (mainly for blueprints) */ virtual void HandleTravelFailure_NotifyGameInstance(UWorld* World, ETravelFailure::Type FailureType); +public: +#if WITH_EDITOR + //~ Begin Transaction Interfaces. + virtual int32 BeginTransaction(const TCHAR* TransactionContext, const FText& Description, UObject* PrimaryObject) { return INDEX_NONE; } + virtual int32 EndTransaction() { return INDEX_NONE; } + virtual void CancelTransaction(int32 Index) { } +#endif + public: /** * Delegate we fire every time a new stat has been registered. diff --git a/Engine/Source/Runtime/Engine/Classes/Engine/GameViewportClient.h b/Engine/Source/Runtime/Engine/Classes/Engine/GameViewportClient.h index 8e1ac09637ec..d3f41f325110 100644 --- a/Engine/Source/Runtime/Engine/Classes/Engine/GameViewportClient.h +++ b/Engine/Source/Runtime/Engine/Classes/Engine/GameViewportClient.h @@ -775,6 +775,12 @@ public: void SetVirtualCursorWidget(EMouseCursor::Type Cursor, class UUserWidget* Widget); + /** Adds a cursor to the set based on the enum and the class reference to it. */ + void AddSoftwareCursor(EMouseCursor::Type Cursor, const FSoftClassPath& CursorClass); + + /** Does the viewport client have a software cursor set up for the given enum? */ + bool HasSoftwareCursor(EMouseCursor::Type Cursor) const; + private: /** Resets the platform type shape to nullptr, to restore it to the OS default. */ void ResetHardwareCursorStates(); @@ -824,9 +830,6 @@ private: /** Delegate handler for when a window DPI changes and we might need to adjust the scenes resolution */ void HandleWindowDPIScaleChanged(TSharedRef InWindow); - /** Adds a cursor to the set based on the enum and the class reference to it. */ - void AddSoftwareCursor(EMouseCursor::Type Cursor, const FSoftClassPath& CursorClass); - private: /** Slate window associated with this viewport client. The same window may host more than one viewport client. */ TWeakPtr Window; diff --git a/Engine/Source/Runtime/Engine/Classes/Engine/MeshMerging.h b/Engine/Source/Runtime/Engine/Classes/Engine/MeshMerging.h index bfd5176e42ab..c682c759f1bb 100644 --- a/Engine/Source/Runtime/Engine/Classes/Engine/MeshMerging.h +++ b/Engine/Source/Runtime/Engine/Classes/Engine/MeshMerging.h @@ -179,7 +179,7 @@ namespace EProxyNormalComputationMethod } -USTRUCT() +USTRUCT(Blueprintable) struct FMeshProxySettings { GENERATED_USTRUCT_BODY() @@ -233,7 +233,7 @@ struct FMeshProxySettings UPROPERTY(EditAnywhere, Category = MaxRayCastDist, meta = (InlineEditConditionToggle)) bool bOverrideTransferDistance; - /** Override search distance used when discovering texture values for simplified geometry. Useful when non-zero Merge Distance setting generates new geometry in concave corners.*/ + /** Override search distance used when discovering texture values for simplified geometry. Useful when non-zero Merge Distance setting generates new geometry in concave corners.*/ UPROPERTY(EditAnywhere, Category = ProxySettings, meta = (EditCondition = "bOverrideTransferDistance", DisplayName = "Transfer Distance Override", ClampMin = 0)) float MaxRayCastDist; @@ -276,7 +276,7 @@ struct FMeshProxySettings UPROPERTY(EditAnywhere, Category = ProxySettings) bool bAllowAdjacency; - /** Whether to allow distance field to be computed for this mesh. Disable this to save memory if you mesh will only rendered in the distance. */ + /** Whether to allow distance field to be computed for this mesh. Disable this to save memory if the merged mesh will only be rendered in the distance. */ UPROPERTY(EditAnywhere, Category = ProxySettings) bool bAllowDistanceField; @@ -284,7 +284,7 @@ struct FMeshProxySettings UPROPERTY(EditAnywhere, Category = ProxySettings) bool bReuseMeshLightmapUVs; - /** Whether to generate collision for the proxy mesh */ + /** Whether to generate collision for the merged mesh */ UPROPERTY(EditAnywhere, Category = ProxySettings) bool bCreateCollision; @@ -475,7 +475,7 @@ struct FMeshMergingSettings UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = MeshSettings) bool bIncludeImposters; - /** Whether to allow distance field to be computed for this mesh. Disable this to save memory if you mesh will only rendered in the distance. */ + /** Whether to allow distance field to be computed for this mesh. Disable this to save memory if the merged mesh will only be rendered in the distance. */ UPROPERTY(EditAnywhere, Category = MeshSettings) bool bAllowDistanceField; diff --git a/Engine/Source/Runtime/Engine/Classes/Engine/StaticMesh.h b/Engine/Source/Runtime/Engine/Classes/Engine/StaticMesh.h index 23f7757ec653..8f590c81fa26 100644 --- a/Engine/Source/Runtime/Engine/Classes/Engine/StaticMesh.h +++ b/Engine/Source/Runtime/Engine/Classes/Engine/StaticMesh.h @@ -790,6 +790,16 @@ public: * @return true if the UV channel was removed. */ ENGINE_API bool RemoveUVChannel(int32 LODIndex, int32 UVChannelIndex); + + /** + * Sets the texture coordinates at the specified UV channel index on the given LOD of a StaticMesh. + * @param LODIndex Index of the StaticMesh LOD. + * @param UVChannelIndex Index where to remove the UV channel. + * @param TexCoords The texture coordinates to set on the UV channel. + * @return true if the UV channel could be set. + */ + ENGINE_API bool SetUVChannel(int32 LODIndex, int32 UVChannelIndex, const TArray& TexCoords); + #endif /** diff --git a/Engine/Source/Runtime/Engine/Classes/Engine/SystemTimeTimecodeProvider.h b/Engine/Source/Runtime/Engine/Classes/Engine/SystemTimeTimecodeProvider.h new file mode 100644 index 000000000000..fc8ec36cbe59 --- /dev/null +++ b/Engine/Source/Runtime/Engine/Classes/Engine/SystemTimeTimecodeProvider.h @@ -0,0 +1,63 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "TimecodeProvider.h" +#include "Misc/FrameRate.h" +#include "UObject/UnrealType.h" +#include "SystemTimeTimecodeProvider.generated.h" + +/** + * Converts the current system time to timecode, relative to a provided frame rate. + */ +UCLASS() +class ENGINE_API USystemTimeTimecodeProvider : public UTimecodeProvider +{ + GENERATED_BODY() + +private: + + UPROPERTY(EditAnywhere, Category = Timecode) + FFrameRate FrameRate; + + ETimecodeProviderSynchronizationState State; + +public: + + USystemTimeTimecodeProvider(): + FrameRate(60, 1), + State(ETimecodeProviderSynchronizationState::Closed) + { + } + + //~ Begin UTimecodeProvider Interface + virtual FTimecode GetTimecode() const override; + + virtual FFrameRate GetFrameRate() const override + { + return FrameRate; + } + + virtual ETimecodeProviderSynchronizationState GetSynchronizationState() const override + { + return State; + } + + virtual bool Initialize(class UEngine* InEngine) override + { + State = ETimecodeProviderSynchronizationState::Synchronized; + return true; + } + + virtual void Shutdown(class UEngine* InEngine) override + { + State = ETimecodeProviderSynchronizationState::Closed; + } + //~ End UTimecodeProvider Interface + + UFUNCTION() + void SetFrameRate(const FFrameRate& InFrameRate) + { + FrameRate = InFrameRate; + } +}; diff --git a/Engine/Source/Runtime/Engine/Classes/Engine/TimecodeProvider.h b/Engine/Source/Runtime/Engine/Classes/Engine/TimecodeProvider.h index e14d74bd1399..e72b34fe5e4c 100644 --- a/Engine/Source/Runtime/Engine/Classes/Engine/TimecodeProvider.h +++ b/Engine/Source/Runtime/Engine/Classes/Engine/TimecodeProvider.h @@ -31,8 +31,10 @@ enum class ETimecodeProviderSynchronizationState }; /** - * A class responsible of fetching a timecode from a source. - */ + * A class responsible of fetching a timecode from a source. + * Note, FApp::GetTimecode and FApp::GetTimecodeFramerate should be used to retrieve + * the current system Timecode and Framerate. + */ UCLASS(abstract) class ENGINE_API UTimecodeProvider : public UObject { @@ -63,13 +65,4 @@ public: /** This Provider stopped being the Engine's Provider. */ virtual void Shutdown(class UEngine* InEngine) PURE_VIRTUAL(UTimecodeProvider::Shutdown, ); - -public: - /** - * Calculate a timecode from the system clock. - * The frame number will depends on the FrameRate. - * Will take into account DropFrame if the FrameRate support it. - * The frame number will be clamp to a max of 60. - */ - static FTimecode GetSystemTimeTimecode(const FFrameRate& InForFrameRate); }; diff --git a/Engine/Source/Runtime/Engine/Classes/GameFramework/Actor.h b/Engine/Source/Runtime/Engine/Classes/GameFramework/Actor.h index 947144079e8b..35d69bb8d1bc 100644 --- a/Engine/Source/Runtime/Engine/Classes/GameFramework/Actor.h +++ b/Engine/Source/Runtime/Engine/Classes/GameFramework/Actor.h @@ -1519,6 +1519,8 @@ public: FName AttachParentName; FName SocketName; FTransform RelativeTransform; + + friend FArchive& operator<<(FArchive& Ar, FAttachedActorInfo& ActorInfo); }; // The RootComponent's transform @@ -1532,28 +1534,45 @@ public: // Actors that are attached to this RootComponent TArray AttachedToInfo; + + friend FArchive& operator<<(FArchive& Ar, FActorRootComponentReconstructionData& RootComponentData); }; class FActorTransactionAnnotation : public ITransactionObjectAnnotation { public: - FActorTransactionAnnotation(const AActor* Actor, const bool bCacheRootComponentData = true); + /** Create an empty instance */ + static TSharedRef Create(); + /** Create an instance from the given actor, optionally caching root component data */ + static TSharedRef Create(const AActor* InActor, const bool InCacheRootComponentData = true); + + /** Create an instance from the given actor if required (UActorTransactionAnnotation::HasInstanceData would return true), optionally caching root component data */ + static TSharedPtr CreateIfRequired(const AActor* InActor, const bool InCacheRootComponentData = true); + + //~ ITransactionObjectAnnotation interface virtual void AddReferencedObjects(FReferenceCollector& Collector) override; + virtual void Serialize(FArchive& Ar) override; bool HasInstanceData() const; + // Actor and component data + TWeakObjectPtr Actor; FComponentInstanceDataCache ComponentInstanceData; // Root component reconstruction data bool bRootComponentDataCached; FActorRootComponentReconstructionData RootComponentData; + + private: + FActorTransactionAnnotation(); + FActorTransactionAnnotation(const AActor* InActor, FComponentInstanceDataCache&& InComponentInstanceData, const bool InCacheRootComponentData = true); }; /** Cached pointer to the transaction annotation data from PostEditUndo to be used in the next RerunConstructionScript */ TSharedPtr CurrentTransactionAnnotation; - virtual TSharedPtr GetTransactionAnnotation() const override; + virtual TSharedPtr FactoryTransactionAnnotation(const ETransactionAnnotationCreationMode InCreationMode) const override; virtual void PostEditUndo(TSharedPtr TransactionAnnotation) override; /** @return true if the component is allowed to re-register its components when modified. False for CDOs or PIE instances. */ @@ -1726,7 +1745,7 @@ public: virtual bool IsSelectable() const { return true; } /** @return Returns true if this actor should be shown in the scene outliner */ - bool IsListedInSceneOutliner() const; + virtual bool IsListedInSceneOutliner() const; /** @return Returns true if this actor is allowed to be attached to the given actor */ virtual bool EditorCanAttachTo(const AActor* InParent, FText& OutReason) const; @@ -2238,7 +2257,10 @@ public: /** Invalidate lighting cache with default options. */ void InvalidateLightingCache() { - InvalidateLightingCacheDetailed(false); + if (GIsEditor && !GIsDemoMode) + { + InvalidateLightingCacheDetailed(false); + } } /** Invalidates anything produced by the last lighting build. */ diff --git a/Engine/Source/Runtime/Engine/Classes/Kismet/KismetSystemLibrary.h b/Engine/Source/Runtime/Engine/Classes/Kismet/KismetSystemLibrary.h index fdfed7a30fc0..7c8024a5c9be 100644 --- a/Engine/Source/Runtime/Engine/Classes/Kismet/KismetSystemLibrary.h +++ b/Engine/Source/Runtime/Engine/Classes/Kismet/KismetSystemLibrary.h @@ -349,7 +349,7 @@ class ENGINE_API UKismetSystemLibrary : public UBlueprintFunctionLibrary * @param Duration The display duration (if Print to Screen is True). Using negative number will result in loading the duration time from the config. */ UFUNCTION(BlueprintCallable, meta=(WorldContext="WorldContextObject", CallableWithoutWorldContext, Keywords = "log", AdvancedDisplay = "2", DevelopmentOnly), Category="Utilities|Text") - static void PrintText(UObject* WorldContextObject, const FText InText = FText::FromString(TEXT("Hello")), bool bPrintToScreen = true, bool bPrintToLog = true, FLinearColor TextColor = FLinearColor(0.0, 0.66, 1.0), float Duration = 2.f); + static void PrintText(UObject* WorldContextObject, const FText InText = INVTEXT("Hello"), bool bPrintToScreen = true, bool bPrintToLog = true, FLinearColor TextColor = FLinearColor(0.0, 0.66, 1.0), float Duration = 2.f); /** * Prints a warning string to the log and the screen. Meant to be used as a way to inform the user that they misused the node. @@ -1660,6 +1660,51 @@ class ENGINE_API UKismetSystemLibrary : public UBlueprintFunctionLibrary UFUNCTION(BlueprintCallable, Category="Utilities") static FString GetCommandLine(); + // --- Transactions ------------------------------ + + /** + * Begin a new undo transaction. An undo transaction is defined as all actions which take place when the user selects "undo" a single time. + * @note If there is already an active transaction in progress, then this increments that transaction's action counter instead of beginning a new transaction. + * @note You must call TransactObject before modifying each object that should be included in this undo transaction. + * @note Only available in the editor. + * + * @param Context The context for the undo session. Typically the tool/editor that caused the undo operation. + * @param Description The description for the undo session. This is the text that will appear in the "Edit" menu next to the Undo item. + * @param PrimaryObject The primary object that the undo session operators on (can be null, and mostly is). + * + * @return The number of active actions when BeginTransaction was called (values greater than 0 indicate that there was already an existing undo transaction in progress), or -1 on failure. + */ + UFUNCTION(BlueprintCallable, Category = "Transactions") + static int32 BeginTransaction(const FString& Context, FText Description, UObject* PrimaryObject); + + /** + * Attempt to end the current undo transaction. Only successful if the transaction's action counter is 1. + * @note Only available in the editor. + * + * @return The number of active actions when EndTransaction was called (a value of 1 indicates that the transaction was successfully closed), or -1 on failure. + */ + UFUNCTION(BlueprintCallable, Category = "Transactions") + static int32 EndTransaction(); + + /** + * Cancel the current transaction, and no longer capture actions to be placed in the undo buffer. + * @note Only available in the editor. + * + * @param Index The action counter to cancel transactions from (as returned by a call to BeginTransaction). + */ + UFUNCTION(BlueprintCallable, Category = "Transactions") + static void CancelTransaction(const int32 Index); + + /** + * Notify the current transaction (if any) that this object is about to be modified and should be placed into the undo buffer. + * @note Internally this calls Modify on the given object, so will also mark the owner package dirty. + * @note Only available in the editor. + * + * @param Object The object that is about to be modified. + */ + UFUNCTION(BlueprintCallable, Category = "Transactions") + static void TransactObject(UObject* Object); + // --- Asset Manager ------------------------------ /** Returns the Object associated with a Primary Asset Id, this will only return a valid object if it is in memory, it will not load it */ diff --git a/Engine/Source/Runtime/Engine/Classes/Sound/DialogueWave.h b/Engine/Source/Runtime/Engine/Classes/Sound/DialogueWave.h index fb4bd6b046d9..aafe5e04e8f2 100644 --- a/Engine/Source/Runtime/Engine/Classes/Sound/DialogueWave.h +++ b/Engine/Source/Runtime/Engine/Classes/Sound/DialogueWave.h @@ -131,6 +131,12 @@ public: ENGINE_API bool SupportsContext(const FDialogueContext& Context) const; ENGINE_API USoundBase* GetWaveFromContext(const FDialogueContext& Context) const; ENGINE_API USoundBase* GetWaveFromContext(const FDialogueContextMapping& ContextMapping) const; + ENGINE_API FText GetLocalizedSpokenText() const; + ENGINE_API FText GetLocalizedSpokenText(const FDialogueContext& Context) const; + ENGINE_API FText GetLocalizedSpokenText(const FDialogueContextMapping& ContextMapping) const; + ENGINE_API FText GetLocalizedSubtitle() const; + ENGINE_API FText GetLocalizedSubtitle(const FDialogueContext& Context) const; + ENGINE_API FText GetLocalizedSubtitle(const FDialogueContextMapping& ContextMapping) const; ENGINE_API FString GetContextLocalizationKey(const FDialogueContext& Context) const; ENGINE_API FString GetContextLocalizationKey(const FDialogueContextMapping& ContextMapping) const; ENGINE_API FString GetContextRecordedAudioFilename(const FDialogueContext& Context) const; diff --git a/Engine/Source/Runtime/Engine/Private/Actor.cpp b/Engine/Source/Runtime/Engine/Private/Actor.cpp index 558026d59a1a..15d14ae631e6 100644 --- a/Engine/Source/Runtime/Engine/Private/Actor.cpp +++ b/Engine/Source/Runtime/Engine/Private/Actor.cpp @@ -4344,11 +4344,14 @@ void AActor::DrawDebugComponents(FColor const& BaseColor) const void AActor::InvalidateLightingCacheDetailed(bool bTranslationOnly) { - for (UActorComponent* Component : GetComponents()) + if(GIsEditor && !GIsDemoMode) { - if(Component && Component->IsRegistered()) + for (UActorComponent* Component : GetComponents()) { - Component->InvalidateLightingCacheDetailed(true, bTranslationOnly); + if (Component && Component->IsRegistered()) + { + Component->InvalidateLightingCacheDetailed(true, bTranslationOnly); + } } } } diff --git a/Engine/Source/Runtime/Engine/Private/ActorConstruction.cpp b/Engine/Source/Runtime/Engine/Private/ActorConstruction.cpp index ba2043def832..39cad8017f85 100644 --- a/Engine/Source/Runtime/Engine/Private/ActorConstruction.cpp +++ b/Engine/Source/Runtime/Engine/Private/ActorConstruction.cpp @@ -273,7 +273,7 @@ void AActor::RerunConstructionScripts() #if WITH_EDITOR if (!CurrentTransactionAnnotation.IsValid()) { - CurrentTransactionAnnotation = MakeShareable(new FActorTransactionAnnotation(this, false)); + CurrentTransactionAnnotation = FActorTransactionAnnotation::Create(this, false); } FActorTransactionAnnotation* ActorTransactionAnnotation = CurrentTransactionAnnotation.Get(); InstanceDataCache = &ActorTransactionAnnotation->ComponentInstanceData; diff --git a/Engine/Source/Runtime/Engine/Private/ActorEditor.cpp b/Engine/Source/Runtime/Engine/Private/ActorEditor.cpp index 20626ed56162..e744b006da81 100644 --- a/Engine/Source/Runtime/Engine/Private/ActorEditor.cpp +++ b/Engine/Source/Runtime/Engine/Private/ActorEditor.cpp @@ -174,6 +174,13 @@ void AActor::PostEditMove(bool bFinished) { FNavigationSystem::OnPostEditActorMove(*this); } + + if (!bFinished) + { + // Snapshot the transaction buffer for this actor if we've not finished moving yet + // This allows listeners to be notified of intermediate changes of state + SnapshotTransactionBuffer(this); + } } bool AActor::ReregisterComponentsWhenModified() const @@ -268,11 +275,112 @@ void AActor::DebugShowOneComponentHierarchy( USceneComponent* SceneComp, int32& } } -AActor::FActorTransactionAnnotation::FActorTransactionAnnotation(const AActor* Actor, const bool bCacheRootComponentData) - : ComponentInstanceData(Actor) +FArchive& operator<<(FArchive& Ar, AActor::FActorRootComponentReconstructionData::FAttachedActorInfo& ActorInfo) { - USceneComponent* ActorRootComponent = Actor->GetRootComponent(); - if (bCacheRootComponentData && ActorRootComponent && ActorRootComponent->IsCreatedByConstructionScript()) + enum class EVersion : uint8 + { + InitialVersion = 0, + // ------------------------------------------------------ + VersionPlusOne, + LatestVersion = VersionPlusOne - 1 + }; + + EVersion Version = EVersion::LatestVersion; + Ar << Version; + + if (Version > EVersion::LatestVersion) + { + Ar.SetError(); + return Ar; + } + + Ar << ActorInfo.Actor; + Ar << ActorInfo.AttachParent; + Ar << ActorInfo.AttachParentName; + Ar << ActorInfo.SocketName; + Ar << ActorInfo.RelativeTransform; + + return Ar; +} + +FArchive& operator<<(FArchive& Ar, AActor::FActorRootComponentReconstructionData& RootComponentData) +{ + enum class EVersion : uint8 + { + InitialVersion = 0, + // ------------------------------------------------------ + VersionPlusOne, + LatestVersion = VersionPlusOne - 1 + }; + + EVersion Version = EVersion::LatestVersion; + Ar << Version; + + if (Version > EVersion::LatestVersion) + { + Ar.SetError(); + return Ar; + } + + Ar << RootComponentData.Transform; + + if (Ar.IsSaving()) + { + FQuat TransformRotationQuat = RootComponentData.TransformRotationCache.GetCachedQuat(); + Ar << TransformRotationQuat; + } + else if (Ar.IsLoading()) + { + FQuat TransformRotationQuat; + Ar << TransformRotationQuat; + RootComponentData.TransformRotationCache.NormalizedQuatToRotator(TransformRotationQuat); + } + + Ar << RootComponentData.AttachedParentInfo; + + Ar << RootComponentData.AttachedToInfo; + + return Ar; +} + +TSharedRef AActor::FActorTransactionAnnotation::Create() +{ + return MakeShareable(new FActorTransactionAnnotation()); +} + +TSharedRef AActor::FActorTransactionAnnotation::Create(const AActor* InActor, const bool InCacheRootComponentData) +{ + return MakeShareable(new FActorTransactionAnnotation(InActor, FComponentInstanceDataCache(InActor), InCacheRootComponentData)); +} + +TSharedPtr AActor::FActorTransactionAnnotation::CreateIfRequired(const AActor* InActor, const bool InCacheRootComponentData) +{ + // Don't create a transaction annotation for something that has no instance data, or a root component that's created by a construction script + FComponentInstanceDataCache TempComponentInstanceData(InActor); + if (!TempComponentInstanceData.HasInstanceData()) + { + USceneComponent* ActorRootComponent = InActor->GetRootComponent(); + if (!InCacheRootComponentData || !ActorRootComponent || !ActorRootComponent->IsCreatedByConstructionScript()) + { + return nullptr; + } + } + + return MakeShareable(new FActorTransactionAnnotation(InActor, MoveTemp(TempComponentInstanceData), InCacheRootComponentData)); +} + +AActor::FActorTransactionAnnotation::FActorTransactionAnnotation() + : bRootComponentDataCached(false) +{ +} + +AActor::FActorTransactionAnnotation::FActorTransactionAnnotation(const AActor* InActor, FComponentInstanceDataCache&& InComponentInstanceData, const bool InCacheRootComponentData) + : ComponentInstanceData(MoveTemp(InComponentInstanceData)) +{ + Actor = InActor; + + USceneComponent* ActorRootComponent = InActor->GetRootComponent(); + if (InCacheRootComponentData && ActorRootComponent && ActorRootComponent->IsCreatedByConstructionScript()) { bRootComponentDataCached = true; RootComponentData.Transform = ActorRootComponent->GetComponentTransform(); @@ -291,7 +399,7 @@ AActor::FActorTransactionAnnotation::FActorTransactionAnnotation(const AActor* A for (USceneComponent* AttachChild : ActorRootComponent->GetAttachChildren()) { AActor* ChildOwner = (AttachChild ? AttachChild->GetOwner() : NULL); - if (ChildOwner && ChildOwner != Actor) + if (ChildOwner && ChildOwner != InActor) { // Save info about actor to reattach FActorRootComponentReconstructionData::FAttachedActorInfo Info; @@ -313,27 +421,57 @@ void AActor::FActorTransactionAnnotation::AddReferencedObjects(FReferenceCollect ComponentInstanceData.AddReferencedObjects(Collector); } +void AActor::FActorTransactionAnnotation::Serialize(FArchive& Ar) +{ + enum class EVersion : uint8 + { + InitialVersion = 0, + // ------------------------------------------------------ + VersionPlusOne, + LatestVersion = VersionPlusOne - 1 + }; + + EVersion Version = EVersion::LatestVersion; + Ar << Version; + + if (Version > EVersion::LatestVersion) + { + Ar.SetError(); + return; + } + + Ar << Actor; + if (Ar.IsLoading()) + { + ComponentInstanceData = FComponentInstanceDataCache(Actor.Get()); + } + + Ar << bRootComponentDataCached; + + if (bRootComponentDataCached) + { + Ar << RootComponentData; + } +} + bool AActor::FActorTransactionAnnotation::HasInstanceData() const { return (bRootComponentDataCached || ComponentInstanceData.HasInstanceData()); } -TSharedPtr AActor::GetTransactionAnnotation() const +TSharedPtr AActor::FactoryTransactionAnnotation(const ETransactionAnnotationCreationMode InCreationMode) const { + if (InCreationMode == ETransactionAnnotationCreationMode::DefaultInstance) + { + return FActorTransactionAnnotation::Create(); + } + if (CurrentTransactionAnnotation.IsValid()) { return CurrentTransactionAnnotation; } - TSharedPtr TransactionAnnotation = MakeShareable(new FActorTransactionAnnotation(this)); - - if (!TransactionAnnotation->HasInstanceData()) - { - // If there is nothing in the annotation don't bother storing it. - TransactionAnnotation = nullptr; - } - - return TransactionAnnotation; + return FActorTransactionAnnotation::CreateIfRequired(this); } void AActor::PreEditUndo() diff --git a/Engine/Source/Runtime/Engine/Private/Components/SceneComponent.cpp b/Engine/Source/Runtime/Engine/Private/Components/SceneComponent.cpp index 9b382806c3de..68e4ab0607ea 100644 --- a/Engine/Source/Runtime/Engine/Private/Components/SceneComponent.cpp +++ b/Engine/Source/Runtime/Engine/Private/Components/SceneComponent.cpp @@ -511,7 +511,10 @@ void USceneComponent::PostEditChangeProperty(FPropertyChangedEvent& PropertyChan if (bLocationChanged || (PropertyName == RotationName || MemberPropertyName == RotationName) || (PropertyName == ScaleName || MemberPropertyName == ScaleName)) { FNavigationSystem::UpdateComponentData(*this); - InvalidateLightingCacheDetailed(true, bLocationChanged); + if (!GIsDemoMode) + { + InvalidateLightingCacheDetailed(true, bLocationChanged); + } } } @@ -3217,6 +3220,16 @@ void USceneComponent::GetLifetimeReplicatedProps(TArray< FLifetimeProperty > & O } #if WITH_EDITOR +void USceneComponent::PostEditComponentMove(bool bFinished) +{ + if (!bFinished) + { + // Snapshot the transaction buffer for this component if we've not finished moving yet + // This allows listeners to be notified of intermediate changes of state + SnapshotTransactionBuffer(this); + } +} + bool USceneComponent::CanEditChange( const UProperty* Property ) const { bool bIsEditable = Super::CanEditChange( Property ); diff --git a/Engine/Source/Runtime/Engine/Private/DialogueWave.cpp b/Engine/Source/Runtime/Engine/Private/DialogueWave.cpp index d2aa16aeeb74..0d45dc79dc65 100644 --- a/Engine/Source/Runtime/Engine/Private/DialogueWave.cpp +++ b/Engine/Source/Runtime/Engine/Private/DialogueWave.cpp @@ -629,6 +629,95 @@ USoundBase* UDialogueWave::GetWaveFromContext(const FDialogueContextMapping& Con return ContextMapping.Proxy; } +FText UDialogueWave::GetLocalizedSpokenText() const +{ + // Try and find a general dialogue wave localization + FText LocalizedSpokenText; + if (!FText::FindText(FDialogueConstants::DialogueNamespace, LocalizationGUID.ToString(), /*OUT*/LocalizedSpokenText, &SpokenText)) + { + LocalizedSpokenText = FText::AsCultureInvariant(SpokenText); + } + return LocalizedSpokenText; +} + +FText UDialogueWave::GetLocalizedSpokenText(const FDialogueContext& Context) const +{ + for (const FDialogueContextMapping& ContextMapping : ContextMappings) + { + if (ContextMapping.Context == Context) + { + return GetLocalizedSpokenText(ContextMapping); + } + } + return FText::GetEmpty(); +} + +FText UDialogueWave::GetLocalizedSpokenText(const FDialogueContextMapping& ContextMapping) const +{ + // First try and find a context specific localization + FText LocalizedSpokenText; + if (!FText::FindText(FDialogueConstants::DialogueNamespace, GetContextLocalizationKey(ContextMapping), /*OUT*/LocalizedSpokenText, &SpokenText)) + { + // Failing that, try and find a general dialogue wave localization + LocalizedSpokenText = GetLocalizedSpokenText(); + } + return LocalizedSpokenText; +} + +FText UDialogueWave::GetLocalizedSubtitle() const +{ + const FString* SubtitleSourceString = &SpokenText; + + // Try and find a general dialogue wave localization + FString Key = LocalizationGUID.ToString(); + if (bOverride_SubtitleOverride) + { + SubtitleSourceString = &SubtitleOverride; + Key += FDialogueConstants::SubtitleKeySuffix; + } + + FText LocalizedSubtitle; + if (!FText::FindText(FDialogueConstants::DialogueNamespace, Key, /*OUT*/LocalizedSubtitle, SubtitleSourceString)) + { + LocalizedSubtitle = bOverride_SubtitleOverride ? FText::AsCultureInvariant(SubtitleOverride) : FText::AsCultureInvariant(SpokenText); + } + return LocalizedSubtitle; +} + +FText UDialogueWave::GetLocalizedSubtitle(const FDialogueContext& Context) const +{ + for (const FDialogueContextMapping& ContextMapping : ContextMappings) + { + if (ContextMapping.Context == Context) + { + return GetLocalizedSubtitle(ContextMapping); + } + } + return FText::GetEmpty(); +} + +FText UDialogueWave::GetLocalizedSubtitle(const FDialogueContextMapping& ContextMapping) const +{ + const FString* SubtitleSourceString = &SpokenText; + + // Do we have a subtitle override? + FString Key = GetContextLocalizationKey(ContextMapping); + if (bOverride_SubtitleOverride) + { + SubtitleSourceString = &SubtitleOverride; + Key += FDialogueConstants::SubtitleKeySuffix; + } + + // First try and find a context specific localization + FText LocalizedSubtitle; + if (!FText::FindText(FDialogueConstants::DialogueNamespace, Key, /*OUT*/LocalizedSubtitle, SubtitleSourceString)) + { + // Failing that, try and find a general dialogue wave localization + LocalizedSubtitle = GetLocalizedSubtitle(); + } + return LocalizedSubtitle; +} + FString UDialogueWave::GetContextLocalizationKey(const FDialogueContext& Context) const { for (const FDialogueContextMapping& ContextMapping : ContextMappings) @@ -721,32 +810,9 @@ void UDialogueWave::UpdateMappingProxy(FDialogueContextMapping& ContextMapping) UEngine::CopyPropertiesForUnrelatedObjects(ContextMapping.SoundWave, ContextMapping.Proxy); FSubtitleCue NewSubtitleCue; - - // Do we have a subtitle override? - const FString* NewSubtitleCueSourceString = &SpokenText; - FString Key = GetContextLocalizationKey(ContextMapping); - if (bOverride_SubtitleOverride) - { - NewSubtitleCueSourceString = &SubtitleOverride; - Key += FDialogueConstants::SubtitleKeySuffix; - } - - // First try and find a context specific localization - if (!FText::FindText(FDialogueConstants::DialogueNamespace, Key, /*OUT*/NewSubtitleCue.Text, NewSubtitleCueSourceString)) - { - // Failing that, try and find a general dialogue wave localization - Key = LocalizationGUID.ToString(); - if (bOverride_SubtitleOverride) - { - Key += FDialogueConstants::SubtitleKeySuffix; - } - - if (!FText::FindText(FDialogueConstants::DialogueNamespace, Key, /*OUT*/NewSubtitleCue.Text, NewSubtitleCueSourceString)) - { - NewSubtitleCue.Text = bOverride_SubtitleOverride ? FText::AsCultureInvariant(SubtitleOverride) : FText::AsCultureInvariant(SpokenText); - } - } + NewSubtitleCue.Text = GetLocalizedSubtitle(ContextMapping); NewSubtitleCue.Time = 0.0f; + ContextMapping.Proxy->Subtitles.Empty(); ContextMapping.Proxy->Subtitles.Add(NewSubtitleCue); } diff --git a/Engine/Source/Runtime/Engine/Private/EditorFramework/AssetImportData.cpp b/Engine/Source/Runtime/Engine/Private/EditorFramework/AssetImportData.cpp index b9e4b75285ee..525c5d2a9da5 100644 --- a/Engine/Source/Runtime/Engine/Private/EditorFramework/AssetImportData.cpp +++ b/Engine/Source/Runtime/Engine/Private/EditorFramework/AssetImportData.cpp @@ -186,10 +186,14 @@ FString UAssetImportData::SanitizeImportFilename(const FString& InPath) const FString UAssetImportData::SanitizeImportFilename(const FString& InPath, const UPackage* Outermost) { - if (Outermost) + return SanitizeImportFilename(InPath, Outermost ? Outermost->GetPathName() : FString()); +} + +FString UAssetImportData::SanitizeImportFilename(const FString& InPath, const FString& PackagePath) +{ + if (!PackagePath.IsEmpty()) { const bool bIncludeDot = true; - const FString PackagePath = Outermost->GetPathName(); const FName MountPoint = FPackageName::GetPackageMountPoint(PackagePath); const FString PackageFilename = FPackageName::LongPackageNameToFilename(PackagePath, FPaths::GetExtension(InPath, bIncludeDot)); const FString AbsolutePath = FPaths::ConvertRelativePathToFull(InPath); @@ -231,8 +235,6 @@ FString UAssetImportData::SanitizeImportFilename(const FString& InPath, const UP FString UAssetImportData::ResolveImportFilename(const FString& InRelativePath, const UPackage* Outermost) { - FString RelativePath = InRelativePath; - if (Outermost) { // Relative to the package filename? @@ -271,7 +273,7 @@ FString UAssetImportData::ResolveImportFilename(const FString& InRelativePath, c #endif // Convert relative paths - return FPaths::ConvertRelativePathToFull(RelativePath); + return FPaths::ConvertRelativePathToFull(InRelativePath); } FString UAssetImportData::ResolveImportFilename(const FString& InRelativePath) const diff --git a/Engine/Source/Runtime/Engine/Private/Engine/SystemTimeTimecodeProvider.cpp b/Engine/Source/Runtime/Engine/Private/Engine/SystemTimeTimecodeProvider.cpp new file mode 100644 index 000000000000..eaac8db4dc63 --- /dev/null +++ b/Engine/Source/Runtime/Engine/Private/Engine/SystemTimeTimecodeProvider.cpp @@ -0,0 +1,17 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "Engine/SystemTimeTimecodeProvider.h" + +#include "Misc/CoreMisc.h" +#include "Misc/DateTime.h" + + +FTimecode USystemTimeTimecodeProvider::GetTimecode() const +{ + const FDateTime DateTime = FDateTime::Now(); + const FTimespan Timespan = DateTime.GetTimeOfDay(); + const double TotalSeconds = Timespan.GetTotalSeconds(); + FFrameNumber FrameNumber = FrameRate.AsFrameNumber(TotalSeconds); + + return FTimecode::FromFrameNumber(FrameNumber, FrameRate, FTimecode::IsDropFormatTimecodeSupported(FrameRate)); +} diff --git a/Engine/Source/Runtime/Engine/Private/Engine/TimecodeProvider.cpp b/Engine/Source/Runtime/Engine/Private/Engine/TimecodeProvider.cpp deleted file mode 100644 index eb597a2c6514..000000000000 --- a/Engine/Source/Runtime/Engine/Private/Engine/TimecodeProvider.cpp +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. - -#include "Engine/TimecodeProvider.h" - -#include "Misc/CoreMisc.h" -#include "Misc/DateTime.h" - - -FTimecode UTimecodeProvider::GetSystemTimeTimecode(const FFrameRate& InForFrameRate) -{ - const FDateTime DateTime = FDateTime::Now(); - const FTimespan Timespan = DateTime.GetTimeOfDay(); - const double TotalSeconds = Timespan.GetTotalSeconds(); - FFrameNumber FrameNumber = InForFrameRate.AsFrameNumber(TotalSeconds); - - return FTimecode::FromFrameNumber(FrameNumber, InForFrameRate, FTimecode::IsDropFormatTimecodeSupported(InForFrameRate)); -} diff --git a/Engine/Source/Runtime/Engine/Private/GameEngine.cpp b/Engine/Source/Runtime/Engine/Private/GameEngine.cpp index 27efa2b0c066..56e78014b141 100644 --- a/Engine/Source/Runtime/Engine/Private/GameEngine.cpp +++ b/Engine/Source/Runtime/Engine/Private/GameEngine.cpp @@ -545,7 +545,15 @@ TSharedRef UGameEngine::CreateGameWindow() Window->SetWindowMode(WindowMode); } - Window->ShowWindow(); + // No need to show window in off-screen rendering mode as it does not render to screen + if (FSlateApplication::Get().IsRenderingOffScreen()) + { + FSlateApplicationBase::Get().GetRenderer()->CreateViewport(Window); + } + else + { + Window->ShowWindow(); + } // Tick now to force a redraw of the window and ensure correct fullscreen application FSlateApplication::Get().Tick(); @@ -1413,7 +1421,11 @@ void UGameEngine::Tick( float DeltaSeconds, bool bIdleMode ) FPlatformSplash::Hide(); if ( GameViewportWindow.IsValid() ) { - GameViewportWindow.Pin()->ShowWindow(); + // Don't show window in off-screen rendering mode as it doesn't render to screen + if (!FSlateApplication::Get().IsRenderingOffScreen()) + { + GameViewportWindow.Pin()->ShowWindow(); + } FSlateApplication::Get().RegisterGameViewport( GameViewportWidget.ToSharedRef() ); } } diff --git a/Engine/Source/Runtime/Engine/Private/GameViewportClient.cpp b/Engine/Source/Runtime/Engine/Private/GameViewportClient.cpp index f790a6d09b3a..e71481eb9e3d 100644 --- a/Engine/Source/Runtime/Engine/Private/GameViewportClient.cpp +++ b/Engine/Source/Runtime/Engine/Private/GameViewportClient.cpp @@ -368,7 +368,29 @@ void UGameViewportClient::Init(struct FWorldContext& WorldContext, UGameInstance // Set the projects default viewport mouse capture mode MouseCaptureMode = GetDefault()->DefaultViewportMouseCaptureMode; + FString DefaultViewportMouseCaptureMode; + if (FParse::Value(FCommandLine::Get(), TEXT("DefaultViewportMouseCaptureMode="), DefaultViewportMouseCaptureMode)) + { + const UEnum* EnumPtr = FindObject(ANY_PACKAGE, TEXT("EMouseCaptureMode")); + checkf(EnumPtr, TEXT("Unable to find EMouseCaptureMode enum")); + if (EnumPtr) + { + int64 EnumValue = EnumPtr->GetValueByName(FName(*DefaultViewportMouseCaptureMode)); + if (EnumValue != INDEX_NONE) + { + MouseCaptureMode = static_cast(EnumValue); + } + else + { + UE_LOG(LogInit, Warning, TEXT("Unknown DefaultViewportMouseCaptureMode %s. Command line setting will be ignored."), *DefaultViewportMouseCaptureMode); + } + } + } MouseLockMode = GetDefault()->DefaultViewportMouseLockMode; + // In off-screen rendering mode don't lock mouse to the viewport, as we don't want mouse to lock to an invisible window + if (FSlateApplication::Get().IsRenderingOffScreen()) { + MouseLockMode = EMouseLockMode::DoNotLock; + } // Create the cursor Widgets UUserInterfaceSettings* UISettings = GetMutableDefault(UUserInterfaceSettings::StaticClass()); @@ -847,6 +869,11 @@ void UGameViewportClient::AddSoftwareCursor(EMouseCursor::Type Cursor, const FSo } } +bool UGameViewportClient::HasSoftwareCursor(EMouseCursor::Type Cursor) const +{ + return CursorWidgets.Contains(Cursor); +} + void UGameViewportClient::AddCursorWidget(EMouseCursor::Type Cursor, class UUserWidget* CursorWidget) { if (ensure(CursorWidget)) diff --git a/Engine/Source/Runtime/Engine/Private/Internationalization/StringTable.cpp b/Engine/Source/Runtime/Engine/Private/Internationalization/StringTable.cpp index 3e06f9eb003d..93e3a2117512 100644 --- a/Engine/Source/Runtime/Engine/Private/Internationalization/StringTable.cpp +++ b/Engine/Source/Runtime/Engine/Private/Internationalization/StringTable.cpp @@ -79,7 +79,7 @@ public: } private: - //~ IStringTableEngineInterop interface + //~ IStringTableEngineBridge interface virtual void RedirectAndLoadStringTableAssetImpl(FName& InOutTableId, const EStringTableLoadingPolicy InLoadingPolicy) override { const FSoftObjectPath StringTableAssetReference = GetAssetReference(InOutTableId); @@ -117,6 +117,11 @@ private: return StringTableAssetReference.IsValid(); } + virtual bool IsStringTableAssetBeingReplacedImpl(const UStringTable* InStringTableAsset) override + { + return InStringTableAsset && InStringTableAsset->HasAnyFlags(RF_NewerVersionExists); + } + static FSoftObjectPath GetAssetReference(const FName InTableId) { const FString StringTableAssetName = InTableId.ToString(); diff --git a/Engine/Source/Runtime/Engine/Private/KismetSystemLibrary.cpp b/Engine/Source/Runtime/Engine/Private/KismetSystemLibrary.cpp index 5734672d6260..895c441499a3 100644 --- a/Engine/Source/Runtime/Engine/Private/KismetSystemLibrary.cpp +++ b/Engine/Source/Runtime/Engine/Private/KismetSystemLibrary.cpp @@ -2553,6 +2553,44 @@ FString UKismetSystemLibrary::GetCommandLine() return FString(FCommandLine::Get()); } +int32 UKismetSystemLibrary::BeginTransaction(const FString& Context, FText Description, UObject* PrimaryObject) +{ +#if WITH_EDITOR + return GEngine->BeginTransaction(*Context, Description, PrimaryObject); +#else + return INDEX_NONE; +#endif +} + +int32 UKismetSystemLibrary::EndTransaction() +{ +#if WITH_EDITOR + return GEngine->EndTransaction(); +#else + return INDEX_NONE; +#endif +} + +void UKismetSystemLibrary::CancelTransaction(const int32 Index) +{ +#if WITH_EDITOR + if (Index >= 0) + { + GEngine->CancelTransaction(Index); + } +#endif +} + +void UKismetSystemLibrary::TransactObject(UObject* Object) +{ +#if WITH_EDITOR + if (Object) + { + Object->Modify(); + } +#endif +} + UObject* UKismetSystemLibrary::GetObjectFromPrimaryAssetId(FPrimaryAssetId PrimaryAssetId) { if (UAssetManager* Manager = UAssetManager::GetIfValid()) diff --git a/Engine/Source/Runtime/Engine/Private/PlayerCameraManager.cpp b/Engine/Source/Runtime/Engine/Private/PlayerCameraManager.cpp index 7845d19db508..cf4f97b51926 100644 --- a/Engine/Source/Runtime/Engine/Private/PlayerCameraManager.cpp +++ b/Engine/Source/Runtime/Engine/Private/PlayerCameraManager.cpp @@ -381,7 +381,7 @@ UCameraAnimInst* APlayerCameraManager::PlayCameraAnim(UCameraAnim* Anim, float R UCameraAnimInst* const Inst = AllocCameraAnimInst(); if (Inst) { - if (!Anim->bRelativeToInitialFOV) + if (Anim != nullptr && !Anim->bRelativeToInitialFOV) { Inst->InitialFOV = ViewTarget.POV.FOV; } diff --git a/Engine/Source/Runtime/Engine/Private/PrimitiveSceneProxy.cpp b/Engine/Source/Runtime/Engine/Private/PrimitiveSceneProxy.cpp index 4e197fc98811..b45de0033c71 100644 --- a/Engine/Source/Runtime/Engine/Private/PrimitiveSceneProxy.cpp +++ b/Engine/Source/Runtime/Engine/Private/PrimitiveSceneProxy.cpp @@ -550,7 +550,8 @@ void FPrimitiveSceneProxy::SetCollisionEnabled_RenderThread(const bool bNewEnabl bool FPrimitiveSceneProxy::IsShown(const FSceneView* View) const { #if WITH_EDITOR - if (!View->Family->EngineShowFlags.VREditing) + // Don't draw editor specific actors during game mode + if (View->Family->EngineShowFlags.Game) { if (DrawInVREditMode) { @@ -558,7 +559,7 @@ bool FPrimitiveSceneProxy::IsShown(const FSceneView* View) const } } - // After checking for VR Edit mode specific actors, check for Editor vs. Game + // After checking for VR/Desktop Edit mode specific actors, check for Editor vs. Game if(View->Family->EngineShowFlags.Editor) { if(!DrawInEditor) diff --git a/Engine/Source/Runtime/Engine/Private/Slate/SceneViewport.cpp b/Engine/Source/Runtime/Engine/Private/Slate/SceneViewport.cpp index 10164356f0d8..40d89632856a 100644 --- a/Engine/Source/Runtime/Engine/Private/Slate/SceneViewport.cpp +++ b/Engine/Source/Runtime/Engine/Private/Slate/SceneViewport.cpp @@ -1205,91 +1205,100 @@ void FSceneViewport::ResizeFrame(uint32 NewWindowSizeX, uint32 NewWindowSizeY, E TOptional NewWindowPos; FVector2D NewWindowSize(NewWindowSizeX, NewWindowSizeY); - const FSlateRect BestWorkArea = FSlateApplication::Get().GetWorkArea(FSlateRect::FromPointAndExtent(OldWindowPos, OldWindowSize)); - - // A switch to window mode should position the window to be in the center of the work-area (we don't do this if we were already in window mode to allow the user to move the window) - // Fullscreen modes should position the window to the top-left of the monitor. - // If we're going into windowed fullscreen mode, we always want the window to fill the entire screen. - // When we calculate the scene view, we'll check the fullscreen mode and configure the screen percentage - // scaling so we actual render to the resolution we've been asked for. - if (NewWindowMode == EWindowMode::Windowed) + // Only adjust window size if not in off-screen rendering mode, because off-screen rendering skips rendering to screen and uses custom size. + if (!FSlateApplication::Get().IsRenderingOffScreen()) { - if (OldWindowMode == EWindowMode::Windowed && NewWindowSize == OldWindowSize) + const FSlateRect BestWorkArea = FSlateApplication::Get().GetWorkArea(FSlateRect::FromPointAndExtent(OldWindowPos, OldWindowSize)); + + // A switch to window mode should position the window to be in the center of the work-area (we don't do this if we were already in window mode to allow the user to move the window) + // Fullscreen modes should position the window to the top-left of the monitor. + // If we're going into windowed fullscreen mode, we always want the window to fill the entire screen. + // When we calculate the scene view, we'll check the fullscreen mode and configure the screen percentage + // scaling so we actual render to the resolution we've been asked for. + if (NewWindowMode == EWindowMode::Windowed) { - // Leave the window position alone! - NewWindowPos.Reset(); + if (OldWindowMode == EWindowMode::Windowed && NewWindowSize == OldWindowSize) + { + // Leave the window position alone! + NewWindowPos.Reset(); + } + else + { + const FVector2D BestWorkAreaTopLeft = BestWorkArea.GetTopLeft(); + const FVector2D BestWorkAreaSize = BestWorkArea.GetSize(); + + FVector2D CenteredWindowPos = BestWorkAreaTopLeft; + + if (NewWindowSize.X < BestWorkAreaSize.X) + { + CenteredWindowPos.X += FMath::Max(0.0f, (BestWorkAreaSize.X - NewWindowSize.X) * 0.5f); + } + + if (NewWindowSize.Y < BestWorkAreaSize.Y) + { + CenteredWindowPos.Y += FMath::Max(0.0f, (BestWorkAreaSize.Y - NewWindowSize.Y) * 0.5f); + } + + NewWindowPos = CenteredWindowPos; + } } else { - const FVector2D BestWorkAreaTopLeft = BestWorkArea.GetTopLeft(); - const FVector2D BestWorkAreaSize = BestWorkArea.GetSize(); + FDisplayMetrics DisplayMetrics; + FSlateApplication::Get().GetInitialDisplayMetrics(DisplayMetrics); - FVector2D CenteredWindowPos = BestWorkAreaTopLeft; - - if (NewWindowSize.X < BestWorkAreaSize.X) + if (DisplayMetrics.MonitorInfo.Num() > 0) { - CenteredWindowPos.X += FMath::Max(0.0f, (BestWorkAreaSize.X - NewWindowSize.X) * 0.5f); - } + // Try to find the monitor that the viewport belongs to based on BestWorkArea. + // For widowed fullscreen and fullscreen modes it should be top left position of one of monitors. + FPlatformRect DisplayRect = DisplayMetrics.MonitorInfo[0].DisplayRect; + for (int32 Index = 1; Index < DisplayMetrics.MonitorInfo.Num(); ++Index) + { + const FMonitorInfo& MonitorInfo = DisplayMetrics.MonitorInfo[Index]; + if (BestWorkArea.GetTopLeft() == FVector2D(MonitorInfo.WorkArea.Left, MonitorInfo.WorkArea.Top)) + { + DisplayRect = DisplayMetrics.MonitorInfo[Index].DisplayRect; + } + } - if (NewWindowSize.Y < BestWorkAreaSize.Y) + NewWindowPos = FVector2D(DisplayRect.Left, DisplayRect.Top); + + if (NewWindowMode == EWindowMode::WindowedFullscreen) + { + NewWindowSize.X = DisplayRect.Right - DisplayRect.Left; + NewWindowSize.Y = DisplayRect.Bottom - DisplayRect.Top; + } + } + else { - CenteredWindowPos.Y += FMath::Max(0.0f, (BestWorkAreaSize.Y - NewWindowSize.Y) * 0.5f); - } + NewWindowPos = FVector2D(0.0f, 0.0f); - NewWindowPos = CenteredWindowPos; + if (NewWindowMode == EWindowMode::WindowedFullscreen) + { + NewWindowSize.X = DisplayMetrics.PrimaryDisplayWidth; + NewWindowSize.Y = DisplayMetrics.PrimaryDisplayHeight; + } + } } + +#if !PLATFORM_MAC + IHeadMountedDisplay::MonitorInfo MonitorInfo; + if (GEngine->XRSystem.IsValid() && GEngine->XRSystem->GetHMDDevice() && GEngine->XRSystem->GetHMDDevice()->GetHMDMonitorInfo(MonitorInfo)) + { + if (MonitorInfo.DesktopX > 0 || MonitorInfo.DesktopY > 0) + { + NewWindowSize.X = MonitorInfo.ResolutionX; + NewWindowSize.Y = MonitorInfo.ResolutionY; + NewWindowPos = FVector2D(MonitorInfo.DesktopX, MonitorInfo.DesktopY); + } + } +#endif } else { - FDisplayMetrics DisplayMetrics; - FSlateApplication::Get().GetInitialDisplayMetrics(DisplayMetrics); - - if (DisplayMetrics.MonitorInfo.Num() > 0) - { - // Try to find the monitor that the viewport belongs to based on BestWorkArea. - // For widowed fullscreen and fullscreen modes it should be top left position of one of monitors. - FPlatformRect DisplayRect = DisplayMetrics.MonitorInfo[0].DisplayRect; - for (int32 Index = 1; Index < DisplayMetrics.MonitorInfo.Num(); ++Index) - { - const FMonitorInfo& MonitorInfo = DisplayMetrics.MonitorInfo[Index]; - if (BestWorkArea.GetTopLeft() == FVector2D(MonitorInfo.WorkArea.Left, MonitorInfo.WorkArea.Top)) - { - DisplayRect = DisplayMetrics.MonitorInfo[Index].DisplayRect; - } - } - - NewWindowPos = FVector2D(DisplayRect.Left, DisplayRect.Top); - - if (NewWindowMode == EWindowMode::WindowedFullscreen) - { - NewWindowSize.X = DisplayRect.Right - DisplayRect.Left; - NewWindowSize.Y = DisplayRect.Bottom - DisplayRect.Top; - } - } - else - { - NewWindowPos = FVector2D(0.0f, 0.0f); - - if (NewWindowMode == EWindowMode::WindowedFullscreen) - { - NewWindowSize.X = DisplayMetrics.PrimaryDisplayWidth; - NewWindowSize.Y = DisplayMetrics.PrimaryDisplayHeight; - } - } + NewWindowPos = FVector2D(0.0f, 0.0f); } -#if !PLATFORM_MAC - IHeadMountedDisplay::MonitorInfo MonitorInfo; - if (GEngine->XRSystem.IsValid() && GEngine->XRSystem->GetHMDDevice() && GEngine->XRSystem->GetHMDDevice()->GetHMDMonitorInfo(MonitorInfo)) - { - if (MonitorInfo.DesktopX > 0 || MonitorInfo.DesktopY > 0) - { - NewWindowSize.X = MonitorInfo.ResolutionX; - NewWindowSize.Y = MonitorInfo.ResolutionY; - NewWindowPos = FVector2D(MonitorInfo.DesktopX, MonitorInfo.DesktopY); - } - } -#endif // Resize window const bool bSizeChanged = NewWindowSize != OldWindowSize; const bool bPositionChanged = NewWindowPos.IsSet() && NewWindowPos != OldWindowPos; diff --git a/Engine/Source/Runtime/Engine/Private/StaticMesh.cpp b/Engine/Source/Runtime/Engine/Private/StaticMesh.cpp index 4b5ad3d29296..1f423fb3818d 100644 --- a/Engine/Source/Runtime/Engine/Private/StaticMesh.cpp +++ b/Engine/Source/Runtime/Engine/Private/StaticMesh.cpp @@ -3160,6 +3160,34 @@ bool UStaticMesh::RemoveUVChannel(int32 LODIndex, int32 UVChannelIndex) return false; } +bool UStaticMesh::SetUVChannel(int32 LODIndex, int32 UVChannelIndex, const TArray& TexCoords) +{ + FMeshDescription* MeshDescription = GetOriginalMeshDescription(LODIndex); + if (!MeshDescription) + { + return false; + } + + if (TexCoords.Num() < MeshDescription->VertexInstances().Num()) + { + return false; + } + + Modify(); + + int32 TextureCoordIndex = 0; + TMeshAttributesRef UVs = MeshDescription->VertexInstanceAttributes().GetAttributesRef(MeshAttribute::VertexInstance::TextureCoordinate); + for (const FVertexInstanceID& VertexInstanceID : MeshDescription->VertexInstances().GetElementIDs()) + { + UVs.Set(VertexInstanceID, UVChannelIndex, TexCoords[TextureCoordIndex++]); + } + + CommitOriginalMeshDescription(LODIndex); + PostEditChange(); + + return true; +} + #endif int32 UStaticMesh::GetNumUVChannels(int32 LODIndex) diff --git a/Engine/Source/Runtime/Engine/Private/UnrealEngine.cpp b/Engine/Source/Runtime/Engine/Private/UnrealEngine.cpp index cd3bfed27b52..e451ca40dbe4 100644 --- a/Engine/Source/Runtime/Engine/Private/UnrealEngine.cpp +++ b/Engine/Source/Runtime/Engine/Private/UnrealEngine.cpp @@ -89,6 +89,7 @@ UnrealEngine.cpp: Implements the UEngine class and helpers. #include "Engine/ObjectReferencer.h" #include "Engine/TextureLODSettings.h" #include "Engine/TimecodeProvider.h" +#include "Engine/SystemTimeTimecodeProvider.h" #include "Misc/NetworkVersion.h" #include "Net/OnlineEngineInterface.h" #include "Engine/Console.h" @@ -1571,7 +1572,13 @@ void UEngine::PreExit() delete ScreenSaverInhibitorRunnable; - SetTimecodeProvider(nullptr); + GetTimecodeProviderProtected()->Shutdown(this); + CustomTimecodeProvider = nullptr; + + // Don't clear the pointer to DefaultTimecodeProvider, as other systems shutting down may try to reference it + // for validation. + DefaultTimecodeProvider->RemoveFromRoot(); + SetCustomTimeStep(nullptr); ShutdownHMD(); @@ -1708,9 +1715,9 @@ void UEngine::UpdateTimeAndHandleMaxTickRate() FTimedMemReport::Get().PumpTimedMemoryReports(); #endif - if (CustomTimeStep) + if (CurrentCustomTimeStep) { - bool bRunEngineCode = CustomTimeStep->UpdateTimeStep(this); + bool bRunEngineCode = CurrentCustomTimeStep->UpdateTimeStep(this); if (!bRunEngineCode) { UpdateTimecode(); @@ -1921,25 +1928,40 @@ void UEngine::UpdateTimeAndHandleMaxTickRate() UpdateTimecode(); } +void UEngine::ReinitializeCustomTimeStep() +{ + UEngineCustomTimeStep* CustomTimeStep = GetCustomTimeStep(); + if (CustomTimeStep) + { + CustomTimeStep->Shutdown(this); + if (!CustomTimeStep->Initialize(this)) + { + UE_LOG(LogEngine, Error, TEXT("Failed reinitializing CustomTimeStep %s"), *GetPathName(CustomTimeStep)); + } + } +} + bool UEngine::SetCustomTimeStep(UEngineCustomTimeStep* InCustomTimeStep) { bool bResult = true; - if (InCustomTimeStep != CustomTimeStep) + UEngineCustomTimeStep* Previous = GetCustomTimeStep(); + if (InCustomTimeStep != Previous) { - if (CustomTimeStep) + if (Previous) { - CustomTimeStep->Shutdown(this); + Previous->Shutdown(this); } - CustomTimeStep = InCustomTimeStep; + CurrentCustomTimeStep = InCustomTimeStep; - if (CustomTimeStep) + if (CurrentCustomTimeStep) { - bResult = CustomTimeStep->Initialize(this); + bResult = CurrentCustomTimeStep->Initialize(this); if (!bResult) { - CustomTimeStep = nullptr; + UE_LOG(LogEngine, Error, TEXT("SetCustomTimeStep - Failed to intialize CustomTimeStep %s"), *GetPathName(CurrentCustomTimeStep)); + CurrentCustomTimeStep = nullptr; } } } @@ -1947,48 +1969,74 @@ bool UEngine::SetCustomTimeStep(UEngineCustomTimeStep* InCustomTimeStep) return bResult; } +void UEngine::ReinitializeTimecodeProvider() +{ + UTimecodeProvider* Provider = GetTimecodeProviderProtected(); + Provider->Shutdown(this); + if (!Provider->Initialize(this)) + { + UE_LOG(LogEngine, Error, TEXT("Failed reinitializing TimecodeProvider %s"), *GetPathName(Provider)); + } +} + bool UEngine::SetTimecodeProvider(UTimecodeProvider* InTimecodeProvider) { bool bResult = true; - if (InTimecodeProvider != TimecodeProvider) + if (InTimecodeProvider != CustomTimecodeProvider) { - if (TimecodeProvider) + const bool bCurrentlyUsingDefault = !CustomTimecodeProvider; + if (bCurrentlyUsingDefault) { - TimecodeProvider->Shutdown(this); + // If we're already using the default, and we're resetting to the default, we don't need to do anything. + if (InTimecodeProvider == DefaultTimecodeProvider || InTimecodeProvider == nullptr) + { + return bResult; + } + else + { + DefaultTimecodeProvider->Shutdown(this); + } + } + else + { + CustomTimecodeProvider->Shutdown(this); + CustomTimecodeProvider = nullptr; } - TimecodeProvider = InTimecodeProvider; - - if (TimecodeProvider) + if (InTimecodeProvider != nullptr) { - bResult = TimecodeProvider->Initialize(this); - if (!bResult) + bResult = InTimecodeProvider->Initialize(this); + if (bResult) { - TimecodeProvider = nullptr; + CustomTimecodeProvider = InTimecodeProvider; + } + } + + // If the new provider failed to initialized (or was null), then + // re-initialize the default provider. + if (!CustomTimecodeProvider) + { + if (!ensure(DefaultTimecodeProvider->Initialize(this))) + { + UE_LOG(LogEngine, Error, TEXT("SetTimecodeProvider - Failed to intialize DefaultTimecodeProvider %s"), *GetPathName(DefaultTimecodeProvider)); } } } - + return bResult; } void UEngine::UpdateTimecode() { - if (TimecodeProvider) + const UTimecodeProvider* Provider = GetTimecodeProvider(); + if (Provider->GetSynchronizationState() == ETimecodeProviderSynchronizationState::Synchronized) { - if (TimecodeProvider->GetSynchronizationState() == ETimecodeProviderSynchronizationState::Synchronized) - { - FApp::SetTimecodeAndFrameRate(TimecodeProvider->GetTimecode(), TimecodeProvider->GetFrameRate()); - } - else - { - FApp::SetTimecodeAndFrameRate(FTimecode(), FFrameRate()); - } + FApp::SetTimecodeAndFrameRate(Provider->GetTimecode(), Provider->GetFrameRate()); } else { - FApp::SetTimecodeAndFrameRate(UTimecodeProvider::GetSystemTimeTimecode(DefaultTimecodeFrameRate), DefaultTimecodeFrameRate); + FApp::SetTimecodeAndFrameRate(FTimecode(), FFrameRate()); } } @@ -2095,44 +2143,23 @@ void LoadEngineClass(FSoftClassPath& ClassName, TSubclassOf& EngineCl } } -void InitializeTimecodeProvider(UEngine* InEngine, FSoftClassPath InTimecodeFrameRateClassName) -{ - if (InEngine->GetTimecodeProvider() == nullptr && InTimecodeFrameRateClassName.IsValid()) - { - UClass* TimecodeProviderClass = LoadClass(nullptr, *InTimecodeFrameRateClassName.ToString()); - if (TimecodeProviderClass) - { - UTimecodeProvider* NewTimecodeProvider = NewObject(InEngine, TimecodeProviderClass); - if (!InEngine->SetTimecodeProvider(NewTimecodeProvider)) - { - UE_LOG(LogEngine, Error, TEXT("Engine config TimecodeProviderClassName '%s' could not be initialized."), *InTimecodeFrameRateClassName.ToString()); - } - } - else - { - UE_LOG(LogEngine, Error, TEXT("Engine config value TimecodeProviderClassName '%s' is not a valid class name."), *InTimecodeFrameRateClassName.ToString()); - } - } -} - -void InitializeCustomTimeStep(UEngine* InEngine, FSoftClassPath InCustomTimeStepClassName) +UEngineCustomTimeStep* InitializeCustomTimeStep(UEngine* InEngine, FSoftClassPath InCustomTimeStepClassName) { + UEngineCustomTimeStep* NewCustomTimeStep = nullptr; if (InEngine->GetCustomTimeStep() == nullptr && InCustomTimeStepClassName.IsValid()) { UClass* CustomTimeStepClass = LoadClass(nullptr, *InCustomTimeStepClassName.ToString()); if (CustomTimeStepClass) { - UEngineCustomTimeStep* NewCustomTimeStep = NewObject(InEngine, CustomTimeStepClass); - if (!InEngine->SetCustomTimeStep(NewCustomTimeStep)) - { - UE_LOG(LogEngine, Error, TEXT("Engine config CustomTimeStepClassName '%s' could not be initialized."), *InCustomTimeStepClassName.ToString()); - } + NewCustomTimeStep = NewObject(InEngine, CustomTimeStepClass); + InEngine->SetCustomTimeStep(NewCustomTimeStep); } else { UE_LOG(LogEngine, Error, TEXT("Engine config value CustomTimeStepClassName '%s' is not a valid class name."), *InCustomTimeStepClassName.ToString()); } } + return NewCustomTimeStep; } /** @@ -2319,8 +2346,43 @@ void UEngine::InitializeObjectReferences() } } - InitializeCustomTimeStep(this, CustomTimeStepClassName); - InitializeTimecodeProvider(this, TimecodeProviderClassName); + DefaultCustomTimeStep = InitializeCustomTimeStep(this, CustomTimeStepClassName); + + // Setup the timecode providers. + { + if (DefaultTimecodeProvider == nullptr) + { + UClass* DefaultTimecodeProviderClass = DefaultTimecodeProviderClassName.TryLoadClass(); + if (DefaultTimecodeProviderClass == nullptr) + { + DefaultTimecodeProviderClass = USystemTimeTimecodeProvider::StaticClass(); + } + + DefaultTimecodeProvider = NewObject(this, DefaultTimecodeProviderClass); + if (!ensure(DefaultTimecodeProvider->Initialize(this))) + { + UE_LOG(LogEngine, Error, TEXT("InitializeObjectReferences - Failed to intialize DefaultTimecodeProvider %s"), *GetPathName(DefaultTimecodeProvider)); + } + + DefaultTimecodeProvider->AddToRoot(); + if (USystemTimeTimecodeProvider* LocalSystemTimeProvider = Cast(DefaultTimecodeProvider)) + { + LocalSystemTimeProvider->SetFrameRate(DefaultTimecodeFrameRate); + } + } + + if (CustomTimecodeProvider == nullptr && TimecodeProviderClassName.IsValid()) + { + if (UClass* TimecodeProviderClass = TimecodeProviderClassName.TryLoadClass()) + { + SetTimecodeProvider(NewObject(this, TimecodeProviderClass)); + } + else + { + UE_LOG(LogEngine, Error, TEXT("Engine config value TimecodeProviderClassName '%s' is not a valid class name."), *TimecodeProviderClassName.ToString()); + } + } + } if (GameSingleton == nullptr && GameSingletonClassName.ToString().Len() > 0) { @@ -2476,22 +2538,6 @@ void UEngine::AddReferencedObjects(UObject* InThis, FReferenceCollector& Collect Super::AddReferencedObjects(This, Collector); } -#if WITH_EDITOR -bool UEngine::CanEditChange(const UProperty* InProperty) const -{ - if (!Super::CanEditChange(InProperty)) - { - return false; - } - - if (InProperty->GetFName() == GET_MEMBER_NAME_CHECKED(UEngine, DefaultTimecodeFrameRate)) - { - return !TimecodeProviderClassName.IsValid(); - } - - return true; -} -#endif // #if WITH_EDITOR void UEngine::CleanupGameViewport() { @@ -13992,6 +14038,7 @@ int32 UEngine::RenderStatFPS(UWorld* World, FViewport* Viewport, FCanvas* Canvas // Start drawing the various counters. const int32 RowHeight = FMath::TruncToInt(Font->GetMaxCharHeight() * 1.1f); + UEngineCustomTimeStep* CustomTimeStep = GetCustomTimeStep(); if (CustomTimeStep) { ECustomTimeStepSynchronizationState State = CustomTimeStep->GetSynchronizationState(); @@ -15744,34 +15791,31 @@ int32 UEngine::RenderStatTimecode(UWorld* World, FViewport* Viewport, FCanvas* C UFont* Font = FPlatformProperties::SupportsWindowedMode() ? GetSmallFont() : GetMediumFont(); const int32 RowHeight = FMath::TruncToInt(Font->GetMaxCharHeight() * 1.1f); - UTimecodeProvider* Provider = GetTimecodeProvider(); - if (Provider) + const UTimecodeProvider* Provider = GetTimecodeProvider(); + ETimecodeProviderSynchronizationState State = Provider->GetSynchronizationState(); + FString ProviderName = Provider->GetName(); + float CharWidth, CharHeight; + Font->GetCharSize(TEXT(' '), CharWidth, CharHeight); + int32 NewX = X - Font->GetStringSize(*ProviderName) - (int32)CharWidth; + switch (State) { - ETimecodeProviderSynchronizationState State = Provider->GetSynchronizationState(); - FString ProviderName = Provider->GetName(); - float CharWidth, CharHeight; - Font->GetCharSize(TEXT(' '), CharWidth, CharHeight); - int32 NewX = X - Font->GetStringSize(*ProviderName) - (int32)CharWidth; - switch(State) - { - case ETimecodeProviderSynchronizationState::Closed: - Canvas->DrawShadowedString(NewX, Y, *FString::Printf(TEXT("%s TC: Closed"), *ProviderName), Font, FColor::Red); - break; - case ETimecodeProviderSynchronizationState::Error: - Canvas->DrawShadowedString(NewX, Y, *FString::Printf(TEXT("%s TC: Error"), *ProviderName), Font, FColor::Red); - break; - case ETimecodeProviderSynchronizationState::Synchronized: - Canvas->DrawShadowedString(NewX, Y, *FString::Printf(TEXT("%s TC: Synchronized"), *ProviderName), Font, FColor::Green); - break; - case ETimecodeProviderSynchronizationState::Synchronizing: - Canvas->DrawShadowedString(NewX, Y, *FString::Printf(TEXT("%s TC: Synchronizing"), *ProviderName), Font, FColor::Yellow); - break; - default: - check(false); - break; - } - Y += RowHeight; + case ETimecodeProviderSynchronizationState::Closed: + Canvas->DrawShadowedString(NewX, Y, *FString::Printf(TEXT("%s TC: Closed"), *ProviderName), Font, FColor::Red); + break; + case ETimecodeProviderSynchronizationState::Error: + Canvas->DrawShadowedString(NewX, Y, *FString::Printf(TEXT("%s TC: Error"), *ProviderName), Font, FColor::Red); + break; + case ETimecodeProviderSynchronizationState::Synchronized: + Canvas->DrawShadowedString(NewX, Y, *FString::Printf(TEXT("%s TC: Synchronized"), *ProviderName), Font, FColor::Green); + break; + case ETimecodeProviderSynchronizationState::Synchronizing: + Canvas->DrawShadowedString(NewX, Y, *FString::Printf(TEXT("%s TC: Synchronizing"), *ProviderName), Font, FColor::Yellow); + break; + default: + check(false); + break; } + Y += RowHeight; Canvas->DrawShadowedString(X, Y, *FString::Printf(TEXT("TC: %s"), *FApp::GetTimecode().ToString()), Font, FColor::Green); Y += RowHeight; diff --git a/Engine/Source/Runtime/Engine/Public/ComponentInstanceDataCache.h b/Engine/Source/Runtime/Engine/Public/ComponentInstanceDataCache.h index 5f852f9176a2..349c9d66070f 100644 --- a/Engine/Source/Runtime/Engine/Public/ComponentInstanceDataCache.h +++ b/Engine/Source/Runtime/Engine/Public/ComponentInstanceDataCache.h @@ -73,6 +73,14 @@ public: ~FComponentInstanceDataCache(); + /** Non-copyable */ + FComponentInstanceDataCache(const FComponentInstanceDataCache&) = delete; + FComponentInstanceDataCache& operator=(const FComponentInstanceDataCache&) = delete; + + /** Movable */ + FComponentInstanceDataCache(FComponentInstanceDataCache&&) = default; + FComponentInstanceDataCache& operator=(FComponentInstanceDataCache&&) = default; + /** Iterates over an Actor's components and applies the stored component instance data to each */ void ApplyToActor(AActor* Actor, const ECacheApplyPhase CacheApplyPhase) const; diff --git a/Engine/Source/Runtime/LiveLinkInterface/Private/LiveLinkSourceSettings.cpp b/Engine/Source/Runtime/LiveLinkInterface/Private/LiveLinkSourceSettings.cpp new file mode 100644 index 000000000000..274357e4f174 --- /dev/null +++ b/Engine/Source/Runtime/LiveLinkInterface/Private/LiveLinkSourceSettings.cpp @@ -0,0 +1,20 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "LiveLinkSourceSettings.h" +#include "UObject/EnterpriseObjectVersion.h" + +PRAGMA_DISABLE_DEPRECATION_WARNINGS +void ULiveLinkSourceSettings::Serialize(FArchive& Ar) +{ + Super::Serialize(Ar); + + // LiveLinkSourceSettings aren't persistently stored by the engine, + // but they could have been elsewhere. + + Ar.UsingCustomVersion(FEnterpriseObjectVersion::GUID); + if (Ar.IsLoading() && FEnterpriseObjectVersion::LiveLinkTimeSynchronization > Ar.CustomVer(FEnterpriseObjectVersion::GUID)) + { + Mode = InterpolationSettings.bUseInterpolation ? ELiveLinkSourceMode::Interpolated : ELiveLinkSourceMode::Default; + } +} +PRAGMA_ENABLE_DEPRECATION_WARNINGS \ No newline at end of file diff --git a/Engine/Source/Runtime/LiveLinkInterface/Public/ILiveLinkClient.h b/Engine/Source/Runtime/LiveLinkInterface/Public/ILiveLinkClient.h index 7f7a62f0cb43..d3fb384693da 100644 --- a/Engine/Source/Runtime/LiveLinkInterface/Public/ILiveLinkClient.h +++ b/Engine/Source/Runtime/LiveLinkInterface/Public/ILiveLinkClient.h @@ -8,7 +8,7 @@ #include "Misc/Guid.h" class ILiveLinkSource; -struct FQualifiedFrametime; +struct FTimecode; class LIVELINKINTERFACE_API ILiveLinkClient : public IModularFeature { @@ -26,23 +26,28 @@ public: virtual void PushSubjectSkeleton(FGuid SourceGuid, FName SubjectName, const FLiveLinkRefSkeleton& RefSkeleton) = 0; virtual void PushSubjectData(FGuid SourceGuid, FName SubjectName, const FLiveLinkFrameData& FrameData) = 0; virtual void ClearSubject(FName SubjectName) = 0; + // Populates an array with in-use subject names virtual void GetSubjectNames(TArray& SubjectNames) = 0; //Get Whether or not we are saving each frame or not or not virtual bool GetSaveFrames() const = 0; + //Set Whether or not we are saving each frame //Returns whether or not we were previously saving. //If we were saving frames and now don't we remove the previously saved frames virtual bool SetSaveFrames(bool InSave) = 0; + //Clear the stored frames associated with this subject virtual void ClearSubjectsFrames(FName SubjectName) = 0; + //Clear All Subjects Frames virtual void ClearAllSubjectsFrames() = 0; virtual const FLiveLinkSubjectFrame* GetSubjectData(FName SubjectName) = 0; virtual const FLiveLinkSubjectFrame* GetSubjectDataAtWorldTime(FName SubjectName, double WorldTime) = 0; + virtual const FLiveLinkSubjectFrame* GetSubjectDataAtSceneTime(FName SubjectName, const FTimecode& SceneTime) = 0; //Efficiently get data - virtual const TArray* GetSubjectRawFrames(FName SubjectName) = 0; + virtual const TArray* GetSubjectRawFrames(FName SubjectName) = 0; }; diff --git a/Engine/Source/Runtime/LiveLinkInterface/Public/LiveLinkSourceSettings.h b/Engine/Source/Runtime/LiveLinkInterface/Public/LiveLinkSourceSettings.h index 271c93d0062f..81fdeb47edcb 100644 --- a/Engine/Source/Runtime/LiveLinkInterface/Public/LiveLinkSourceSettings.h +++ b/Engine/Source/Runtime/LiveLinkInterface/Public/LiveLinkSourceSettings.h @@ -5,18 +5,54 @@ #include "CoreMinimal.h" #include "UObject/ObjectMacros.h" #include "UObject/Object.h" +#include "Misc/FrameRate.h" #include "LiveLinkSourceSettings.generated.h" +UENUM() +enum class ELiveLinkSourceMode : uint8 +{ + Default, //! The source will be run in default mode. + //! This mode will not attempt any type of interpolation, time synchronization, + //! or other processing. + + Interpolated, //! The source will be run in interpolated mode. + //! This mode will use FLiveLinkInterpolationSettings and is most useful + //! when smooth animation is desired. + + TimeSynchronized, //! The source will be run in time synchronized mode. + //! This mode will use FLiveLinkTimeSynchronizationSettings and is most useful + //! when sources need to be synchronized with multiple other external inputs + //! (such as video or other time synchronized sources). + //! Don't use if the engine isn't setup with a Timecode provider. +}; + +USTRUCT() +struct FLiveLinkTimeSynchronizationSettings +{ + GENERATED_BODY() + + FLiveLinkTimeSynchronizationSettings() : FrameRate(60, 1) {} + + // The frame rate of the source. + // This should be the frame rate the source is "stamped" at, not necessarily the frame rate the source is sending. + // The source should supply this whenever possible. + UPROPERTY(EditAnywhere, Category = Settings) + FFrameRate FrameRate; +}; + +PRAGMA_DISABLE_DEPRECATION_WARNINGS + USTRUCT() struct FLiveLinkInterpolationSettings { - GENERATED_USTRUCT_BODY() + GENERATED_BODY() - FLiveLinkInterpolationSettings() : bUseInterpolation(false), InterpolationOffset(0.5f) {} + FLiveLinkInterpolationSettings() : InterpolationOffset(0.5f) {} - // Should this connection use interpolation - UPROPERTY(EditAnywhere, Category = Settings) + // Unused + DEPRECATED(4.21, "Please use ULiveLinkSourceSettings::Mode to specify how the source will behave.") + UPROPERTY() bool bUseInterpolation; // When interpolating: how far back from current time should we read the buffer (in seconds) @@ -24,6 +60,8 @@ struct FLiveLinkInterpolationSettings float InterpolationOffset; }; +PRAGMA_ENABLE_DEPRECATION_WARNINGS + // Base class for live link source settings (can be replaced by sources themselves) UCLASS() class LIVELINKINTERFACE_API ULiveLinkSourceSettings : public UObject @@ -31,7 +69,16 @@ class LIVELINKINTERFACE_API ULiveLinkSourceSettings : public UObject public: GENERATED_BODY() + UPROPERTY(EditAnywhere, Category = "Mode") + ELiveLinkSourceMode Mode = ELiveLinkSourceMode::Default; + + // Only used when Mode is set to Interpolated. UPROPERTY(EditAnywhere, Category = "Interpolation Settings") FLiveLinkInterpolationSettings InterpolationSettings; + // Only used when Mode is set to TimeSynchronized. + UPROPERTY(EditAnywhere, Category = "Time Synchronization Settings") + FLiveLinkTimeSynchronizationSettings TimeSynchronizationSettings; + + virtual void Serialize(FArchive& Ar) override; }; diff --git a/Engine/Source/Runtime/LiveLinkInterface/Public/LiveLinkTypes.h b/Engine/Source/Runtime/LiveLinkInterface/Public/LiveLinkTypes.h index e429947df80f..3feffbc3906b 100644 --- a/Engine/Source/Runtime/LiveLinkInterface/Public/LiveLinkTypes.h +++ b/Engine/Source/Runtime/LiveLinkInterface/Public/LiveLinkTypes.h @@ -1,4 +1,4 @@ -// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. #pragma once @@ -168,9 +168,7 @@ public: UPROPERTY() TMap StringMetaData; - DEPRECATED(4.20, "SceneTime will become an FQualifiedFrameTime from TimeManagement in 4.21. FLiveLinkTimeCode will implicitly allow conversion to FQualifiedFrameTime so please update your code in preparation.") - UPROPERTY() - FLiveLinkTimeCode SceneTime; + FQualifiedFrameTime SceneTime; }; USTRUCT() @@ -268,10 +266,10 @@ struct FLiveLinkSubjectFrame struct FLiveLinkFrame { public: - TArray Transforms; - TArray Curves; + TArray Transforms; + TArray Curves; - FLiveLinkMetaData MetaData; + FLiveLinkMetaData MetaData; FLiveLinkWorldTime WorldTime; diff --git a/Engine/Source/Runtime/Media/Public/IMediaAudioSample.h b/Engine/Source/Runtime/Media/Public/IMediaAudioSample.h index fcac52f07c58..f4251e28b80a 100644 --- a/Engine/Source/Runtime/Media/Public/IMediaAudioSample.h +++ b/Engine/Source/Runtime/Media/Public/IMediaAudioSample.h @@ -3,6 +3,8 @@ #pragma once #include "CoreTypes.h" +#include "Misc/Optional.h" +#include "Misc/Timecode.h" #include "Misc/Timespan.h" @@ -103,6 +105,14 @@ public: */ virtual FTimespan GetTime() const = 0; + /** + * Get the sample timecode if available. + * + * @return Sample timecode. + * @see GetTime + */ + virtual TOptional GetTimecode() const { return TOptional(); } + public: /** Virtual destructor. */ diff --git a/Engine/Source/Runtime/Media/Public/IMediaBinarySample.h b/Engine/Source/Runtime/Media/Public/IMediaBinarySample.h index d56021c3f68f..1d11e65a9a0c 100644 --- a/Engine/Source/Runtime/Media/Public/IMediaBinarySample.h +++ b/Engine/Source/Runtime/Media/Public/IMediaBinarySample.h @@ -3,6 +3,8 @@ #pragma once #include "CoreTypes.h" +#include "Misc/Optional.h" +#include "Misc/Timecode.h" #include "Misc/Timespan.h" @@ -49,6 +51,14 @@ public: */ virtual FTimespan GetTime() const = 0; + /** + * Get the sample timecode if available. + * + * @return Sample timecode. + * @see GetTime + */ + virtual TOptional GetTimecode() const { return TOptional(); } + public: /** Virtual destructor. */ diff --git a/Engine/Source/Runtime/Media/Public/IMediaOverlaySample.h b/Engine/Source/Runtime/Media/Public/IMediaOverlaySample.h index f0a8e92004b3..e2f01a14ab28 100644 --- a/Engine/Source/Runtime/Media/Public/IMediaOverlaySample.h +++ b/Engine/Source/Runtime/Media/Public/IMediaOverlaySample.h @@ -5,6 +5,7 @@ #include "Internationalization/Text.h" #include "Math/Vector2D.h" #include "Misc/Optional.h" +#include "Misc/Timecode.h" #include "Misc/Timespan.h" @@ -35,7 +36,6 @@ public: * Get the amount of time for which the sample should be displayed. * * @return Sample duration. - * @see GetTimecode */ virtual FTimespan GetDuration() const = 0; @@ -61,10 +61,17 @@ public: * This value is used primarily for debugging purposes. * * @return Sample time. - * @see GetTimecode */ virtual FTimespan GetTime() const = 0; + /** + * Get the sample timecode if available. + * + * @return Sample timecode. + * @see GetTime + */ + virtual TOptional GetTimecode() const { return TOptional(); } + /** * Get the sample type. * diff --git a/Engine/Source/Runtime/Media/Public/IMediaTextureSample.h b/Engine/Source/Runtime/Media/Public/IMediaTextureSample.h index cb93ce5898da..11a3126af57f 100644 --- a/Engine/Source/Runtime/Media/Public/IMediaTextureSample.h +++ b/Engine/Source/Runtime/Media/Public/IMediaTextureSample.h @@ -5,6 +5,8 @@ #include "CoreTypes.h" #include "Math/Color.h" #include "Math/IntPoint.h" +#include "Misc/Optional.h" +#include "Misc/Timecode.h" #include "Misc/Timespan.h" #include "Templates/SharedPointer.h" @@ -160,6 +162,14 @@ public: */ virtual FTimespan GetTime() const = 0; + /** + * Get the sample timecode if available. + * + * @return Sample timecode. + * @see GetTime + */ + virtual TOptional GetTimecode() const { return TOptional(); } + /** * Whether the sample can be held in a cache. * diff --git a/Engine/Source/Runtime/MediaAssets/Public/TimeSynchronizableMediaSource.h b/Engine/Source/Runtime/MediaAssets/Public/TimeSynchronizableMediaSource.h index e7f140f61354..4ced2686c541 100644 --- a/Engine/Source/Runtime/MediaAssets/Public/TimeSynchronizableMediaSource.h +++ b/Engine/Source/Runtime/MediaAssets/Public/TimeSynchronizableMediaSource.h @@ -27,7 +27,11 @@ public: public: - /** Synchronize the media with the engine's timecode. */ + /** + * Synchronize the media with the engine's timecode. + * The media player has be able to read timecode. + * The media player will try to play the corresponding frame, base on the frame's timecode value. + */ UPROPERTY(BlueprintReadWrite, EditAnywhere, Category=Synchronization, meta=(DisplayName="Synchronize with Engine's Timecode")) bool bUseTimeSynchronization; diff --git a/Engine/Source/Runtime/MediaIOCore/Private/MediaCapture.cpp b/Engine/Source/Runtime/MediaIOCore/Private/MediaCapture.cpp index dee79f595ef1..7ae4499c261c 100644 --- a/Engine/Source/Runtime/MediaIOCore/Private/MediaCapture.cpp +++ b/Engine/Source/Runtime/MediaIOCore/Private/MediaCapture.cpp @@ -13,6 +13,7 @@ #include "RendererInterface.h" #include "RenderUtils.h" #include "Slate/SceneViewport.h" +#include "Misc/ScopeLock.h" #if WITH_EDITOR #include "Editor.h" @@ -27,6 +28,13 @@ namespace MediaCaptureDetails { bool FindSceneViewportAndLevel(TSharedPtr& OutSceneViewport); + + //Validation for the source of a capture + bool ValidateSceneViewport(const TSharedPtr& SceneViewport, const FIntPoint& DesiredSize, const EPixelFormat DesiredPixelFormat, const bool bCurrentlyCapturing); + bool ValidateTextureRenderTarget2D(const UTextureRenderTarget2D* RenderTarget, const FIntPoint& DesiredSize, const EPixelFormat DesiredPixelFormat, const bool bCurrentlyCapturing); + + //Validation that there is a capture + bool ValidateIsCapturing(const UMediaCapture& CaptureToBeValidated); } /* UMediaCapture @@ -63,6 +71,7 @@ FString UMediaCapture::GetDesc() bool UMediaCapture::CaptureActiveSceneViewport() { StopCapture(false); + check(IsInGameThread()); TSharedPtr FoundSceneViewport; @@ -78,13 +87,8 @@ bool UMediaCapture::CaptureActiveSceneViewport() bool UMediaCapture::CaptureSceneViewport(TSharedPtr& InSceneViewport) { StopCapture(false); - check(IsInGameThread()); - if (!InSceneViewport.IsValid()) - { - UE_LOG(LogMediaIOCore, Error, TEXT("Can not start the capture. The Scene Viewport is invalid.")); - return false; - } + check(IsInGameThread()); if (!ValidateMediaOutput()) { @@ -93,23 +97,10 @@ bool UMediaCapture::CaptureSceneViewport(TSharedPtr& InSceneView DesiredSize = MediaOutput->GetRequestedSize(); DesiredPixelFormat = MediaOutput->GetRequestedPixelFormat(); - - FIntPoint SceneViewportSize = InSceneViewport->GetRenderTargetTextureSizeXY(); - if (DesiredSize.X != SceneViewportSize.X || DesiredSize.Y != SceneViewportSize.Y) - { - UE_LOG(LogMediaIOCore, Error, TEXT("Can not start the capture. The Render Target size doesn't match with the requested size. SceneViewport: %d,%d MediaOutput: %d,%d") - , SceneViewportSize.X, SceneViewportSize.Y - , DesiredSize.X, DesiredSize.Y); - return false; - } - static const auto CVarDefaultBackBufferPixelFormat = IConsoleManager::Get().FindTConsoleVariableDataInt(TEXT("r.DefaultBackBufferPixelFormat")); - EPixelFormat SceneTargetFormat = EDefaultBackBufferPixelFormat::Convert2PixelFormat(EDefaultBackBufferPixelFormat::FromInt(CVarDefaultBackBufferPixelFormat->GetValueOnGameThread())); - if (DesiredPixelFormat != SceneTargetFormat) + const bool bCurrentlyCapturing = false; + if (!MediaCaptureDetails::ValidateSceneViewport(InSceneViewport, DesiredSize, DesiredPixelFormat, bCurrentlyCapturing)) { - UE_LOG(LogMediaIOCore, Error, TEXT("Can not start the capture. The Render Target pixel format doesn't match with the requested pixel format. SceneViewport: %s MediaOutput: %s") - , GetPixelFormatString(SceneTargetFormat) - , GetPixelFormatString(DesiredPixelFormat)); return false; } @@ -120,7 +111,9 @@ bool UMediaCapture::CaptureSceneViewport(TSharedPtr& InSceneView return false; } + //no lock required the command on the render thread is not active CapturingSceneViewport = InSceneViewport; + InitializeResolveTarget(MediaOutput->NumberOfTextureBuffers); CurrentResolvedTargetIndex = 0; FCoreDelegates::OnEndFrame.AddUObject(this, &UMediaCapture::OnEndFrame_GameThread); @@ -133,33 +126,13 @@ bool UMediaCapture::CaptureTextureRenderTarget2D(UTextureRenderTarget2D* InRende StopCapture(false); check(IsInGameThread()); - if (InRenderTarget2D == nullptr) - { - UE_LOG(LogMediaIOCore, Error, TEXT("Couldn't start the capture. The Render Target is invalid.")); - return false; - } - - if (!ValidateMediaOutput()) - { - return false; - } DesiredSize = MediaOutput->GetRequestedSize(); DesiredPixelFormat = MediaOutput->GetRequestedPixelFormat(); - if (DesiredSize.X != InRenderTarget2D->SizeX || DesiredSize.Y != InRenderTarget2D->SizeY) + const bool bCurrentlyCapturing = false; + if (!MediaCaptureDetails::ValidateTextureRenderTarget2D(InRenderTarget2D, DesiredSize, DesiredPixelFormat, bCurrentlyCapturing)) { - UE_LOG(LogMediaIOCore, Error, TEXT("Can not start the capture. The Render Target size doesn't match with the requested size. RenderTarget: %d,%d MediaOutput: %d,%d") - , InRenderTarget2D->SizeX, InRenderTarget2D->SizeY - , DesiredSize.X, DesiredSize.Y); - return false; - } - - if (DesiredPixelFormat != InRenderTarget2D->GetFormat()) - { - UE_LOG(LogMediaIOCore, Error, TEXT("Can not start the capture. The Render Target pixel format doesn't match with the requested pixel format. RenderTarget: %s MediaOutput: %s") - , GetPixelFormatString(InRenderTarget2D->GetFormat()) - , GetPixelFormatString(DesiredPixelFormat)); return false; } @@ -168,7 +141,9 @@ bool UMediaCapture::CaptureTextureRenderTarget2D(UTextureRenderTarget2D* InRende return false; } + //no lock required the command on the render thread is not active yet CapturingRenderTarget = InRenderTarget2D; + InitializeResolveTarget(MediaOutput->NumberOfTextureBuffers); CurrentResolvedTargetIndex = 0; FCoreDelegates::OnEndFrame.AddUObject(this, &UMediaCapture::OnEndFrame_GameThread); @@ -177,6 +152,71 @@ bool UMediaCapture::CaptureTextureRenderTarget2D(UTextureRenderTarget2D* InRende return true; } +bool UMediaCapture::UpdateSceneViewport(TSharedPtr& InSceneViewport) +{ + if (!MediaCaptureDetails::ValidateIsCapturing(*this)) + { + StopCapture(false); + return false; + } + + check(IsInGameThread()); + + const bool bCurrentlyCapturing = true; + + if (!MediaCaptureDetails::ValidateSceneViewport(InSceneViewport, DesiredSize, DesiredPixelFormat, bCurrentlyCapturing)) + { + StopCapture(false); + return false; + } + + if (!UpdateSceneViewportImpl(InSceneViewport)) + { + StopCapture(false); + return false; + } + + { + FScopeLock Lock(&AccessingCapturingSource); + CapturingSceneViewport = InSceneViewport; + CapturingRenderTarget = nullptr; + } + + return true; +} + +bool UMediaCapture::UpdateTextureRenderTarget2D(UTextureRenderTarget2D * InRenderTarget2D) +{ + if (!MediaCaptureDetails::ValidateIsCapturing(*this)) + { + StopCapture(false); + return false; + } + + check(IsInGameThread()); + + const bool bCurrentlyCapturing = true; + if (!MediaCaptureDetails::ValidateTextureRenderTarget2D(InRenderTarget2D, DesiredSize, DesiredPixelFormat, bCurrentlyCapturing)) + { + StopCapture(false); + return false; + } + + if (!UpdateRenderTargetImpl(InRenderTarget2D)) + { + StopCapture(false); + return false; + } + + { + FScopeLock Lock(&AccessingCapturingSource); + CapturingRenderTarget = InRenderTarget2D; + CapturingSceneViewport.Reset(); + } + + return true; +} + void UMediaCapture::StopCapture(bool bAllowPendingFrameToBeProcess) { check(IsInGameThread()); @@ -195,7 +235,7 @@ void UMediaCapture::StopCapture(bool bAllowPendingFrameToBeProcess) FCoreDelegates::OnEndFrame.RemoveAll(this); - if (bWaitingForResolveCommandExecution || !bResolvedTargetInitialized) + while (bWaitingForResolveCommandExecution || !bResolvedTargetInitialized) { FlushRenderingCommands(); } @@ -244,7 +284,7 @@ void UMediaCapture::InitializeResolveTarget(int32 InNumberOfBuffers) 1, TexCreate_CPUReadback, CreateInfo - ); + ); } bResolvedTargetInitialized = true; }; @@ -297,8 +337,8 @@ void UMediaCapture::OnEndFrame_GameThread() return; } - CurrentResolvedTargetIndex = (CurrentResolvedTargetIndex+1) % NumberOfCaptureFrame; - int32 ReadyFrameIndex = (CurrentResolvedTargetIndex+1) % NumberOfCaptureFrame; // Next one in the buffer queue + CurrentResolvedTargetIndex = (CurrentResolvedTargetIndex + 1) % NumberOfCaptureFrame; + int32 ReadyFrameIndex = (CurrentResolvedTargetIndex + 1) % NumberOfCaptureFrame; // Next one in the buffer queue FCaptureFrame* ReadyFrame = (CaptureFrames[ReadyFrameIndex].bResolvedTargetRequested) ? &CaptureFrames[ReadyFrameIndex] : nullptr; FCaptureFrame* CapturingFrame = (GetState() != EMediaCaptureState::StopRequested) ? &CaptureFrames[CurrentResolvedTargetIndex] : nullptr; @@ -324,9 +364,11 @@ void UMediaCapture::OnEndFrame_GameThread() { FTexture2DRHIRef SourceTexture; { - UTextureRenderTarget2D* InCapturingRenderTarget = nullptr; - FPlatformAtomics::InterlockedExchangePtr((void**)(&InCapturingRenderTarget), CapturingRenderTarget); + FScopeLock Lock(&AccessingCapturingSource); + + UTextureRenderTarget2D* InCapturingRenderTarget = CapturingRenderTarget; TSharedPtr InSceneViewportPtr = CapturingSceneViewport.Pin(); + if (InSceneViewportPtr.IsValid()) { SourceTexture = InSceneViewportPtr->GetRenderTargetTexture(); @@ -459,4 +501,80 @@ namespace MediaCaptureDetails return true; } } + + bool ValidateSceneViewport(const TSharedPtr& SceneViewport, const FIntPoint& DesiredSize, const EPixelFormat DesiredPixelFormat, const bool bCurrentlyCapturing) + { + + if (!SceneViewport.IsValid()) + { + UE_LOG(LogMediaIOCore, Error, TEXT("Can not %s the capture. The Scene Viewport is invalid.") + , bCurrentlyCapturing ? TEXT("continue") : TEXT("start")); + return false; + } + + FIntPoint SceneViewportSize = SceneViewport->GetRenderTargetTextureSizeXY(); + if (DesiredSize.X != SceneViewportSize.X || DesiredSize.Y != SceneViewportSize.Y) + { + UE_LOG(LogMediaIOCore, Error, TEXT("Can not %s the capture. The Render Target size doesn't match with the requested size. SceneViewport: %d,%d MediaOutput: %d,%d") + , bCurrentlyCapturing ? TEXT("continue") : TEXT("start") + , SceneViewportSize.X, SceneViewportSize.Y + , DesiredSize.X, DesiredSize.Y); + return false; + } + + static const auto CVarDefaultBackBufferPixelFormat = IConsoleManager::Get().FindTConsoleVariableDataInt(TEXT("r.DefaultBackBufferPixelFormat")); + EPixelFormat SceneTargetFormat = EDefaultBackBufferPixelFormat::Convert2PixelFormat(EDefaultBackBufferPixelFormat::FromInt(CVarDefaultBackBufferPixelFormat->GetValueOnGameThread())); + if (DesiredPixelFormat != SceneTargetFormat) + { + UE_LOG(LogMediaIOCore, Error, TEXT("Can not %s the capture. The Render Target pixel format doesn't match with the requested pixel format. SceneViewport: %s MediaOutput: %s") + , bCurrentlyCapturing ? TEXT("continue") : TEXT("start") + , GetPixelFormatString(SceneTargetFormat) + , GetPixelFormatString(DesiredPixelFormat)); + return false; + } + + return true; + } + + bool ValidateTextureRenderTarget2D(const UTextureRenderTarget2D* InRenderTarget2D, const FIntPoint& DesiredSize, const EPixelFormat DesiredPixelFormat, const bool bCurrentlyCapturing) + { + if (InRenderTarget2D == nullptr) + { + UE_LOG(LogMediaIOCore, Error, TEXT("Couldn't %s the capture. The Render Target is invalid.") + , bCurrentlyCapturing ? TEXT("continue") : TEXT("start")); + return false; + } + + if (DesiredSize.X != InRenderTarget2D->SizeX || DesiredSize.Y != InRenderTarget2D->SizeY) + { + UE_LOG(LogMediaIOCore, Error, TEXT("Can not %s the capture. The Render Target size doesn't match with the requested size. RenderTarget: %d,%d MediaOutput: %d,%d") + , bCurrentlyCapturing ? TEXT("continue") : TEXT("start") + , InRenderTarget2D->SizeX, InRenderTarget2D->SizeY + , DesiredSize.X, DesiredSize.Y); + return false; + } + + if (DesiredPixelFormat != InRenderTarget2D->GetFormat()) + { + UE_LOG(LogMediaIOCore, Error, TEXT("Can not %s the capture. The Render Target pixel format doesn't match with the requested pixel format. RenderTarget: %s MediaOutput: %s") + , bCurrentlyCapturing ? TEXT("continue") : TEXT("start") + , GetPixelFormatString(InRenderTarget2D->GetFormat()) + , GetPixelFormatString(DesiredPixelFormat)); + return false; + } + + return true; + } + + bool ValidateIsCapturing(const UMediaCapture& CaptureToBeValidated) + { + if (CaptureToBeValidated.GetState() != EMediaCaptureState::Capturing && CaptureToBeValidated.GetState() != EMediaCaptureState::Preparing) + { + UE_LOG(LogMediaIOCore, Error, TEXT("Can not update the capture. There is no capture currently.\ + Only use UpdateSceneViewport or UpdateTextureRenderTarget2D when the state is Capturing or Preparing")); + return false; + } + + return true; + } } diff --git a/Engine/Source/Runtime/MediaIOCore/Private/Player/MediaIOCorePlayerBase.cpp b/Engine/Source/Runtime/MediaIOCore/Private/Player/MediaIOCorePlayerBase.cpp index 14bbed6f2f0c..d269370da6fe 100644 --- a/Engine/Source/Runtime/MediaIOCore/Private/Player/MediaIOCorePlayerBase.cpp +++ b/Engine/Source/Runtime/MediaIOCore/Private/Player/MediaIOCorePlayerBase.cpp @@ -14,21 +14,26 @@ #define LOCTEXT_NAMESPACE "MediaIOCorePlayerBase" +/* FMediaIOCoreMediaOption structors + *****************************************************************************/ +const FName FMediaIOCoreMediaOption::FrameRateNumerator("FrameRateNumerator"); +const FName FMediaIOCoreMediaOption::FrameRateDenominator("FrameRateDenominator"); +const FName FMediaIOCoreMediaOption::ResolutionWidth("ResolutionWidth"); +const FName FMediaIOCoreMediaOption::ResolutionHeight("ResolutionHeight"); +const FName FMediaIOCoreMediaOption::VideoStandard("VideoStandard"); + /* FMediaIOCorePlayerBase structors *****************************************************************************/ FMediaIOCorePlayerBase::FMediaIOCorePlayerBase(IMediaEventSink& InEventSink) - :bIsTimecodeLogEnable(false) + : bIsTimecodeLogEnable(false) , CurrentState(EMediaState::Closed) , CurrentTime(FTimespan::Zero()) , EventSink(InEventSink) - , LastAudioChannels(0) - , LastAudioSampleRate(0) , LastVideoDim(FIntPoint::ZeroValue) , VideoFrameRate(30, 1) , LastFrameDropCount(0) , Samples(new FMediaIOCoreSamples) - , bUseFrameTimecode(false) , bUseTimeSynchronization(false) , VideoSampleFormat(EMediaTextureSampleFormat::CharBGRA) , PreviousFrameTimespan(FTimespan::Zero()) @@ -49,6 +54,10 @@ void FMediaIOCorePlayerBase::Close() CurrentState = EMediaState::Closed; CurrentTime = FTimespan::Zero(); LastVideoDim = FIntPoint::ZeroValue; + AudioTrackFormat.NumChannels = 0; + AudioTrackFormat.SampleRate = 0; + + Samples->FlushSamples(); EventSink.ReceiveMediaEvent(EMediaEvent::TracksChanged); EventSink.ReceiveMediaEvent(EMediaEvent::MediaClosed); } @@ -57,12 +66,12 @@ FString FMediaIOCorePlayerBase::GetInfo() const { FString Info; - if (LastAudioChannels > 0) + if (AudioTrackFormat.NumChannels > 0) { Info += FString::Printf(TEXT("Stream\n")); Info += FString::Printf(TEXT(" Type: Audio\n")); - Info += FString::Printf(TEXT(" Channels: %i\n"), LastAudioChannels); - Info += FString::Printf(TEXT(" Sample Rate: %i Hz\n"), LastAudioSampleRate); + Info += FString::Printf(TEXT(" Channels: %i\n"), AudioTrackFormat.NumChannels); + Info += FString::Printf(TEXT(" Sample Rate: %i Hz\n"), AudioTrackFormat.SampleRate); Info += FString::Printf(TEXT(" Bits Per Sample: 32\n")); } @@ -95,6 +104,11 @@ IMediaSamples& FMediaIOCorePlayerBase::GetSamples() return *Samples; } +const FMediaIOCoreSamples& FMediaIOCorePlayerBase::GetSamples() const +{ + return *Samples; +} + FString FMediaIOCorePlayerBase::GetStats() const { return FString(); @@ -105,14 +119,22 @@ IMediaTracks& FMediaIOCorePlayerBase::GetTracks() return *this; } +FString FMediaIOCorePlayerBase::GetUrl() const +{ + return OpenUrl; +} + IMediaView& FMediaIOCorePlayerBase::GetView() { return *this; } -bool FMediaIOCorePlayerBase::Open(const FString& /*Url*/, const IMediaOptions* /*Options*/) +bool FMediaIOCorePlayerBase::Open(const FString& Url, const IMediaOptions* Options) { - return false; + Close(); + + OpenUrl = Url; + return ReadMediaOptions(Options); } bool FMediaIOCorePlayerBase::Open(const TSharedRef& /*Archive*/, const FString& /*OriginalUrl*/, const IMediaOptions* /*Options*/) @@ -120,33 +142,28 @@ bool FMediaIOCorePlayerBase::Open(const TSharedRefGetTimecodeProvider()) { - if (Provider->GetSynchronizationState() == ETimecodeProviderSynchronizationState::Synchronized) - { - FrameRate = Provider->GetFrameRate(); - bUseDefaultTime = false; - } - } - else - { - FrameRate = GEngine->DefaultTimecodeFrameRate; - bUseDefaultTime = false; - } - - if (!bUseDefaultTime) - { - CurrentTime = FTimespan(0, Timecode.Hours, Timecode.Minutes, Timecode.Seconds, static_cast((ETimespan::TicksPerSecond * Timecode.Frames) / FrameRate.AsDecimal()) * ETimespan::NanosecondsPerTick); + bUseTimecode = (Provider->GetSynchronizationState() == ETimecodeProviderSynchronizationState::Synchronized); } } - return bUseDefaultTime; + + if (bUseTimecode) + { + FTimecode Timecode = FApp::GetTimecode(); + FFrameRate FrameRate = FApp::GetTimecodeFrameRate(); + CurrentTime = FTimespan(0, Timecode.Hours, Timecode.Minutes, Timecode.Seconds, static_cast((ETimespan::TicksPerSecond * Timecode.Frames) / FrameRate.AsDecimal()) * ETimespan::NanosecondsPerTick); + } + else + { + // As default, use the App time + CurrentTime = FTimespan::FromSeconds(FApp::GetCurrentTime()); + } } /* IMediaCache interface @@ -370,6 +387,19 @@ bool FMediaIOCorePlayerBase::SetTrackFormat(EMediaTrackType TrackType, int32 Tra bool FMediaIOCorePlayerBase::ReadMediaOptions(const IMediaOptions* Options) { bUseTimeSynchronization = Options->GetMediaOption(TimeSynchronizableMedia::UseTimeSynchronizatioOption, false); + { + int32 Numerator = Options->GetMediaOption(FMediaIOCoreMediaOption::FrameRateNumerator, (int64)30); + int32 Denominator = Options->GetMediaOption(FMediaIOCoreMediaOption::FrameRateDenominator, (int64)1); + VideoFrameRate = FFrameRate(Numerator, Denominator); + } + { + int32 ResolutionX = Options->GetMediaOption(FMediaIOCoreMediaOption::ResolutionWidth, (int64)1920); + int32 ResolutionY = Options->GetMediaOption(FMediaIOCoreMediaOption::ResolutionHeight, (int64)1080); + VideoTrackFormat.Dim = FIntPoint(ResolutionX, ResolutionY); + VideoTrackFormat.FrameRates = TRange(VideoFrameRate.AsDecimal()); + VideoTrackFormat.FrameRate = VideoFrameRate.AsDecimal(); + VideoTrackFormat.TypeName = Options->GetMediaOption(FMediaIOCoreMediaOption::VideoStandard, FString(TEXT("1080p30fps"))); + } return true; } diff --git a/Engine/Source/Runtime/MediaIOCore/Public/MediaCapture.h b/Engine/Source/Runtime/MediaIOCore/Public/MediaCapture.h index d968c09f7895..198a072b0501 100644 --- a/Engine/Source/Runtime/MediaIOCore/Public/MediaCapture.h +++ b/Engine/Source/Runtime/MediaIOCore/Public/MediaCapture.h @@ -10,6 +10,7 @@ #include "PixelFormat.h" #include "RHI.h" #include "RHIResources.h" +#include "HAL/CriticalSection.h" #include "MediaCapture.generated.h" @@ -20,7 +21,7 @@ class UTextureRenderTarget2D; /** * Possible states of media capture. */ - UENUM() +UENUM() enum class EMediaCaptureState { /** Unrecoverable error occurred during capture. */ @@ -51,46 +52,71 @@ class FMediaCaptureUserData * MediaCapture capture the texture of the Render target or the SceneViewport and sends it to an external media device. * MediaCapture should be created by a MediaOutput. */ -UCLASS(Abstract, editinlinenew, BlueprintType, hidecategories=(Object)) +UCLASS(Abstract, editinlinenew, BlueprintType, hidecategories = (Object)) class MEDIAIOCORE_API UMediaCapture : public UObject { GENERATED_UCLASS_BODY() public: /** - * Stop the previous capture and start the capture of a SceneViewport. + * Stop the actual capture if there is one. + * Then start the capture of a SceneViewport. * If the SceneViewport is destroyed, the capture will stop. * The SceneViewport needs to be of the same size and have the same pixel format as requested by the media output. * @note make sure the size of the SceneViewport doesn't change during capture. + * @return True if the capture was successfully started */ bool CaptureSceneViewport(TSharedPtr& SceneViewport); /** - * Find and capture every frame from active SceneViewport. + * Stop the current capture if there is one. + * Then find and capture every frame from active SceneViewport. * It can only find a SceneViewport when you play in Standalone or in "New Editor Window PIE". * If the active SceneViewport is destroyed, the capture will stop. * The SceneViewport needs to be of the same size and have the same pixel format as requested by the media output. + * @return True if the capture was successfully started */ UFUNCTION(BlueprintCallable, Category = "Media|Output") bool CaptureActiveSceneViewport(); /** - * Capture every frame for a TextureRenderTarget2D. + * Stop the actual capture if there is one. + * Then capture every frame for a TextureRenderTarget2D. * The TextureRenderTarget2D needs to be of the same size and have the same pixel format as requested by the media output. + * @return True if the capture was successfully started */ UFUNCTION(BlueprintCallable, Category = "Media|Output") bool CaptureTextureRenderTarget2D(UTextureRenderTarget2D* RenderTarget); + /** + * Update the current capture with a SceneViewport. + * If the SceneViewport is destroyed, the capture will stop. + * The SceneViewport needs to be of the same size and have the same pixel format as requested by the media output. + * @note make sure the size of the SceneViewport doesn't change during capture. + * @return Return true if the capture was successfully updated. If false is returned, the capture was stopped. + */ + bool UpdateSceneViewport(TSharedPtr& SceneViewport); + + + /** + * Update the current capture with every frame for a TextureRenderTarget2D. + * The TextureRenderTarget2D needs to be of the same size and have the same pixel format as requested by the media output. + * @return Return true if the capture was successfully updated. If false is returned, the capture was stopped. + */ + UFUNCTION(BlueprintCallable, Category = "Media|Output") + bool UpdateTextureRenderTarget2D(UTextureRenderTarget2D* RenderTarget); + + /** * Stop the previous requested capture. - * @param bAllowPendingFrameToBeProcess Keep copying the pending frames asynchronously or stop immediately without copying the pending frames. + * @param bAllowPendingFrameToBeProcess Keep copying the pending frames asynchronously or stop immediately without copying the pending frames. */ UFUNCTION(BlueprintCallable, Category = "Media|Output") void StopCapture(bool bAllowPendingFrameToBeProcess); /** Get the current state of the capture. */ UFUNCTION(BlueprintCallable, Category = "Media|Output") - virtual EMediaCaptureState GetState() { return MediaState; } + virtual EMediaCaptureState GetState() const { return MediaState; } /** Set the media output. Can only be set when the capture is stopped. */ UFUNCTION(BlueprintCallable, Category = "Media|Output") @@ -98,7 +124,7 @@ public: /** Get the desired size of the current capture. */ UFUNCTION(BlueprintCallable, Category = "Media|Output") - FIntPoint GetDesiredSize() const { return DesiredSize;} + FIntPoint GetDesiredSize() const { return DesiredSize; } /** Get the desired pixel format of the current capture. */ UFUNCTION(BlueprintCallable, Category = "Media|Output") @@ -117,6 +143,8 @@ protected: virtual bool ValidateMediaOutput() const; virtual bool CaptureSceneViewportImpl(TSharedPtr& InSceneViewport) { return true; } virtual bool CaptureRenderTargetImpl(UTextureRenderTarget2D* InRenderTarget) { return true; } + virtual bool UpdateSceneViewportImpl(TSharedPtr& InSceneViewport) { return true; } + virtual bool UpdateRenderTargetImpl(UTextureRenderTarget2D* InRenderTarget) { return true; } virtual void StopCaptureImpl(bool bAllowPendingFrameToBeProcess) { } @@ -158,6 +186,8 @@ private: UPROPERTY(Transient) UTextureRenderTarget2D* CapturingRenderTarget; TWeakPtr CapturingSceneViewport; + FCriticalSection AccessingCapturingSource; + FIntPoint DesiredSize; EPixelFormat DesiredPixelFormat; diff --git a/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCoreAudioSampleBase.h b/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCoreAudioSampleBase.h index 997b4debb067..10b908228047 100644 --- a/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCoreAudioSampleBase.h +++ b/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCoreAudioSampleBase.h @@ -22,6 +22,62 @@ public: , Time(FTimespan::MinValue()) { } + /** + * Initialize the sample. + * + * @param InAudioBuffer The audio frame data. + * @param InBufferSize The size of the audio buffer. + * @param InNumberOfChannels The number of channel of the audio buffer. + * @param InSampleRate The sample rate of the audio buffer. + * @param InTime The sample time (in the player's own clock). + * @param InTimecode The sample timecode if available. + */ + bool Initialize(const int32* InAudioBuffer, uint32 InBufferSize, uint32 InNumberOfChannels, uint32 InSampleRate, FTimespan InTime, const TOptional& InTimecode) + { + if (InAudioBuffer == nullptr || InNumberOfChannels * InSampleRate <= 0) + { + FreeSample(); + return false; + } + + Buffer.Reset(InBufferSize); + Buffer.Append(InAudioBuffer, InBufferSize); + Time = InTime; + Timecode = InTimecode; + Channels = InNumberOfChannels; + SampleRate = InSampleRate; + Duration = (InBufferSize * ETimespan::TicksPerSecond) / (Channels * SampleRate); + + return true; + } + + /** + * Initialize the sample. + * + * @param InBinaryBuffer The metadata frame data. + * @param InNumberOfChannels The number of channel of the audio buffer. + * @param InSampleRate The sample rate of the audio buffer. + * @param InTime The sample time (in the player's own clock). + * @param InTimecode The sample timecode if available. + */ + bool Initialize(TArray InAudioBuffer, uint32 InNumberOfChannels, uint32 InSampleRate, FTimespan InTime, const TOptional& InTimecode) + { + if (InAudioBuffer.Num() == 0 || InNumberOfChannels * InSampleRate <= 0) + { + FreeSample(); + return false; + } + + Buffer = MoveTemp(InAudioBuffer); + Time = InTime; + Timecode = InTimecode; + Channels = InNumberOfChannels; + SampleRate = InSampleRate; + Duration = (InAudioBuffer.Num() * ETimespan::TicksPerSecond) / (Channels * SampleRate); + + return true; + } + public: //~ IMediaAudioSample interface @@ -61,6 +117,11 @@ public: return Time; } + virtual TOptional GetTimecode() const override + { + return Timecode; + } + public: //~ IMediaPoolable interface @@ -91,4 +152,7 @@ protected: /** Sample time. */ FTimespan Time; + + /** Sample timecode. */ + TOptional Timecode; }; diff --git a/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCoreBinarySampleBase.h b/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCoreBinarySampleBase.h index 7e3e2cbe124f..4226d507f286 100644 --- a/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCoreBinarySampleBase.h +++ b/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCoreBinarySampleBase.h @@ -24,9 +24,10 @@ public: * * @param InBinaryBuffer The metadata frame data. * @param InBufferSize The size of the InBinaryBuffer. + * @param InTimecode The sample timecode if available. * @param InTime The sample time (in the player's own clock). */ - bool Initialize(const uint8* InBinaryBuffer, uint32 InBufferSize, FTimespan InTime) + bool Initialize(const uint8* InBinaryBuffer, uint32 InBufferSize, FTimespan InTime, const TOptional& InTimecode) { if (InBinaryBuffer == nullptr) { @@ -37,6 +38,7 @@ public: Buffer.Reset(InBufferSize); Buffer.Append(InBinaryBuffer, InBufferSize); Time = InTime; + Timecode = InTimecode; return true; } @@ -45,12 +47,14 @@ public: * Initialize the sample. * * @param InBinaryBuffer The metadata frame data. + * @param InTimecode The sample timecode if available. * @param InTime The sample time (in the player's own clock). */ - bool Initialize(TArray InBinaryBuffer, FTimespan InTime) + bool Initialize(TArray InBinaryBuffer, FTimespan InTime, const TOptional& InTimecode) { Buffer = MoveTemp(InBinaryBuffer); Time = InTime; + Timecode = InTimecode; return true; } @@ -79,6 +83,11 @@ public: return Time; } + virtual TOptional GetTimecode() const override + { + return Timecode; + } + public: //~ IMediaPoolable interface @@ -102,4 +111,7 @@ protected: /** Sample time. */ FTimespan Time; + + /** Sample timecode. */ + TOptional Timecode; }; diff --git a/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCorePlayerBase.h b/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCorePlayerBase.h index 868599a8cdd4..60a550148f93 100644 --- a/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCorePlayerBase.h +++ b/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCorePlayerBase.h @@ -20,6 +20,16 @@ class IMediaEventSink; enum class EMediaTextureSampleFormat; +struct MEDIAIOCORE_API FMediaIOCoreMediaOption +{ + static const FName FrameRateNumerator; + static const FName FrameRateDenominator; + static const FName ResolutionWidth; + static const FName ResolutionHeight; + static const FName VideoStandard; +}; + + /** * Implements a base player for hardware IO cards. * @@ -63,10 +73,12 @@ public: virtual IMediaCache& GetCache() override; virtual IMediaControls& GetControls() override; virtual IMediaSamples& GetSamples() override; + const FMediaIOCoreSamples& GetSamples() const; virtual FString GetStats() const override; virtual IMediaTracks& GetTracks() override; + virtual FString GetUrl() const override; virtual IMediaView& GetView() override; - virtual bool TickTimeManagement(); + virtual void TickTimeManagement(); public: //~ IMediaCache interface @@ -126,6 +138,9 @@ protected: /** Enable timecode logging */ bool bIsTimecodeLogEnable; + /** Url used to open the media player */ + FString OpenUrl; + /** format of the video */ FMediaVideoTrackFormat VideoTrackFormat; @@ -141,12 +156,6 @@ protected: /** The media event handler. */ IMediaEventSink& EventSink; - /** Number of audio channels in the last received sample. */ - int32 LastAudioChannels; - - /** Audio sample rate in the last received sample. */ - int32 LastAudioSampleRate; - /** Video dimensions in the last received sample. */ FIntPoint LastVideoDim; @@ -159,9 +168,6 @@ protected: /** The media sample cache. */ FMediaIOCoreSamples* Samples; - /** Whether to use the timecode embedded in a frame. */ - bool bUseFrameTimecode; - /** Whether to use the Synchronization Time module as time source. */ bool bUseTimeSynchronization; diff --git a/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCoreTextureSampleBase.h b/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCoreTextureSampleBase.h index 961007be285e..4c49c98e3717 100644 --- a/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCoreTextureSampleBase.h +++ b/Engine/Source/Runtime/MediaIOCore/Public/MediaIOCoreTextureSampleBase.h @@ -18,14 +18,78 @@ protected: : Duration(FTimespan::Zero()) , SampleFormat(EMediaTextureSampleFormat::Undefined) , Time(FTimespan::Zero()) - , Stride(0) , Width(0) , Height(0) - , PixelBuffer(nullptr) { } + /** + * Initialize the sample. + * + * @param InVideoBuffer The audio frame data. + * @param InBufferSize The size of the video buffer. + * @param InStride The number of channel of the video buffer. + * @param InWidth The sample rate of the video buffer. + * @param InHeight The sample rate of the video buffer. + * @param InSampleFormat The sample format of the video buffer. + * @param InTime The sample time (in the player's own clock). + * @param InTimecode The sample timecode if available. + */ + bool Initialize(void* InVideoBuffer, uint32 InBufferSize, uint32 InStride, uint32 InWidth, int32 InHeight, EMediaTextureSampleFormat InSampleFormat, FTimespan InTime, const TOptional& InTimecode) + { + FreeSample(); + + if ((InVideoBuffer == nullptr) || (InSampleFormat == EMediaTextureSampleFormat::Undefined)) + { + return false; + } + + Buffer.Reset(InBufferSize); + Buffer.Append(reinterpret_cast(InVideoBuffer), InBufferSize); + PixelBuffer = Buffer.GetData(); //@TODO: Temp for Blackmagic. + Stride = InStride; + Width = InWidth; + Height = InHeight; + SampleFormat = InSampleFormat; + Time = InTime; + Timecode = InTimecode; + + return true; + } + + /** + * Initialize the sample. + * + * @param InVideoBuffer The audio frame data. + * @param InStride The number of channel of the video buffer. + * @param InWidth The sample rate of the video buffer. + * @param InHeight The sample rate of the video buffer. + * @param InSampleFormat The sample format of the video buffer. + * @param InTime The sample time (in the player's own clock). + * @param InTimecode The sample timecode if available. + */ + bool Initialize(TArray InVideoBuffer, uint32 InStride, uint32 InWidth, int32 InHeight, EMediaTextureSampleFormat InSampleFormat, FTimespan InTime, const TOptional& InTimecode) + { + FreeSample(); + + if ((InVideoBuffer.Num() == 0) || (InSampleFormat == EMediaTextureSampleFormat::Undefined)) + { + return false; + } + + Buffer = MoveTemp(InVideoBuffer); + PixelBuffer = Buffer.GetData(); //@TODO: Temp for Blackmagic. + Stride = InStride; + Width = InWidth; + Height = InHeight; + SampleFormat = InSampleFormat; + Time = InTime; + Timecode = InTimecode; + + return true; + } + public: //~ IMediaTextureSample interface @@ -71,6 +135,11 @@ public: return Time; } + virtual TOptional GetTimecode() const override + { + return Timecode; + } + virtual bool IsCacheable() const override { return true; @@ -90,7 +159,10 @@ public: } protected: - virtual void FreeSample() = 0; + virtual void FreeSample() + { + Buffer.Reset(); + } protected: /** Duration for which the sample is valid. */ @@ -102,12 +174,16 @@ protected: /** Sample time. */ FTimespan Time; + /** Sample timecode. */ + TOptional Timecode; + /** Image dimensions */ uint32_t Stride; uint32_t Width; uint32_t Height; /** Pointer to raw pixels */ - void* PixelBuffer; + TArray Buffer; + void* PixelBuffer; //@TODO: Temp for Blackmagic. }; diff --git a/Engine/Source/Runtime/Messaging/Private/Bus/MessageBus.cpp b/Engine/Source/Runtime/Messaging/Private/Bus/MessageBus.cpp index da3203eb47b8..1217cbd08108 100644 --- a/Engine/Source/Runtime/Messaging/Private/Bus/MessageBus.cpp +++ b/Engine/Source/Runtime/Messaging/Private/Bus/MessageBus.cpp @@ -92,6 +92,7 @@ void FMessageBus::Publish( Publisher->GetSenderAddress(), TArray(), Scope, + EMessageFlags::None, FDateTime::UtcNow() + Delay, Expiration, FTaskGraphInterface::Get().GetCurrentThreadIfKnown() @@ -108,6 +109,7 @@ void FMessageBus::Register(const FMessageAddress& Address, const TSharedRef& Attachment, const TArray& Recipients, const FTimespan& Delay, @@ -122,6 +124,7 @@ void FMessageBus::Send( Sender->GetSenderAddress(), Recipients, EMessageScope::Network, + Flags, FDateTime::UtcNow() + Delay, Expiration, FTaskGraphInterface::Get().GetCurrentThreadIfKnown() diff --git a/Engine/Source/Runtime/Messaging/Private/Bus/MessageBus.h b/Engine/Source/Runtime/Messaging/Private/Bus/MessageBus.h index bb3bf0ed91fd..54370dbc3e2e 100644 --- a/Engine/Source/Runtime/Messaging/Private/Bus/MessageBus.h +++ b/Engine/Source/Runtime/Messaging/Private/Bus/MessageBus.h @@ -44,7 +44,7 @@ public: virtual FOnMessageBusShutdown& OnShutdown() override; virtual void Publish(void* Message, UScriptStruct* TypeInfo, EMessageScope Scope, const FTimespan& Delay, const FDateTime& Expiration, const TSharedRef& Publisher) override; virtual void Register(const FMessageAddress& Address, const TSharedRef& Recipient) override; - virtual void Send(void* Message, UScriptStruct* TypeInfo, const TSharedPtr& Attachment, const TArray& Recipients, const FTimespan& Delay, const FDateTime& Expiration, const TSharedRef& Sender) override; + virtual void Send(void* Message, UScriptStruct* TypeInfo, EMessageFlags Flags, const TSharedPtr& Attachment, const TArray& Recipients, const FTimespan& Delay, const FDateTime& Expiration, const TSharedRef& Sender) override; virtual void Shutdown() override; virtual TSharedPtr Subscribe(const TSharedRef& Subscriber, const FName& MessageType, const FMessageScopeRange& ScopeRange) override; virtual void Unintercept(const TSharedRef& Interceptor, const FName& MessageType) override; diff --git a/Engine/Source/Runtime/Messaging/Private/Bus/MessageContext.cpp b/Engine/Source/Runtime/Messaging/Private/Bus/MessageContext.cpp index 76ab55ce29f9..e2cf52e78e8d 100644 --- a/Engine/Source/Runtime/Messaging/Private/Bus/MessageContext.cpp +++ b/Engine/Source/Runtime/Messaging/Private/Bus/MessageContext.cpp @@ -94,6 +94,15 @@ EMessageScope FMessageContext::GetScope() const return Scope; } +EMessageFlags FMessageContext::GetFlags() const +{ + if (OriginalContext.IsValid()) + { + return OriginalContext->GetFlags(); + } + + return Flags; +} const FMessageAddress& FMessageContext::GetSender() const { diff --git a/Engine/Source/Runtime/Messaging/Private/Bus/MessageContext.h b/Engine/Source/Runtime/Messaging/Private/Bus/MessageContext.h index 8d6403ce4c25..3e1b91747565 100644 --- a/Engine/Source/Runtime/Messaging/Private/Bus/MessageContext.h +++ b/Engine/Source/Runtime/Messaging/Private/Bus/MessageContext.h @@ -35,6 +35,7 @@ public: * @param InSender The sender's address. * @param InRecipients The message recipients. * @param InScope The message scope. + * @param InFlags The message flags. * @param InTimeSent The time at which the message was sent. * @param InExpiration The message's expiration time. * @param InSenderThread The name of the thread from which the message was sent. @@ -46,6 +47,7 @@ public: const FMessageAddress& InSender, const TArray& InRecipients, EMessageScope InScope, + EMessageFlags InFlags, const FDateTime& InTimeSent, const FDateTime& InExpiration, ENamedThreads::Type InSenderThread @@ -55,6 +57,7 @@ public: , Message(InMessage) , Recipients(InRecipients) , Scope(InScope) + , Flags(InFlags) , Sender(InSender) , SenderThread(InSenderThread) , TimeSent(InTimeSent) @@ -85,6 +88,7 @@ public: , OriginalContext(InContext) , Recipients(NewRecipients) , Scope(NewScope) + , Flags(EMessageFlags::None) , Sender(InForwarder) , SenderThread(InForwarderThread) , TimeSent(InTimeForwarded) @@ -105,6 +109,7 @@ public: virtual TSharedPtr GetOriginalContext() const override; virtual const TArray& GetRecipients() const override; virtual EMessageScope GetScope() const override; + virtual EMessageFlags GetFlags() const override; virtual const FMessageAddress& GetSender() const override; virtual ENamedThreads::Type GetSenderThread() const override; virtual const FDateTime& GetTimeForwarded() const override; @@ -133,6 +138,9 @@ private: /** Holds the message's scope. */ EMessageScope Scope; + /** Holds the message's scope. */ + EMessageFlags Flags; + /** Holds the sender's identifier. */ FMessageAddress Sender; diff --git a/Engine/Source/Runtime/Messaging/Public/IMessageBus.h b/Engine/Source/Runtime/Messaging/Public/IMessageBus.h index 116161cd3aa8..c4419040bc4a 100644 --- a/Engine/Source/Runtime/Messaging/Public/IMessageBus.h +++ b/Engine/Source/Runtime/Messaging/Public/IMessageBus.h @@ -17,6 +17,7 @@ class IMessageTracer; class UScriptStruct; enum class EMessageScope : uint8; +enum class EMessageFlags : uint32; struct FDateTime; struct FMessageAddress; @@ -172,6 +173,7 @@ public: * * @param Message The message to send. * @param TypeInfo The message's type information. + * @param Flags The message flags. * @param Attachment The binary data to attach to the message. * @param Recipients The list of message recipients. * @param Delay The delay after which to send the message. @@ -179,7 +181,7 @@ public: * @param Sender The message sender. * @see Forward, Publish */ - virtual void Send(void* Message, UScriptStruct* TypeInfo, const TSharedPtr& Attachment, const TArray& Recipients, const FTimespan& Delay, const FDateTime& Expiration, const TSharedRef& Sender) = 0; + virtual void Send(void* Message, UScriptStruct* TypeInfo, EMessageFlags Flags, const TSharedPtr& Attachment, const TArray& Recipients, const FTimespan& Delay, const FDateTime& Expiration, const TSharedRef& Sender) = 0; /** * Shuts down the message bus. diff --git a/Engine/Source/Runtime/Messaging/Public/IMessageContext.h b/Engine/Source/Runtime/Messaging/Public/IMessageContext.h index 27f05675f63a..7a867c661d98 100644 --- a/Engine/Source/Runtime/Messaging/Public/IMessageContext.h +++ b/Engine/Source/Runtime/Messaging/Public/IMessageContext.h @@ -182,6 +182,15 @@ enum class EMessageScope : uint8 All }; +enum class EMessageFlags : uint32 +{ + /** No special flags */ + None = 0, + /** Guarantee that this message is delivered */ + Reliable = 1 << 0, +}; +ENUM_CLASS_FLAGS(EMessageFlags); + /** Type definition for message scope ranges. */ typedef TRange FMessageScopeRange; @@ -271,6 +280,13 @@ public: */ virtual EMessageScope GetScope() const = 0; + /** + * Gets the scope to which the message was sent. + * + * @return The message scope. + */ + virtual EMessageFlags GetFlags() const = 0; + /** * Gets the sender's address. * diff --git a/Engine/Source/Runtime/MessagingCommon/Public/MessageEndpoint.h b/Engine/Source/Runtime/MessagingCommon/Public/MessageEndpoint.h index faa10897fc95..9d2674d70602 100644 --- a/Engine/Source/Runtime/MessagingCommon/Public/MessageEndpoint.h +++ b/Engine/Source/Runtime/MessagingCommon/Public/MessageEndpoint.h @@ -241,13 +241,31 @@ public: * @param Delay The delay after which to send the message. * @param Expiration The time at which the message expires. */ + DEPRECATED(4.21, "FMessageEndpoint::Send with 6 params is deprecated. Please use FMessageEndpoint::Send that takes additionnal EMessageFlags instead!") void Send(void* Message, UScriptStruct* TypeInfo, const TSharedPtr& Attachment, const TArray& Recipients, const FTimespan& Delay, const FDateTime& Expiration) + { + Send(Message, TypeInfo, EMessageFlags::None, Attachment, Recipients, Delay, Expiration); + } + + /** + * Sends a message to the specified list of recipients. + * Allows to specify message flags + * + * @param Message The message to send. + * @param TypeInfo The message's type information. + * @param Flags The message's type information. + * @param Attachment An optional binary data attachment. + * @param Recipients The message recipients. + * @param Delay The delay after which to send the message. + * @param Expiration The time at which the message expires. + */ + void Send(void* Message, UScriptStruct* TypeInfo, EMessageFlags Flags, const TSharedPtr& Attachment, const TArray& Recipients, const FTimespan& Delay, const FDateTime& Expiration) { TSharedPtr Bus = GetBusIfEnabled(); if (Bus.IsValid()) { - Bus->Send(Message, TypeInfo, Attachment, Recipients, Delay, Expiration, AsShared()); + Bus->Send(Message, TypeInfo, Flags, Attachment, Recipients, Delay, Expiration, AsShared()); } } @@ -539,7 +557,7 @@ public: template void Send(MessageType* Message, const FMessageAddress& Recipient) { - Send(Message, MessageType::StaticStruct(), nullptr, TArrayBuilder().Add(Recipient), FTimespan::Zero(), FDateTime::MaxValue()); + Send(Message, MessageType::StaticStruct(), EMessageFlags::None, nullptr, TArrayBuilder().Add(Recipient), FTimespan::Zero(), FDateTime::MaxValue()); } /** @@ -553,7 +571,7 @@ public: template void Send(MessageType* Message, const FMessageAddress& Recipient, const FTimespan& Delay) { - Send(Message, MessageType::StaticStruct(), nullptr, TArrayBuilder().Add(Recipient), Delay, FDateTime::MaxValue()); + Send(Message, MessageType::StaticStruct(), EMessageFlags::None, nullptr, TArrayBuilder().Add(Recipient), Delay, FDateTime::MaxValue()); } /** @@ -568,7 +586,7 @@ public: template void Send(MessageType* Message, const FMessageAddress& Recipient, const FTimespan& Delay, const FDateTime& Expiration) { - Send(Message, MessageType::StaticStruct(), nullptr, TArrayBuilder().Add(Recipient), Delay, Expiration); + Send(Message, MessageType::StaticStruct(), EMessageFlags::None, nullptr, TArrayBuilder().Add(Recipient), Delay, Expiration); } /** @@ -582,7 +600,7 @@ public: template void Send(MessageType* Message, const TSharedPtr& Attachment, const FMessageAddress& Recipient) { - Send(Message, MessageType::StaticStruct(), Attachment, TArrayBuilder().Add(Recipient), FTimespan::Zero(), FDateTime::MaxValue()); + Send(Message, MessageType::StaticStruct(), EMessageFlags::None, Attachment, TArrayBuilder().Add(Recipient), FTimespan::Zero(), FDateTime::MaxValue()); } /** @@ -598,7 +616,7 @@ public: template void Send(MessageType* Message, const TSharedPtr& Attachment, const FMessageAddress& Recipient, const FDateTime& Expiration, const FTimespan& Delay) { - Send(Message, MessageType::StaticStruct(), Attachment, TArrayBuilder().Add(Recipient), Delay, Expiration); + Send(Message, MessageType::StaticStruct(), EMessageFlags::None, Attachment, TArrayBuilder().Add(Recipient), Delay, Expiration); } /** @@ -611,7 +629,7 @@ public: template void Send(MessageType* Message, const TArray& Recipients) { - Send(Message, MessageType::StaticStruct(), nullptr, Recipients, FTimespan::Zero(), FDateTime::MaxValue()); + Send(Message, MessageType::StaticStruct(), EMessageFlags::None, nullptr, Recipients, FTimespan::Zero(), FDateTime::MaxValue()); } /** @@ -625,7 +643,7 @@ public: template void Send(MessageType* Message, const TArray& Recipients, const FTimespan& Delay) { - Send(Message, MessageType::StaticStruct(), nullptr, Recipients, Delay, FDateTime::MaxValue()); + Send(Message, MessageType::StaticStruct(), EMessageFlags::None, nullptr, Recipients, Delay, FDateTime::MaxValue()); } /** @@ -640,7 +658,7 @@ public: template void Send(MessageType* Message, const TSharedPtr& Attachment, const TArray& Recipients, const FTimespan& Delay) { - Send(Message, MessageType::StaticStruct(), Attachment, Recipients, Delay, FDateTime::MaxValue()); + Send(Message, MessageType::StaticStruct(), EMessageFlags::None, Attachment, Recipients, Delay, FDateTime::MaxValue()); } /** @@ -656,7 +674,7 @@ public: template void Send(MessageType* Message, const TSharedPtr& Attachment, const TArray& Recipients, const FTimespan& Delay, const FDateTime& Expiration) { - Send(Message, MessageType::StaticStruct(), Attachment, Recipients, Delay, Expiration); + Send(Message, MessageType::StaticStruct(), EMessageFlags::None, Attachment, Recipients, Delay, Expiration); } /** diff --git a/Engine/Source/Runtime/MessagingRpc/Private/MessageRpcClient.cpp b/Engine/Source/Runtime/MessagingRpc/Private/MessageRpcClient.cpp index 8f5fe7196c23..785746b483c1 100644 --- a/Engine/Source/Runtime/MessagingRpc/Private/MessageRpcClient.cpp +++ b/Engine/Source/Runtime/MessagingRpc/Private/MessageRpcClient.cpp @@ -77,6 +77,7 @@ void FMessageRpcClient::SendCall(const TSharedPtr& Call) MessageEndpoint->Send( Call->ConstructMessage(), Call->GetMessageType(), + EMessageFlags::None, nullptr, TArrayBuilder().Add(ServerAddress), FTimespan::Zero(), diff --git a/Engine/Source/Runtime/MessagingRpc/Private/MessageRpcServer.cpp b/Engine/Source/Runtime/MessagingRpc/Private/MessageRpcServer.cpp index 7b331e08a6f5..04e806814817 100644 --- a/Engine/Source/Runtime/MessagingRpc/Private/MessageRpcServer.cpp +++ b/Engine/Source/Runtime/MessagingRpc/Private/MessageRpcServer.cpp @@ -132,6 +132,7 @@ void FMessageRpcServer::SendResult(const FGuid& CallId, const FReturnInfo& Retur MessageEndpoint->Send( Message, ReturnInfo.Return->GetResponseTypeInfo(), + EMessageFlags::None, nullptr, TArrayBuilder().Add(ReturnInfo.ClientAddress), FTimespan::Zero(), diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScene3DAttachTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScene3DAttachTrack.cpp index 76d273031250..a9f523b169e4 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScene3DAttachTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScene3DAttachTrack.cpp @@ -22,8 +22,7 @@ FMovieSceneEvalTemplatePtr UMovieScene3DAttachTrack::CreateTemplateForSection(co void UMovieScene3DAttachTrack::AddConstraint(FFrameNumber KeyTime, int32 Duration, const FName SocketName, const FName ComponentName, const FMovieSceneObjectBindingID& ConstraintBindingID) { // add the section - UMovieScene3DAttachSection* NewSection = NewObject(this); - NewSection->SetFlags(RF_Transactional); + UMovieScene3DAttachSection* NewSection = NewObject(this, NAME_None, RF_Transactional); NewSection->SetAttachTargetID(ConstraintBindingID); NewSection->InitialPlacement(ConstraintSections, KeyTime, Duration, SupportsMultipleRows()); NewSection->AttachSocketName = SocketName; @@ -34,8 +33,7 @@ void UMovieScene3DAttachTrack::AddConstraint(FFrameNumber KeyTime, int32 Duratio UMovieSceneSection* UMovieScene3DAttachTrack::CreateNewSection() { - UMovieScene3DAttachSection* NewSection = NewObject(this); - NewSection->SetFlags(RF_Transactional); + UMovieScene3DAttachSection* NewSection = NewObject(this, NAME_None, RF_Transactional); ConstraintSections.Add(NewSection); diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScene3DPathTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScene3DPathTrack.cpp index 48cd73d4725e..c84c02945a26 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScene3DPathTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScene3DPathTrack.cpp @@ -23,7 +23,7 @@ FMovieSceneEvalTemplatePtr UMovieScene3DPathTrack::CreateTemplateForSection(cons void UMovieScene3DPathTrack::AddConstraint(FFrameNumber KeyTime, int32 Duration, const FName SocketName, const FName ComponentName, const FMovieSceneObjectBindingID& ConstraintBindingID) { - UMovieScene3DPathSection* NewSection = NewObject(this); + UMovieScene3DPathSection* NewSection = NewObject(this, NAME_None, RF_Transactional); { NewSection->SetPathBindingID( ConstraintBindingID ); NewSection->InitialPlacement( ConstraintSections, KeyTime, Duration, SupportsMultipleRows() ); diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScene3DTransformTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScene3DTransformTrack.cpp index 4c1e7af9d43e..608b499866ad 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScene3DTransformTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScene3DTransformTrack.cpp @@ -25,7 +25,7 @@ UMovieScene3DTransformTrack::UMovieScene3DTransformTrack( const FObjectInitializ UMovieSceneSection* UMovieScene3DTransformTrack::CreateNewSection() { - return NewObject(this, UMovieScene3DTransformSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneInterrogationKey UMovieScene3DTransformTrack::GetInterrogationKey() diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneActorReferenceTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneActorReferenceTrack.cpp index 55583458b9db..5686fb028989 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneActorReferenceTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneActorReferenceTrack.cpp @@ -12,7 +12,7 @@ UMovieSceneActorReferenceTrack::UMovieSceneActorReferenceTrack( const FObjectIni UMovieSceneSection* UMovieSceneActorReferenceTrack::CreateNewSection() { - return NewObject(this, UMovieSceneActorReferenceSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneActorReferenceTrack::CreateTemplateForSection(const UMovieSceneSection& InSection) const diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneAudioTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneAudioTrack.cpp index 0b49c36584e8..565844122e93 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneAudioTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneAudioTrack.cpp @@ -85,7 +85,7 @@ UMovieSceneSection* UMovieSceneAudioTrack::AddNewSoundOnRow(USoundBase* Sound, F } // add the section - UMovieSceneAudioSection* NewSection = NewObject(this); + UMovieSceneAudioSection* NewSection = NewObject(this, NAME_None, RF_Transactional); NewSection->InitialPlacementOnRow( AudioSections, Time, DurationToUse.FrameNumber.Value, RowIndex ); NewSection->SetSound(Sound); diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneBoolTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneBoolTrack.cpp index cd97ca38430a..10be76c705eb 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneBoolTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneBoolTrack.cpp @@ -7,7 +7,7 @@ UMovieSceneSection* UMovieSceneBoolTrack::CreateNewSection() { - return NewObject(this, UMovieSceneBoolSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneByteTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneByteTrack.cpp index 005bb38d7873..4f1ee224a34c 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneByteTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneByteTrack.cpp @@ -17,7 +17,7 @@ void UMovieSceneByteTrack::PostLoad() UMovieSceneSection* UMovieSceneByteTrack::CreateNewSection() { - UMovieSceneByteSection* NewByteSection = NewObject(this, UMovieSceneByteSection::StaticClass(), NAME_None, RF_Transactional); + UMovieSceneByteSection* NewByteSection = NewObject(this, NAME_None, RF_Transactional); NewByteSection->ByteCurve.SetEnum(Enum); return NewByteSection; } diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneColorTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneColorTrack.cpp index f3880c493f30..74ba2c44a147 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneColorTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneColorTrack.cpp @@ -14,7 +14,7 @@ UMovieSceneColorTrack::UMovieSceneColorTrack( const FObjectInitializer& ObjectIn UMovieSceneSection* UMovieSceneColorTrack::CreateNewSection() { - return NewObject(this, UMovieSceneColorSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneColorTrack::CreateTemplateForSection(const UMovieSceneSection& Section) const diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneEnumTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneEnumTrack.cpp index 91cc905e8a62..82de49884f48 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneEnumTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneEnumTrack.cpp @@ -17,7 +17,7 @@ void UMovieSceneEnumTrack::PostLoad() UMovieSceneSection* UMovieSceneEnumTrack::CreateNewSection() { - UMovieSceneEnumSection* NewEnumSection = NewObject(this, UMovieSceneEnumSection::StaticClass(), NAME_None, RF_Transactional); + UMovieSceneEnumSection* NewEnumSection = NewObject(this, NAME_None, RF_Transactional); NewEnumSection->EnumCurve.SetEnum(Enum); return NewEnumSection; } diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneEulerTransformTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneEulerTransformTrack.cpp index 571dd06aa4a5..4381d0268e6d 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneEulerTransformTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneEulerTransformTrack.cpp @@ -22,7 +22,7 @@ UMovieSceneEulerTransformTrack::UMovieSceneEulerTransformTrack(const FObjectInit UMovieSceneSection* UMovieSceneEulerTransformTrack::CreateNewSection() { - return NewObject(this, UMovieScene3DTransformSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneEulerTransformTrack::CreateTemplateForSection(const UMovieSceneSection& InSection) const diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneEventTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneEventTrack.cpp index d1a9347e30b5..ec024f6faa20 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneEventTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneEventTrack.cpp @@ -24,7 +24,7 @@ void UMovieSceneEventTrack::AddSection(UMovieSceneSection& Section) UMovieSceneSection* UMovieSceneEventTrack::CreateNewSection() { - return NewObject(this, UMovieSceneEventTriggerSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneFadeTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneFadeTrack.cpp index 683cfd0efd22..64715a87878c 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneFadeTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneFadeTrack.cpp @@ -19,7 +19,7 @@ UMovieSceneFadeTrack::UMovieSceneFadeTrack(const FObjectInitializer& Init) UMovieSceneSection* UMovieSceneFadeTrack::CreateNewSection() { - return NewObject(this, UMovieSceneFadeSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneFadeTrack::CreateTemplateForSection(const UMovieSceneSection& InSection) const diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneFloatTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneFloatTrack.cpp index e78f4d8fc780..94401435b680 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneFloatTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneFloatTrack.cpp @@ -15,7 +15,7 @@ UMovieSceneFloatTrack::UMovieSceneFloatTrack( const FObjectInitializer& ObjectIn UMovieSceneSection* UMovieSceneFloatTrack::CreateNewSection() { - return NewObject(this, UMovieSceneFloatSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneFloatTrack::CreateTemplateForSection(const UMovieSceneSection& InSection) const diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneIntegerTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneIntegerTrack.cpp index fcdd9f2ba023..454c881e5c8a 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneIntegerTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneIntegerTrack.cpp @@ -13,7 +13,7 @@ UMovieSceneIntegerTrack::UMovieSceneIntegerTrack( const FObjectInitializer& Obje UMovieSceneSection* UMovieSceneIntegerTrack::CreateNewSection() { - return NewObject(this, UMovieSceneIntegerSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneIntegerTrack::CreateTemplateForSection(const UMovieSceneSection& InSection) const diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneLevelVisibilityTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneLevelVisibilityTrack.cpp index 8bf42fbc727d..ae7673e1b9ed 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneLevelVisibilityTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneLevelVisibilityTrack.cpp @@ -43,7 +43,7 @@ void UMovieSceneLevelVisibilityTrack::RemoveSection( UMovieSceneSection& Section UMovieSceneSection* UMovieSceneLevelVisibilityTrack::CreateNewSection() { - return NewObject(this, UMovieSceneLevelVisibilitySection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneMaterialParameterCollectionTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneMaterialParameterCollectionTrack.cpp index 5ff40fa5e6e3..8463deb1f34c 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneMaterialParameterCollectionTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneMaterialParameterCollectionTrack.cpp @@ -16,7 +16,7 @@ UMovieSceneMaterialParameterCollectionTrack::UMovieSceneMaterialParameterCollect UMovieSceneSection* UMovieSceneMaterialParameterCollectionTrack::CreateNewSection() { - UMovieSceneSection* NewSection = NewObject(this, UMovieSceneParameterSection::StaticClass(), NAME_None, RF_Transactional); + UMovieSceneSection* NewSection = NewObject(this, NAME_None, RF_Transactional); NewSection->SetRange(TRange::All()); return NewSection; } diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneMaterialTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneMaterialTrack.cpp index 8fda22c8c186..939ab2130ddc 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneMaterialTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneMaterialTrack.cpp @@ -16,7 +16,7 @@ UMovieSceneMaterialTrack::UMovieSceneMaterialTrack(const FObjectInitializer& Obj UMovieSceneSection* UMovieSceneMaterialTrack::CreateNewSection() { - return NewObject(this, UMovieSceneParameterSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneParticleParameterTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneParticleParameterTrack.cpp index a57cebddfc73..853794db331f 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneParticleParameterTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneParticleParameterTrack.cpp @@ -21,7 +21,7 @@ FMovieSceneEvalTemplatePtr UMovieSceneParticleParameterTrack::CreateTemplateForS UMovieSceneSection* UMovieSceneParticleParameterTrack::CreateNewSection() { - return NewObject( this, UMovieSceneParameterSection::StaticClass(), NAME_None, RF_Transactional ); + return NewObject(this, NAME_None, RF_Transactional); } void UMovieSceneParticleParameterTrack::RemoveAllAnimationData() diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneParticleTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneParticleTrack.cpp index 3628380777ba..8a1e9ae6ae6f 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneParticleTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneParticleTrack.cpp @@ -64,7 +64,7 @@ void UMovieSceneParticleTrack::AddNewSection( FFrameNumber SectionTime ) UMovieSceneSection* UMovieSceneParticleTrack::CreateNewSection() { - return NewObject( this, NAME_None, RF_Transactional ); + return NewObject(this, NAME_None, RF_Transactional); } #if WITH_EDITORONLY_DATA diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScenePropertyTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScenePropertyTrack.cpp index 0efd56416666..47494732612c 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScenePropertyTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieScenePropertyTrack.cpp @@ -176,6 +176,7 @@ UMovieSceneSection* UMovieScenePropertyTrack::FindOrAddSection(FFrameNumber Time // Add a new section that starts and ends at the same time UMovieSceneSection* NewSection = CreateNewSection(); + ensureAlwaysMsgf(NewSection->HasAnyFlags(RF_Transactional), TEXT("CreateNewSection must return an instance with RF_Transactional set! (pass RF_Transactional to NewObject)")); NewSection->SetFlags(RF_Transactional); NewSection->SetRange(TRange::Inclusive(Time, Time)); diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneSlomoTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneSlomoTrack.cpp index 98b7c3a393e8..b9f76650c4eb 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneSlomoTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneSlomoTrack.cpp @@ -19,7 +19,7 @@ UMovieSceneSlomoTrack::UMovieSceneSlomoTrack(const FObjectInitializer& Init) UMovieSceneSection* UMovieSceneSlomoTrack::CreateNewSection() { - return NewObject(this, UMovieSceneSlomoSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneSlomoTrack::CreateTemplateForSection(const UMovieSceneSection& InSection) const diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneStringTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneStringTrack.cpp index f4aadf60fe5a..efa2928b36ac 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneStringTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneStringTrack.cpp @@ -20,7 +20,7 @@ void UMovieSceneStringTrack::AddSection(UMovieSceneSection& Section) UMovieSceneSection* UMovieSceneStringTrack::CreateNewSection() { - return NewObject(this, UMovieSceneStringSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneTransformTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneTransformTrack.cpp index 173a35e63d5a..e42daa811a40 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneTransformTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneTransformTrack.cpp @@ -22,7 +22,7 @@ UMovieSceneTransformTrack::UMovieSceneTransformTrack(const FObjectInitializer& O UMovieSceneSection* UMovieSceneTransformTrack::CreateNewSection() { - return NewObject(this, UMovieScene3DTransformSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } FMovieSceneEvalTemplatePtr UMovieSceneTransformTrack::CreateTemplateForSection(const UMovieSceneSection& InSection) const diff --git a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneVectorTrack.cpp b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneVectorTrack.cpp index ee38ed3fb3e2..9fbcb6b25ed7 100644 --- a/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneVectorTrack.cpp +++ b/Engine/Source/Runtime/MovieSceneTracks/Private/Tracks/MovieSceneVectorTrack.cpp @@ -15,7 +15,7 @@ UMovieSceneVectorTrack::UMovieSceneVectorTrack( const FObjectInitializer& Object UMovieSceneSection* UMovieSceneVectorTrack::CreateNewSection() { - UMovieSceneVectorSection* NewSection = NewObject(this, UMovieSceneVectorSection::StaticClass(), NAME_None, RF_Transactional); + UMovieSceneVectorSection* NewSection = NewObject(this, NAME_None, RF_Transactional); NewSection->SetChannelsUsed(NumChannelsUsed); return NewSection; } diff --git a/Engine/Source/Runtime/Serialization/Private/Backends/CborStructDeserializerBackend.cpp b/Engine/Source/Runtime/Serialization/Private/Backends/CborStructDeserializerBackend.cpp new file mode 100644 index 000000000000..ecdb98d92f84 --- /dev/null +++ b/Engine/Source/Runtime/Serialization/Private/Backends/CborStructDeserializerBackend.cpp @@ -0,0 +1,262 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "Backends/CborStructDeserializerBackend.h" +#include "Backends/StructDeserializerBackendUtilities.h" +#include "UObject/Class.h" +#include "UObject/UnrealType.h" +#include "UObject/EnumProperty.h" +#include "UObject/TextProperty.h" + +const FString& FCborStructDeserializerBackend::GetCurrentPropertyName() const +{ + return LastMapKey; +} + +FString FCborStructDeserializerBackend::GetDebugString() const +{ + FArchive* Ar = const_cast(CborReader.GetArchive()); + return FString::Printf(TEXT("Offset: %u"), Ar ? Ar->Tell() : 0); +} + +const FString& FCborStructDeserializerBackend::GetLastErrorMessage() const +{ + // interface function that is actually entirely unused... + static FString Dummy; + return Dummy; +} + +bool FCborStructDeserializerBackend::GetNextToken(EStructDeserializerBackendTokens& OutToken) +{ + LastMapKey.Reset(); + + if (!CborReader.ReadNext(LastContext)) + { + OutToken = LastContext.IsError() ? EStructDeserializerBackendTokens::Error : EStructDeserializerBackendTokens::None; + return false; + } + + if (LastContext.IsBreak()) + { + ECborCode ContainerEndType = LastContext.AsBreak(); + // We do not support indefinite string container type + check(ContainerEndType == ECborCode::Array || ContainerEndType == ECborCode::Map); + OutToken = ContainerEndType == ECborCode::Array ? EStructDeserializerBackendTokens::ArrayEnd : EStructDeserializerBackendTokens::StructureEnd; + return true; + } + + // if after reading the last context, the parent context is a map with an odd length, we just read a key + if (CborReader.GetContext().MajorType() == ECborCode::Map && (CborReader.GetContext().AsLength() & 1)) + { + // Should be a string + check(LastContext.MajorType() == ECborCode::TextString); + LastMapKey = LastContext.AsString(); + + // Read next and carry on + if (!CborReader.ReadNext(LastContext)) + { + OutToken = LastContext.IsError() ? EStructDeserializerBackendTokens::Error : EStructDeserializerBackendTokens::None; + return false; + } + } + + switch (LastContext.MajorType()) + { + case ECborCode::Array: + OutToken = EStructDeserializerBackendTokens::ArrayStart; + break; + case ECborCode::Map: + OutToken = EStructDeserializerBackendTokens::StructureStart; + break; + case ECborCode::Int: + // fall through + case ECborCode::Uint: + // fall through + case ECborCode::TextString: + // fall through + case ECborCode::Prim: + OutToken = EStructDeserializerBackendTokens::Property; + break; + default: + // Other types are unsupported + check(false); + } + + return true; +} + +bool FCborStructDeserializerBackend::ReadProperty(UProperty* Property, UProperty* Outer, void* Data, int32 ArrayIndex) +{ + switch (LastContext.MajorType()) + { + // Unsigned Integers + case ECborCode::Uint: + { + if (UByteProperty* ByteProperty = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(ByteProperty, Outer, Data, ArrayIndex, (uint8)LastContext.AsUInt()); + } + + if (UUInt16Property* UInt16Property = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(UInt16Property, Outer, Data, ArrayIndex, (uint16)LastContext.AsUInt()); + } + + if (UUInt32Property* UInt32Property = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(UInt32Property, Outer, Data, ArrayIndex, (uint32)LastContext.AsUInt()); + } + + if (UUInt64Property* UInt64Property = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(UInt64Property, Outer, Data, ArrayIndex, (uint64)LastContext.AsUInt()); + } + } + // Fall through - cbor can encode positive signed integers as unsigned + // Signed Integers + case ECborCode::Int: + { + if (UInt8Property* Int8Property = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(Int8Property, Outer, Data, ArrayIndex, (int8)LastContext.AsInt()); + } + + if (UInt16Property* Int16Property = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(Int16Property, Outer, Data, ArrayIndex, (int16)LastContext.AsInt()); + } + + if (UIntProperty* IntProperty = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(IntProperty, Outer, Data, ArrayIndex, (int32)LastContext.AsInt()); + } + + if (UInt64Property* Int64Property = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(Int64Property, Outer, Data, ArrayIndex, (int64)LastContext.AsInt()); + } + + + UE_LOG(LogSerialization, Verbose, TEXT("Integer field %s with value '%d' is not supported in UProperty type %s (%s)"), *Property->GetFName().ToString(), LastContext.AsUInt(), *Property->GetClass()->GetName(), *GetDebugString()); + + return false; + } + break; + + // Strings, Names & Enumerations + case ECborCode::TextString: + { + FString StringValue = LastContext.AsString(); + + if (UStrProperty* StrProperty = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(StrProperty, Outer, Data, ArrayIndex, StringValue); + } + + if (UNameProperty* NameProperty = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(NameProperty, Outer, Data, ArrayIndex, FName(*StringValue)); + } + + if (UTextProperty* TextProperty = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(TextProperty, Outer, Data, ArrayIndex, FText::FromString(StringValue)); + } + + if (UByteProperty* ByteProperty = Cast(Property)) + { + if (!ByteProperty->Enum) + { + return false; + } + + int32 Value = ByteProperty->Enum->GetValueByName(*StringValue); + if (Value == INDEX_NONE) + { + return false; + } + + return StructDeserializerBackendUtilities::SetPropertyValue(ByteProperty, Outer, Data, ArrayIndex, (uint8)Value); + } + + if (UEnumProperty* EnumProperty = Cast(Property)) + { + int64 Value = EnumProperty->GetEnum()->GetValueByName(*StringValue); + if (Value == INDEX_NONE) + { + return false; + } + + if (void* ElementPtr = StructDeserializerBackendUtilities::GetPropertyValuePtr(EnumProperty, Outer, Data, ArrayIndex)) + { + EnumProperty->GetUnderlyingProperty()->SetIntPropertyValue(ElementPtr, Value); + return true; + } + + return false; + } + + if (UClassProperty* ClassProperty = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(ClassProperty, Outer, Data, ArrayIndex, LoadObject(NULL, *StringValue, NULL, LOAD_NoWarn)); + } + + UE_LOG(LogSerialization, Verbose, TEXT("String field %s with value '%s' is not supported in UProperty type %s (%s)"), *Property->GetFName().ToString(), *StringValue, *Property->GetClass()->GetName(), *GetDebugString()); + + return false; + } + break; + + // Prim + case ECborCode::Prim: + { + switch (LastContext.AdditionalValue()) + { + // Boolean + case ECborCode::True: + // fall through + case ECborCode::False: + if (UBoolProperty* BoolProperty = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(BoolProperty, Outer, Data, ArrayIndex, LastContext.AsBool()); + } + UE_LOG(LogSerialization, Verbose, TEXT("Boolean field %s with value '%s' is not supported in UProperty type %s (%s)"), *Property->GetFName().ToString(), LastContext.AsBool() ? *(GTrue.ToString()) : *(GFalse.ToString()), *Property->GetClass()->GetName(), *GetDebugString()); + return false; + // Null + case ECborCode::Null: + return StructDeserializerBackendUtilities::ClearPropertyValue(Property, Outer, Data, ArrayIndex); + // Float + case ECborCode::Value_4Bytes: + if (UFloatProperty* FloatProperty = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(FloatProperty, Outer, Data, ArrayIndex, LastContext.AsFloat()); + } + UE_LOG(LogSerialization, Verbose, TEXT("Float field %s with value '%f' is not supported in UProperty type %s (%s)"), *Property->GetFName().ToString(), LastContext.AsFloat(), *Property->GetClass()->GetName(), *GetDebugString()); + return false; + // Double + case ECborCode::Value_8Bytes: + if (UDoubleProperty* DoubleProperty = Cast(Property)) + { + return StructDeserializerBackendUtilities::SetPropertyValue(DoubleProperty, Outer, Data, ArrayIndex, LastContext.AsDouble()); + } + UE_LOG(LogSerialization, Verbose, TEXT("Double field %s with value '%f' is not supported in UProperty type %s (%s)"), *Property->GetFName().ToString(), LastContext.AsDouble(), *Property->GetClass()->GetName(), *GetDebugString()); + return false; + default: + UE_LOG(LogSerialization, Verbose, TEXT("Unsupported primitive type for %s in UProperty type %s (%s)"), *Property->GetFName().ToString(), LastContext.AsDouble(), *Property->GetClass()->GetName(), *GetDebugString()); + return false; + } + } + } + + return true; + +} + +void FCborStructDeserializerBackend::SkipArray() +{ + CborReader.SkipContainer(ECborCode::Array); +} + +void FCborStructDeserializerBackend::SkipStructure() +{ + CborReader.SkipContainer(ECborCode::Map); +} diff --git a/Engine/Source/Runtime/Serialization/Private/Backends/CborStructSerializerBackend.cpp b/Engine/Source/Runtime/Serialization/Private/Backends/CborStructSerializerBackend.cpp new file mode 100644 index 000000000000..83eb602d7131 --- /dev/null +++ b/Engine/Source/Runtime/Serialization/Private/Backends/CborStructSerializerBackend.cpp @@ -0,0 +1,234 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#include "Backends/CborStructSerializerBackend.h" +#include "UObject/UnrealType.h" +#include "UObject/EnumProperty.h" +#include "UObject/TextProperty.h" +#include "UObject/PropertyPortFlags.h" + +void FCborStructSerializerBackend::BeginArray(const FStructSerializerState& State) +{ + UObject* Outer = State.ValueProperty->GetOuter(); + + // Array nested in Array + if ((Outer != nullptr) && (Outer->GetClass() == UArrayProperty::StaticClass())) + { + CborWriter.WriteContainerStart(ECborCode::Array, -1); + } + // Array nested in Map + else if (State.KeyProperty != nullptr) + { + FString KeyString; + State.KeyProperty->ExportTextItem(KeyString, State.KeyData, nullptr, nullptr, PPF_None); + CborWriter.WriteValue(KeyString); + CborWriter.WriteContainerStart(ECborCode::Array, -1/*Indefinite*/); + } + // Array nested in Object + else + { + CborWriter.WriteValue(State.ValueProperty->GetName()); + CborWriter.WriteContainerStart(ECborCode::Array, -1/*Indefinite*/); + } +} + +void FCborStructSerializerBackend::BeginStructure(const FStructSerializerState& State) +{ + if (State.ValueProperty != nullptr) + { + UObject* Outer = State.ValueProperty->GetOuter(); + + // Object nested in Array + if ((Outer != nullptr) && (Outer->GetClass() == UArrayProperty::StaticClass())) + { + CborWriter.WriteContainerStart(ECborCode::Map, -1/*Indefinite*/); + } + // Object nested in Map + else if (State.KeyProperty != nullptr) + { + FString KeyString; + State.KeyProperty->ExportTextItem(KeyString, State.KeyData, nullptr, nullptr, PPF_None); + CborWriter.WriteValue(KeyString); + CborWriter.WriteContainerStart(ECborCode::Map, -1/*Indefinite*/); + } + // Object nested in Object + else + { + CborWriter.WriteValue(State.ValueProperty->GetName()); + CborWriter.WriteContainerStart(ECborCode::Map, -1/*Indefinite*/); + } + } + // Root Object + else + { + CborWriter.WriteContainerStart(ECborCode::Map, -1/*Indefinite*/); + } +} + +void FCborStructSerializerBackend::EndArray(const FStructSerializerState& State) +{ + CborWriter.WriteContainerEnd(); +} + +void FCborStructSerializerBackend::EndStructure(const FStructSerializerState& State) +{ + CborWriter.WriteContainerEnd(); +} + +void FCborStructSerializerBackend::WriteComment(const FString& Comment) +{ + // Binary format do not support comment +} + +namespace CborStructSerializerBackend +{ + // Writes a property value to the serialization output. + template + void WritePropertyValue(FCborWriter& CborWriter, const FStructSerializerState& State, const ValueType& Value) + { + // Value nested in Array or as root + if ((State.ValueProperty == nullptr) || (State.ValueProperty->ArrayDim > 1) || (State.ValueProperty->GetOuter()->GetClass() == UArrayProperty::StaticClass())) + { + CborWriter.WriteValue(Value); + } + // Value nested in Map + else if (State.KeyProperty != nullptr) + { + FString KeyString; + State.KeyProperty->ExportTextItem(KeyString, State.KeyData, nullptr, nullptr, PPF_None); + CborWriter.WriteValue(KeyString); + CborWriter.WriteValue(Value); + } + else + { + CborWriter.WriteValue(State.ValueProperty->GetName()); + CborWriter.WriteValue(Value); + } + } + + // Writes a null value to the serialization output. + void WriteNull(FCborWriter& CborWriter, const FStructSerializerState& State) + { + if ((State.ValueProperty == nullptr) || (State.ValueProperty->ArrayDim > 1) || (State.ValueProperty->GetOuter()->GetClass() == UArrayProperty::StaticClass())) + { + CborWriter.WriteNull(); + } + else if (State.KeyProperty != nullptr) + { + FString KeyString; + State.KeyProperty->ExportTextItem(KeyString, State.KeyData, nullptr, nullptr, PPF_None); + CborWriter.WriteValue(KeyString); + CborWriter.WriteNull(); + } + else + { + CborWriter.WriteValue(State.ValueProperty->GetName()); + CborWriter.WriteNull(); + } + } + +} + +void FCborStructSerializerBackend::WriteProperty(const FStructSerializerState& State, int32 ArrayIndex) +{ + using namespace CborStructSerializerBackend; + + // Bool + if (State.ValueType == UBoolProperty::StaticClass()) + { + WritePropertyValue(CborWriter, State, CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)); + } + + // Unsigned Bytes & Enums + else if (State.ValueType == UEnumProperty::StaticClass()) + { + UEnumProperty* EnumProperty = CastChecked(State.ValueProperty); + + WritePropertyValue(CborWriter, State, EnumProperty->GetEnum()->GetNameStringByValue(EnumProperty->GetUnderlyingProperty()->GetSignedIntPropertyValue(EnumProperty->ContainerPtrToValuePtr(State.ValueData, ArrayIndex)))); + } + else if (State.ValueType == UByteProperty::StaticClass()) + { + UByteProperty* ByteProperty = CastChecked(State.ValueProperty); + + if (ByteProperty->IsEnum()) + { + WritePropertyValue(CborWriter, State, ByteProperty->Enum->GetNameStringByValue(ByteProperty->GetPropertyValue_InContainer(State.ValueData, ArrayIndex))); + } + else + { + WritePropertyValue(CborWriter, State, (int64)ByteProperty->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)); + } + } + + // Double & Float + else if (State.ValueType == UDoubleProperty::StaticClass()) + { + WritePropertyValue(CborWriter, State, CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)); + } + else if (State.ValueType == UFloatProperty::StaticClass()) + { + WritePropertyValue(CborWriter, State, CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)); + } + + // Signed Integers + else if (State.ValueType == UIntProperty::StaticClass()) + { + WritePropertyValue(CborWriter, State, (int64)CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)); + } + else if (State.ValueType == UInt8Property::StaticClass()) + { + WritePropertyValue(CborWriter, State, (int64)CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)); + } + else if (State.ValueType == UInt16Property::StaticClass()) + { + WritePropertyValue(CborWriter, State, (int64)CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)); + } + else if (State.ValueType == UInt64Property::StaticClass()) + { + WritePropertyValue(CborWriter, State, (int64)CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)); + } + + // Unsigned Integers + else if (State.ValueType == UUInt16Property::StaticClass()) + { + WritePropertyValue(CborWriter, State, (int64)CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)); + } + else if (State.ValueType == UUInt32Property::StaticClass()) + { + WritePropertyValue(CborWriter, State, (int64)CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)); + } + else if (State.ValueType == UUInt64Property::StaticClass()) + { + WritePropertyValue(CborWriter, State, (int64)CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)); + } + + // FNames, Strings & Text + else if (State.ValueType == UNameProperty::StaticClass()) + { + WritePropertyValue(CborWriter, State, CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex).ToString()); + } + else if (State.ValueType == UStrProperty::StaticClass()) + { + WritePropertyValue(CborWriter, State, CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)); + } + else if (State.ValueType == UTextProperty::StaticClass()) + { + WritePropertyValue(CborWriter, State, CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex).ToString()); + } + + // Classes & Objects + else if (State.ValueType == UClassProperty::StaticClass()) + { + WritePropertyValue(CborWriter, State, CastChecked(State.ValueProperty)->GetPropertyValue_InContainer(State.ValueData, ArrayIndex)->GetPathName()); + } + else if (State.ValueType == UObjectProperty::StaticClass()) + { + WriteNull(CborWriter, State); + } + + // Unsupported + else + { + UE_LOG(LogSerialization, Verbose, TEXT("FCborStructSerializerBackend: Property %s cannot be serialized, because its type (%s) is not supported"), *State.ValueProperty->GetFName().ToString(), *State.ValueType->GetFName().ToString()); + } + +} diff --git a/Engine/Source/Runtime/Serialization/Private/Backends/JsonStructDeserializerBackend.cpp b/Engine/Source/Runtime/Serialization/Private/Backends/JsonStructDeserializerBackend.cpp index 195f1135e268..2057b03464aa 100644 --- a/Engine/Source/Runtime/Serialization/Private/Backends/JsonStructDeserializerBackend.cpp +++ b/Engine/Source/Runtime/Serialization/Private/Backends/JsonStructDeserializerBackend.cpp @@ -1,107 +1,12 @@ // Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. #include "Backends/JsonStructDeserializerBackend.h" +#include "Backends/StructDeserializerBackendUtilities.h" #include "UObject/Class.h" #include "UObject/UnrealType.h" #include "UObject/EnumProperty.h" #include "UObject/TextProperty.h" - -/* Internal helpers - *****************************************************************************/ - -namespace JsonStructDeserializerBackend -{ - /** - * Clears the value of the given property. - * - * @param Property The property to clear. - * @param Outer The property that contains the property to be cleared, if any. - * @param Data A pointer to the memory holding the property's data. - * @param ArrayIndex The index of the element to clear (if the property is an array). - * @return true on success, false otherwise. - * @see SetPropertyValue - */ - bool ClearPropertyValue( UProperty* Property, UProperty* Outer, void* Data, int32 ArrayIndex ) - { - UArrayProperty* ArrayProperty = Cast(Outer); - - if (ArrayProperty != nullptr) - { - if (ArrayProperty->Inner != Property) - { - return false; - } - - FScriptArrayHelper ArrayHelper(ArrayProperty, ArrayProperty->template ContainerPtrToValuePtr(Data)); - ArrayIndex = ArrayHelper.AddValue(); - } - - Property->ClearValue_InContainer(Data, ArrayIndex); - - return true; - } - - - /** - * Gets a pointer to object of the given property. - * - * @param Property The property to get. - * @param Outer The property that contains the property to be get, if any. - * @param Data A pointer to the memory holding the property's data. - * @param ArrayIndex The index of the element to set (if the property is an array). - * @return A pointer to the object represented by the property, null otherwise.. - * @see ClearPropertyValue - */ - void* GetPropertyValuePtr( UProperty* Property, UProperty* Outer, void* Data, int32 ArrayIndex ) - { - check(Property); - - if (UArrayProperty* ArrayProperty = Cast(Outer)) - { - if (ArrayProperty->Inner != Property) - { - return nullptr; - } - - FScriptArrayHelper ArrayHelper(ArrayProperty, ArrayProperty->template ContainerPtrToValuePtr(Data)); - int32 Index = ArrayHelper.AddValue(); - - return ArrayHelper.GetRawPtr(Index); - } - - if (ArrayIndex >= Property->ArrayDim) - { - return nullptr; - } - - return Property->template ContainerPtrToValuePtr(Data, ArrayIndex); - } - - /** - * Sets the value of the given property. - * - * @param Property The property to set. - * @param Outer The property that contains the property to be set, if any. - * @param Data A pointer to the memory holding the property's data. - * @param ArrayIndex The index of the element to set (if the property is an array). - * @return true on success, false otherwise. - * @see ClearPropertyValue - */ - template - bool SetPropertyValue( PropertyType* Property, UProperty* Outer, void* Data, int32 ArrayIndex, const ValueType& Value ) - { - if (void* Ptr = GetPropertyValuePtr(Property, Outer, Data, ArrayIndex)) - { - *(ValueType*)Ptr = Value; - return true; - } - - return false; - } -} - - /* IStructDeserializerBackend interface *****************************************************************************/ @@ -171,8 +76,6 @@ bool FJsonStructDeserializerBackend::GetNextToken( EStructDeserializerBackendTok bool FJsonStructDeserializerBackend::ReadProperty( UProperty* Property, UProperty* Outer, void* Data, int32 ArrayIndex ) { - using namespace JsonStructDeserializerBackend; - switch (LastNotation) { // boolean values @@ -182,7 +85,7 @@ bool FJsonStructDeserializerBackend::ReadProperty( UProperty* Property, UPropert if (UBoolProperty* BoolProperty = Cast(Property)) { - return SetPropertyValue(BoolProperty, Outer, Data, ArrayIndex, BoolValue); + return StructDeserializerBackendUtilities::SetPropertyValue(BoolProperty, Outer, Data, ArrayIndex, BoolValue); } UE_LOG(LogSerialization, Verbose, TEXT("Boolean field %s with value '%s' is not supported in UProperty type %s (%s)"), *Property->GetFName().ToString(), BoolValue ? *(GTrue.ToString()) : *(GFalse.ToString()), *Property->GetClass()->GetName(), *GetDebugString()); @@ -198,52 +101,52 @@ bool FJsonStructDeserializerBackend::ReadProperty( UProperty* Property, UPropert if (UByteProperty* ByteProperty = Cast(Property)) { - return SetPropertyValue(ByteProperty, Outer, Data, ArrayIndex, (int8)NumericValue); + return StructDeserializerBackendUtilities::SetPropertyValue(ByteProperty, Outer, Data, ArrayIndex, (int8)NumericValue); } if (UDoubleProperty* DoubleProperty = Cast(Property)) { - return SetPropertyValue(DoubleProperty, Outer, Data, ArrayIndex, (double)NumericValue); + return StructDeserializerBackendUtilities::SetPropertyValue(DoubleProperty, Outer, Data, ArrayIndex, (double)NumericValue); } if (UFloatProperty* FloatProperty = Cast(Property)) { - return SetPropertyValue(FloatProperty, Outer, Data, ArrayIndex, (float)NumericValue); + return StructDeserializerBackendUtilities::SetPropertyValue(FloatProperty, Outer, Data, ArrayIndex, (float)NumericValue); } if (UIntProperty* IntProperty = Cast(Property)) { - return SetPropertyValue(IntProperty, Outer, Data, ArrayIndex, (int32)NumericValue); + return StructDeserializerBackendUtilities::SetPropertyValue(IntProperty, Outer, Data, ArrayIndex, (int32)NumericValue); } if (UUInt32Property* UInt32Property = Cast(Property)) { - return SetPropertyValue(UInt32Property, Outer, Data, ArrayIndex, (uint32)NumericValue); + return StructDeserializerBackendUtilities::SetPropertyValue(UInt32Property, Outer, Data, ArrayIndex, (uint32)NumericValue); } if (UInt16Property* Int16Property = Cast(Property)) { - return SetPropertyValue(Int16Property, Outer, Data, ArrayIndex, (int16)NumericValue); + return StructDeserializerBackendUtilities::SetPropertyValue(Int16Property, Outer, Data, ArrayIndex, (int16)NumericValue); } if (UUInt16Property* UInt16Property = Cast(Property)) { - return SetPropertyValue(UInt16Property, Outer, Data, ArrayIndex, (uint16)NumericValue); + return StructDeserializerBackendUtilities::SetPropertyValue(UInt16Property, Outer, Data, ArrayIndex, (uint16)NumericValue); } if (UInt64Property* Int64Property = Cast(Property)) { - return SetPropertyValue(Int64Property, Outer, Data, ArrayIndex, (int64)NumericValue); + return StructDeserializerBackendUtilities::SetPropertyValue(Int64Property, Outer, Data, ArrayIndex, (int64)NumericValue); } if (UUInt64Property* UInt64Property = Cast(Property)) { - return SetPropertyValue(UInt64Property, Outer, Data, ArrayIndex, (uint64)NumericValue); + return StructDeserializerBackendUtilities::SetPropertyValue(UInt64Property, Outer, Data, ArrayIndex, (uint64)NumericValue); } if (UInt8Property* Int8Property = Cast(Property)) { - return SetPropertyValue(Int8Property, Outer, Data, ArrayIndex, (int8)NumericValue); + return StructDeserializerBackendUtilities::SetPropertyValue(Int8Property, Outer, Data, ArrayIndex, (int8)NumericValue); } UE_LOG(LogSerialization, Verbose, TEXT("Numeric field %s with value '%f' is not supported in UProperty type %s (%s)"), *Property->GetFName().ToString(), NumericValue, *Property->GetClass()->GetName(), *GetDebugString()); @@ -254,7 +157,7 @@ bool FJsonStructDeserializerBackend::ReadProperty( UProperty* Property, UPropert // null values case EJsonNotation::Null: - return ClearPropertyValue(Property, Outer, Data, ArrayIndex); + return StructDeserializerBackendUtilities::ClearPropertyValue(Property, Outer, Data, ArrayIndex); // strings, names & enumerations case EJsonNotation::String: @@ -263,17 +166,17 @@ bool FJsonStructDeserializerBackend::ReadProperty( UProperty* Property, UPropert if (UStrProperty* StrProperty = Cast(Property)) { - return SetPropertyValue(StrProperty, Outer, Data, ArrayIndex, StringValue); + return StructDeserializerBackendUtilities::SetPropertyValue(StrProperty, Outer, Data, ArrayIndex, StringValue); } if (UNameProperty* NameProperty = Cast(Property)) { - return SetPropertyValue(NameProperty, Outer, Data, ArrayIndex, FName(*StringValue)); + return StructDeserializerBackendUtilities::SetPropertyValue(NameProperty, Outer, Data, ArrayIndex, FName(*StringValue)); } if (UTextProperty* TextProperty = Cast(Property)) { - return SetPropertyValue(TextProperty, Outer, Data, ArrayIndex, FText::FromString(StringValue)); + return StructDeserializerBackendUtilities::SetPropertyValue(TextProperty, Outer, Data, ArrayIndex, FText::FromString(StringValue)); } if (UByteProperty* ByteProperty = Cast(Property)) @@ -289,7 +192,7 @@ bool FJsonStructDeserializerBackend::ReadProperty( UProperty* Property, UPropert return false; } - return SetPropertyValue(ByteProperty, Outer, Data, ArrayIndex, (uint8)Value); + return StructDeserializerBackendUtilities::SetPropertyValue(ByteProperty, Outer, Data, ArrayIndex, (uint8)Value); } if (UEnumProperty* EnumProperty = Cast(Property)) @@ -300,7 +203,7 @@ bool FJsonStructDeserializerBackend::ReadProperty( UProperty* Property, UPropert return false; } - if (void* ElementPtr = GetPropertyValuePtr(EnumProperty, Outer, Data, ArrayIndex)) + if (void* ElementPtr = StructDeserializerBackendUtilities::GetPropertyValuePtr(EnumProperty, Outer, Data, ArrayIndex)) { EnumProperty->GetUnderlyingProperty()->SetIntPropertyValue(ElementPtr, Value); return true; @@ -311,7 +214,7 @@ bool FJsonStructDeserializerBackend::ReadProperty( UProperty* Property, UPropert if (UClassProperty* ClassProperty = Cast(Property)) { - return SetPropertyValue(ClassProperty, Outer, Data, ArrayIndex, LoadObject(NULL, *StringValue, NULL, LOAD_NoWarn)); + return StructDeserializerBackendUtilities::SetPropertyValue(ClassProperty, Outer, Data, ArrayIndex, LoadObject(NULL, *StringValue, NULL, LOAD_NoWarn)); } UE_LOG(LogSerialization, Verbose, TEXT("String field %s with value '%s' is not supported in UProperty type %s (%s)"), *Property->GetFName().ToString(), *StringValue, *Property->GetClass()->GetName(), *GetDebugString()); diff --git a/Engine/Source/Runtime/Serialization/Private/Backends/StructDeserializerBackendUtilities.h b/Engine/Source/Runtime/Serialization/Private/Backends/StructDeserializerBackendUtilities.h new file mode 100644 index 000000000000..28bee27dc0ab --- /dev/null +++ b/Engine/Source/Runtime/Serialization/Private/Backends/StructDeserializerBackendUtilities.h @@ -0,0 +1,101 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "UObject/Class.h" +#include "UObject/UnrealType.h" +#include "UObject/EnumProperty.h" +#include "UObject/TextProperty.h" + + +struct StructDeserializerBackendUtilities +{ + /** + * Clears the value of the given property. + * + * @param Property The property to clear. + * @param Outer The property that contains the property to be cleared, if any. + * @param Data A pointer to the memory holding the property's data. + * @param ArrayIndex The index of the element to clear (if the property is an array). + * @return true on success, false otherwise. + * @see SetPropertyValue + */ + static bool ClearPropertyValue(UProperty* Property, UProperty* Outer, void* Data, int32 ArrayIndex) + { + UArrayProperty* ArrayProperty = Cast(Outer); + + if (ArrayProperty != nullptr) + { + if (ArrayProperty->Inner != Property) + { + return false; + } + + FScriptArrayHelper ArrayHelper(ArrayProperty, ArrayProperty->template ContainerPtrToValuePtr(Data)); + ArrayIndex = ArrayHelper.AddValue(); + } + + Property->ClearValue_InContainer(Data, ArrayIndex); + + return true; + } + + + /** + * Gets a pointer to object of the given property. + * + * @param Property The property to get. + * @param Outer The property that contains the property to be get, if any. + * @param Data A pointer to the memory holding the property's data. + * @param ArrayIndex The index of the element to set (if the property is an array). + * @return A pointer to the object represented by the property, null otherwise.. + * @see ClearPropertyValue + */ + static void* GetPropertyValuePtr(UProperty* Property, UProperty* Outer, void* Data, int32 ArrayIndex) + { + check(Property); + + if (UArrayProperty* ArrayProperty = Cast(Outer)) + { + if (ArrayProperty->Inner != Property) + { + return nullptr; + } + + FScriptArrayHelper ArrayHelper(ArrayProperty, ArrayProperty->template ContainerPtrToValuePtr(Data)); + int32 Index = ArrayHelper.AddValue(); + + return ArrayHelper.GetRawPtr(Index); + } + + if (ArrayIndex >= Property->ArrayDim) + { + return nullptr; + } + + return Property->template ContainerPtrToValuePtr(Data, ArrayIndex); + } + + /** + * Sets the value of the given property. + * + * @param Property The property to set. + * @param Outer The property that contains the property to be set, if any. + * @param Data A pointer to the memory holding the property's data. + * @param ArrayIndex The index of the element to set (if the property is an array). + * @return true on success, false otherwise. + * @see ClearPropertyValue + */ + template + static bool SetPropertyValue(PropertyType* Property, UProperty* Outer, void* Data, int32 ArrayIndex, const ValueType& Value) + { + if (void* Ptr = GetPropertyValuePtr(Property, Outer, Data, ArrayIndex)) + { + *(ValueType*)Ptr = Value; + return true; + } + + return false; + } +}; diff --git a/Engine/Source/Runtime/Serialization/Private/Tests/StructSerializerTest.cpp b/Engine/Source/Runtime/Serialization/Private/Tests/StructSerializerTest.cpp index f05285bd702f..cc8f42a5aaba 100644 --- a/Engine/Source/Runtime/Serialization/Private/Tests/StructSerializerTest.cpp +++ b/Engine/Source/Runtime/Serialization/Private/Tests/StructSerializerTest.cpp @@ -8,6 +8,8 @@ #include "Templates/SubclassOf.h" #include "Backends/JsonStructDeserializerBackend.h" #include "Backends/JsonStructSerializerBackend.h" +#include "Backends/CborStructDeserializerBackend.h" +#include "Backends/CborStructSerializerBackend.h" #include "StructDeserializer.h" #include "StructSerializer.h" #include "Tests/StructSerializerTestTypes.h" @@ -87,10 +89,10 @@ namespace StructSerializerTest /* Tests *****************************************************************************/ -IMPLEMENT_SIMPLE_AUTOMATION_TEST(FJsonStructSerializerTest, "System.Core.Serialization.JsonStructSerializer", EAutomationTestFlags::EditorContext | EAutomationTestFlags::EngineFilter) +IMPLEMENT_SIMPLE_AUTOMATION_TEST(FStructSerializerTest, "System.Core.Serialization.StructSerializer", EAutomationTestFlags::EditorContext | EAutomationTestFlags::EngineFilter) -bool FJsonStructSerializerTest::RunTest( const FString& Parameters ) +bool FStructSerializerTest::RunTest( const FString& Parameters ) { // json { @@ -106,6 +108,17 @@ bool FJsonStructSerializerTest::RunTest( const FString& Parameters ) // uncomment this to look at the serialized data // GLog->Logf(TEXT("%s"), (TCHAR*)Buffer.GetData()); } + // cbor + { + TArray Buffer; + FMemoryReader Reader(Buffer); + FMemoryWriter Writer(Buffer); + + FCborStructSerializerBackend SerializerBackend(Writer); + FCborStructDeserializerBackend DeserializerBackend(Reader); + + StructSerializerTest::TestSerialization(*this, SerializerBackend, DeserializerBackend); + } return true; } diff --git a/Engine/Source/Runtime/Serialization/Public/Backends/CborStructDeserializerBackend.h b/Engine/Source/Runtime/Serialization/Public/Backends/CborStructDeserializerBackend.h new file mode 100644 index 000000000000..ee76982f35cc --- /dev/null +++ b/Engine/Source/Runtime/Serialization/Public/Backends/CborStructDeserializerBackend.h @@ -0,0 +1,45 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "CborReader.h" +#include "IStructDeserializerBackend.h" + +/** + * Implements a reader for UStruct deserialization using Cbor. + */ +class SERIALIZATION_API FCborStructDeserializerBackend + : public IStructDeserializerBackend +{ +public: + + /** + * Creates and initializes a new instance. + * @param Archive The archive to deserialize from. + */ + FCborStructDeserializerBackend( FArchive& Archive ) + : CborReader(&Archive) + {} + +public: + + // IStructDeserializerBackend interface + virtual const FString& GetCurrentPropertyName() const override; + virtual FString GetDebugString() const override; + virtual const FString& GetLastErrorMessage() const override; + virtual bool GetNextToken(EStructDeserializerBackendTokens& OutToken) override; + virtual bool ReadProperty(UProperty* Property, UProperty* Outer, void* Data, int32 ArrayIndex) override; + virtual void SkipArray() override; + virtual void SkipStructure() override; + +private: + /** Holds the Cbor reader used for the actual reading of the archive. */ + FCborReader CborReader; + + /** Holds the last read Cbor Context. */ + FCborContext LastContext; + + /** Holds the last map key. */ + FString LastMapKey; +}; diff --git a/Engine/Source/Runtime/Serialization/Public/Backends/CborStructSerializerBackend.h b/Engine/Source/Runtime/Serialization/Public/Backends/CborStructSerializerBackend.h new file mode 100644 index 000000000000..2f4f51aa2923 --- /dev/null +++ b/Engine/Source/Runtime/Serialization/Public/Backends/CborStructSerializerBackend.h @@ -0,0 +1,39 @@ +// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "IStructSerializerBackend.h" +#include "CborWriter.h" + +/** + * Implements a writer for UStruct serialization using Cbor. + + */ +class SERIALIZATION_API FCborStructSerializerBackend + : public IStructSerializerBackend +{ +public: + + /** + * Creates and initializes a new instance. + * @param Archive The archive to serialize into. + */ + FCborStructSerializerBackend( FArchive& Archive ) + : CborWriter(&Archive) + {} + +public: + + // IStructSerializerBackend interface + virtual void BeginArray(const FStructSerializerState& State) override; + virtual void BeginStructure(const FStructSerializerState& State) override; + virtual void EndArray(const FStructSerializerState& State) override; + virtual void EndStructure(const FStructSerializerState& State) override; + virtual void WriteComment(const FString& Comment) override; + virtual void WriteProperty(const FStructSerializerState& State, int32 ArrayIndex = 0) override; + +private: + /** Holds the Cbor writer used for the actual serialization. */ + FCborWriter CborWriter; +}; diff --git a/Engine/Source/Runtime/Serialization/Serialization.Build.cs b/Engine/Source/Runtime/Serialization/Serialization.Build.cs index 0e0c9c7ffae5..3634fe31975d 100644 --- a/Engine/Source/Runtime/Serialization/Serialization.Build.cs +++ b/Engine/Source/Runtime/Serialization/Serialization.Build.cs @@ -10,6 +10,7 @@ namespace UnrealBuildTool.Rules new string[] { "Core", "Json", + "Cbor", }); PrivateDependencyModuleNames.AddRange( diff --git a/Engine/Source/Runtime/Slate/Private/Framework/Application/SlateApplication.cpp b/Engine/Source/Runtime/Slate/Private/Framework/Application/SlateApplication.cpp index 6172b2434468..877e9732b638 100644 --- a/Engine/Source/Runtime/Slate/Private/Framework/Application/SlateApplication.cpp +++ b/Engine/Source/Runtime/Slate/Private/Framework/Application/SlateApplication.cpp @@ -927,6 +927,11 @@ void FSlateApplication::SetPlatformApplication(const TSharedRefSetMessageHandler(CurrentApplication.ToSharedRef()); } +void FSlateApplication::OverridePlatformApplication(TSharedPtr InPlatformApplication) +{ + PlatformApplication = InPlatformApplication; +} + void FSlateApplication::Create() { GSlateFastWidgetPath = GIsEditor ? 0 : 1; @@ -1057,6 +1062,8 @@ FSlateApplication::FSlateApplication() GConfig->GetBool(TEXT("CursorControl"), TEXT("bAllowSoftwareCursor"), bSoftwareCursorAvailable, GEngineIni); } + bRenderOffScreen = FParse::Param(FCommandLine::Get(), TEXT("RenderOffScreen")); + // causes InputCore to initialize, even if statically linked FInputCoreModule& InputCore = FModuleManager::LoadModuleChecked(TEXT("InputCore")); @@ -1305,8 +1312,8 @@ void FSlateApplication::DrawWindowAndChildren( const TSharedRef& Window // On other platforms we set bDrawChildWindows to true only if we draw the current window. bool bDrawChildWindows = PLATFORM_MAC; - // Only draw visible windows - if( WindowToDraw->IsVisible() && (!WindowToDraw->IsWindowMinimized() || FApp::UseVRFocus()) ) + // Only draw visible windows or in off-screen rendering mode + if (bRenderOffScreen || (WindowToDraw->IsVisible() && (!WindowToDraw->IsWindowMinimized() || FApp::UseVRFocus())) ) { // Switch to the appropriate world for drawing @@ -1570,7 +1577,8 @@ void FSlateApplication::PrivateDrawWindows( TSharedPtr DrawOnlyThisWind for( TArray< TSharedRef >::TConstIterator CurrentWindowIt( SlateWindows ); CurrentWindowIt; ++CurrentWindowIt ) { TSharedRef CurrentWindow = *CurrentWindowIt; - if ( CurrentWindow->IsVisible() ) + // Only draw visible windows or in off-screen rendering mode + if (bRenderOffScreen || CurrentWindow->IsVisible() ) { DrawWindowAndChildren( CurrentWindow, DrawWindowArgs ); } @@ -1995,6 +2003,14 @@ TSharedRef FSlateApplication::AddWindow( TSharedRef InSlateWin TSharedRef< FGenericWindow > FSlateApplication::MakeWindow( TSharedRef InSlateWindow, const bool bShowImmediately ) { + // When rendering off-screen don't render to screen, create a dummy generic window + if (bRenderOffScreen) + { + TSharedRef< FGenericWindow > NewWindow = MakeShareable(new FGenericWindow()); + InSlateWindow->SetNativeWindow(NewWindow); + return NewWindow; + } + TSharedPtr NativeParent = nullptr; TSharedPtr ParentWindow = InSlateWindow->GetParentWindow(); if ( ParentWindow.IsValid() ) diff --git a/Engine/Source/Runtime/Slate/Private/Widgets/Layout/SExpandableArea.cpp b/Engine/Source/Runtime/Slate/Private/Widgets/Layout/SExpandableArea.cpp index d81e3b22e016..866635ef2ac2 100644 --- a/Engine/Source/Runtime/Slate/Private/Widgets/Layout/SExpandableArea.cpp +++ b/Engine/Source/Runtime/Slate/Private/Widgets/Layout/SExpandableArea.cpp @@ -45,9 +45,9 @@ void SExpandableArea::Construct( const FArguments& InArgs ) // Furthermore, we still need to fallback to just using one border if they do want them the // same, otherwise we could introduce curved edges between the upper and lower sections. const bool bBodyDiffers = InArgs._BodyBorderImage != nullptr || InArgs._BodyBorderBackgroundColor.IsSet(); - const FSlateBrush* FullBorderImage = bBodyDiffers ? FStyleDefaults::GetNoBrush() : InArgs._BorderImage; + const TAttribute FullBorderImage = bBodyDiffers ? FStyleDefaults::GetNoBrush() : InArgs._BorderImage; const TAttribute FullBorderBackgroundColor = bBodyDiffers ? FLinearColor::Transparent : InArgs._BorderBackgroundColor; - const FSlateBrush* TitleBorderImage = !bBodyDiffers ? FStyleDefaults::GetNoBrush() : InArgs._BorderImage; + const TAttribute TitleBorderImage = !bBodyDiffers ? FStyleDefaults::GetNoBrush() : InArgs._BorderImage; const TAttribute TitleBorderBackgroundColor = !bBodyDiffers ? FLinearColor::Transparent : InArgs._BorderBackgroundColor; ChildSlot diff --git a/Engine/Source/Runtime/Slate/Private/Widgets/Text/SlateEditableTextLayout.cpp b/Engine/Source/Runtime/Slate/Private/Widgets/Text/SlateEditableTextLayout.cpp index 31f5649d4b7a..464cb6b630fc 100644 --- a/Engine/Source/Runtime/Slate/Private/Widgets/Text/SlateEditableTextLayout.cpp +++ b/Engine/Source/Runtime/Slate/Private/Widgets/Text/SlateEditableTextLayout.cpp @@ -239,7 +239,7 @@ void FSlateEditableTextLayout::SetText(const TAttribute& InText) if (RefreshImpl(&NewText, bForceRefresh)) { // Make sure we move the cursor to the end of the new text if we had keyboard focus - if (OwnerWidget->GetSlateWidget()->HasAnyUserFocus().IsSet()) + if (OwnerWidget->GetSlateWidget()->HasAnyUserFocus().IsSet() && !bWasFocusedByLastMouseDown) { JumpTo(ETextLocation::EndOfDocument, ECursorAction::MoveCursor); } @@ -709,8 +709,8 @@ bool FSlateEditableTextLayout::HandleFocusReceived(const FFocusEvent& InFocusEve GoTo(ETextLocation::EndOfDocument); } - // Select All Text - if (OwnerWidget->ShouldSelectAllTextWhenFocused()) + // Select All Text for non-mouse events (mouse events are handled by HandleMouseButtonUp) + if (InFocusEvent.GetCause() != EFocusCause::Mouse && OwnerWidget->ShouldSelectAllTextWhenFocused()) { SelectAllText(); } @@ -1157,7 +1157,7 @@ FReply FSlateEditableTextLayout::HandleMouseButtonUp(const FGeometry& MyGeometry // we'll leave things alone! if (bWasFocusedByLastMouseDown) { - if (!bHasDragSelectedSinceFocused) + if (!bHasDragSelectedSinceFocused || !SelectionStart.IsSet()) { if (OwnerWidget->ShouldSelectAllTextWhenFocused()) { diff --git a/Engine/Source/Runtime/Slate/Public/Framework/Application/SlateApplication.h b/Engine/Source/Runtime/Slate/Public/Framework/Application/SlateApplication.h index ecd233d947de..6fd61ca534a4 100644 --- a/Engine/Source/Runtime/Slate/Public/Framework/Application/SlateApplication.h +++ b/Engine/Source/Runtime/Slate/Public/Framework/Application/SlateApplication.h @@ -867,6 +867,12 @@ public: void SetPlatformApplication(const TSharedRef& InPlatformApplication); + /** + * Replace the current platform application with a custom version. + * @param InPlatformApplication - The replacement platform application. + */ + void OverridePlatformApplication(TSharedPtr InPlatformApplication); + /** Set the global application icon */ void SetAppIcon(const FSlateBrush* const InAppIcon); @@ -1326,6 +1332,8 @@ public: void SetAllowTooltips(bool bCanShow); bool GetAllowTooltips() const; + bool IsRenderingOffScreen() const { return bRenderOffScreen; } + public: //~ Begin FSlateApplicationBase Interface @@ -1708,6 +1716,9 @@ private: /** true if any slate window is currently active (not just top level windows) */ bool bSlateWindowActive; + /** true if rendering windows even when they are set to invisible */ + bool bRenderOffScreen; + /** Application-wide scale for supporting monitors of varying pixel density */ float Scale; diff --git a/Engine/Source/Runtime/Slate/Public/Widgets/Layout/SExpandableArea.h b/Engine/Source/Runtime/Slate/Public/Widgets/Layout/SExpandableArea.h index 4118d2689be9..5bd071e10a0a 100644 --- a/Engine/Source/Runtime/Slate/Public/Widgets/Layout/SExpandableArea.h +++ b/Engine/Source/Runtime/Slate/Public/Widgets/Layout/SExpandableArea.h @@ -48,7 +48,7 @@ public: /** Background color to apply to the border image */ SLATE_ATTRIBUTE( FSlateColor, BorderBackgroundColor ) /** Border to use around the area */ - SLATE_ARGUMENT( const FSlateBrush*, BorderImage ) + SLATE_ATTRIBUTE( const FSlateBrush*, BorderImage ) /** Background color to apply to the body's border image. Unspecified uses BorderBackgroundColor */ SLATE_ATTRIBUTE( FSlateColor, BodyBorderBackgroundColor ) /** Border to use around the body. Unspecified uses BorderImage */ diff --git a/Engine/Source/Runtime/SlateCore/Private/Input/HittestGrid.cpp b/Engine/Source/Runtime/SlateCore/Private/Input/HittestGrid.cpp index 6a2fd29fabdd..1d20e5911263 100644 --- a/Engine/Source/Runtime/SlateCore/Private/Input/HittestGrid.cpp +++ b/Engine/Source/Runtime/SlateCore/Private/Input/HittestGrid.cpp @@ -97,6 +97,7 @@ struct FHittestGrid::FGridTestingParams FHittestGrid::FHittestGrid() : WidgetsCachedThisFrame() +, NumCellsExcess(0, 0) { } @@ -224,6 +225,7 @@ void FHittestGrid::ClearGridForNewFrame(const FSlateRect& HittestArea) GridOrigin = HittestArea.GetTopLeft(); const FVector2D GridSize = HittestArea.GetSize(); NumCells = FIntPoint(FMath::CeilToInt(GridSize.X / CellSize.X), FMath::CeilToInt(GridSize.Y / CellSize.Y)); + NumCells += NumCellsExcess; WidgetsCachedThisFrame.Reset(); const int32 NewTotalCells = NumCells.X * NumCells.Y; diff --git a/Engine/Source/Runtime/SlateCore/Public/Input/HittestGrid.h b/Engine/Source/Runtime/SlateCore/Public/Input/HittestGrid.h index 5aa9cd4768e8..6dd783a52b3a 100644 --- a/Engine/Source/Runtime/SlateCore/Public/Input/HittestGrid.h +++ b/Engine/Source/Runtime/SlateCore/Public/Input/HittestGrid.h @@ -93,6 +93,26 @@ public: */ TSharedPtr FindNextFocusableWidget(const FArrangedWidget& StartingWidget, const EUINavigation Direction, const FNavigationReply& NavigationReply, const FArrangedWidget& RuleWidget); + /** + * Get the size of the grid in cells. + * @returns - The size of the grid in cells. + */ + FIntPoint GetNumCells() const + { + return NumCells; + } + + /** + * Set an excess of cells which will increase the size of the grid. This + * extends the desktop space beyond the norm and allows hits to take place + * on areas outside the desktop. + * @param InNumCellsExcess - The excess of cells. + */ + void SetNumCellsExcess(const FIntPoint& InNumCellsExcess) + { + NumCellsExcess = InNumCellsExcess; + } + private: /** @@ -196,6 +216,9 @@ private: /** The size of the grid in cells. */ FIntPoint NumCells; + /** The excess of cells which will increase the size of the grid. */ + FIntPoint NumCellsExcess; + /** The clipping manager that manages any clipping for hit testable widgets. */ FSlateClippingManager ClippingManager; diff --git a/Engine/Source/Runtime/SlateCore/Public/Rendering/SlateRenderer.h b/Engine/Source/Runtime/SlateCore/Public/Rendering/SlateRenderer.h index 594ee4cb2fe2..32722ebeabce 100644 --- a/Engine/Source/Runtime/SlateCore/Public/Rendering/SlateRenderer.h +++ b/Engine/Source/Runtime/SlateCore/Public/Rendering/SlateRenderer.h @@ -6,6 +6,7 @@ #include "Textures/SlateShaderResource.h" #include "Brushes/SlateDynamicImageBrush.h" #include "Rendering/DrawElements.h" +#include "Templates/RefCounting.h" class FRHITexture2D; class FSlateDrawBuffer; @@ -19,6 +20,7 @@ class FSceneInterface; struct FSlateBrush; typedef FRHITexture2D* FTexture2DRHIParamRef; +typedef TRefCountPtr FTexture2DRHIRef; /** * Update context for deferred drawing of widgets to render targets @@ -242,6 +244,10 @@ public: DECLARE_MULTICAST_DELEGATE_OneParam(FOnPostResizeWindowBackbuffer, void*); FOnPostResizeWindowBackbuffer& OnPostResizeWindowBackBuffer() { return PostResizeBackBufferDelegate; } + /** Callback on the render thread after slate rendering finishes and right before present is called */ + DECLARE_DELEGATE_OneParam(FOnBackBufferReadyToPresent, const FTexture2DRHIRef&); + FOnBackBufferReadyToPresent& OnBackBufferReadyToPresent() { return OnBackBufferReadyToPresentDelegate; } + /** * Sets which color vision filter to use */ @@ -501,6 +507,8 @@ protected: FOnPreResizeWindowBackbuffer PreResizeBackBufferDelegate; FOnPostResizeWindowBackbuffer PostResizeBackBufferDelegate; + FOnBackBufferReadyToPresent OnBackBufferReadyToPresentDelegate; + /** * Necessary to grab before flushing the resource pool, as it may be being * accessed by multiple threads when loading. diff --git a/Engine/Source/Runtime/SlateRHIRenderer/Private/SlateRHIRenderer.cpp b/Engine/Source/Runtime/SlateRHIRenderer/Private/SlateRHIRenderer.cpp index 19a05cc5f086..efdbd4063740 100644 --- a/Engine/Source/Runtime/SlateRHIRenderer/Private/SlateRHIRenderer.cpp +++ b/Engine/Source/Runtime/SlateRHIRenderer/Private/SlateRHIRenderer.cpp @@ -908,6 +908,12 @@ void FSlateRHIRenderer::DrawWindow_RenderThread(FRHICommandListImmediate& RHICmd GEngine->StereoRenderingDevice->RenderTexture_RenderThread(RHICmdList, RHICmdList.GetViewportBackBuffer(ViewportInfo.ViewportRHI), ViewportInfo.GetRenderTargetTexture(), WindowSize); } RHICmdList.TransitionResource(EResourceTransitionAccess::EReadable, BackBuffer); + + // Fire delegate to inform bound functions the back buffer is ready to be captured. + if (OnBackBufferReadyToPresentDelegate.IsBound()) + { + OnBackBufferReadyToPresentDelegate.Execute(BackBuffer); + } } } diff --git a/Engine/Source/Runtime/TimeManagement/Public/TimeSynchronizationSource.h b/Engine/Source/Runtime/TimeManagement/Public/TimeSynchronizationSource.h index 37c3f91b08a2..292cb66a8a1b 100644 --- a/Engine/Source/Runtime/TimeManagement/Public/TimeSynchronizationSource.h +++ b/Engine/Source/Runtime/TimeManagement/Public/TimeSynchronizationSource.h @@ -5,38 +5,76 @@ #include "UObject/ObjectMacros.h" #include "UObject/Object.h" +#include "Misc/QualifiedFrameTime.h" #include "Misc/FrameRate.h" #include "Misc/FrameTime.h" +#include "Misc/Timecode.h" #include "TimeSynchronizationSource.generated.h" +struct FTimeSynchronizationOpenData +{ + /** Frame rate that will be used as the base for synchronization. */ + FFrameRate SynchronizationFrameRate; + + /** + * The frame on which rollover occurs (i.e., the modulus value of rollover). + * This is relative to the SynchronizationFrameRate. + * Not set if rollover is not used. + */ + TOptional RolloverFrame; +}; + +//! Values that will be sent to sources when synchronization has been successfully started. +struct FTimeSynchronizationStartData +{ + /** + * The frame on which synchronization was established. + * This is relative to SynchronizationFrameRate in FTimecodeSynchronizationOpenData. + */ + FFrameTime StartFrame; +}; + /** -* Base class for sources to be used for time synchronization -*/ + * Base class for sources to be used for time synchronization. + * + * Subclasses don't need to directly contain data, nor provide access to the + * data in any way (although they may). + * + * Currently, Synchronization does not work on the subframe level. + */ UCLASS(Abstract) class TIMEMANAGEMENT_API UTimeSynchronizationSource : public UObject { GENERATED_UCLASS_BODY() public: - /** If the source has a time base that can be used to synchronize other sources. */ + + /** Whether or not this source should be considered when establishing synchronization. */ UPROPERTY(EditAnywhere, Category = Synchronization) bool bUseForSynchronization; - /** Extra frame to buffered before allowing the Manager to by in synchronized mode. */ - UPROPERTY(EditAnywhere, Category = Synchronization, Meta = (ClampMin = 0, ClampMax = 60, EditCondition = "bUseForSynchronization")) - int32 NumberOfExtraBufferedFrame; - - /** Fixed delay in seconds to align with other sources. */ - UPROPERTY(EditAnywhere, Category = Synchronization, Meta = (EditCondition = "!bUseForSynchronization")) - float TimeDelay; + /** + * An additional offset in frames (relative to this source's frame rate) that should used. + * This is mainly useful to help correct discrepancies between the reported Sample Times + * and how the samples actually line up relative to other sources. + */ + UPROPERTY(EditAnywhere, Category = Synchronization) + int32 FrameOffset; public: - /** Get next available sample Time based on the source FrameRate */ - virtual FFrameTime GetNextSampleTime() const PURE_VIRTUAL(UTimeSynchronizationSource::GetNextSampleTime, return FFrameTime();) - /** Get number of available samples buffered in the source */ - virtual int32 GetAvailableSampleCount() const PURE_VIRTUAL(UTimeSynchronizationSource::GetAvailableSampleCount, return 0;) + /** + * Get the time of the newest available sample (relative to this source's frame rate). + * Note, in cases where Rollover is allowed and has occurred, this may have a lower value than GetOldestSampleTime. + */ + virtual FFrameTime GetNewestSampleTime() const PURE_VIRTUAL(UTimeSynchronizationSource::GetNewestSampleTime, return FFrameTime();) + + /** + * Get the time of the oldest available sample (relative to this source's frame rate). + * Note, in cases where Rollover is allowed and has occurred, this may have a higher value than GetNewestSampleTime. + */ + virtual FFrameTime GetOldestSampleTime() const PURE_VIRTUAL(UTimeSynchronizationSource::GetOldestSampleTime, return FFrameTime();) /** Get the source actual FrameRate */ virtual FFrameRate GetFrameRate() const PURE_VIRTUAL(UTimeSynchronizationSource::GetFrameRate, return FFrameRate();) @@ -44,15 +82,108 @@ public: /** Used to know if the source is ready to be used for synchronization. */ virtual bool IsReady() const PURE_VIRTUAL(UTimeSynchronizationSource::IsReady, return false;) - /** Open the source to initiate frame acquisition */ - virtual bool Open() PURE_VIRTUAL(UTimeSynchronizationSource::Open, return false;) + /** Called when synchronization is started to notify this source to begin buffering frames. */ + virtual bool Open(const FTimeSynchronizationOpenData& OpenData) PURE_VIRTUAL(UTimeSynchronizationSource::Open, return false;) - /** Start Rolling/Playing the source */ - virtual void Start() PURE_VIRTUAL(UTimeSynchronizationSource::Start, return;) + /** Start playing samples. */ + virtual void Start(const FTimeSynchronizationStartData& StartData) PURE_VIRTUAL(UTimeSynchronizationSource::Start, return;) - /** Stop the source from rolling. The source may clear all his buffered frame. */ + /** Called when synchronization has been completed. The source may discard any unnecessary frames. */ virtual void Close() PURE_VIRTUAL(UTimeSynchronizationSource::Close, return;) /** Name to used when displaying an error message or to used in UI. */ - virtual FString GetDisplayName() const PURE_VIRTUAL(UTimeSynchronizationSource::GetDisplayName, return FString();); -}; + virtual FString GetDisplayName() const PURE_VIRTUAL(UTimeSynchronizationSource::GetDisplayName, return FString();) + +public: + + /** + * Checks to see whether or not the given frame is between the Lower and Upper bounds. + * It's assumed the bounds are in appropriate order (i.e., LowerBound <= UpperBound, unless they span across a rollover boundary, in which + * case LowerBound > UpperBound). + * It's assumed the value to check is also valid (between 0 and the rollover modulus). + * + * @param ToCheck The value to check. + * @param LowerBound The lower bound of times to check. + * @param UpperBound The upper bound of times to check. + * @param RolloverModulus Rollover frame value. + */ + FORCEINLINE static bool IsFrameBetweenWithRolloverModulus(const FFrameTime& ToCheck, const FFrameTime& LowerBound, const FFrameTime& UpperBound, const FFrameTime& RolloverModulus) + { + if (LowerBound <= UpperBound) + { + return LowerBound <= ToCheck && ToCheck <= UpperBound; + } + else + { + return (LowerBound <= ToCheck && ToCheck <= RolloverModulus) || (FFrameTime(0) <= ToCheck && ToCheck <= UpperBound); + } + } + + /** Convenience method to convert a FrameTime and FrameRate to a timecode value. */ + FORCEINLINE static FTimecode ConvertFrameTimeToTimecode(const FFrameTime& FrameTime, const FFrameRate& FrameRate) + { + const bool bIsDropFrame = FTimecode::IsDropFormatTimecodeSupported(FrameRate); + return FTimecode::FromFrameNumber(FrameTime.GetFrame(), FrameRate, bIsDropFrame); + } + + /** + * Adds an integer offset (representing frames) to the given FrameTime. + * It's expected the offset's magnitude will be less than the rollover modulus. + * + * @param FrameTime The base frame time. + * @param Offset The offset to add. + * @param RolloverModulus Rollover frame value. + */ + FORCEINLINE static FFrameTime AddOffsetWithRolloverModulus(const FFrameTime& FrameTime, const int32 Offset, const FFrameTime& RolloverModulus) + { + const FFrameTime WithOffset = FrameTime + Offset; + const int32 RolloverFrameValue = RolloverModulus.GetFrame().Value; + return FFrameTime((WithOffset.GetFrame().Value + RolloverFrameValue) % RolloverFrameValue, WithOffset.GetSubFrame()); + } + + /** + * Calculates the distance between two frames. + * This method accounts for rollover (when used), and assumes the frames will always be relatively close together. + * This is also a convenient method to use to check whether or not a rollover has happened within a range of frames. + * + * @param StartFrameTime The start time in the range. + * @param EndFrameTime The end time in the range. + * @param RolloverModulus Rollover frame value. Unset if rollover isn't used. + * @param bDidRollover [out] Whether or not a rollover occurred in the input range. + */ + static int32 FindDistanceBetweenFramesWithRolloverModulus(const FFrameTime& StartFrameTime, const FFrameTime& EndFrameTime, const TOptional& RolloverModulus, bool& bDidRollover) + { + int32 Offset = (EndFrameTime.GetFrame().Value - StartFrameTime.GetFrame().Value); + bDidRollover = false; + + if (RolloverModulus.IsSet()) + { + // At this point, we don't know if a rollover has occurred. + // Any comparisons will be useless, because we don't know the real order. + + // If we assume the "real world" distance between these frames is usually small, then + // we can figure out ordering based on distance. + // Here, we'll define relatively small as being less than half the time of our roll over range. + // That is, if we roll over every 24 hours, "small" will be 12 hours or less. + // The reason for this choice is because if 2 values are half the roll over distance apart, + // they are equidistant in modulo space. Anything greater than half implies + // that a roll over has occurred, while anything less than half implies no roll over. + + const int32 RolloverTimeValue = RolloverModulus->GetFrame().Value; + + if (FMath::Abs(Offset) > (RolloverTimeValue / 2)) + { + // At this point, we know that a roll over has occurred between the frames. + // If Offset is negative, then Start was greater than End, we'll assume the roll over happened between then, and our output should be positive. + // If Offset is positive, the inverse is true (and our output will be negative). + // To correct for that, we need to "unroll" modulo space by adding or subtracting + // the full rollover value. + + Offset += (Offset < 0) ? RolloverTimeValue : -RolloverTimeValue; + bDidRollover = true; + } + } + + return Offset; + } +}; \ No newline at end of file diff --git a/Engine/Source/Runtime/UMG/Private/Animation/MovieScene2DTransformTrack.cpp b/Engine/Source/Runtime/UMG/Private/Animation/MovieScene2DTransformTrack.cpp index b7d5eaf3ec82..ad42925e501a 100644 --- a/Engine/Source/Runtime/UMG/Private/Animation/MovieScene2DTransformTrack.cpp +++ b/Engine/Source/Runtime/UMG/Private/Animation/MovieScene2DTransformTrack.cpp @@ -18,7 +18,7 @@ UMovieScene2DTransformTrack::UMovieScene2DTransformTrack(const FObjectInitialize UMovieSceneSection* UMovieScene2DTransformTrack::CreateNewSection() { - return NewObject(this, UMovieScene2DTransformSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } diff --git a/Engine/Source/Runtime/UMG/Private/Animation/MovieSceneMarginTrack.cpp b/Engine/Source/Runtime/UMG/Private/Animation/MovieSceneMarginTrack.cpp index b221a338b0bc..d798a99528cc 100644 --- a/Engine/Source/Runtime/UMG/Private/Animation/MovieSceneMarginTrack.cpp +++ b/Engine/Source/Runtime/UMG/Private/Animation/MovieSceneMarginTrack.cpp @@ -14,7 +14,7 @@ UMovieSceneMarginTrack::UMovieSceneMarginTrack(const FObjectInitializer& Init) UMovieSceneSection* UMovieSceneMarginTrack::CreateNewSection() { - return NewObject(this, UMovieSceneMarginSection::StaticClass(), NAME_None, RF_Transactional); + return NewObject(this, NAME_None, RF_Transactional); } diff --git a/Engine/Source/Runtime/UMG/Private/Components/ExpandableArea.cpp b/Engine/Source/Runtime/UMG/Private/Components/ExpandableArea.cpp index 3eb98677d962..16917867f24a 100644 --- a/Engine/Source/Runtime/UMG/Private/Components/ExpandableArea.cpp +++ b/Engine/Source/Runtime/UMG/Private/Components/ExpandableArea.cpp @@ -23,7 +23,7 @@ UExpandableArea::UExpandableArea(const FObjectInitializer& ObjectInitializer) SExpandableArea::FArguments ExpandableDefaults; Style = *ExpandableDefaults._Style; BorderColor = ExpandableDefaults._BorderBackgroundColor.Get( FLinearColor::White ); - BorderBrush = *ExpandableDefaults._BorderImage; + BorderBrush = *ExpandableDefaults._BorderImage.Get( FStyleDefaults::GetNoBrush() ); AreaPadding = ExpandableDefaults._Padding.Get(); HeaderPadding = ExpandableDefaults._HeaderPadding.Get(); } diff --git a/Engine/Source/Runtime/Windows/D3D11RHI/Private/D3D11Viewport.cpp b/Engine/Source/Runtime/Windows/D3D11RHI/Private/D3D11Viewport.cpp index df4981c78e2f..5a42f15facc0 100644 --- a/Engine/Source/Runtime/Windows/D3D11RHI/Private/D3D11Viewport.cpp +++ b/Engine/Source/Runtime/Windows/D3D11RHI/Private/D3D11Viewport.cpp @@ -89,11 +89,31 @@ extern void D3D11TextureAllocated2D( FD3D11Texture2D& Texture ); /** * Creates a FD3D11Surface to represent a swap chain's back buffer. */ -FD3D11Texture2D* GetSwapChainSurface(FD3D11DynamicRHI* D3DRHI, EPixelFormat PixelFormat, IDXGISwapChain* SwapChain) +FD3D11Texture2D* GetSwapChainSurface(FD3D11DynamicRHI* D3DRHI, EPixelFormat PixelFormat, uint32 SizeX, uint32 SizeY, IDXGISwapChain* SwapChain) { // Grab the back buffer TRefCountPtr BackBufferResource; - VERIFYD3D11RESULT_EX(SwapChain->GetBuffer(0,IID_ID3D11Texture2D,(void**)BackBufferResource.GetInitReference()), D3DRHI->GetDevice()); + if (SwapChain) + { + VERIFYD3D11RESULT_EX(SwapChain->GetBuffer(0, IID_ID3D11Texture2D, (void**)BackBufferResource.GetInitReference()), D3DRHI->GetDevice()); + } + else + { + // Create custom back buffer texture as no swap chain is created in pixel streaming windowless mode + DXGI_FORMAT TextureFormat = GetRenderTargetFormat(PixelFormat); + D3D11_TEXTURE2D_DESC TextureDesc; + FMemory::Memzero(TextureDesc); + TextureDesc.Width = SizeX; + TextureDesc.Height = SizeY; + TextureDesc.MipLevels = 1; + TextureDesc.ArraySize = 1; + TextureDesc.Format = TextureFormat; + TextureDesc.SampleDesc.Count = 1; + TextureDesc.SampleDesc.Quality = 0; + TextureDesc.Usage = D3D11_USAGE_DEFAULT; + TextureDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE; + VERIFYD3D11RESULT_EX(D3DRHI->GetDevice()->CreateTexture2D(&TextureDesc, NULL, BackBufferResource.GetInitReference()), D3DRHI->GetDevice()); + } // create the render target view TRefCountPtr BackBufferRenderTargetView; @@ -230,17 +250,20 @@ void FD3D11Viewport::Resize(uint32 InSizeX, uint32 InSizeY, bool bInIsFullscreen check(SizeX > 0); check(SizeY > 0); - // Resize the swap chain. - DXGI_FORMAT RenderTargetFormat = GetRenderTargetFormat(PixelFormat); - VERIFYD3D11RESIZEVIEWPORTRESULT(SwapChain->ResizeBuffers(1,SizeX,SizeY,RenderTargetFormat,DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH),SizeX,SizeY,RenderTargetFormat, D3DRHI->GetDevice()); - - if(bInIsFullscreen) + if (bNeedSwapChain) { - DXGI_MODE_DESC BufferDesc = SetupDXGI_MODE_DESC(); + // Resize the swap chain. + DXGI_FORMAT RenderTargetFormat = GetRenderTargetFormat(PixelFormat); + VERIFYD3D11RESIZEVIEWPORTRESULT(SwapChain->ResizeBuffers(1, SizeX, SizeY, RenderTargetFormat, DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH), SizeX, SizeY, RenderTargetFormat, D3DRHI->GetDevice()); - if (FAILED(SwapChain->ResizeTarget(&BufferDesc))) + if (bInIsFullscreen) { - ConditionalResetSwapChain(true); + DXGI_MODE_DESC BufferDesc = SetupDXGI_MODE_DESC(); + + if (FAILED(SwapChain->ResizeTarget(&BufferDesc))) + { + ConditionalResetSwapChain(true); + } } } } @@ -250,9 +273,12 @@ void FD3D11Viewport::Resize(uint32 InSizeX, uint32 InSizeY, bool bInIsFullscreen bIsFullscreen = bInIsFullscreen; bIsValid = false; - // Use ConditionalResetSwapChain to call SetFullscreenState, to handle the failure case. - // Ignore the viewport's focus state; since Resize is called as the result of a user action we assume authority without waiting for Focus. - ConditionalResetSwapChain(true); + if (bNeedSwapChain) + { + // Use ConditionalResetSwapChain to call SetFullscreenState, to handle the failure case. + // Ignore the viewport's focus state; since Resize is called as the result of a user action we assume authority without waiting for Focus. + ConditionalResetSwapChain(true); + } } // Float RGBA backbuffers are requested whenever HDR mode is desired @@ -266,7 +292,7 @@ void FD3D11Viewport::Resize(uint32 InSizeX, uint32 InSizeY, bool bInIsFullscreen } // Create a RHI surface to represent the viewport's back buffer. - BackBuffer = GetSwapChainSurface(D3DRHI, PixelFormat, SwapChain); + BackBuffer = GetSwapChainSurface(D3DRHI, PixelFormat, SizeX, SizeY, SwapChain); } /** Returns true if desktop composition is enabled. */ @@ -293,8 +319,11 @@ bool FD3D11Viewport::PresentChecked(int32 SyncInterval) if (bNeedNativePresent) { - // Present the back buffer to the viewport window. - Result = SwapChain->Present(SyncInterval, 0); + if (SwapChain.IsValid()) + { + // Present the back buffer to the viewport window. + Result = SwapChain->Present(SyncInterval, 0); + } if (IsValidRef(CustomPresent)) { @@ -444,7 +473,7 @@ bool FD3D11Viewport::Present(bool bLockToVsync) bool bNativelyPresented = true; #if D3D11_WITH_DWMAPI // We can't call Present if !bIsValid, as it waits a window message to be processed, but the main thread may not be pumping the message handler. - if(bIsValid) + if(bIsValid && SwapChain.IsValid()) { // Check if the viewport's swap chain has been invalidated by DXGI. BOOL bSwapChainFullscreenState; diff --git a/Engine/Source/Runtime/Windows/D3D11RHI/Private/Windows/WindowsD3D11Viewport.cpp b/Engine/Source/Runtime/Windows/D3D11RHI/Private/Windows/WindowsD3D11Viewport.cpp index a69d4992448e..26b3ec3a48b0 100644 --- a/Engine/Source/Runtime/Windows/D3D11RHI/Private/Windows/WindowsD3D11Viewport.cpp +++ b/Engine/Source/Runtime/Windows/D3D11RHI/Private/Windows/WindowsD3D11Viewport.cpp @@ -13,7 +13,7 @@ #include "dxgi1_2.h" -extern FD3D11Texture2D* GetSwapChainSurface(FD3D11DynamicRHI* D3DRHI, EPixelFormat PixelFormat, IDXGISwapChain* SwapChain); +extern FD3D11Texture2D* GetSwapChainSurface(FD3D11DynamicRHI* D3DRHI, EPixelFormat PixelFormat, uint32 SizeX, uint32 SizeY, IDXGISwapChain* SwapChain); FD3D11Viewport::FD3D11Viewport(FD3D11DynamicRHI* InD3DRHI,HWND InWindowHandle,uint32 InSizeX,uint32 InSizeY,bool bInIsFullscreen, EPixelFormat InPreferredPixelFormat): D3DRHI(InD3DRHI), @@ -68,69 +68,73 @@ FD3D11Viewport::FD3D11Viewport(FD3D11DynamicRHI* InD3DRHI,HWND InWindowHandle,ui D3DRHI->EnableHDR(); } - // Create the swapchain. - if (InD3DRHI->IsQuadBufferStereoEnabled()) - { - IDXGIFactory2* Factory2 = (IDXGIFactory2*)D3DRHI->GetFactory(); - - BOOL stereoEnabled = Factory2->IsWindowedStereoEnabled(); - if (stereoEnabled) - { - DXGI_SWAP_CHAIN_DESC1 SwapChainDesc1; - FMemory::Memzero(&SwapChainDesc1, sizeof(DXGI_SWAP_CHAIN_DESC1)); - - // Enable stereo - SwapChainDesc1.Stereo = true; - // MSAA Sample count - SwapChainDesc1.SampleDesc.Count = 1; - SwapChainDesc1.SampleDesc.Quality = 0; - - SwapChainDesc1.Format = DXGI_FORMAT_R10G10B10A2_UNORM; - SwapChainDesc1.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT | DXGI_USAGE_SHADER_INPUT; - // Double buffering required to create stereo swap chain - SwapChainDesc1.BufferCount = 2; - SwapChainDesc1.Scaling = DXGI_SCALING_NONE; - SwapChainDesc1.SwapEffect = DXGI_SWAP_EFFECT_FLIP_DISCARD; - SwapChainDesc1.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH; - - IDXGISwapChain1* SwapChain1 = nullptr; - VERIFYD3D11RESULT_EX((Factory2->CreateSwapChainForHwnd(D3DRHI->GetDevice(), WindowHandle, &SwapChainDesc1, nullptr, nullptr, &SwapChain1)), D3DRHI->GetDevice()); - SwapChain = SwapChain1; - } - else - { - UE_LOG(LogD3D11RHI, Log, TEXT("FD3D11Viewport::FD3D11Viewport was not able to create stereo SwapChain; Please enable stereo in driver settings.")); - InD3DRHI->DisableQuadBufferStereo(); - } - } - - // if stereo was not activated or not enabled in settings - if(SwapChain == nullptr) + // Skip swap chain creation in off-screen rendering mode + bNeedSwapChain = !FParse::Param(FCommandLine::Get(), TEXT("RenderOffScreen")); + if (bNeedSwapChain) { // Create the swapchain. - DXGI_SWAP_CHAIN_DESC SwapChainDesc; - FMemory::Memzero(&SwapChainDesc, sizeof(DXGI_SWAP_CHAIN_DESC)); + if (InD3DRHI->IsQuadBufferStereoEnabled()) + { + IDXGIFactory2* Factory2 = (IDXGIFactory2*)D3DRHI->GetFactory(); - SwapChainDesc.BufferDesc = SetupDXGI_MODE_DESC(); - // MSAA Sample count - SwapChainDesc.SampleDesc.Count = 1; - SwapChainDesc.SampleDesc.Quality = 0; - SwapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT | DXGI_USAGE_SHADER_INPUT; - // 1:single buffering, 2:double buffering, 3:triple buffering - SwapChainDesc.BufferCount = 1; - SwapChainDesc.OutputWindow = WindowHandle; - SwapChainDesc.Windowed = !bIsFullscreen; - // DXGI_SWAP_EFFECT_DISCARD / DXGI_SWAP_EFFECT_SEQUENTIAL - SwapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD; - SwapChainDesc.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH; - VERIFYD3D11RESULT_EX(D3DRHI->GetFactory()->CreateSwapChain(DXGIDevice, &SwapChainDesc, SwapChain.GetInitReference()), D3DRHI->GetDevice()); + BOOL stereoEnabled = Factory2->IsWindowedStereoEnabled(); + if (stereoEnabled) + { + DXGI_SWAP_CHAIN_DESC1 SwapChainDesc1; + FMemory::Memzero(&SwapChainDesc1, sizeof(DXGI_SWAP_CHAIN_DESC1)); + + // Enable stereo + SwapChainDesc1.Stereo = true; + // MSAA Sample count + SwapChainDesc1.SampleDesc.Count = 1; + SwapChainDesc1.SampleDesc.Quality = 0; + + SwapChainDesc1.Format = DXGI_FORMAT_R10G10B10A2_UNORM; + SwapChainDesc1.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT | DXGI_USAGE_SHADER_INPUT; + // Double buffering required to create stereo swap chain + SwapChainDesc1.BufferCount = 2; + SwapChainDesc1.Scaling = DXGI_SCALING_NONE; + SwapChainDesc1.SwapEffect = DXGI_SWAP_EFFECT_FLIP_DISCARD; + SwapChainDesc1.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH; + + IDXGISwapChain1* SwapChain1 = nullptr; + VERIFYD3D11RESULT_EX((Factory2->CreateSwapChainForHwnd(D3DRHI->GetDevice(), WindowHandle, &SwapChainDesc1, nullptr, nullptr, &SwapChain1)), D3DRHI->GetDevice()); + SwapChain = SwapChain1; + } + else + { + UE_LOG(LogD3D11RHI, Log, TEXT("FD3D11Viewport::FD3D11Viewport was not able to create stereo SwapChain; Please enable stereo in driver settings.")); + InD3DRHI->DisableQuadBufferStereo(); + } + } + + // if stereo was not activated or not enabled in settings + if (SwapChain == nullptr) + { + // Create the swapchain. + DXGI_SWAP_CHAIN_DESC SwapChainDesc; + FMemory::Memzero(&SwapChainDesc, sizeof(DXGI_SWAP_CHAIN_DESC)); + + SwapChainDesc.BufferDesc = SetupDXGI_MODE_DESC(); + // MSAA Sample count + SwapChainDesc.SampleDesc.Count = 1; + SwapChainDesc.SampleDesc.Quality = 0; + SwapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT | DXGI_USAGE_SHADER_INPUT; + // 1:single buffering, 2:double buffering, 3:triple buffering + SwapChainDesc.BufferCount = 1; + SwapChainDesc.OutputWindow = WindowHandle; + SwapChainDesc.Windowed = !bIsFullscreen; + // DXGI_SWAP_EFFECT_DISCARD / DXGI_SWAP_EFFECT_SEQUENTIAL + SwapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD; + SwapChainDesc.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH; + VERIFYD3D11RESULT_EX(D3DRHI->GetFactory()->CreateSwapChain(DXGIDevice, &SwapChainDesc, SwapChain.GetInitReference()), D3DRHI->GetDevice()); + } + + // Set the DXGI message hook to not change the window behind our back. + D3DRHI->GetFactory()->MakeWindowAssociation(WindowHandle,DXGI_MWA_NO_WINDOW_CHANGES); } - - // Set the DXGI message hook to not change the window behind our back. - D3DRHI->GetFactory()->MakeWindowAssociation(WindowHandle,DXGI_MWA_NO_WINDOW_CHANGES); - // Create a RHI surface to represent the viewport's back buffer. - BackBuffer = GetSwapChainSurface(D3DRHI, PixelFormat, SwapChain); + BackBuffer = GetSwapChainSurface(D3DRHI, PixelFormat, SizeX, SizeY, SwapChain); // Tell the window to redraw when they can. // @todo: For Slate viewports, it doesn't make sense to post WM_PAINT messages (we swallow those.) diff --git a/Engine/Source/Runtime/Windows/D3D11RHI/Public/D3D11Viewport.h b/Engine/Source/Runtime/Windows/D3D11RHI/Public/D3D11Viewport.h index 4cb0492542c4..f6d5ef27a85c 100644 --- a/Engine/Source/Runtime/Windows/D3D11RHI/Public/D3D11Viewport.h +++ b/Engine/Source/Runtime/Windows/D3D11RHI/Public/D3D11Viewport.h @@ -131,6 +131,11 @@ private: TRefCountPtr ForcedFullscreenOutput; bool bForcedFullscreenDisplay; + // Whether to create swap chain and use swap chain's back buffer surface, + // or don't create swap chain and create an off-screen back buffer surface. + // Currently used for pixel streaming plugin "windowless" mode to run in the cloud without on screen display. + bool bNeedSwapChain; + /** An event used to track the GPU's progress. */ FD3D11EventQuery FrameSyncEvent; diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/array_view.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/array_view.h new file mode 100644 index 000000000000..4c1fc8650ae6 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/array_view.h @@ -0,0 +1,284 @@ +/* + * Copyright 2015 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ARRAY_VIEW_H_ +#define API_ARRAY_VIEW_H_ + +#include +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/type_traits.h" + +namespace rtc { + +// tl;dr: rtc::ArrayView is the same thing as gsl::span from the Guideline +// Support Library. +// +// Many functions read from or write to arrays. The obvious way to do this is +// to use two arguments, a pointer to the first element and an element count: +// +// bool Contains17(const int* arr, size_t size) { +// for (size_t i = 0; i < size; ++i) { +// if (arr[i] == 17) +// return true; +// } +// return false; +// } +// +// This is flexible, since it doesn't matter how the array is stored (C array, +// std::vector, rtc::Buffer, ...), but it's error-prone because the caller has +// to correctly specify the array length: +// +// Contains17(arr, arraysize(arr)); // C array +// Contains17(arr.data(), arr.size()); // std::vector +// Contains17(arr, size); // pointer + size +// ... +// +// It's also kind of messy to have two separate arguments for what is +// conceptually a single thing. +// +// Enter rtc::ArrayView. It contains a T pointer (to an array it doesn't +// own) and a count, and supports the basic things you'd expect, such as +// indexing and iteration. It allows us to write our function like this: +// +// bool Contains17(rtc::ArrayView arr) { +// for (auto e : arr) { +// if (e == 17) +// return true; +// } +// return false; +// } +// +// And even better, because a bunch of things will implicitly convert to +// ArrayView, we can call it like this: +// +// Contains17(arr); // C array +// Contains17(arr); // std::vector +// Contains17(rtc::ArrayView(arr, size)); // pointer + size +// Contains17(nullptr); // nullptr -> empty ArrayView +// ... +// +// ArrayView stores both a pointer and a size, but you may also use +// ArrayView, which has a size that's fixed at compile time (which means +// it only has to store the pointer). +// +// One important point is that ArrayView and ArrayView are +// different types, which allow and don't allow mutation of the array elements, +// respectively. The implicit conversions work just like you'd hope, so that +// e.g. vector will convert to either ArrayView or ArrayView, but const vector will convert only to ArrayView. +// (ArrayView itself can be the source type in such conversions, so +// ArrayView will convert to ArrayView.) +// +// Note: ArrayView is tiny (just a pointer and a count if variable-sized, just +// a pointer if fix-sized) and trivially copyable, so it's probably cheaper to +// pass it by value than by const reference. + +namespace impl { + +// Magic constant for indicating that the size of an ArrayView is variable +// instead of fixed. +enum : std::ptrdiff_t { kArrayViewVarSize = -4711 }; + +// Base class for ArrayViews of fixed nonzero size. +template +class ArrayViewBase { + static_assert(Size > 0, "ArrayView size must be variable or non-negative"); + + public: + ArrayViewBase(T* data, size_t size) : data_(data) {} + + static constexpr size_t size() { return Size; } + static constexpr bool empty() { return false; } + T* data() const { return data_; } + + protected: + static constexpr bool fixed_size() { return true; } + + private: + T* data_; +}; + +// Specialized base class for ArrayViews of fixed zero size. +template +class ArrayViewBase { + public: + explicit ArrayViewBase(T* data, size_t size) {} + + static constexpr size_t size() { return 0; } + static constexpr bool empty() { return true; } + T* data() const { return nullptr; } + + protected: + static constexpr bool fixed_size() { return true; } +}; + +// Specialized base class for ArrayViews of variable size. +template +class ArrayViewBase { + public: + ArrayViewBase(T* data, size_t size) + : data_(size == 0 ? nullptr : data), size_(size) {} + + size_t size() const { return size_; } + bool empty() const { return size_ == 0; } + T* data() const { return data_; } + + protected: + static constexpr bool fixed_size() { return false; } + + private: + T* data_; + size_t size_; +}; + +} // namespace impl + +template +class ArrayView final : public impl::ArrayViewBase { + public: + using value_type = T; + using const_iterator = const T*; + + // Construct an ArrayView from a pointer and a length. + template + ArrayView(U* data, size_t size) + : impl::ArrayViewBase::ArrayViewBase(data, size) { + RTC_DCHECK_EQ(size == 0 ? nullptr : data, this->data()); + RTC_DCHECK_EQ(size, this->size()); + RTC_DCHECK_EQ(!this->data(), + this->size() == 0); // data is null iff size == 0. + } + + // Construct an empty ArrayView. Note that fixed-size ArrayViews of size > 0 + // cannot be empty. + ArrayView() : ArrayView(nullptr, 0) {} + ArrayView(std::nullptr_t) // NOLINT + : ArrayView() {} + ArrayView(std::nullptr_t, size_t size) + : ArrayView(static_cast(nullptr), size) { + static_assert(Size == 0 || Size == impl::kArrayViewVarSize, ""); + RTC_DCHECK_EQ(0, size); + } + + // Construct an ArrayView from a C-style array. + template + ArrayView(U (&array)[N]) // NOLINT + : ArrayView(array, N) { + static_assert(Size == N || Size == impl::kArrayViewVarSize, + "Array size must match ArrayView size"); + } + + // (Only if size is fixed.) Construct a fixed size ArrayView from a + // non-const std::array instance. For an ArrayView with variable size, the + // used ctor is ArrayView(U& u) instead. + template (N)>::type* = nullptr> + ArrayView(std::array& u) // NOLINT + : ArrayView(u.data(), u.size()) {} + + // (Only if size is fixed.) Construct a fixed size ArrayView where T is + // const from a const(expr) std::array instance. For an ArrayView with + // variable size, the used ctor is ArrayView(U& u) instead. + template (N)>::type* = nullptr> + ArrayView(const std::array& u) // NOLINT + : ArrayView(u.data(), u.size()) {} + + // (Only if size is fixed.) Construct an ArrayView from any type U that has a + // static constexpr size() method whose return value is equal to Size, and a + // data() method whose return value converts implicitly to T*. In particular, + // this means we allow conversion from ArrayView to ArrayView, but not the other way around. We also don't allow conversion from + // ArrayView to ArrayView, or from ArrayView to ArrayView when M != N. + template < + typename U, + typename std::enable_if::value>::type* = nullptr> + ArrayView(U& u) // NOLINT + : ArrayView(u.data(), u.size()) { + static_assert(U::size() == Size, "Sizes must match exactly"); + } + + // (Only if size is variable.) Construct an ArrayView from any type U that + // has a size() method whose return value converts implicitly to size_t, and + // a data() method whose return value converts implicitly to T*. In + // particular, this means we allow conversion from ArrayView to + // ArrayView, but not the other way around. Other allowed + // conversions include + // ArrayView to ArrayView or ArrayView, + // std::vector to ArrayView or ArrayView, + // const std::vector to ArrayView, + // rtc::Buffer to ArrayView or ArrayView, and + // const rtc::Buffer to ArrayView. + template < + typename U, + typename std::enable_if::value>::type* = nullptr> + ArrayView(U& u) // NOLINT + : ArrayView(u.data(), u.size()) {} + + // Indexing and iteration. These allow mutation even if the ArrayView is + // const, because the ArrayView doesn't own the array. (To prevent mutation, + // use a const element type.) + T& operator[](size_t idx) const { + RTC_DCHECK_LT(idx, this->size()); + RTC_DCHECK(this->data()); + return this->data()[idx]; + } + T* begin() const { return this->data(); } + T* end() const { return this->data() + this->size(); } + const T* cbegin() const { return this->data(); } + const T* cend() const { return this->data() + this->size(); } + + ArrayView subview(size_t offset, size_t size) const { + return offset < this->size() + ? ArrayView(this->data() + offset, + std::min(size, this->size() - offset)) + : ArrayView(); + } + ArrayView subview(size_t offset) const { + return subview(offset, this->size()); + } +}; + +// Comparing two ArrayViews compares their (pointer,size) pairs; it does *not* +// dereference the pointers. +template +bool operator==(const ArrayView& a, const ArrayView& b) { + return a.data() == b.data() && a.size() == b.size(); +} +template +bool operator!=(const ArrayView& a, const ArrayView& b) { + return !(a == b); +} + +// Variable-size ArrayViews are the size of two pointers; fixed-size ArrayViews +// are the size of one pointer. (And as a special case, fixed-size ArrayViews +// of size 0 require no storage.) +static_assert(sizeof(ArrayView) == 2 * sizeof(int*), ""); +static_assert(sizeof(ArrayView) == sizeof(int*), ""); +static_assert(std::is_empty>::value, ""); + +template +inline ArrayView MakeArrayView(T* data, size_t size) { + return ArrayView(data, size); +} + +} // namespace rtc + +#endif // API_ARRAY_VIEW_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/audio_frame.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/audio_frame.h new file mode 100644 index 000000000000..39840e5e6ad1 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/audio_frame.h @@ -0,0 +1,132 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_AUDIO_FRAME_H_ +#define API_AUDIO_AUDIO_FRAME_H_ + +#include + +#include "rtc_base/constructormagic.h" +#include "typedefs.h" // NOLINT(build/include) + +namespace webrtc { + +/* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It + * allows for adding and subtracting frames while keeping track of the resulting + * states. + * + * Notes + * - This is a de-facto api, not designed for external use. The AudioFrame class + * is in need of overhaul or even replacement, and anyone depending on it + * should be prepared for that. + * - The total number of samples is samples_per_channel_ * num_channels_. + * - Stereo data is interleaved starting with the left channel. + */ +class AudioFrame { + public: + // Using constexpr here causes linker errors unless the variable also has an + // out-of-class definition, which is impractical in this header-only class. + // (This makes no sense because it compiles as an enum value, which we most + // certainly cannot take the address of, just fine.) C++17 introduces inline + // variables which should allow us to switch to constexpr and keep this a + // header-only class. + enum : size_t { + // Stereo, 32 kHz, 60 ms (2 * 32 * 60) + kMaxDataSizeSamples = 3840, + kMaxDataSizeBytes = kMaxDataSizeSamples * sizeof(int16_t), + }; + + enum VADActivity { + kVadActive = 0, + kVadPassive = 1, + kVadUnknown = 2 + }; + enum SpeechType { + kNormalSpeech = 0, + kPLC = 1, + kCNG = 2, + kPLCCNG = 3, + kUndefined = 4 + }; + + AudioFrame(); + + // Resets all members to their default state. + void Reset(); + // Same as Reset(), but leaves mute state unchanged. Muting a frame requires + // the buffer to be zeroed on the next call to mutable_data(). Callers + // intending to write to the buffer immediately after Reset() can instead use + // ResetWithoutMuting() to skip this wasteful zeroing. + void ResetWithoutMuting(); + + void UpdateFrame(uint32_t timestamp, const int16_t* data, + size_t samples_per_channel, int sample_rate_hz, + SpeechType speech_type, VADActivity vad_activity, + size_t num_channels = 1); + + void CopyFrom(const AudioFrame& src); + + // Sets a wall-time clock timestamp in milliseconds to be used for profiling + // of time between two points in the audio chain. + // Example: + // t0: UpdateProfileTimeStamp() + // t1: ElapsedProfileTimeMs() => t1 - t0 [msec] + void UpdateProfileTimeStamp(); + // Returns the time difference between now and when UpdateProfileTimeStamp() + // was last called. Returns -1 if UpdateProfileTimeStamp() has not yet been + // called. + int64_t ElapsedProfileTimeMs() const; + + // data() returns a zeroed static buffer if the frame is muted. + // mutable_frame() always returns a non-static buffer; the first call to + // mutable_frame() zeros the non-static buffer and marks the frame unmuted. + const int16_t* data() const; + int16_t* mutable_data(); + + // Prefer to mute frames using AudioFrameOperations::Mute. + void Mute(); + // Frame is muted by default. + bool muted() const; + + // RTP timestamp of the first sample in the AudioFrame. + uint32_t timestamp_ = 0; + // Time since the first frame in milliseconds. + // -1 represents an uninitialized value. + int64_t elapsed_time_ms_ = -1; + // NTP time of the estimated capture time in local timebase in milliseconds. + // -1 represents an uninitialized value. + int64_t ntp_time_ms_ = -1; + size_t samples_per_channel_ = 0; + int sample_rate_hz_ = 0; + size_t num_channels_ = 0; + SpeechType speech_type_ = kUndefined; + VADActivity vad_activity_ = kVadUnknown; + // Monotonically increasing timestamp intended for profiling of audio frames. + // Typically used for measuring elapsed time between two different points in + // the audio path. No lock is used to save resources and we are thread safe + // by design. Also, rtc::Optional is not used since it will cause a "complex + // class/struct needs an explicit out-of-line destructor" build error. + int64_t profile_timestamp_ms_ = 0; + + private: + // A permamently zeroed out buffer to represent muted frames. This is a + // header-only class, so the only way to avoid creating a separate empty + // buffer per translation unit is to wrap a static in an inline function. + static const int16_t* empty_data(); + + int16_t data_[kMaxDataSizeSamples]; + bool muted_ = true; + + RTC_DISALLOW_COPY_AND_ASSIGN(AudioFrame); +}; + +} // namespace webrtc + +#endif // API_AUDIO_AUDIO_FRAME_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/audio_mixer.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/audio_mixer.h new file mode 100644 index 000000000000..14eefc173be4 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/audio_mixer.h @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_AUDIO_MIXER_H_ +#define API_AUDIO_AUDIO_MIXER_H_ + +#include + +#include "api/audio/audio_frame.h" +#include "rtc_base/refcount.h" + +namespace webrtc { + +// WORK IN PROGRESS +// This class is under development and is not yet intended for for use outside +// of WebRtc/Libjingle. +class AudioMixer : public rtc::RefCountInterface { + public: + // A callback class that all mixer participants must inherit from/implement. + class Source { + public: + enum class AudioFrameInfo { + kNormal, // The samples in audio_frame are valid and should be used. + kMuted, // The samples in audio_frame should not be used, but + // should be implicitly interpreted as zero. Other + // fields in audio_frame may be read and should + // contain meaningful values. + kError, // The audio_frame will not be used. + }; + + // Overwrites |audio_frame|. The data_ field is overwritten with + // 10 ms of new audio (either 1 or 2 interleaved channels) at + // |sample_rate_hz|. All fields in |audio_frame| must be updated. + virtual AudioFrameInfo GetAudioFrameWithInfo(int sample_rate_hz, + AudioFrame* audio_frame) = 0; + + // A way for a mixer implementation to distinguish participants. + virtual int Ssrc() const = 0; + + // A way for this source to say that GetAudioFrameWithInfo called + // with this sample rate or higher will not cause quality loss. + virtual int PreferredSampleRate() const = 0; + + virtual ~Source() {} + }; + + // Returns true if adding was successful. A source is never added + // twice. Addition and removal can happen on different threads. + virtual bool AddSource(Source* audio_source) = 0; + + // Removal is never attempted if a source has not been successfully + // added to the mixer. + virtual void RemoveSource(Source* audio_source) = 0; + + // Performs mixing by asking registered audio sources for audio. The + // mixed result is placed in the provided AudioFrame. This method + // will only be called from a single thread. The channels argument + // specifies the number of channels of the mix result. The mixer + // should mix at a rate that doesn't cause quality loss of the + // sources' audio. The mixing rate is one of the rates listed in + // AudioProcessing::NativeRate. All fields in + // |audio_frame_for_mixing| must be updated. + virtual void Mix(size_t number_of_channels, + AudioFrame* audio_frame_for_mixing) = 0; + + protected: + // Since the mixer is reference counted, the destructor may be + // called from any thread. + ~AudioMixer() override {} +}; +} // namespace webrtc + +#endif // API_AUDIO_AUDIO_MIXER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/echo_canceller3_config.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/echo_canceller3_config.h new file mode 100644 index 000000000000..dcbfad483341 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/echo_canceller3_config.h @@ -0,0 +1,159 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_ECHO_CANCELLER3_CONFIG_H_ +#define API_AUDIO_ECHO_CANCELLER3_CONFIG_H_ + +#include // size_t + +namespace webrtc { + +// Configuration struct for EchoCanceller3 +struct EchoCanceller3Config { + EchoCanceller3Config(); + EchoCanceller3Config(const EchoCanceller3Config& e); + struct Delay { + size_t default_delay = 5; + size_t down_sampling_factor = 4; + size_t num_filters = 5; + size_t api_call_jitter_blocks = 26; + size_t min_echo_path_delay_blocks = 0; + size_t delay_headroom_blocks = 2; + size_t hysteresis_limit_1_blocks = 1; + size_t hysteresis_limit_2_blocks = 1; + size_t skew_hysteresis_blocks = 3; + } delay; + + struct Filter { + struct MainConfiguration { + size_t length_blocks; + float leakage_converged; + float leakage_diverged; + float error_floor; + float noise_gate; + }; + + struct ShadowConfiguration { + size_t length_blocks; + float rate; + float noise_gate; + }; + + MainConfiguration main = {13, 0.005f, 0.1f, 0.001f, 20075344.f}; + ShadowConfiguration shadow = {13, 0.7f, 20075344.f}; + + MainConfiguration main_initial = {12, 0.05f, 5.f, 0.001f, 20075344.f}; + ShadowConfiguration shadow_initial = {12, 0.9f, 20075344.f}; + + size_t config_change_duration_blocks = 250; + } filter; + + struct Erle { + float min = 1.f; + float max_l = 4.f; + float max_h = 1.5f; + } erle; + + struct EpStrength { + float lf = 1.f; + float mf = 1.f; + float hf = 1.f; + float default_len = 0.88f; + bool reverb_based_on_render = true; + bool echo_can_saturate = true; + bool bounded_erl = false; + } ep_strength; + + struct Mask { + Mask(); + Mask(const Mask& m); + float m0 = 0.1f; + float m1 = 0.01f; + float m2 = 0.0001f; + float m3 = 0.01f; + float m5 = 0.01f; + float m6 = 0.0001f; + float m7 = 0.01f; + float m8 = 0.0001f; + float m9 = 0.1f; + + float gain_curve_offset = 1.45f; + float gain_curve_slope = 5.f; + float temporal_masking_lf = 0.9f; + float temporal_masking_hf = 0.6f; + size_t temporal_masking_lf_bands = 3; + } gain_mask; + + struct EchoAudibility { + float low_render_limit = 4 * 64.f; + float normal_render_limit = 64.f; + float floor_power = 2 * 64.f; + float audibility_threshold_lf = 10; + float audibility_threshold_mf = 10; + float audibility_threshold_hf = 10; + bool use_stationary_properties = true; + } echo_audibility; + + struct RenderLevels { + float active_render_limit = 100.f; + float poor_excitation_render_limit = 150.f; + float poor_excitation_render_limit_ds8 = 20.f; + } render_levels; + + struct GainUpdates { + struct GainChanges { + float max_inc; + float max_dec; + float rate_inc; + float rate_dec; + float min_inc; + float min_dec; + }; + + GainChanges low_noise = {2.f, 2.f, 1.4f, 1.4f, 1.1f, 1.1f}; + GainChanges initial = {2.f, 2.f, 1.5f, 1.5f, 1.2f, 1.2f}; + GainChanges normal = {2.f, 2.f, 1.5f, 1.5f, 1.2f, 1.2f}; + GainChanges saturation = {1.2f, 1.2f, 1.5f, 1.5f, 1.f, 1.f}; + GainChanges nonlinear = {1.5f, 1.5f, 1.2f, 1.2f, 1.1f, 1.1f}; + + float max_inc_factor = 2.0f; + float max_dec_factor_lf = 0.25f; + float floor_first_increase = 0.00001f; + } gain_updates; + + struct EchoRemovalControl { + struct GainRampup { + float first_non_zero_gain = 0.001f; + int non_zero_gain_blocks = 187; + int full_gain_blocks = 312; + } gain_rampup; + + bool has_clock_drift = false; + } echo_removal_control; + + struct EchoModel { + size_t noise_floor_hold = 50; + float min_noise_floor_power = 1638400.f; + float stationary_gate_slope = 10.f; + float noise_gate_power = 27509.42f; + float noise_gate_slope = 0.3f; + size_t render_pre_window_size = 1; + size_t render_post_window_size = 1; + float nonlinear_hold = 1; + float nonlinear_release = 0.001f; + } echo_model; + + struct Suppressor { + size_t bands_with_reliable_coherence = 5; + } suppressor; +}; +} // namespace webrtc + +#endif // API_AUDIO_ECHO_CANCELLER3_CONFIG_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/echo_canceller3_factory.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/echo_canceller3_factory.h new file mode 100644 index 000000000000..f6db11685037 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/echo_canceller3_factory.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_ECHO_CANCELLER3_FACTORY_H_ +#define API_AUDIO_ECHO_CANCELLER3_FACTORY_H_ + +#include + +#include "api/audio/echo_canceller3_config.h" +#include "api/audio/echo_control.h" + +namespace webrtc { + +class EchoCanceller3Factory : public EchoControlFactory { + public: + // Factory producing EchoCanceller3 instances with the default configuration. + EchoCanceller3Factory(); + + // Factory producing EchoCanceller3 instances with the specified + // configuration. + explicit EchoCanceller3Factory(const EchoCanceller3Config& config); + + // Creates an EchoCanceller3 running at the specified sampling rate. + std::unique_ptr Create(int sample_rate_hz) override; + + private: + const EchoCanceller3Config config_; +}; +} // namespace webrtc + +#endif // API_AUDIO_ECHO_CANCELLER3_FACTORY_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/echo_control.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/echo_control.h new file mode 100644 index 000000000000..f549f40fbb05 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio/echo_control.h @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_ECHO_CONTROL_H_ +#define API_AUDIO_ECHO_CONTROL_H_ + +#include + +namespace webrtc { + +class AudioBuffer; + +// Interface for an acoustic echo cancellation (AEC) submodule. +class EchoControl { + public: + // Analysis (not changing) of the render signal. + virtual void AnalyzeRender(AudioBuffer* render) = 0; + + // Analysis (not changing) of the capture signal. + virtual void AnalyzeCapture(AudioBuffer* capture) = 0; + + // Processes the capture signal in order to remove the echo. + virtual void ProcessCapture(AudioBuffer* capture, bool echo_path_change) = 0; + + struct Metrics { + double echo_return_loss; + double echo_return_loss_enhancement; + int delay_ms; + }; + + // Collect current metrics from the echo controller. + virtual Metrics GetMetrics() const = 0; + + // Provides an optional external estimate of the audio buffer delay. + virtual void SetAudioBufferDelay(size_t delay_ms) = 0; + + virtual ~EchoControl() {} +}; + +// Interface for a factory that creates EchoControllers. +class EchoControlFactory { + public: + virtual std::unique_ptr Create(int sample_rate_hz) = 0; + virtual ~EchoControlFactory() = default; +}; +} // namespace webrtc + +#endif // API_AUDIO_ECHO_CONTROL_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/L16/audio_decoder_L16.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/L16/audio_decoder_L16.h new file mode 100644 index 000000000000..deef90920198 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/L16/audio_decoder_L16.h @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_L16_AUDIO_DECODER_L16_H_ +#define API_AUDIO_CODECS_L16_AUDIO_DECODER_L16_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" + +namespace webrtc { + +// L16 decoder API for use as a template parameter to +// CreateAudioDecoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioDecoderL16 { + struct Config { + bool IsOk() const { + return (sample_rate_hz == 8000 || sample_rate_hz == 16000 || + sample_rate_hz == 32000 || sample_rate_hz == 48000) && + num_channels >= 1; + } + int sample_rate_hz = 8000; + int num_channels = 1; + }; + static rtc::Optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + const Config& config, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_L16_AUDIO_DECODER_L16_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/L16/audio_encoder_L16.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/L16/audio_encoder_L16.h new file mode 100644 index 000000000000..08d7ef1eb812 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/L16/audio_encoder_L16.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_L16_AUDIO_ENCODER_L16_H_ +#define API_AUDIO_CODECS_L16_AUDIO_ENCODER_L16_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" + +namespace webrtc { + +// L16 encoder API for use as a template parameter to +// CreateAudioEncoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioEncoderL16 { + struct Config { + bool IsOk() const { + return (sample_rate_hz == 8000 || sample_rate_hz == 16000 || + sample_rate_hz == 32000 || sample_rate_hz == 48000) && + num_channels >= 1 && frame_size_ms > 0 && frame_size_ms <= 120 && + frame_size_ms % 10 == 0; + } + int sample_rate_hz = 8000; + int num_channels = 1; + int frame_size_ms = 10; + }; + static rtc::Optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const Config& config); + static std::unique_ptr MakeAudioEncoder( + const Config& config, + int payload_type, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_L16_AUDIO_ENCODER_L16_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_codec_pair_id.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_codec_pair_id.h new file mode 100644 index 000000000000..b10f14ea66d2 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_codec_pair_id.h @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_CODEC_PAIR_ID_H_ +#define API_AUDIO_CODECS_AUDIO_CODEC_PAIR_ID_H_ + +#include + +#include + +namespace webrtc { + +class AudioCodecPairId final { + public: + // Copyable, but not default constructible. + AudioCodecPairId() = delete; + AudioCodecPairId(const AudioCodecPairId&) = default; + AudioCodecPairId(AudioCodecPairId&&) = default; + AudioCodecPairId& operator=(const AudioCodecPairId&) = default; + AudioCodecPairId& operator=(AudioCodecPairId&&) = default; + + friend void swap(AudioCodecPairId& a, AudioCodecPairId& b) { + using std::swap; + swap(a.id_, b.id_); + } + + // Creates a new ID, unequal to any previously created ID. + static AudioCodecPairId Create(); + + // IDs can be tested for equality. + friend bool operator==(AudioCodecPairId a, AudioCodecPairId b) { + return a.id_ == b.id_; + } + friend bool operator!=(AudioCodecPairId a, AudioCodecPairId b) { + return a.id_ != b.id_; + } + + // Comparisons. The ordering of ID values is completely arbitrary, but + // stable, so it's useful e.g. if you want to use IDs as keys in an ordered + // map. + friend bool operator<(AudioCodecPairId a, AudioCodecPairId b) { + return a.id_ < b.id_; + } + friend bool operator<=(AudioCodecPairId a, AudioCodecPairId b) { + return a.id_ <= b.id_; + } + friend bool operator>=(AudioCodecPairId a, AudioCodecPairId b) { + return a.id_ >= b.id_; + } + friend bool operator>(AudioCodecPairId a, AudioCodecPairId b) { + return a.id_ > b.id_; + } + + // Returns a numeric representation of the ID. The numeric values are + // completely arbitrary, but stable, collision-free, and reasonably evenly + // distributed, so they are e.g. useful as hash values in unordered maps. + uint64_t NumericRepresentation() const { return id_; } + + private: + explicit AudioCodecPairId(uint64_t id) : id_(id) {} + + uint64_t id_; +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_AUDIO_CODEC_PAIR_ID_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_decoder.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_decoder.h new file mode 100644 index 000000000000..021288fc2bd9 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_decoder.h @@ -0,0 +1,180 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_DECODER_H_ +#define API_AUDIO_CODECS_AUDIO_DECODER_H_ + +#include +#include + +#include "api/array_view.h" +#include "api/optional.h" +#include "rtc_base/buffer.h" +#include "rtc_base/constructormagic.h" +#include "typedefs.h" // NOLINT(build/include) + +namespace webrtc { + +class AudioDecoder { + public: + enum SpeechType { + kSpeech = 1, + kComfortNoise = 2, + }; + + // Used by PacketDuration below. Save the value -1 for errors. + enum { kNotImplemented = -2 }; + + AudioDecoder() = default; + virtual ~AudioDecoder() = default; + + class EncodedAudioFrame { + public: + struct DecodeResult { + size_t num_decoded_samples; + SpeechType speech_type; + }; + + virtual ~EncodedAudioFrame() = default; + + // Returns the duration in samples-per-channel of this audio frame. + // If no duration can be ascertained, returns zero. + virtual size_t Duration() const = 0; + + // Returns true if this packet contains DTX. + virtual bool IsDtxPacket() const; + + // Decodes this frame of audio and writes the result in |decoded|. + // |decoded| must be large enough to store as many samples as indicated by a + // call to Duration() . On success, returns an rtc::Optional containing the + // total number of samples across all channels, as well as whether the + // decoder produced comfort noise or speech. On failure, returns an empty + // rtc::Optional. Decode may be called at most once per frame object. + virtual rtc::Optional Decode( + rtc::ArrayView decoded) const = 0; + }; + + struct ParseResult { + ParseResult(); + ParseResult(uint32_t timestamp, + int priority, + std::unique_ptr frame); + ParseResult(ParseResult&& b); + ~ParseResult(); + + ParseResult& operator=(ParseResult&& b); + + // The timestamp of the frame is in samples per channel. + uint32_t timestamp; + // The relative priority of the frame compared to other frames of the same + // payload and the same timeframe. A higher value means a lower priority. + // The highest priority is zero - negative values are not allowed. + int priority; + std::unique_ptr frame; + }; + + // Let the decoder parse this payload and prepare zero or more decodable + // frames. Each frame must be between 10 ms and 120 ms long. The caller must + // ensure that the AudioDecoder object outlives any frame objects returned by + // this call. The decoder is free to swap or move the data from the |payload| + // buffer. |timestamp| is the input timestamp, in samples, corresponding to + // the start of the payload. + virtual std::vector ParsePayload(rtc::Buffer&& payload, + uint32_t timestamp); + + // Decodes |encode_len| bytes from |encoded| and writes the result in + // |decoded|. The maximum bytes allowed to be written into |decoded| is + // |max_decoded_bytes|. Returns the total number of samples across all + // channels. If the decoder produced comfort noise, |speech_type| + // is set to kComfortNoise, otherwise it is kSpeech. The desired output + // sample rate is provided in |sample_rate_hz|, which must be valid for the + // codec at hand. + int Decode(const uint8_t* encoded, + size_t encoded_len, + int sample_rate_hz, + size_t max_decoded_bytes, + int16_t* decoded, + SpeechType* speech_type); + + // Same as Decode(), but interfaces to the decoders redundant decode function. + // The default implementation simply calls the regular Decode() method. + int DecodeRedundant(const uint8_t* encoded, + size_t encoded_len, + int sample_rate_hz, + size_t max_decoded_bytes, + int16_t* decoded, + SpeechType* speech_type); + + // Indicates if the decoder implements the DecodePlc method. + virtual bool HasDecodePlc() const; + + // Calls the packet-loss concealment of the decoder to update the state after + // one or several lost packets. The caller has to make sure that the + // memory allocated in |decoded| should accommodate |num_frames| frames. + virtual size_t DecodePlc(size_t num_frames, int16_t* decoded); + + // Resets the decoder state (empty buffers etc.). + virtual void Reset() = 0; + + // Notifies the decoder of an incoming packet to NetEQ. + virtual int IncomingPacket(const uint8_t* payload, + size_t payload_len, + uint16_t rtp_sequence_number, + uint32_t rtp_timestamp, + uint32_t arrival_timestamp); + + // Returns the last error code from the decoder. + virtual int ErrorCode(); + + // Returns the duration in samples-per-channel of the payload in |encoded| + // which is |encoded_len| bytes long. Returns kNotImplemented if no duration + // estimate is available, or -1 in case of an error. + virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len) const; + + // Returns the duration in samples-per-channel of the redandant payload in + // |encoded| which is |encoded_len| bytes long. Returns kNotImplemented if no + // duration estimate is available, or -1 in case of an error. + virtual int PacketDurationRedundant(const uint8_t* encoded, + size_t encoded_len) const; + + // Detects whether a packet has forward error correction. The packet is + // comprised of the samples in |encoded| which is |encoded_len| bytes long. + // Returns true if the packet has FEC and false otherwise. + virtual bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const; + + // Returns the actual sample rate of the decoder's output. This value may not + // change during the lifetime of the decoder. + virtual int SampleRateHz() const = 0; + + // The number of channels in the decoder's output. This value may not change + // during the lifetime of the decoder. + virtual size_t Channels() const = 0; + + protected: + static SpeechType ConvertSpeechType(int16_t type); + + virtual int DecodeInternal(const uint8_t* encoded, + size_t encoded_len, + int sample_rate_hz, + int16_t* decoded, + SpeechType* speech_type) = 0; + + virtual int DecodeRedundantInternal(const uint8_t* encoded, + size_t encoded_len, + int sample_rate_hz, + int16_t* decoded, + SpeechType* speech_type); + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoder); +}; + +} // namespace webrtc +#endif // API_AUDIO_CODECS_AUDIO_DECODER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_decoder_factory.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_decoder_factory.h new file mode 100644 index 000000000000..9954f314a629 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_decoder_factory.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_H_ +#define API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" +#include "rtc_base/refcount.h" + +namespace webrtc { + +// A factory that creates AudioDecoders. +// NOTE: This class is still under development and may change without notice. +class AudioDecoderFactory : public rtc::RefCountInterface { + public: + virtual std::vector GetSupportedDecoders() = 0; + + virtual bool IsSupportedDecoder(const SdpAudioFormat& format) = 0; + + // Create a new decoder instance. The `codec_pair_id` argument is used to + // link encoders and decoders that talk to the same remote entity; if a + // MakeAudioEncoder() and a MakeAudioDecoder() call receive non-null IDs that + // compare equal, the factory implementations may assume that the encoder and + // decoder form a pair. + // + // Note: Implementations need to be robust against combinations other than + // one encoder, one decoder getting the same ID; such decoders must still + // work. + virtual std::unique_ptr MakeAudioDecoder( + const SdpAudioFormat& format, + rtc::Optional codec_pair_id); + + // Deprecated version of the above. + virtual std::unique_ptr MakeAudioDecoder( + const SdpAudioFormat& format); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_decoder_factory_template.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_decoder_factory_template.h new file mode 100644 index 000000000000..20a56d377131 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_decoder_factory_template.h @@ -0,0 +1,170 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_TEMPLATE_H_ +#define API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_TEMPLATE_H_ + +#include +#include + +#include "api/audio_codecs/audio_decoder_factory.h" +#include "rtc_base/refcountedobject.h" +#include "rtc_base/scoped_ref_ptr.h" + +namespace webrtc { + +namespace audio_decoder_factory_template_impl { + +template +class MakeAudioDecoderTakesTwoArgs { + private: + template + static auto Test(int) -> decltype( + U::MakeAudioDecoder(std::declval(), + std::declval>()), + std::true_type()); + + template + static std::false_type Test(...); + + public: + static constexpr bool value = decltype(Test(0))::value; +}; + +template +struct Helper; + +// Base case: 0 template parameters. +template <> +struct Helper<> { + static void AppendSupportedDecoders(std::vector* specs) {} + static bool IsSupportedDecoder(const SdpAudioFormat& format) { return false; } + static std::unique_ptr MakeAudioDecoder( + const SdpAudioFormat& format, + rtc::Optional codec_pair_id) { + return nullptr; + } +}; + +// Inductive case: Called with n + 1 template parameters; calls subroutines +// with n template parameters. +template +struct Helper { + static void AppendSupportedDecoders(std::vector* specs) { + T::AppendSupportedDecoders(specs); + Helper::AppendSupportedDecoders(specs); + } + static bool IsSupportedDecoder(const SdpAudioFormat& format) { + auto opt_config = T::SdpToConfig(format); + static_assert(std::is_same>::value, + "T::SdpToConfig() must return a value of type " + "rtc::Optional"); + return opt_config ? true : Helper::IsSupportedDecoder(format); + } + static std::unique_ptr MakeAudioDecoder( + const SdpAudioFormat& format, + rtc::Optional codec_pair_id) { + auto opt_config = T::SdpToConfig(format); + return opt_config ? CallMakeAudioDecoder(*opt_config, codec_pair_id) + : Helper::MakeAudioDecoder(format, codec_pair_id); + } + template < + typename ConfigT, + typename std::enable_if< + !MakeAudioDecoderTakesTwoArgs::value>::type* = nullptr> + static decltype(T::MakeAudioDecoder(std::declval())) + CallMakeAudioDecoder(const ConfigT& config, + rtc::Optional codec_pair_id) { + return T::MakeAudioDecoder(config); + } + template + static decltype( + T::MakeAudioDecoder(std::declval(), + std::declval>())) + CallMakeAudioDecoder(const ConfigT& config, + rtc::Optional codec_pair_id) { + return T::MakeAudioDecoder(config, codec_pair_id); + } +}; + +template +class AudioDecoderFactoryT : public AudioDecoderFactory { + public: + std::vector GetSupportedDecoders() override { + std::vector specs; + Helper::AppendSupportedDecoders(&specs); + return specs; + } + + bool IsSupportedDecoder(const SdpAudioFormat& format) override { + return Helper::IsSupportedDecoder(format); + } + + std::unique_ptr MakeAudioDecoder( + const SdpAudioFormat& format, + rtc::Optional codec_pair_id) override { + return Helper::MakeAudioDecoder(format, codec_pair_id); + } +}; + +} // namespace audio_decoder_factory_template_impl + +// Make an AudioDecoderFactory that can create instances of the given decoders. +// +// Each decoder type is given as a template argument to the function; it should +// be a struct with the following static member functions: +// +// // Converts |audio_format| to a ConfigType instance. Returns an empty +// // optional if |audio_format| doesn't correctly specify an decoder of our +// // type. +// rtc::Optional SdpToConfig(const SdpAudioFormat& audio_format); +// +// // Appends zero or more AudioCodecSpecs to the list that will be returned +// // by AudioDecoderFactory::GetSupportedDecoders(). +// void AppendSupportedDecoders(std::vector* specs); +// +// // Creates an AudioDecoder for the specified format. Used to implement +// // AudioDecoderFactory::MakeAudioDecoder(). +// std::unique_ptr MakeAudioDecoder(const ConfigType& config); +// OR +// std::unique_ptr MakeAudioDecoder( +// const ConfigType& config, +// rtc::Optional codec_pair_id); +// +// ConfigType should be a type that encapsulates all the settings needed to +// create an AudioDecoder. T::Config (where T is the decoder struct) should +// either be the config type, or an alias for it. +// +// Whenever it tries to do something, the new factory will try each of the +// decoder types in the order they were specified in the template argument +// list, stopping at the first one that claims to be able to do the job. +// +// NOTE: This function is still under development and may change without notice. +// +// TODO(kwiberg): Point at CreateBuiltinAudioDecoderFactory() for an example of +// how it is used. +template +rtc::scoped_refptr CreateAudioDecoderFactory() { + // There's no technical reason we couldn't allow zero template parameters, + // but such a factory couldn't create any decoders, and callers can do this + // by mistake by simply forgetting the <> altogether. So we forbid it in + // order to prevent caller foot-shooting. + static_assert(sizeof...(Ts) >= 1, + "Caller must give at least one template parameter"); + + return rtc::scoped_refptr( + new rtc::RefCountedObject< + audio_decoder_factory_template_impl::AudioDecoderFactoryT>()); +} + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_TEMPLATE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_encoder.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_encoder.h new file mode 100644 index 000000000000..7ad9ba4d0950 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_encoder.h @@ -0,0 +1,250 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_ENCODER_H_ +#define API_AUDIO_CODECS_AUDIO_ENCODER_H_ + +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/optional.h" +#include "rtc_base/buffer.h" +#include "rtc_base/deprecation.h" +#include "typedefs.h" // NOLINT(build/include) + +namespace webrtc { + +class RtcEventLog; + +// Statistics related to Audio Network Adaptation. +struct ANAStats { + ANAStats(); + ANAStats(const ANAStats&); + ~ANAStats(); + // Number of actions taken by the ANA bitrate controller since the start of + // the call. If this value is not set, it indicates that the bitrate + // controller is disabled. + rtc::Optional bitrate_action_counter; + // Number of actions taken by the ANA channel controller since the start of + // the call. If this value is not set, it indicates that the channel + // controller is disabled. + rtc::Optional channel_action_counter; + // Number of actions taken by the ANA DTX controller since the start of the + // call. If this value is not set, it indicates that the DTX controller is + // disabled. + rtc::Optional dtx_action_counter; + // Number of actions taken by the ANA FEC controller since the start of the + // call. If this value is not set, it indicates that the FEC controller is + // disabled. + rtc::Optional fec_action_counter; + // Number of times the ANA frame length controller decided to increase the + // frame length since the start of the call. If this value is not set, it + // indicates that the frame length controller is disabled. + rtc::Optional frame_length_increase_counter; + // Number of times the ANA frame length controller decided to decrease the + // frame length since the start of the call. If this value is not set, it + // indicates that the frame length controller is disabled. + rtc::Optional frame_length_decrease_counter; + // The uplink packet loss fractions as set by the ANA FEC controller. If this + // value is not set, it indicates that the ANA FEC controller is not active. + rtc::Optional uplink_packet_loss_fraction; +}; + +// This is the interface class for encoders in AudioCoding module. Each codec +// type must have an implementation of this class. +class AudioEncoder { + public: + // Used for UMA logging of codec usage. The same codecs, with the + // same values, must be listed in + // src/tools/metrics/histograms/histograms.xml in chromium to log + // correct values. + enum class CodecType { + kOther = 0, // Codec not specified, and/or not listed in this enum + kOpus = 1, + kIsac = 2, + kPcmA = 3, + kPcmU = 4, + kG722 = 5, + kIlbc = 6, + + // Number of histogram bins in the UMA logging of codec types. The + // total number of different codecs that are logged cannot exceed this + // number. + kMaxLoggedAudioCodecTypes + }; + + struct EncodedInfoLeaf { + size_t encoded_bytes = 0; + uint32_t encoded_timestamp = 0; + int payload_type = 0; + bool send_even_if_empty = false; + bool speech = true; + CodecType encoder_type = CodecType::kOther; + }; + + // This is the main struct for auxiliary encoding information. Each encoded + // packet should be accompanied by one EncodedInfo struct, containing the + // total number of |encoded_bytes|, the |encoded_timestamp| and the + // |payload_type|. If the packet contains redundant encodings, the |redundant| + // vector will be populated with EncodedInfoLeaf structs. Each struct in the + // vector represents one encoding; the order of structs in the vector is the + // same as the order in which the actual payloads are written to the byte + // stream. When EncoderInfoLeaf structs are present in the vector, the main + // struct's |encoded_bytes| will be the sum of all the |encoded_bytes| in the + // vector. + struct EncodedInfo : public EncodedInfoLeaf { + EncodedInfo(); + EncodedInfo(const EncodedInfo&); + EncodedInfo(EncodedInfo&&); + ~EncodedInfo(); + EncodedInfo& operator=(const EncodedInfo&); + EncodedInfo& operator=(EncodedInfo&&); + + std::vector redundant; + }; + + virtual ~AudioEncoder() = default; + + // Returns the input sample rate in Hz and the number of input channels. + // These are constants set at instantiation time. + virtual int SampleRateHz() const = 0; + virtual size_t NumChannels() const = 0; + + // Returns the rate at which the RTP timestamps are updated. The default + // implementation returns SampleRateHz(). + virtual int RtpTimestampRateHz() const; + + // Returns the number of 10 ms frames the encoder will put in the next + // packet. This value may only change when Encode() outputs a packet; i.e., + // the encoder may vary the number of 10 ms frames from packet to packet, but + // it must decide the length of the next packet no later than when outputting + // the preceding packet. + virtual size_t Num10MsFramesInNextPacket() const = 0; + + // Returns the maximum value that can be returned by + // Num10MsFramesInNextPacket(). + virtual size_t Max10MsFramesInAPacket() const = 0; + + // Returns the current target bitrate in bits/s. The value -1 means that the + // codec adapts the target automatically, and a current target cannot be + // provided. + virtual int GetTargetBitrate() const = 0; + + // Accepts one 10 ms block of input audio (i.e., SampleRateHz() / 100 * + // NumChannels() samples). Multi-channel audio must be sample-interleaved. + // The encoder appends zero or more bytes of output to |encoded| and returns + // additional encoding information. Encode() checks some preconditions, calls + // EncodeImpl() which does the actual work, and then checks some + // postconditions. + EncodedInfo Encode(uint32_t rtp_timestamp, + rtc::ArrayView audio, + rtc::Buffer* encoded); + + // Resets the encoder to its starting state, discarding any input that has + // been fed to the encoder but not yet emitted in a packet. + virtual void Reset() = 0; + + // Enables or disables codec-internal FEC (forward error correction). Returns + // true if the codec was able to comply. The default implementation returns + // true when asked to disable FEC and false when asked to enable it (meaning + // that FEC isn't supported). + virtual bool SetFec(bool enable); + + // Enables or disables codec-internal VAD/DTX. Returns true if the codec was + // able to comply. The default implementation returns true when asked to + // disable DTX and false when asked to enable it (meaning that DTX isn't + // supported). + virtual bool SetDtx(bool enable); + + // Returns the status of codec-internal DTX. The default implementation always + // returns false. + virtual bool GetDtx() const; + + // Sets the application mode. Returns true if the codec was able to comply. + // The default implementation just returns false. + enum class Application { kSpeech, kAudio }; + virtual bool SetApplication(Application application); + + // Tells the encoder about the highest sample rate the decoder is expected to + // use when decoding the bitstream. The encoder would typically use this + // information to adjust the quality of the encoding. The default + // implementation does nothing. + virtual void SetMaxPlaybackRate(int frequency_hz); + + // This is to be deprecated. Please use |OnReceivedTargetAudioBitrate| + // instead. + // Tells the encoder what average bitrate we'd like it to produce. The + // encoder is free to adjust or disregard the given bitrate (the default + // implementation does the latter). + RTC_DEPRECATED virtual void SetTargetBitrate(int target_bps); + + // Causes this encoder to let go of any other encoders it contains, and + // returns a pointer to an array where they are stored (which is required to + // live as long as this encoder). Unless the returned array is empty, you may + // not call any methods on this encoder afterwards, except for the + // destructor. The default implementation just returns an empty array. + // NOTE: This method is subject to change. Do not call or override it. + virtual rtc::ArrayView> + ReclaimContainedEncoders(); + + // Enables audio network adaptor. Returns true if successful. + virtual bool EnableAudioNetworkAdaptor(const std::string& config_string, + RtcEventLog* event_log); + + // Disables audio network adaptor. + virtual void DisableAudioNetworkAdaptor(); + + // Provides uplink packet loss fraction to this encoder to allow it to adapt. + // |uplink_packet_loss_fraction| is in the range [0.0, 1.0]. + virtual void OnReceivedUplinkPacketLossFraction( + float uplink_packet_loss_fraction); + + // Provides 1st-order-FEC-recoverable uplink packet loss rate to this encoder + // to allow it to adapt. + // |uplink_recoverable_packet_loss_fraction| is in the range [0.0, 1.0]. + virtual void OnReceivedUplinkRecoverablePacketLossFraction( + float uplink_recoverable_packet_loss_fraction); + + // Provides target audio bitrate to this encoder to allow it to adapt. + virtual void OnReceivedTargetAudioBitrate(int target_bps); + + // Provides target audio bitrate and corresponding probing interval of + // the bandwidth estimator to this encoder to allow it to adapt. + virtual void OnReceivedUplinkBandwidth( + int target_audio_bitrate_bps, + rtc::Optional bwe_period_ms); + + // Provides RTT to this encoder to allow it to adapt. + virtual void OnReceivedRtt(int rtt_ms); + + // Provides overhead to this encoder to adapt. The overhead is the number of + // bytes that will be added to each packet the encoder generates. + virtual void OnReceivedOverhead(size_t overhead_bytes_per_packet); + + // To allow encoder to adapt its frame length, it must be provided the frame + // length range that receivers can accept. + virtual void SetReceiverFrameLengthRange(int min_frame_length_ms, + int max_frame_length_ms); + + // Get statistics related to audio network adaptation. + virtual ANAStats GetANAStats() const; + + protected: + // Subclasses implement this to perform the actual encoding. Called by + // Encode(). + virtual EncodedInfo EncodeImpl(uint32_t rtp_timestamp, + rtc::ArrayView audio, + rtc::Buffer* encoded) = 0; +}; +} // namespace webrtc +#endif // API_AUDIO_CODECS_AUDIO_ENCODER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_encoder_factory.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_encoder_factory.h new file mode 100644 index 000000000000..02bdfd92343f --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_encoder_factory.h @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_H_ +#define API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" +#include "rtc_base/refcount.h" + +namespace webrtc { + +// A factory that creates AudioEncoders. +// NOTE: This class is still under development and may change without notice. +class AudioEncoderFactory : public rtc::RefCountInterface { + public: + // Returns a prioritized list of audio codecs, to use for signaling etc. + virtual std::vector GetSupportedEncoders() = 0; + + // Returns information about how this format would be encoded, provided it's + // supported. More format and format variations may be supported than those + // returned by GetSupportedEncoders(). + virtual rtc::Optional QueryAudioEncoder( + const SdpAudioFormat& format) = 0; + + // Creates an AudioEncoder for the specified format. The encoder will tags + // its payloads with the specified payload type. The `codec_pair_id` argument + // is used to link encoders and decoders that talk to the same remote entity; + // if a MakeAudioEncoder() and a MakeAudioDecoder() call receive non-null IDs + // that compare equal, the factory implementations may assume that the + // encoder and decoder form a pair. + // + // Note: Implementations need to be robust against combinations other than + // one encoder, one decoder getting the same ID; such encoders must still + // work. + // + // TODO(ossu): Try to avoid audio encoders having to know their payload type. + virtual std::unique_ptr MakeAudioEncoder( + int payload_type, + const SdpAudioFormat& format, + rtc::Optional codec_pair_id); + + // Deprecated version of the above. + virtual std::unique_ptr MakeAudioEncoder( + int payload_type, + const SdpAudioFormat& format); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_encoder_factory_template.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_encoder_factory_template.h new file mode 100644 index 000000000000..6d4d9d61b026 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_encoder_factory_template.h @@ -0,0 +1,195 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_TEMPLATE_H_ +#define API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_TEMPLATE_H_ + +#include +#include + +#include "api/audio_codecs/audio_encoder_factory.h" +#include "rtc_base/refcountedobject.h" +#include "rtc_base/scoped_ref_ptr.h" + +namespace webrtc { + +namespace audio_encoder_factory_template_impl { + +template +class MakeAudioEncoderTakesThreeArgs { + private: + template + static auto Test(int) -> decltype( + U::MakeAudioEncoder(std::declval(), + std::declval(), + std::declval>()), + std::true_type()); + + template + static std::false_type Test(...); + + public: + static constexpr bool value = decltype(Test(0))::value; +}; + +template +struct Helper; + +// Base case: 0 template parameters. +template <> +struct Helper<> { + static void AppendSupportedEncoders(std::vector* specs) {} + static rtc::Optional QueryAudioEncoder( + const SdpAudioFormat& format) { + return rtc::nullopt; + } + static std::unique_ptr MakeAudioEncoder( + int payload_type, + const SdpAudioFormat& format, + rtc::Optional codec_pair_id) { + return nullptr; + } +}; + +// Inductive case: Called with n + 1 template parameters; calls subroutines +// with n template parameters. +template +struct Helper { + static void AppendSupportedEncoders(std::vector* specs) { + T::AppendSupportedEncoders(specs); + Helper::AppendSupportedEncoders(specs); + } + static rtc::Optional QueryAudioEncoder( + const SdpAudioFormat& format) { + auto opt_config = T::SdpToConfig(format); + static_assert(std::is_same>::value, + "T::SdpToConfig() must return a value of type " + "rtc::Optional"); + return opt_config ? rtc::Optional( + T::QueryAudioEncoder(*opt_config)) + : Helper::QueryAudioEncoder(format); + } + static std::unique_ptr MakeAudioEncoder( + int payload_type, + const SdpAudioFormat& format, + rtc::Optional codec_pair_id) { + auto opt_config = T::SdpToConfig(format); + if (opt_config) { + return CallMakeAudioEncoder(*opt_config, payload_type, codec_pair_id); + } else { + return Helper::MakeAudioEncoder(payload_type, format, + codec_pair_id); + } + } + template < + typename ConfigT, + typename std::enable_if< + !MakeAudioEncoderTakesThreeArgs::value>::type* = nullptr> + static decltype(T::MakeAudioEncoder(std::declval(), + std::declval())) + CallMakeAudioEncoder(const ConfigT& config, + int payload_type, + rtc::Optional codec_pair_id) { + return T::MakeAudioEncoder(config, payload_type); + } + template + static decltype( + T::MakeAudioEncoder(std::declval(), + std::declval(), + std::declval>())) + CallMakeAudioEncoder(const ConfigT& config, + int payload_type, + rtc::Optional codec_pair_id) { + return T::MakeAudioEncoder(config, payload_type, codec_pair_id); + } +}; + +template +class AudioEncoderFactoryT : public AudioEncoderFactory { + public: + std::vector GetSupportedEncoders() override { + std::vector specs; + Helper::AppendSupportedEncoders(&specs); + return specs; + } + + rtc::Optional QueryAudioEncoder( + const SdpAudioFormat& format) override { + return Helper::QueryAudioEncoder(format); + } + + std::unique_ptr MakeAudioEncoder( + int payload_type, + const SdpAudioFormat& format, + rtc::Optional codec_pair_id) override { + return Helper::MakeAudioEncoder(payload_type, format, codec_pair_id); + } +}; + +} // namespace audio_encoder_factory_template_impl + +// Make an AudioEncoderFactory that can create instances of the given encoders. +// +// Each encoder type is given as a template argument to the function; it should +// be a struct with the following static member functions: +// +// // Converts |audio_format| to a ConfigType instance. Returns an empty +// // optional if |audio_format| doesn't correctly specify an encoder of our +// // type. +// rtc::Optional SdpToConfig(const SdpAudioFormat& audio_format); +// +// // Appends zero or more AudioCodecSpecs to the list that will be returned +// // by AudioEncoderFactory::GetSupportedEncoders(). +// void AppendSupportedEncoders(std::vector* specs); +// +// // Returns information about how this format would be encoded. Used to +// // implement AudioEncoderFactory::QueryAudioEncoder(). +// AudioCodecInfo QueryAudioEncoder(const ConfigType& config); +// +// // Creates an AudioEncoder for the specified format. Used to implement +// // AudioEncoderFactory::MakeAudioEncoder(). +// std::unique_ptr MakeAudioEncoder(const ConfigType& config, +// int payload_type); +// OR +// std::unique_ptr MakeAudioEncoder( +// const ConfigType& config, +// int payload_type, +// rtc::Optional codec_pair_id); +// +// ConfigType should be a type that encapsulates all the settings needed to +// create an AudioEncoder. T::Config (where T is the encoder struct) should +// either be the config type, or an alias for it. +// +// Whenever it tries to do something, the new factory will try each of the +// encoders in the order they were specified in the template argument list, +// stopping at the first one that claims to be able to do the job. +// +// NOTE: This function is still under development and may change without notice. +// +// TODO(kwiberg): Point at CreateBuiltinAudioEncoderFactory() for an example of +// how it is used. +template +rtc::scoped_refptr CreateAudioEncoderFactory() { + // There's no technical reason we couldn't allow zero template parameters, + // but such a factory couldn't create any encoders, and callers can do this + // by mistake by simply forgetting the <> altogether. So we forbid it in + // order to prevent caller foot-shooting. + static_assert(sizeof...(Ts) >= 1, + "Caller must give at least one template parameter"); + + return rtc::scoped_refptr( + new rtc::RefCountedObject< + audio_encoder_factory_template_impl::AudioEncoderFactoryT>()); +} + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_TEMPLATE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_format.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_format.h new file mode 100644 index 000000000000..553ab8fb2577 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/audio_format.h @@ -0,0 +1,137 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_FORMAT_H_ +#define API_AUDIO_CODECS_AUDIO_FORMAT_H_ + +#include +#include +#include + +#include "api/optional.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +// SDP specification for a single audio codec. +// NOTE: This class is still under development and may change without notice. +struct SdpAudioFormat { + using Parameters = std::map; + + SdpAudioFormat(const SdpAudioFormat&); + SdpAudioFormat(SdpAudioFormat&&); + SdpAudioFormat(const char* name, int clockrate_hz, size_t num_channels); + SdpAudioFormat(const std::string& name, + int clockrate_hz, + size_t num_channels); + SdpAudioFormat(const char* name, + int clockrate_hz, + size_t num_channels, + const Parameters& param); + SdpAudioFormat(const std::string& name, + int clockrate_hz, + size_t num_channels, + const Parameters& param); + ~SdpAudioFormat(); + + // Returns true if this format is compatible with |o|. In SDP terminology: + // would it represent the same codec between an offer and an answer? As + // opposed to operator==, this method disregards codec parameters. + bool Matches(const SdpAudioFormat& o) const; + + SdpAudioFormat& operator=(const SdpAudioFormat&); + SdpAudioFormat& operator=(SdpAudioFormat&&); + + friend bool operator==(const SdpAudioFormat& a, const SdpAudioFormat& b); + friend bool operator!=(const SdpAudioFormat& a, const SdpAudioFormat& b) { + return !(a == b); + } + + std::string name; + int clockrate_hz; + size_t num_channels; + Parameters parameters; +}; + +void swap(SdpAudioFormat& a, SdpAudioFormat& b); + +// Information about how an audio format is treated by the codec implementation. +// Contains basic information, such as sample rate and number of channels, which +// isn't uniformly presented by SDP. Also contains flags indicating support for +// integrating with other parts of WebRTC, like external VAD and comfort noise +// level calculation. +// +// To avoid API breakage, and make the code clearer, AudioCodecInfo should not +// be directly initializable with any flags indicating optional support. If it +// were, these initializers would break any time a new flag was added. It's also +// more difficult to understand: +// AudioCodecInfo info{16000, 1, 32000, true, false, false, true, true}; +// than +// AudioCodecInfo info(16000, 1, 32000); +// info.allow_comfort_noise = true; +// info.future_flag_b = true; +// info.future_flag_c = true; +struct AudioCodecInfo { + AudioCodecInfo(int sample_rate_hz, size_t num_channels, int bitrate_bps); + AudioCodecInfo(int sample_rate_hz, + size_t num_channels, + int default_bitrate_bps, + int min_bitrate_bps, + int max_bitrate_bps); + AudioCodecInfo(const AudioCodecInfo& b) = default; + ~AudioCodecInfo() = default; + + bool operator==(const AudioCodecInfo& b) const { + return sample_rate_hz == b.sample_rate_hz && + num_channels == b.num_channels && + default_bitrate_bps == b.default_bitrate_bps && + min_bitrate_bps == b.min_bitrate_bps && + max_bitrate_bps == b.max_bitrate_bps && + allow_comfort_noise == b.allow_comfort_noise && + supports_network_adaption == b.supports_network_adaption; + } + + bool operator!=(const AudioCodecInfo& b) const { return !(*this == b); } + + bool HasFixedBitrate() const { + RTC_DCHECK_GE(min_bitrate_bps, 0); + RTC_DCHECK_LE(min_bitrate_bps, default_bitrate_bps); + RTC_DCHECK_GE(max_bitrate_bps, default_bitrate_bps); + return min_bitrate_bps == max_bitrate_bps; + } + + int sample_rate_hz; + size_t num_channels; + int default_bitrate_bps; + int min_bitrate_bps; + int max_bitrate_bps; + + bool allow_comfort_noise = true; // This codec can be used with an external + // comfort noise generator. + bool supports_network_adaption = false; // This codec can adapt to varying + // network conditions. +}; + +// AudioCodecSpec ties an audio format to specific information about the codec +// and its implementation. +struct AudioCodecSpec { + bool operator==(const AudioCodecSpec& b) const { + return format == b.format && info == b.info; + } + + bool operator!=(const AudioCodecSpec& b) const { return !(*this == b); } + + SdpAudioFormat format; + AudioCodecInfo info; +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_AUDIO_FORMAT_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/builtin_audio_decoder_factory.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/builtin_audio_decoder_factory.h new file mode 100644 index 000000000000..3127403e243f --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/builtin_audio_decoder_factory.h @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_BUILTIN_AUDIO_DECODER_FACTORY_H_ +#define API_AUDIO_CODECS_BUILTIN_AUDIO_DECODER_FACTORY_H_ + +#include "api/audio_codecs/audio_decoder_factory.h" +#include "rtc_base/scoped_ref_ptr.h" + +namespace webrtc { + +// Creates a new factory that can create the built-in types of audio decoders. +// NOTE: This function is still under development and may change without notice. +rtc::scoped_refptr CreateBuiltinAudioDecoderFactory(); + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_BUILTIN_AUDIO_DECODER_FACTORY_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/builtin_audio_encoder_factory.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/builtin_audio_encoder_factory.h new file mode 100644 index 000000000000..d37ff257e6e3 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/builtin_audio_encoder_factory.h @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_BUILTIN_AUDIO_ENCODER_FACTORY_H_ +#define API_AUDIO_CODECS_BUILTIN_AUDIO_ENCODER_FACTORY_H_ + +#include "api/audio_codecs/audio_encoder_factory.h" +#include "rtc_base/scoped_ref_ptr.h" + +namespace webrtc { + +// Creates a new factory that can create the built-in types of audio encoders. +// NOTE: This function is still under development and may change without notice. +rtc::scoped_refptr CreateBuiltinAudioEncoderFactory(); + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_BUILTIN_AUDIO_ENCODER_FACTORY_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g711/audio_decoder_g711.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g711/audio_decoder_g711.h new file mode 100644 index 000000000000..5085283f0e74 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g711/audio_decoder_g711.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_G711_AUDIO_DECODER_G711_H_ +#define API_AUDIO_CODECS_G711_AUDIO_DECODER_G711_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" + +namespace webrtc { + +// G711 decoder API for use as a template parameter to +// CreateAudioDecoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioDecoderG711 { + struct Config { + enum class Type { kPcmU, kPcmA }; + bool IsOk() const { + return (type == Type::kPcmU || type == Type::kPcmA) && num_channels >= 1; + } + Type type; + int num_channels; + }; + static rtc::Optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + const Config& config, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_G711_AUDIO_DECODER_G711_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g711/audio_encoder_g711.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g711/audio_encoder_g711.h new file mode 100644 index 000000000000..22a74b472767 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g711/audio_encoder_g711.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_G711_AUDIO_ENCODER_G711_H_ +#define API_AUDIO_CODECS_G711_AUDIO_ENCODER_G711_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" + +namespace webrtc { + +// G711 encoder API for use as a template parameter to +// CreateAudioEncoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioEncoderG711 { + struct Config { + enum class Type { kPcmU, kPcmA }; + bool IsOk() const { + return (type == Type::kPcmU || type == Type::kPcmA) && + frame_size_ms > 0 && frame_size_ms % 10 == 0 && num_channels >= 1; + } + Type type = Type::kPcmU; + int num_channels = 1; + int frame_size_ms = 20; + }; + static rtc::Optional SdpToConfig( + const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const Config& config); + static std::unique_ptr MakeAudioEncoder( + const Config& config, + int payload_type, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_G711_AUDIO_ENCODER_G711_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g722/audio_decoder_g722.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g722/audio_decoder_g722.h new file mode 100644 index 000000000000..34235dcad9ec --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g722/audio_decoder_g722.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_G722_AUDIO_DECODER_G722_H_ +#define API_AUDIO_CODECS_G722_AUDIO_DECODER_G722_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" + +namespace webrtc { + +// G722 decoder API for use as a template parameter to +// CreateAudioDecoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioDecoderG722 { + struct Config { + bool IsOk() const { return num_channels == 1 || num_channels == 2; } + int num_channels; + }; + static rtc::Optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + Config config, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_G722_AUDIO_DECODER_G722_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g722/audio_encoder_g722.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g722/audio_encoder_g722.h new file mode 100644 index 000000000000..08cd304c8a80 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g722/audio_encoder_g722.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_H_ +#define API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_codecs/g722/audio_encoder_g722_config.h" +#include "api/optional.h" + +namespace webrtc { + +// G722 encoder API for use as a template parameter to +// CreateAudioEncoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioEncoderG722 { + using Config = AudioEncoderG722Config; + static rtc::Optional SdpToConfig( + const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const AudioEncoderG722Config& config); + static std::unique_ptr MakeAudioEncoder( + const AudioEncoderG722Config& config, + int payload_type, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g722/audio_encoder_g722_config.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g722/audio_encoder_g722_config.h new file mode 100644 index 000000000000..773e430ce3c0 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/g722/audio_encoder_g722_config.h @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_CONFIG_H_ +#define API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_CONFIG_H_ + +namespace webrtc { + +// NOTE: This struct is still under development and may change without notice. +struct AudioEncoderG722Config { + bool IsOk() const { + return frame_size_ms > 0 && frame_size_ms % 10 == 0 && num_channels >= 1; + } + int frame_size_ms = 20; + int num_channels = 1; +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_CONFIG_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/ilbc/audio_decoder_ilbc.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/ilbc/audio_decoder_ilbc.h new file mode 100644 index 000000000000..c233c4b79a13 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/ilbc/audio_decoder_ilbc.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ILBC_AUDIO_DECODER_ILBC_H_ +#define API_AUDIO_CODECS_ILBC_AUDIO_DECODER_ILBC_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" + +namespace webrtc { + +// ILBC decoder API for use as a template parameter to +// CreateAudioDecoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioDecoderIlbc { + struct Config {}; // Empty---no config values needed! + static rtc::Optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + Config config, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ILBC_AUDIO_DECODER_ILBC_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/ilbc/audio_encoder_ilbc.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/ilbc/audio_encoder_ilbc.h new file mode 100644 index 000000000000..85cdab0ee4c8 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/ilbc/audio_encoder_ilbc.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ +#define API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_codecs/ilbc/audio_encoder_ilbc_config.h" +#include "api/optional.h" + +namespace webrtc { + +// ILBC encoder API for use as a template parameter to +// CreateAudioEncoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioEncoderIlbc { + using Config = AudioEncoderIlbcConfig; + static rtc::Optional SdpToConfig( + const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const AudioEncoderIlbcConfig& config); + static std::unique_ptr MakeAudioEncoder( + const AudioEncoderIlbcConfig& config, + int payload_type, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/ilbc/audio_encoder_ilbc_config.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/ilbc/audio_encoder_ilbc_config.h new file mode 100644 index 000000000000..22909a957bb2 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/ilbc/audio_encoder_ilbc_config.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_CONFIG_H_ +#define API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_CONFIG_H_ + +namespace webrtc { + +// NOTE: This struct is still under development and may change without notice. +struct AudioEncoderIlbcConfig { + bool IsOk() const { + return (frame_size_ms == 20 || frame_size_ms == 30 || frame_size_ms == 40 || + frame_size_ms == 60); + } + int frame_size_ms = 30; // Valid values are 20, 30, 40, and 60 ms. + // Note that frame size 40 ms produces encodings with two 20 ms frames in + // them, and frame size 60 ms consists of two 30 ms frames. +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_CONFIG_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_decoder_isac.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_decoder_isac.h new file mode 100644 index 000000000000..f4e9331282be --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_decoder_isac.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_H_ +#define API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_H_ + +#if WEBRTC_USE_BUILTIN_ISAC_FIX && !WEBRTC_USE_BUILTIN_ISAC_FLOAT +#include "api/audio_codecs/isac/audio_decoder_isac_fix.h" // nogncheck +#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT && !WEBRTC_USE_BUILTIN_ISAC_FIX +#include "api/audio_codecs/isac/audio_decoder_isac_float.h" // nogncheck +#else +#error "Must choose either fix or float" +#endif + +namespace webrtc { + +#if WEBRTC_USE_BUILTIN_ISAC_FIX +using AudioDecoderIsac = AudioDecoderIsacFix; +#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT +using AudioDecoderIsac = AudioDecoderIsacFloat; +#endif + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_decoder_isac_fix.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_decoder_isac_fix.h new file mode 100644 index 000000000000..115486c6bf2d --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_decoder_isac_fix.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FIX_H_ +#define API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FIX_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" + +namespace webrtc { + +// iSAC decoder API (fixed-point implementation) for use as a template +// parameter to CreateAudioDecoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioDecoderIsacFix { + struct Config {}; // Empty---no config values needed! + static rtc::Optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + Config config, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FIX_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_decoder_isac_float.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_decoder_isac_float.h new file mode 100644 index 000000000000..47c2c6037f93 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_decoder_isac_float.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FLOAT_H_ +#define API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FLOAT_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" + +namespace webrtc { + +// iSAC decoder API (floating-point implementation) for use as a template +// parameter to CreateAudioDecoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioDecoderIsacFloat { + struct Config { + bool IsOk() const { + return sample_rate_hz == 16000 || sample_rate_hz == 32000; + } + int sample_rate_hz = 16000; + }; + static rtc::Optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + Config config, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FLOAT_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_encoder_isac.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_encoder_isac.h new file mode 100644 index 000000000000..3cb0a1f053d5 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_encoder_isac.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_H_ +#define API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_H_ + +#if WEBRTC_USE_BUILTIN_ISAC_FIX && !WEBRTC_USE_BUILTIN_ISAC_FLOAT +#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" // nogncheck +#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT && !WEBRTC_USE_BUILTIN_ISAC_FIX +#include "api/audio_codecs/isac/audio_encoder_isac_float.h" // nogncheck +#else +#error "Must choose either fix or float" +#endif + +namespace webrtc { + +#if WEBRTC_USE_BUILTIN_ISAC_FIX +using AudioEncoderIsac = AudioEncoderIsacFix; +#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT +using AudioEncoderIsac = AudioEncoderIsacFloat; +#endif + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_encoder_isac_fix.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_encoder_isac_fix.h new file mode 100644 index 000000000000..7f2743cf172b --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_encoder_isac_fix.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FIX_H_ +#define API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FIX_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" + +namespace webrtc { + +// iSAC encoder API (fixed-point implementation) for use as a template +// parameter to CreateAudioEncoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioEncoderIsacFix { + struct Config { + bool IsOk() const { return frame_size_ms == 30 || frame_size_ms == 60; } + int frame_size_ms = 30; + }; + static rtc::Optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(Config config); + static std::unique_ptr MakeAudioEncoder( + Config config, + int payload_type, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FIX_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_encoder_isac_float.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_encoder_isac_float.h new file mode 100644 index 000000000000..b6043f2abac2 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/isac/audio_encoder_isac_float.h @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FLOAT_H_ +#define API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FLOAT_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" + +namespace webrtc { + +// iSAC encoder API (floating-point implementation) for use as a template +// parameter to CreateAudioEncoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioEncoderIsacFloat { + struct Config { + bool IsOk() const { + return (sample_rate_hz == 16000 && + (frame_size_ms == 30 || frame_size_ms == 60)) || + (sample_rate_hz == 32000 && frame_size_ms == 30); + } + int sample_rate_hz = 16000; + int frame_size_ms = 30; + }; + static rtc::Optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const Config& config); + static std::unique_ptr MakeAudioEncoder( + const Config& config, + int payload_type, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FLOAT_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/opus/audio_decoder_opus.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/opus/audio_decoder_opus.h new file mode 100644 index 000000000000..f76d244c678a --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/opus/audio_decoder_opus.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_OPUS_AUDIO_DECODER_OPUS_H_ +#define API_AUDIO_CODECS_OPUS_AUDIO_DECODER_OPUS_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/optional.h" + +namespace webrtc { + +// Opus decoder API for use as a template parameter to +// CreateAudioDecoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioDecoderOpus { + struct Config { + int num_channels; + }; + static rtc::Optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + Config config, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_OPUS_AUDIO_DECODER_OPUS_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/opus/audio_encoder_opus.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/opus/audio_encoder_opus.h new file mode 100644 index 000000000000..632526966da1 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/opus/audio_encoder_opus.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_H_ +#define API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_H_ + +#include +#include + +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_codecs/opus/audio_encoder_opus_config.h" +#include "api/optional.h" + +namespace webrtc { + +// Opus encoder API for use as a template parameter to +// CreateAudioEncoderFactory<...>(). +// +// NOTE: This struct is still under development and may change without notice. +struct AudioEncoderOpus { + using Config = AudioEncoderOpusConfig; + static rtc::Optional SdpToConfig( + const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const AudioEncoderOpusConfig& config); + static std::unique_ptr MakeAudioEncoder( + const AudioEncoderOpusConfig& config, + int payload_type, + rtc::Optional codec_pair_id = rtc::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/opus/audio_encoder_opus_config.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/opus/audio_encoder_opus_config.h new file mode 100644 index 000000000000..d586592ab093 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_codecs/opus/audio_encoder_opus_config.h @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_CONFIG_H_ +#define API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_CONFIG_H_ + +#include + +#include + +#include "api/optional.h" + +namespace webrtc { + +// NOTE: This struct is still under development and may change without notice. +struct AudioEncoderOpusConfig { + static constexpr int kDefaultFrameSizeMs = 20; + + // Opus API allows a min bitrate of 500bps, but Opus documentation suggests + // bitrate should be in the range of 6000 to 510000, inclusive. + static constexpr int kMinBitrateBps = 6000; + static constexpr int kMaxBitrateBps = 510000; + + AudioEncoderOpusConfig(); + AudioEncoderOpusConfig(const AudioEncoderOpusConfig&); + ~AudioEncoderOpusConfig(); + AudioEncoderOpusConfig& operator=(const AudioEncoderOpusConfig&); + + bool IsOk() const; // Checks if the values are currently OK. + + int frame_size_ms; + size_t num_channels; + enum class ApplicationMode { kVoip, kAudio }; + ApplicationMode application; + + // NOTE: This member must always be set. + // TODO(kwiberg): Turn it into just an int. + rtc::Optional bitrate_bps; + + bool fec_enabled; + bool cbr_enabled; + int max_playback_rate_hz; + + // |complexity| is used when the bitrate goes above + // |complexity_threshold_bps| + |complexity_threshold_window_bps|; + // |low_rate_complexity| is used when the bitrate falls below + // |complexity_threshold_bps| - |complexity_threshold_window_bps|. In the + // interval in the middle, we keep using the most recent of the two + // complexity settings. + int complexity; + int low_rate_complexity; + int complexity_threshold_bps; + int complexity_threshold_window_bps; + + bool dtx_enabled; + std::vector supported_frame_lengths_ms; + int uplink_bandwidth_update_interval_ms; + + // NOTE: This member isn't necessary, and will soon go away. See + // https://bugs.chromium.org/p/webrtc/issues/detail?id=7847 + int payload_type; +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_CONFIG_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_options.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_options.h new file mode 100644 index 000000000000..5d698424ebc4 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/audio_options.h @@ -0,0 +1,196 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_OPTIONS_H_ +#define API_AUDIO_OPTIONS_H_ + +#include + +#include "api/optional.h" +#include "rtc_base/stringencode.h" + +namespace cricket { + +// Options that can be applied to a VoiceMediaChannel or a VoiceMediaEngine. +// Used to be flags, but that makes it hard to selectively apply options. +// We are moving all of the setting of options to structs like this, +// but some things currently still use flags. +struct AudioOptions { + AudioOptions(); + ~AudioOptions(); + void SetAll(const AudioOptions& change) { + SetFrom(&echo_cancellation, change.echo_cancellation); +#if defined(WEBRTC_IOS) + SetFrom(&ios_force_software_aec_HACK, change.ios_force_software_aec_HACK); +#endif + SetFrom(&auto_gain_control, change.auto_gain_control); + SetFrom(&noise_suppression, change.noise_suppression); + SetFrom(&highpass_filter, change.highpass_filter); + SetFrom(&stereo_swapping, change.stereo_swapping); + SetFrom(&audio_jitter_buffer_max_packets, + change.audio_jitter_buffer_max_packets); + SetFrom(&audio_jitter_buffer_fast_accelerate, + change.audio_jitter_buffer_fast_accelerate); + SetFrom(&typing_detection, change.typing_detection); + SetFrom(&aecm_generate_comfort_noise, change.aecm_generate_comfort_noise); + SetFrom(&experimental_agc, change.experimental_agc); + SetFrom(&extended_filter_aec, change.extended_filter_aec); + SetFrom(&delay_agnostic_aec, change.delay_agnostic_aec); + SetFrom(&experimental_ns, change.experimental_ns); + SetFrom(&intelligibility_enhancer, change.intelligibility_enhancer); + SetFrom(&residual_echo_detector, change.residual_echo_detector); + SetFrom(&tx_agc_target_dbov, change.tx_agc_target_dbov); + SetFrom(&tx_agc_digital_compression_gain, + change.tx_agc_digital_compression_gain); + SetFrom(&tx_agc_limiter, change.tx_agc_limiter); + SetFrom(&combined_audio_video_bwe, change.combined_audio_video_bwe); + SetFrom(&audio_network_adaptor, change.audio_network_adaptor); + SetFrom(&audio_network_adaptor_config, change.audio_network_adaptor_config); + } + + bool operator==(const AudioOptions& o) const { + return echo_cancellation == o.echo_cancellation && +#if defined(WEBRTC_IOS) + ios_force_software_aec_HACK == o.ios_force_software_aec_HACK && +#endif + auto_gain_control == o.auto_gain_control && + noise_suppression == o.noise_suppression && + highpass_filter == o.highpass_filter && + stereo_swapping == o.stereo_swapping && + audio_jitter_buffer_max_packets == + o.audio_jitter_buffer_max_packets && + audio_jitter_buffer_fast_accelerate == + o.audio_jitter_buffer_fast_accelerate && + typing_detection == o.typing_detection && + aecm_generate_comfort_noise == o.aecm_generate_comfort_noise && + experimental_agc == o.experimental_agc && + extended_filter_aec == o.extended_filter_aec && + delay_agnostic_aec == o.delay_agnostic_aec && + experimental_ns == o.experimental_ns && + intelligibility_enhancer == o.intelligibility_enhancer && + residual_echo_detector == o.residual_echo_detector && + tx_agc_target_dbov == o.tx_agc_target_dbov && + tx_agc_digital_compression_gain == + o.tx_agc_digital_compression_gain && + tx_agc_limiter == o.tx_agc_limiter && + combined_audio_video_bwe == o.combined_audio_video_bwe && + audio_network_adaptor == o.audio_network_adaptor && + audio_network_adaptor_config == o.audio_network_adaptor_config; + } + bool operator!=(const AudioOptions& o) const { return !(*this == o); } + + std::string ToString() const { + std::ostringstream ost; + ost << "AudioOptions {"; + ost << ToStringIfSet("aec", echo_cancellation); +#if defined(WEBRTC_IOS) + ost << ToStringIfSet("ios_force_software_aec_HACK", + ios_force_software_aec_HACK); +#endif + ost << ToStringIfSet("agc", auto_gain_control); + ost << ToStringIfSet("ns", noise_suppression); + ost << ToStringIfSet("hf", highpass_filter); + ost << ToStringIfSet("swap", stereo_swapping); + ost << ToStringIfSet("audio_jitter_buffer_max_packets", + audio_jitter_buffer_max_packets); + ost << ToStringIfSet("audio_jitter_buffer_fast_accelerate", + audio_jitter_buffer_fast_accelerate); + ost << ToStringIfSet("typing", typing_detection); + ost << ToStringIfSet("comfort_noise", aecm_generate_comfort_noise); + ost << ToStringIfSet("experimental_agc", experimental_agc); + ost << ToStringIfSet("extended_filter_aec", extended_filter_aec); + ost << ToStringIfSet("delay_agnostic_aec", delay_agnostic_aec); + ost << ToStringIfSet("experimental_ns", experimental_ns); + ost << ToStringIfSet("intelligibility_enhancer", intelligibility_enhancer); + ost << ToStringIfSet("residual_echo_detector", residual_echo_detector); + ost << ToStringIfSet("tx_agc_target_dbov", tx_agc_target_dbov); + ost << ToStringIfSet("tx_agc_digital_compression_gain", + tx_agc_digital_compression_gain); + ost << ToStringIfSet("tx_agc_limiter", tx_agc_limiter); + ost << ToStringIfSet("combined_audio_video_bwe", combined_audio_video_bwe); + ost << ToStringIfSet("audio_network_adaptor", audio_network_adaptor); + // The adaptor config is a serialized proto buffer and therefore not human + // readable. So we comment out the following line. + // ost << ToStringIfSet("audio_network_adaptor_config", + // audio_network_adaptor_config); + ost << "}"; + return ost.str(); + } + + // Audio processing that attempts to filter away the output signal from + // later inbound pickup. + rtc::Optional echo_cancellation; +#if defined(WEBRTC_IOS) + // Forces software echo cancellation on iOS. This is a temporary workaround + // (until Apple fixes the bug) for a device with non-functioning AEC. May + // improve performance on that particular device, but will cause unpredictable + // behavior in all other cases. See http://bugs.webrtc.org/8682. + rtc::Optional ios_force_software_aec_HACK; +#endif + // Audio processing to adjust the sensitivity of the local mic dynamically. + rtc::Optional auto_gain_control; + // Audio processing to filter out background noise. + rtc::Optional noise_suppression; + // Audio processing to remove background noise of lower frequencies. + rtc::Optional highpass_filter; + // Audio processing to swap the left and right channels. + rtc::Optional stereo_swapping; + // Audio receiver jitter buffer (NetEq) max capacity in number of packets. + rtc::Optional audio_jitter_buffer_max_packets; + // Audio receiver jitter buffer (NetEq) fast accelerate mode. + rtc::Optional audio_jitter_buffer_fast_accelerate; + // Audio processing to detect typing. + rtc::Optional typing_detection; + rtc::Optional aecm_generate_comfort_noise; + rtc::Optional experimental_agc; + rtc::Optional extended_filter_aec; + rtc::Optional delay_agnostic_aec; + rtc::Optional experimental_ns; + rtc::Optional intelligibility_enhancer; + // Note that tx_agc_* only applies to non-experimental AGC. + rtc::Optional residual_echo_detector; + rtc::Optional tx_agc_target_dbov; + rtc::Optional tx_agc_digital_compression_gain; + rtc::Optional tx_agc_limiter; + // Enable combined audio+bandwidth BWE. + // TODO(pthatcher): This flag is set from the + // "googCombinedAudioVideoBwe", but not used anywhere. So delete it, + // and check if any other AudioOptions members are unused. + rtc::Optional combined_audio_video_bwe; + // Enable audio network adaptor. + rtc::Optional audio_network_adaptor; + // Config string for audio network adaptor. + rtc::Optional audio_network_adaptor_config; + + private: + template + static std::string ToStringIfSet(const char* key, + const rtc::Optional& val) { + std::string str; + if (val) { + str = key; + str += ": "; + str += val ? rtc::ToString(*val) : ""; + str += ", "; + } + return str; + } + + template + static void SetFrom(rtc::Optional* s, const rtc::Optional& o) { + if (o) { + *s = o; + } + } +}; + +} // namespace cricket + +#endif // API_AUDIO_OPTIONS_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/call/audio_sink.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/call/audio_sink.h new file mode 100644 index 000000000000..fa4c3f681449 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/call/audio_sink.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CALL_AUDIO_SINK_H_ +#define API_CALL_AUDIO_SINK_H_ + +#if defined(WEBRTC_POSIX) && !defined(__STDC_FORMAT_MACROS) +// Avoid conflict with format_macros.h. +#define __STDC_FORMAT_MACROS +#endif + +#include +#include + +namespace webrtc { + +// Represents a simple push audio sink. +class AudioSinkInterface { + public: + virtual ~AudioSinkInterface() {} + + struct Data { + Data(const int16_t* data, + size_t samples_per_channel, + int sample_rate, + size_t channels, + uint32_t timestamp) + : data(data), + samples_per_channel(samples_per_channel), + sample_rate(sample_rate), + channels(channels), + timestamp(timestamp) {} + + const int16_t* data; // The actual 16bit audio data. + size_t samples_per_channel; // Number of frames in the buffer. + int sample_rate; // Sample rate in Hz. + size_t channels; // Number of channels in the audio data. + uint32_t timestamp; // The RTP timestamp of the first sample. + }; + + virtual void OnData(const Data& audio) = 0; +}; + +} // namespace webrtc + +#endif // API_CALL_AUDIO_SINK_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/call/callfactoryinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/call/callfactoryinterface.h new file mode 100644 index 000000000000..a7f32453c63f --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/call/callfactoryinterface.h @@ -0,0 +1,36 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CALL_CALLFACTORYINTERFACE_H_ +#define API_CALL_CALLFACTORYINTERFACE_H_ + +#include + +namespace webrtc { + +// These classes are not part of the API, and are treated as opaque pointers. +class Call; +struct CallConfig; + +// This interface exists to allow webrtc to be optionally built without media +// support (i.e., if only being used for data channels). PeerConnectionFactory +// is constructed with a CallFactoryInterface, which may or may not be null. +class CallFactoryInterface { + public: + virtual ~CallFactoryInterface() {} + + virtual Call* CreateCall(const CallConfig& config) = 0; +}; + +std::unique_ptr CreateCallFactory(); + +} // namespace webrtc + +#endif // API_CALL_CALLFACTORYINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/call/transport.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/call/transport.h new file mode 100644 index 000000000000..df101fcf0583 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/call/transport.h @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CALL_TRANSPORT_H_ +#define API_CALL_TRANSPORT_H_ + +#include +#include +#include + +namespace webrtc { + +// TODO(holmer): Look into unifying this with the PacketOptions in +// asyncpacketsocket.h. +struct PacketOptions { + PacketOptions(); + ~PacketOptions(); + + // A 16 bits positive id. Negative ids are invalid and should be interpreted + // as packet_id not being set. + int packet_id = -1; + // Additional data bound to the RTP packet for use in application code, + // outside of WebRTC. + std::vector application_data; +}; + +class Transport { + public: + virtual bool SendRtp(const uint8_t* packet, + size_t length, + const PacketOptions& options) = 0; + virtual bool SendRtcp(const uint8_t* packet, size_t length) = 0; + + protected: + virtual ~Transport() {} +}; + +} // namespace webrtc + +#endif // API_CALL_TRANSPORT_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/candidate.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/candidate.h new file mode 100644 index 000000000000..a1f45c215e16 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/candidate.h @@ -0,0 +1,208 @@ +/* + * Copyright 2004 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CANDIDATE_H_ +#define API_CANDIDATE_H_ + +#include +#include + +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/helpers.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/socketaddress.h" + +namespace cricket { + +// Candidate for ICE based connection discovery. +// TODO(phoglund): remove things in here that are not needed in the public API. + +class Candidate { + public: + Candidate(); + // TODO(pthatcher): Match the ordering and param list as per RFC 5245 + // candidate-attribute syntax. http://tools.ietf.org/html/rfc5245#section-15.1 + Candidate(int component, + const std::string& protocol, + const rtc::SocketAddress& address, + uint32_t priority, + const std::string& username, + const std::string& password, + const std::string& type, + uint32_t generation, + const std::string& foundation, + uint16_t network_id = 0, + uint16_t network_cost = 0); + Candidate(const Candidate&); + ~Candidate(); + + const std::string & id() const { return id_; } + void set_id(const std::string & id) { id_ = id; } + + int component() const { return component_; } + void set_component(int component) { component_ = component; } + + const std::string & protocol() const { return protocol_; } + void set_protocol(const std::string & protocol) { protocol_ = protocol; } + + // The protocol used to talk to relay. + const std::string& relay_protocol() const { return relay_protocol_; } + void set_relay_protocol(const std::string& protocol) { + relay_protocol_ = protocol; + } + + const rtc::SocketAddress & address() const { return address_; } + void set_address(const rtc::SocketAddress & address) { + address_ = address; + } + + uint32_t priority() const { return priority_; } + void set_priority(const uint32_t priority) { priority_ = priority; } + + // TODO(pthatcher): Remove once Chromium's jingle/glue/utils.cc + // doesn't use it. + // Maps old preference (which was 0.0-1.0) to match priority (which + // is 0-2^32-1) to to match RFC 5245, section 4.1.2.1. Also see + // https://docs.google.com/a/google.com/document/d/ + // 1iNQDiwDKMh0NQOrCqbj3DKKRT0Dn5_5UJYhmZO-t7Uc/edit + float preference() const { + // The preference value is clamped to two decimal precision. + return static_cast(((priority_ >> 24) * 100 / 127) / 100.0); + } + + // TODO(pthatcher): Remove once Chromium's jingle/glue/utils.cc + // doesn't use it. + void set_preference(float preference) { + // Limiting priority to UINT_MAX when value exceeds uint32_t max. + // This can happen for e.g. when preference = 3. + uint64_t prio_val = static_cast(preference * 127) << 24; + priority_ = static_cast( + std::min(prio_val, static_cast(UINT_MAX))); + } + + // TODO(honghaiz): Change to usernameFragment or ufrag. + const std::string & username() const { return username_; } + void set_username(const std::string & username) { username_ = username; } + + const std::string & password() const { return password_; } + void set_password(const std::string & password) { password_ = password; } + + const std::string & type() const { return type_; } + void set_type(const std::string & type) { type_ = type; } + + const std::string & network_name() const { return network_name_; } + void set_network_name(const std::string & network_name) { + network_name_ = network_name; + } + + rtc::AdapterType network_type() const { return network_type_; } + void set_network_type(rtc::AdapterType network_type) { + network_type_ = network_type; + } + + // Candidates in a new generation replace those in the old generation. + uint32_t generation() const { return generation_; } + void set_generation(uint32_t generation) { generation_ = generation; } + + // |network_cost| measures the cost/penalty of using this candidate. A network + // cost of 0 indicates this candidate can be used freely. A value of + // rtc::kNetworkCostMax indicates it should be used only as the last resort. + void set_network_cost(uint16_t network_cost) { + RTC_DCHECK_LE(network_cost, rtc::kNetworkCostMax); + network_cost_ = network_cost; + } + uint16_t network_cost() const { return network_cost_; } + + // An ID assigned to the network hosting the candidate. + uint16_t network_id() const { return network_id_; } + void set_network_id(uint16_t network_id) { network_id_ = network_id; } + + const std::string& foundation() const { + return foundation_; + } + void set_foundation(const std::string& foundation) { + foundation_ = foundation; + } + + const rtc::SocketAddress & related_address() const { + return related_address_; + } + void set_related_address( + const rtc::SocketAddress & related_address) { + related_address_ = related_address; + } + const std::string& tcptype() const { return tcptype_; } + void set_tcptype(const std::string& tcptype) { + tcptype_ = tcptype; + } + + // The name of the transport channel of this candidate. + // TODO(phoglund): remove. + const std::string& transport_name() const { return transport_name_; } + void set_transport_name(const std::string& transport_name) { + transport_name_ = transport_name; + } + + // The URL of the ICE server which this candidate is gathered from. + const std::string& url() const { return url_; } + void set_url(const std::string& url) { url_ = url; } + + // Determines whether this candidate is equivalent to the given one. + bool IsEquivalent(const Candidate& c) const; + + // Determines whether this candidate can be considered equivalent to the + // given one when looking for a matching candidate to remove. + bool MatchesForRemoval(const Candidate& c) const; + + std::string ToString() const { + return ToStringInternal(false); + } + + std::string ToSensitiveString() const { + return ToStringInternal(true); + } + + uint32_t GetPriority(uint32_t type_preference, + int network_adapter_preference, + int relay_preference) const; + + bool operator==(const Candidate& o) const; + bool operator!=(const Candidate& o) const; + + private: + std::string ToStringInternal(bool sensitive) const; + + std::string id_; + int component_; + std::string protocol_; + std::string relay_protocol_; + rtc::SocketAddress address_; + uint32_t priority_; + std::string username_; + std::string password_; + std::string type_; + std::string network_name_; + rtc::AdapterType network_type_; + uint32_t generation_; + std::string foundation_; + rtc::SocketAddress related_address_; + std::string tcptype_; + std::string transport_name_; + uint16_t network_id_; + uint16_t network_cost_; + std::string url_; +}; + +} // namespace cricket + +#endif // API_CANDIDATE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/cryptoparams.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/cryptoparams.h new file mode 100644 index 000000000000..2350528358f2 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/cryptoparams.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CRYPTOPARAMS_H_ +#define API_CRYPTOPARAMS_H_ + +#include + +namespace cricket { + +// Parameters for SRTP negotiation, as described in RFC 4568. +struct CryptoParams { + CryptoParams() : tag(0) {} + CryptoParams(int t, + const std::string& cs, + const std::string& kp, + const std::string& sp) + : tag(t), cipher_suite(cs), key_params(kp), session_params(sp) {} + + bool Matches(const CryptoParams& params) const { + return (tag == params.tag && cipher_suite == params.cipher_suite); + } + + int tag; + std::string cipher_suite; + std::string key_params; + std::string session_params; +}; + +} // namespace cricket + +#endif // API_CRYPTOPARAMS_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/datachannelinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/datachannelinterface.h new file mode 100644 index 000000000000..afeb17377f16 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/datachannelinterface.h @@ -0,0 +1,182 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains interfaces for DataChannels +// http://dev.w3.org/2011/webrtc/editor/webrtc.html#rtcdatachannel + +#ifndef API_DATACHANNELINTERFACE_H_ +#define API_DATACHANNELINTERFACE_H_ + +#include + +#include "rtc_base/checks.h" +#include "rtc_base/copyonwritebuffer.h" +#include "rtc_base/refcount.h" + +namespace webrtc { + +// C++ version of: https://www.w3.org/TR/webrtc/#idl-def-rtcdatachannelinit +// TODO(deadbeef): Use rtc::Optional for the "-1 if unset" things. +struct DataChannelInit { + // Deprecated. Reliability is assumed, and channel will be unreliable if + // maxRetransmitTime or MaxRetransmits is set. + bool reliable = false; + + // True if ordered delivery is required. + bool ordered = true; + + // The max period of time in milliseconds in which retransmissions will be + // sent. After this time, no more retransmissions will be sent. -1 if unset. + // + // Cannot be set along with |maxRetransmits|. + int maxRetransmitTime = -1; + + // The max number of retransmissions. -1 if unset. + // + // Cannot be set along with |maxRetransmitTime|. + int maxRetransmits = -1; + + // This is set by the application and opaque to the WebRTC implementation. + std::string protocol; + + // True if the channel has been externally negotiated and we do not send an + // in-band signalling in the form of an "open" message. If this is true, |id| + // below must be set; otherwise it should be unset and will be negotiated + // in-band. + bool negotiated = false; + + // The stream id, or SID, for SCTP data channels. -1 if unset (see above). + int id = -1; +}; + +// At the JavaScript level, data can be passed in as a string or a blob, so +// this structure's |binary| flag tells whether the data should be interpreted +// as binary or text. +struct DataBuffer { + DataBuffer(const rtc::CopyOnWriteBuffer& data, bool binary) + : data(data), + binary(binary) { + } + // For convenience for unit tests. + explicit DataBuffer(const std::string& text) + : data(text.data(), text.length()), + binary(false) { + } + size_t size() const { return data.size(); } + + rtc::CopyOnWriteBuffer data; + // Indicates if the received data contains UTF-8 or binary data. + // Note that the upper layers are left to verify the UTF-8 encoding. + // TODO(jiayl): prefer to use an enum instead of a bool. + bool binary; +}; + +// Used to implement RTCDataChannel events. +// +// The code responding to these callbacks should unwind the stack before +// using any other webrtc APIs; re-entrancy is not supported. +class DataChannelObserver { + public: + // The data channel state have changed. + virtual void OnStateChange() = 0; + // A data buffer was successfully received. + virtual void OnMessage(const DataBuffer& buffer) = 0; + // The data channel's buffered_amount has changed. + virtual void OnBufferedAmountChange(uint64_t previous_amount) {} + + protected: + virtual ~DataChannelObserver() {} +}; + +class DataChannelInterface : public rtc::RefCountInterface { + public: + // C++ version of: https://www.w3.org/TR/webrtc/#idl-def-rtcdatachannelstate + // Unlikely to change, but keep in sync with DataChannel.java:State and + // RTCDataChannel.h:RTCDataChannelState. + enum DataState { + kConnecting, + kOpen, // The DataChannel is ready to send data. + kClosing, + kClosed + }; + + static const char* DataStateString(DataState state) { + switch (state) { + case kConnecting: + return "connecting"; + case kOpen: + return "open"; + case kClosing: + return "closing"; + case kClosed: + return "closed"; + } + RTC_CHECK(false) << "Unknown DataChannel state: " << state; + return ""; + } + + // Used to receive events from the data channel. Only one observer can be + // registered at a time. UnregisterObserver should be called before the + // observer object is destroyed. + virtual void RegisterObserver(DataChannelObserver* observer) = 0; + virtual void UnregisterObserver() = 0; + + // The label attribute represents a label that can be used to distinguish this + // DataChannel object from other DataChannel objects. + virtual std::string label() const = 0; + + // The accessors below simply return the properties from the DataChannelInit + // the data channel was constructed with. + virtual bool reliable() const = 0; + // TODO(deadbeef): Remove these dummy implementations when all classes have + // implemented these APIs. They should all just return the values the + // DataChannel was created with. + virtual bool ordered() const { return false; } + virtual uint16_t maxRetransmitTime() const { return 0; } + virtual uint16_t maxRetransmits() const { return 0; } + virtual std::string protocol() const { return std::string(); } + virtual bool negotiated() const { return false; } + + // Returns the ID from the DataChannelInit, if it was negotiated out-of-band. + // If negotiated in-band, this ID will be populated once the DTLS role is + // determined, and until then this will return -1. + virtual int id() const = 0; + virtual DataState state() const = 0; + virtual uint32_t messages_sent() const = 0; + virtual uint64_t bytes_sent() const = 0; + virtual uint32_t messages_received() const = 0; + virtual uint64_t bytes_received() const = 0; + + // Returns the number of bytes of application data (UTF-8 text and binary + // data) that have been queued using Send but have not yet been processed at + // the SCTP level. See comment above Send below. + virtual uint64_t buffered_amount() const = 0; + + // Begins the graceful data channel closing procedure. See: + // https://tools.ietf.org/html/draft-ietf-rtcweb-data-channel-13#section-6.7 + virtual void Close() = 0; + + // Sends |data| to the remote peer. If the data can't be sent at the SCTP + // level (due to congestion control), it's buffered at the data channel level, + // up to a maximum of 16MB. If Send is called while this buffer is full, the + // data channel will be closed abruptly. + // + // So, it's important to use buffered_amount() and OnBufferedAmountChange to + // ensure the data channel is used efficiently but without filling this + // buffer. + virtual bool Send(const DataBuffer& buffer) = 0; + + protected: + virtual ~DataChannelInterface() {} +}; + +} // namespace webrtc + +#endif // API_DATACHANNELINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/dtmfsenderinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/dtmfsenderinterface.h new file mode 100644 index 000000000000..8f0ab71206cf --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/dtmfsenderinterface.h @@ -0,0 +1,97 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_DTMFSENDERINTERFACE_H_ +#define API_DTMFSENDERINTERFACE_H_ + +#include + +#include "api/mediastreaminterface.h" +#include "rtc_base/refcount.h" + +namespace webrtc { + +// DtmfSender callback interface, used to implement RTCDtmfSender events. +// Applications should implement this interface to get notifications from the +// DtmfSender. +class DtmfSenderObserverInterface { + public: + // Triggered when DTMF |tone| is sent. + // If |tone| is empty that means the DtmfSender has sent out all the given + // tones. + virtual void OnToneChange(const std::string& tone) = 0; + + protected: + virtual ~DtmfSenderObserverInterface() {} +}; + +// The interface of native implementation of the RTCDTMFSender defined by the +// WebRTC W3C Editor's Draft. +// See: https://www.w3.org/TR/webrtc/#peer-to-peer-dtmf +class DtmfSenderInterface : public rtc::RefCountInterface { + public: + // Used to receive events from the DTMF sender. Only one observer can be + // registered at a time. UnregisterObserver should be called before the + // observer object is destroyed. + virtual void RegisterObserver(DtmfSenderObserverInterface* observer) = 0; + virtual void UnregisterObserver() = 0; + + // Returns true if this DtmfSender is capable of sending DTMF. Otherwise + // returns false. To be able to send DTMF, the associated RtpSender must be + // able to send packets, and a "telephone-event" codec must be negotiated. + virtual bool CanInsertDtmf() = 0; + + // Queues a task that sends the DTMF |tones|. The |tones| parameter is treated + // as a series of characters. The characters 0 through 9, A through D, #, and + // * generate the associated DTMF tones. The characters a to d are equivalent + // to A to D. The character ',' indicates a delay of 2 seconds before + // processing the next character in the tones parameter. + // + // Unrecognized characters are ignored. + // + // The |duration| parameter indicates the duration in ms to use for each + // character passed in the |tones| parameter. The duration cannot be more + // than 6000 or less than 70. + // + // The |inter_tone_gap| parameter indicates the gap between tones in ms. The + // |inter_tone_gap| must be at least 50 ms but should be as short as + // possible. + // + // If InsertDtmf is called on the same object while an existing task for this + // object to generate DTMF is still running, the previous task is canceled. + // Returns true on success and false on failure. + virtual bool InsertDtmf(const std::string& tones, int duration, + int inter_tone_gap) = 0; + + // Returns the track given as argument to the constructor. Only exists for + // backwards compatibilty; now that DtmfSenders are tied to RtpSenders, it's + // no longer relevant. + virtual const AudioTrackInterface* track() const = 0; + + // Returns the tones remaining to be played out. + virtual std::string tones() const = 0; + + // Returns the current tone duration value in ms. + // This value will be the value last set via the InsertDtmf() method, or the + // default value of 100 ms if InsertDtmf() was never called. + virtual int duration() const = 0; + + // Returns the current value of the between-tone gap in ms. + // This value will be the value last set via the InsertDtmf() method, or the + // default value of 50 ms if InsertDtmf() was never called. + virtual int inter_tone_gap() const = 0; + + protected: + virtual ~DtmfSenderInterface() {} +}; + +} // namespace webrtc + +#endif // API_DTMFSENDERINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/fakemetricsobserver.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/fakemetricsobserver.h new file mode 100644 index 000000000000..3adc5a6547af --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/fakemetricsobserver.h @@ -0,0 +1,57 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_FAKEMETRICSOBSERVER_H_ +#define API_FAKEMETRICSOBSERVER_H_ + +#include +#include +#include + +#include "api/peerconnectioninterface.h" +#include "rtc_base/thread_checker.h" + +namespace webrtc { + +class FakeMetricsObserver : public MetricsObserverInterface { + public: + FakeMetricsObserver(); + void Reset(); + + void IncrementEnumCounter(PeerConnectionEnumCounterType, + int counter, + int counter_max) override; + void AddHistogramSample(PeerConnectionMetricsName type, + int value) override; + + // Accessors to be used by the tests. + int GetEnumCounter(PeerConnectionEnumCounterType type, int counter) const; + int GetHistogramSample(PeerConnectionMetricsName type) const; + + // Returns true if and only if there is a count of 1 for the given counter and + // a count of 0 for all other counters of the given enum type. + bool ExpectOnlySingleEnumCount(PeerConnectionEnumCounterType type, + int counter) const; + + protected: + ~FakeMetricsObserver() {} + + private: + rtc::ThreadChecker thread_checker_; + // The vector contains maps for each counter type. In the map, it's a mapping + // from individual counter to its count, such that it's memory efficient when + // comes to sparse enum types, like the SSL ciphers in the IANA registry. + std::vector> counters_; + int histogram_samples_[kPeerConnectionMetricsName_Max]; +}; + +} // namespace webrtc + +#endif // API_FAKEMETRICSOBSERVER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/fec_controller.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/fec_controller.h new file mode 100644 index 000000000000..59e86ccedbd8 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/fec_controller.h @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_FEC_CONTROLLER_H_ +#define API_FEC_CONTROLLER_H_ + +#include +#include + +#include "common_types.h" // NOLINT(build/include) +#include "modules/include/module_fec_types.h" + +namespace webrtc { +// TODO(yinwa): work in progress. API in class FecController should not be +// used by other users until this comment is removed. + +// Callback class used for telling the user about how to configure the FEC, +// and the rates sent the last second is returned to the VCM. +class VCMProtectionCallback { + public: + virtual int ProtectionRequest(const FecProtectionParams* delta_params, + const FecProtectionParams* key_params, + uint32_t* sent_video_rate_bps, + uint32_t* sent_nack_rate_bps, + uint32_t* sent_fec_rate_bps) = 0; + + protected: + virtual ~VCMProtectionCallback() {} +}; + +// FecController calculates how much of the allocated network +// capacity that can be used by an encoder and how much that +// is needed for redundant packets such as FEC and NACK. It uses an +// implementation of |VCMProtectionCallback| to set new FEC parameters and get +// the bitrate currently used for FEC and NACK. +// Usage: +// Setup by calling SetProtectionMethod and SetEncodingData. +// For each encoded image, call UpdateWithEncodedData. +// Each time the bandwidth estimate change, call UpdateFecRates. UpdateFecRates +// will return the bitrate that can be used by an encoder. +// A lock is used to protect internal states, so methods can be called on an +// arbitrary thread. +class FecController { + public: + virtual ~FecController() {} + + virtual void SetProtectionCallback( + VCMProtectionCallback* protection_callback) = 0; + virtual void SetProtectionMethod(bool enable_fec, bool enable_nack) = 0; + + // Informs loss protectoin logic of initial encoding state. + virtual void SetEncodingData(size_t width, + size_t height, + size_t num_temporal_layers, + size_t max_payload_size) = 0; + + // Returns target rate for the encoder given the channel parameters. + // Inputs: estimated_bitrate_bps - the estimated network bitrate in bits/s. + // actual_framerate - encoder frame rate. + // fraction_lost - packet loss rate in % in the network. + // loss_mask_vector - packet loss mask since last time this method + // was called. round_trip_time_ms - round trip time in milliseconds. + virtual uint32_t UpdateFecRates(uint32_t estimated_bitrate_bps, + int actual_framerate, + uint8_t fraction_lost, + std::vector loss_mask_vector, + int64_t round_trip_time_ms) = 0; + + // Informs of encoded output. + virtual void UpdateWithEncodedData(size_t encoded_image_length, + FrameType encoded_image_frametype) = 0; + + // Returns whether this FEC Controller needs Loss Vector Mask as input. + virtual bool UseLossVectorMask() = 0; +}; + +class FecControllerFactoryInterface { + public: + virtual std::unique_ptr CreateFecController() = 0; + virtual ~FecControllerFactoryInterface() = default; +}; + +} // namespace webrtc +#endif // API_FEC_CONTROLLER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/jsep.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/jsep.h new file mode 100644 index 000000000000..8fd2dacf65ea --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/jsep.h @@ -0,0 +1,238 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains declarations of interfaces that wrap SDP-related +// constructs; session descriptions and ICE candidates. The inner "cricket::" +// objects shouldn't be accessed directly; the intention is that an application +// using the PeerConnection API only creates these objects from strings, and +// them passes them into the PeerConnection. +// +// Though in the future, we're planning to provide an SDP parsing API, with a +// structure more friendly than cricket::SessionDescription. + +#ifndef API_JSEP_H_ +#define API_JSEP_H_ + +#include + +#include +#include +#include + +#include "api/optional.h" +#include "api/rtcerror.h" +#include "rtc_base/refcount.h" + +namespace cricket { +class Candidate; +class SessionDescription; +} // namespace cricket + +namespace webrtc { + +struct SdpParseError { + public: + // The sdp line that causes the error. + std::string line; + // Explains the error. + std::string description; +}; + +// Class representation of an ICE candidate. +// +// An instance of this interface is supposed to be owned by one class at +// a time and is therefore not expected to be thread safe. +// +// An instance can be created by CreateIceCandidate. +class IceCandidateInterface { + public: + virtual ~IceCandidateInterface() {} + // If present, this is the value of the "a=mid" attribute of the candidate's + // m= section in SDP, which identifies the m= section. + virtual std::string sdp_mid() const = 0; + // This indicates the index (starting at zero) of m= section this candidate + // is associated with. Needed when an endpoint doesn't support MIDs. + virtual int sdp_mline_index() const = 0; + // Only for use internally. + virtual const cricket::Candidate& candidate() const = 0; + // The URL of the ICE server which this candidate was gathered from. + // TODO(zhihuang): Remove the default implementation once the subclasses + // implement this method. + virtual std::string server_url() const; + // Creates a SDP-ized form of this candidate. + virtual bool ToString(std::string* out) const = 0; +}; + +// Creates a IceCandidateInterface based on SDP string. +// Returns null if the sdp string can't be parsed. +// |error| may be null. +IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid, + int sdp_mline_index, + const std::string& sdp, + SdpParseError* error); + +// This class represents a collection of candidates for a specific m= section. +// Used in SessionDescriptionInterface. +class IceCandidateCollection { + public: + virtual ~IceCandidateCollection() {} + virtual size_t count() const = 0; + // Returns true if an equivalent |candidate| exist in the collection. + virtual bool HasCandidate(const IceCandidateInterface* candidate) const = 0; + virtual const IceCandidateInterface* at(size_t index) const = 0; +}; + +// Enum that describes the type of the SessionDescriptionInterface. +// Corresponds to RTCSdpType in the WebRTC specification. +// https://w3c.github.io/webrtc-pc/#dom-rtcsdptype +enum class SdpType { + kOffer, // Description must be treated as an SDP offer. + kPrAnswer, // Description must be treated as an SDP answer, but not a final + // answer. + kAnswer // Description must be treated as an SDP final answer, and the offer- + // answer exchange must be considered complete after receiving this. +}; + +// Returns the string form of the given SDP type. String forms are defined in +// SessionDescriptionInterface. +const char* SdpTypeToString(SdpType type); + +// Returns the SdpType from its string form. The string form can be one of the +// constants defined in SessionDescriptionInterface. Passing in any other string +// results in nullopt. +rtc::Optional SdpTypeFromString(const std::string& type_str); + +// Class representation of an SDP session description. +// +// An instance of this interface is supposed to be owned by one class at a time +// and is therefore not expected to be thread safe. +// +// An instance can be created by CreateSessionDescription. +class SessionDescriptionInterface { + public: + // String representations of the supported SDP types. + static const char kOffer[]; + static const char kPrAnswer[]; + static const char kAnswer[]; + + virtual ~SessionDescriptionInterface() {} + + // Only for use internally. + virtual cricket::SessionDescription* description() = 0; + virtual const cricket::SessionDescription* description() const = 0; + + // Get the session id and session version, which are defined based on + // RFC 4566 for the SDP o= line. + virtual std::string session_id() const = 0; + virtual std::string session_version() const = 0; + + // Returns the type of this session description as an SdpType. Descriptions of + // the various types are found in the SdpType documentation. + // TODO(steveanton): Remove default implementation once Chromium has been + // updated. + virtual SdpType GetType() const; + + // kOffer/kPrAnswer/kAnswer + // TODO(steveanton): Remove this in favor of |GetType| that returns SdpType. + virtual std::string type() const = 0; + + // Adds the specified candidate to the description. + // + // Ownership is not transferred. + // + // Returns false if the session description does not have a media section + // that corresponds to |candidate.sdp_mid()| or + // |candidate.sdp_mline_index()|. + virtual bool AddCandidate(const IceCandidateInterface* candidate) = 0; + + // Removes the candidates from the description, if found. + // + // Returns the number of candidates removed. + virtual size_t RemoveCandidates( + const std::vector& candidates); + + // Returns the number of m= sections in the session description. + virtual size_t number_of_mediasections() const = 0; + + // Returns a collection of all candidates that belong to a certain m= + // section. + virtual const IceCandidateCollection* candidates( + size_t mediasection_index) const = 0; + + // Serializes the description to SDP. + virtual bool ToString(std::string* out) const = 0; +}; + +// Creates a SessionDescriptionInterface based on the SDP string and the type. +// Returns null if the sdp string can't be parsed or the type is unsupported. +// |error| may be null. +// TODO(steveanton): This function is deprecated. Please use the functions below +// which take an SdpType enum instead. Remove this once it is no longer used. +SessionDescriptionInterface* CreateSessionDescription(const std::string& type, + const std::string& sdp, + SdpParseError* error); + +// Creates a SessionDescriptionInterface based on the SDP string and the type. +// Returns null if the SDP string cannot be parsed. +// If using the signature with |error_out|, details of the parsing error may be +// written to |error_out| if it is not null. +std::unique_ptr CreateSessionDescription( + SdpType type, + const std::string& sdp); +std::unique_ptr CreateSessionDescription( + SdpType type, + const std::string& sdp, + SdpParseError* error_out); + +// CreateOffer and CreateAnswer callback interface. +class CreateSessionDescriptionObserver : public rtc::RefCountInterface { + public: + // This callback transfers the ownership of the |desc|. + // TODO(deadbeef): Make this take an std::unique_ptr<> to avoid confusion + // around ownership. + virtual void OnSuccess(SessionDescriptionInterface* desc) = 0; + // The OnFailure callback takes an RTCError, which consists of an + // error code and a string. + // RTCError is non-copyable, so it must be passed using std::move. + // Earlier versions of the API used a string argument. This version + // is deprecated; in order to let clients remove the old version, it has a + // default implementation. If both versions are unimplemented, the + // result will be a runtime error (stack overflow). This is intentional. + virtual void OnFailure(RTCError error) { + OnFailure(error.message()); + } + virtual void OnFailure(const std::string& error) { + OnFailure(RTCError(RTCErrorType::INTERNAL_ERROR, std::string(error))); + } + + protected: + ~CreateSessionDescriptionObserver() override = default; +}; + +// SetLocalDescription and SetRemoteDescription callback interface. +class SetSessionDescriptionObserver : public rtc::RefCountInterface { + public: + virtual void OnSuccess() = 0; + // See description in CreateSessionDescriptionObserver for OnFailure. + virtual void OnFailure(RTCError error) { + std::string message(error.message()); + OnFailure(message); + } + virtual void OnFailure(const std::string& error) { + OnFailure(RTCError(RTCErrorType::INTERNAL_ERROR, std::string(error))); + } + + protected: + ~SetSessionDescriptionObserver() override = default; +}; + +} // namespace webrtc + +#endif // API_JSEP_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/jsepicecandidate.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/jsepicecandidate.h new file mode 100644 index 000000000000..dae6121eade1 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/jsepicecandidate.h @@ -0,0 +1,93 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// TODO(deadbeef): Move this out of api/; it's an implementation detail and +// shouldn't be used externally. + +#ifndef API_JSEPICECANDIDATE_H_ +#define API_JSEPICECANDIDATE_H_ + +#include +#include +#include + +#include "api/candidate.h" +#include "api/jsep.h" +#include "rtc_base/constructormagic.h" + +namespace webrtc { + +// Implementation of IceCandidateInterface. +class JsepIceCandidate : public IceCandidateInterface { + public: + JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index); + JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index, + const cricket::Candidate& candidate); + ~JsepIceCandidate(); + // |err| may be null. + bool Initialize(const std::string& sdp, SdpParseError* err); + void SetCandidate(const cricket::Candidate& candidate) { + candidate_ = candidate; + } + + virtual std::string sdp_mid() const { return sdp_mid_; } + virtual int sdp_mline_index() const { return sdp_mline_index_; } + virtual const cricket::Candidate& candidate() const { + return candidate_; + } + + virtual std::string server_url() const { return candidate_.url(); } + + virtual bool ToString(std::string* out) const; + + private: + std::string sdp_mid_; + int sdp_mline_index_; + cricket::Candidate candidate_; + + RTC_DISALLOW_COPY_AND_ASSIGN(JsepIceCandidate); +}; + +// Implementation of IceCandidateCollection which stores JsepIceCandidates. +class JsepCandidateCollection : public IceCandidateCollection { + public: + JsepCandidateCollection() {} + // Move constructor is defined so that a vector of JsepCandidateCollections + // can be resized. + JsepCandidateCollection(JsepCandidateCollection&& o) + : candidates_(std::move(o.candidates_)) {} + ~JsepCandidateCollection(); + virtual size_t count() const { + return candidates_.size(); + } + virtual bool HasCandidate(const IceCandidateInterface* candidate) const; + // Adds and takes ownership of the JsepIceCandidate. + // TODO(deadbeef): Make this use an std::unique_ptr<>, so ownership logic is + // more clear. + virtual void add(JsepIceCandidate* candidate) { + candidates_.push_back(candidate); + } + virtual const IceCandidateInterface* at(size_t index) const { + return candidates_[index]; + } + // Removes the candidate that has a matching address and protocol. + // + // Returns the number of candidates that were removed. + size_t remove(const cricket::Candidate& candidate); + + private: + std::vector candidates_; + + RTC_DISALLOW_COPY_AND_ASSIGN(JsepCandidateCollection); +}; + +} // namespace webrtc + +#endif // API_JSEPICECANDIDATE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/jsepsessiondescription.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/jsepsessiondescription.h new file mode 100644 index 000000000000..70bb27710a2e --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/jsepsessiondescription.h @@ -0,0 +1,89 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// TODO(deadbeef): Move this out of api/; it's an implementation detail and +// shouldn't be used externally. + +#ifndef API_JSEPSESSIONDESCRIPTION_H_ +#define API_JSEPSESSIONDESCRIPTION_H_ + +#include +#include +#include + +#include "api/candidate.h" +#include "api/jsep.h" +#include "api/jsepicecandidate.h" +#include "rtc_base/constructormagic.h" + +namespace cricket { +class SessionDescription; +} + +namespace webrtc { + +// Implementation of SessionDescriptionInterface. +class JsepSessionDescription : public SessionDescriptionInterface { + public: + explicit JsepSessionDescription(SdpType type); + // TODO(steveanton): Remove this once callers have switched to SdpType. + explicit JsepSessionDescription(const std::string& type); + virtual ~JsepSessionDescription(); + + // Takes ownership of |description|. + // TODO(deadbeef): Make this use an std::unique_ptr<>, so ownership logic is + // more clear. + bool Initialize(cricket::SessionDescription* description, + const std::string& session_id, + const std::string& session_version); + + virtual cricket::SessionDescription* description() { + return description_.get(); + } + virtual const cricket::SessionDescription* description() const { + return description_.get(); + } + virtual std::string session_id() const { + return session_id_; + } + virtual std::string session_version() const { + return session_version_; + } + virtual SdpType GetType() const { return type_; } + virtual std::string type() const { return SdpTypeToString(type_); } + // Allows changing the type. Used for testing. + virtual bool AddCandidate(const IceCandidateInterface* candidate); + virtual size_t RemoveCandidates( + const std::vector& candidates); + virtual size_t number_of_mediasections() const; + virtual const IceCandidateCollection* candidates( + size_t mediasection_index) const; + virtual bool ToString(std::string* out) const; + + static const int kDefaultVideoCodecId; + static const char kDefaultVideoCodecName[]; + + private: + std::unique_ptr description_; + std::string session_id_; + std::string session_version_; + SdpType type_; + std::vector candidate_collection_; + + bool GetMediasectionIndex(const IceCandidateInterface* candidate, + size_t* index); + int GetMediasectionIndex(const cricket::Candidate& candidate); + + RTC_DISALLOW_COPY_AND_ASSIGN(JsepSessionDescription); +}; + +} // namespace webrtc + +#endif // API_JSEPSESSIONDESCRIPTION_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediaconstraintsinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediaconstraintsinterface.h new file mode 100644 index 000000000000..90661b893a3b --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediaconstraintsinterface.h @@ -0,0 +1,149 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains the interface for MediaConstraints, corresponding to +// the definition at +// http://www.w3.org/TR/mediacapture-streams/#mediastreamconstraints and also +// used in WebRTC: http://dev.w3.org/2011/webrtc/editor/webrtc.html#constraints. + +// This interface is being deprecated in Chrome, and may be removed +// from WebRTC too. +// https://bugs.chromium.org/p/webrtc/issues/detail?id=5617 + +#ifndef API_MEDIACONSTRAINTSINTERFACE_H_ +#define API_MEDIACONSTRAINTSINTERFACE_H_ + +#include +#include + +#include "api/optional.h" +#include "api/peerconnectioninterface.h" + +namespace webrtc { + +// Interface used for passing arguments about media constraints +// to the MediaStream and PeerConnection implementation. +// +// Constraints may be either "mandatory", which means that unless satisfied, +// the method taking the constraints should fail, or "optional", which means +// they may not be satisfied.. +class MediaConstraintsInterface { + public: + struct Constraint { + Constraint() {} + Constraint(const std::string& key, const std::string value) + : key(key), value(value) { + } + std::string key; + std::string value; + }; + + class Constraints : public std::vector { + public: + bool FindFirst(const std::string& key, std::string* value) const; + }; + + // Constraint keys used by a local video source. + // Specified by draft-alvestrand-constraints-resolution-00b + static const char kMinAspectRatio[]; // minAspectRatio + static const char kMaxAspectRatio[]; // maxAspectRatio + static const char kMaxWidth[]; // maxWidth + static const char kMinWidth[]; // minWidth + static const char kMaxHeight[]; // maxHeight + static const char kMinHeight[]; // minHeight + static const char kMaxFrameRate[]; // maxFrameRate + static const char kMinFrameRate[]; // minFrameRate + + // Constraint keys used by a local audio source. + static const char kEchoCancellation[]; // echoCancellation + + // These keys are google specific. + static const char kGoogEchoCancellation[]; // googEchoCancellation + + static const char kExtendedFilterEchoCancellation[]; // googEchoCancellation2 + static const char kDAEchoCancellation[]; // googDAEchoCancellation + static const char kAutoGainControl[]; // googAutoGainControl + static const char kExperimentalAutoGainControl[]; // googAutoGainControl2 + static const char kNoiseSuppression[]; // googNoiseSuppression + static const char kExperimentalNoiseSuppression[]; // googNoiseSuppression2 + static const char kIntelligibilityEnhancer[]; // intelligibilityEnhancer + static const char kHighpassFilter[]; // googHighpassFilter + static const char kTypingNoiseDetection[]; // googTypingNoiseDetection + static const char kAudioMirroring[]; // googAudioMirroring + static const char + kAudioNetworkAdaptorConfig[]; // goodAudioNetworkAdaptorConfig + + // Google-specific constraint keys for a local video source + static const char kNoiseReduction[]; // googNoiseReduction + + // Constraint keys for CreateOffer / CreateAnswer + // Specified by the W3C PeerConnection spec + static const char kOfferToReceiveVideo[]; // OfferToReceiveVideo + static const char kOfferToReceiveAudio[]; // OfferToReceiveAudio + static const char kVoiceActivityDetection[]; // VoiceActivityDetection + static const char kIceRestart[]; // IceRestart + // These keys are google specific. + static const char kUseRtpMux[]; // googUseRtpMUX + + // Constraints values. + static const char kValueTrue[]; // true + static const char kValueFalse[]; // false + + // PeerConnection constraint keys. + // Temporary pseudo-constraints used to enable DTLS-SRTP + static const char kEnableDtlsSrtp[]; // Enable DTLS-SRTP + // Temporary pseudo-constraints used to enable DataChannels + static const char kEnableRtpDataChannels[]; // Enable RTP DataChannels + // Google-specific constraint keys. + // Temporary pseudo-constraint for enabling DSCP through JS. + static const char kEnableDscp[]; // googDscp + // Constraint to enable IPv6 through JS. + static const char kEnableIPv6[]; // googIPv6 + // Temporary constraint to enable suspend below min bitrate feature. + static const char kEnableVideoSuspendBelowMinBitrate[]; + // googSuspendBelowMinBitrate + // Constraint to enable combined audio+video bandwidth estimation. + static const char kCombinedAudioVideoBwe[]; // googCombinedAudioVideoBwe + static const char kScreencastMinBitrate[]; // googScreencastMinBitrate + static const char kCpuOveruseDetection[]; // googCpuOveruseDetection + static const char kPayloadPadding[]; // googPayloadPadding + + // The prefix of internal-only constraints whose JS set values should be + // stripped by Chrome before passed down to Libjingle. + static const char kInternalConstraintPrefix[]; + + virtual ~MediaConstraintsInterface() = default; + + virtual const Constraints& GetMandatory() const = 0; + virtual const Constraints& GetOptional() const = 0; +}; + +bool FindConstraint(const MediaConstraintsInterface* constraints, + const std::string& key, bool* value, + size_t* mandatory_constraints); + +bool FindConstraint(const MediaConstraintsInterface* constraints, + const std::string& key, + int* value, + size_t* mandatory_constraints); + +// Copy all relevant constraints into an RTCConfiguration object. +void CopyConstraintsIntoRtcConfiguration( + const MediaConstraintsInterface* constraints, + PeerConnectionInterface::RTCConfiguration* configuration); + +// Copy all relevant constraints into an AudioOptions object. +void CopyConstraintsIntoAudioOptions( + const MediaConstraintsInterface* constraints, + cricket::AudioOptions* options); + +} // namespace webrtc + +#endif // API_MEDIACONSTRAINTSINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediastreaminterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediastreaminterface.h new file mode 100644 index 000000000000..195ecc6a50ee --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediastreaminterface.h @@ -0,0 +1,336 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains interfaces for MediaStream, MediaTrack and MediaSource. +// These interfaces are used for implementing MediaStream and MediaTrack as +// defined in http://dev.w3.org/2011/webrtc/editor/webrtc.html#stream-api. These +// interfaces must be used only with PeerConnection. PeerConnectionManager +// interface provides the factory methods to create MediaStream and MediaTracks. + +#ifndef API_MEDIASTREAMINTERFACE_H_ +#define API_MEDIASTREAMINTERFACE_H_ + +#include + +#include +#include + +#include "api/optional.h" +#include "api/video/video_frame.h" +// TODO(zhihuang): Remove unrelated headers once downstream applications stop +// relying on them; they were previously transitively included by +// mediachannel.h, which is no longer a dependency of this file. +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "modules/audio_processing/include/audio_processing_statistics.h" +#include "rtc_base/ratetracker.h" +#include "rtc_base/refcount.h" +#include "rtc_base/scoped_ref_ptr.h" +#include "rtc_base/thread.h" +#include "rtc_base/timeutils.h" + +namespace webrtc { + +// Generic observer interface. +class ObserverInterface { + public: + virtual void OnChanged() = 0; + + protected: + virtual ~ObserverInterface() {} +}; + +class NotifierInterface { + public: + virtual void RegisterObserver(ObserverInterface* observer) = 0; + virtual void UnregisterObserver(ObserverInterface* observer) = 0; + + virtual ~NotifierInterface() {} +}; + +// Base class for sources. A MediaStreamTrack has an underlying source that +// provides media. A source can be shared by multiple tracks. +class MediaSourceInterface : public rtc::RefCountInterface, + public NotifierInterface { + public: + enum SourceState { + kInitializing, + kLive, + kEnded, + kMuted + }; + + virtual SourceState state() const = 0; + + virtual bool remote() const = 0; + + protected: + ~MediaSourceInterface() override = default; +}; + +// C++ version of MediaStreamTrack. +// See: https://www.w3.org/TR/mediacapture-streams/#mediastreamtrack +class MediaStreamTrackInterface : public rtc::RefCountInterface, + public NotifierInterface { + public: + enum TrackState { + kLive, + kEnded, + }; + + static const char kAudioKind[]; + static const char kVideoKind[]; + + // The kind() method must return kAudioKind only if the object is a + // subclass of AudioTrackInterface, and kVideoKind only if the + // object is a subclass of VideoTrackInterface. It is typically used + // to protect a static_cast<> to the corresponding subclass. + virtual std::string kind() const = 0; + + // Track identifier. + virtual std::string id() const = 0; + + // A disabled track will produce silence (if audio) or black frames (if + // video). Can be disabled and re-enabled. + virtual bool enabled() const = 0; + virtual bool set_enabled(bool enable) = 0; + + // Live or ended. A track will never be live again after becoming ended. + virtual TrackState state() const = 0; + + protected: + ~MediaStreamTrackInterface() override = default; +}; + +// VideoTrackSourceInterface is a reference counted source used for +// VideoTracks. The same source can be used by multiple VideoTracks. +// VideoTrackSourceInterface is designed to be invoked on the signaling thread +// except for rtc::VideoSourceInterface methods that will be invoked +// on the worker thread via a VideoTrack. A custom implementation of a source +// can inherit AdaptedVideoTrackSource instead of directly implementing this +// interface. +class VideoTrackSourceInterface + : public MediaSourceInterface, + public rtc::VideoSourceInterface { + public: + struct Stats { + // Original size of captured frame, before video adaptation. + int input_width; + int input_height; + }; + + // Indicates that parameters suitable for screencasts should be automatically + // applied to RtpSenders. + // TODO(perkj): Remove these once all known applications have moved to + // explicitly setting suitable parameters for screencasts and don't need this + // implicit behavior. + virtual bool is_screencast() const = 0; + + // Indicates that the encoder should denoise video before encoding it. + // If it is not set, the default configuration is used which is different + // depending on video codec. + // TODO(perkj): Remove this once denoising is done by the source, and not by + // the encoder. + virtual rtc::Optional needs_denoising() const = 0; + + // Returns false if no stats are available, e.g, for a remote source, or a + // source which has not seen its first frame yet. + // + // Implementation should avoid blocking. + virtual bool GetStats(Stats* stats) = 0; + + protected: + ~VideoTrackSourceInterface() override = default; +}; + +// VideoTrackInterface is designed to be invoked on the signaling thread except +// for rtc::VideoSourceInterface methods that must be invoked +// on the worker thread. +// PeerConnectionFactory::CreateVideoTrack can be used for creating a VideoTrack +// that ensures thread safety and that all methods are called on the right +// thread. +class VideoTrackInterface + : public MediaStreamTrackInterface, + public rtc::VideoSourceInterface { + public: + // Video track content hint, used to override the source is_screencast + // property. + // See https://crbug.com/653531 and https://github.com/WICG/mst-content-hint. + enum class ContentHint { kNone, kFluid, kDetailed }; + + // Register a video sink for this track. Used to connect the track to the + // underlying video engine. + void AddOrUpdateSink(rtc::VideoSinkInterface* sink, + const rtc::VideoSinkWants& wants) override {} + void RemoveSink(rtc::VideoSinkInterface* sink) override {} + + virtual VideoTrackSourceInterface* GetSource() const = 0; + + virtual ContentHint content_hint() const; + virtual void set_content_hint(ContentHint hint) {} + + protected: + ~VideoTrackInterface() override = default; +}; + +// Interface for receiving audio data from a AudioTrack. +class AudioTrackSinkInterface { + public: + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) = 0; + + protected: + virtual ~AudioTrackSinkInterface() {} +}; + +// AudioSourceInterface is a reference counted source used for AudioTracks. +// The same source can be used by multiple AudioTracks. +class AudioSourceInterface : public MediaSourceInterface { + public: + class AudioObserver { + public: + virtual void OnSetVolume(double volume) = 0; + + protected: + virtual ~AudioObserver() {} + }; + + // TODO(deadbeef): Makes all the interfaces pure virtual after they're + // implemented in chromium. + + // Sets the volume of the source. |volume| is in the range of [0, 10]. + // TODO(tommi): This method should be on the track and ideally volume should + // be applied in the track in a way that does not affect clones of the track. + virtual void SetVolume(double volume) {} + + // Registers/unregisters observers to the audio source. + virtual void RegisterAudioObserver(AudioObserver* observer) {} + virtual void UnregisterAudioObserver(AudioObserver* observer) {} + + // TODO(tommi): Make pure virtual. + virtual void AddSink(AudioTrackSinkInterface* sink) {} + virtual void RemoveSink(AudioTrackSinkInterface* sink) {} +}; + +// Interface of the audio processor used by the audio track to collect +// statistics. +class AudioProcessorInterface : public rtc::RefCountInterface { + public: + // Deprecated, use AudioProcessorStatistics instead. + // TODO(ivoc): Remove this when all implementations have switched to the new + // GetStats function. See b/67926135. + struct AudioProcessorStats { + AudioProcessorStats() + : typing_noise_detected(false), + echo_return_loss(0), + echo_return_loss_enhancement(0), + echo_delay_median_ms(0), + echo_delay_std_ms(0), + residual_echo_likelihood(0.0f), + residual_echo_likelihood_recent_max(0.0f), + aec_divergent_filter_fraction(0.0) {} + ~AudioProcessorStats() {} + + bool typing_noise_detected; + int echo_return_loss; + int echo_return_loss_enhancement; + int echo_delay_median_ms; + int echo_delay_std_ms; + float residual_echo_likelihood; + float residual_echo_likelihood_recent_max; + float aec_divergent_filter_fraction; + }; + // This struct maintains the optionality of the stats, and will replace the + // regular stats struct when all users have been updated. + struct AudioProcessorStatistics { + bool typing_noise_detected = false; + AudioProcessingStats apm_statistics; + }; + + // Get audio processor statistics. + virtual void GetStats(AudioProcessorStats* stats); + + // Get audio processor statistics. The |has_remote_tracks| argument should be + // set if there are active remote tracks (this would usually be true during + // a call). If there are no remote tracks some of the stats will not be set by + // the AudioProcessor, because they only make sense if there is at least one + // remote track. + // TODO(ivoc): Make pure virtual when all implementions are updated. + virtual AudioProcessorStatistics GetStats(bool has_remote_tracks); + + protected: + ~AudioProcessorInterface() override = default; +}; + +class AudioTrackInterface : public MediaStreamTrackInterface { + public: + // TODO(deadbeef): Figure out if the following interface should be const or + // not. + virtual AudioSourceInterface* GetSource() const = 0; + + // Add/Remove a sink that will receive the audio data from the track. + virtual void AddSink(AudioTrackSinkInterface* sink) = 0; + virtual void RemoveSink(AudioTrackSinkInterface* sink) = 0; + + // Get the signal level from the audio track. + // Return true on success, otherwise false. + // TODO(deadbeef): Change the interface to int GetSignalLevel() and pure + // virtual after it's implemented in chromium. + virtual bool GetSignalLevel(int* level); + + // Get the audio processor used by the audio track. Return null if the track + // does not have any processor. + // TODO(deadbeef): Make the interface pure virtual. + virtual rtc::scoped_refptr GetAudioProcessor(); + + protected: + ~AudioTrackInterface() override = default; +}; + +typedef std::vector > + AudioTrackVector; +typedef std::vector > + VideoTrackVector; + +// C++ version of https://www.w3.org/TR/mediacapture-streams/#mediastream. +// +// A major difference is that remote audio/video tracks (received by a +// PeerConnection/RtpReceiver) are not synchronized simply by adding them to +// the same stream; a session description with the correct "a=msid" attributes +// must be pushed down. +// +// Thus, this interface acts as simply a container for tracks. +class MediaStreamInterface : public rtc::RefCountInterface, + public NotifierInterface { + public: + virtual std::string id() const = 0; + + virtual AudioTrackVector GetAudioTracks() = 0; + virtual VideoTrackVector GetVideoTracks() = 0; + virtual rtc::scoped_refptr + FindAudioTrack(const std::string& track_id) = 0; + virtual rtc::scoped_refptr + FindVideoTrack(const std::string& track_id) = 0; + + virtual bool AddTrack(AudioTrackInterface* track) = 0; + virtual bool AddTrack(VideoTrackInterface* track) = 0; + virtual bool RemoveTrack(AudioTrackInterface* track) = 0; + virtual bool RemoveTrack(VideoTrackInterface* track) = 0; + + protected: + ~MediaStreamInterface() override = default; +}; + +} // namespace webrtc + +#endif // API_MEDIASTREAMINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediastreamproxy.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediastreamproxy.h new file mode 100644 index 000000000000..3f261db0a891 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediastreamproxy.h @@ -0,0 +1,44 @@ +/* + * Copyright 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_MEDIASTREAMPROXY_H_ +#define API_MEDIASTREAMPROXY_H_ + +#include + +#include "api/mediastreaminterface.h" +#include "api/proxy.h" + +namespace webrtc { + +// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods +// are called on is an implementation detail. +BEGIN_SIGNALING_PROXY_MAP(MediaStream) + PROXY_SIGNALING_THREAD_DESTRUCTOR() + PROXY_CONSTMETHOD0(std::string, id) + PROXY_METHOD0(AudioTrackVector, GetAudioTracks) + PROXY_METHOD0(VideoTrackVector, GetVideoTracks) + PROXY_METHOD1(rtc::scoped_refptr, + FindAudioTrack, + const std::string&) + PROXY_METHOD1(rtc::scoped_refptr, + FindVideoTrack, + const std::string&) + PROXY_METHOD1(bool, AddTrack, AudioTrackInterface*) + PROXY_METHOD1(bool, AddTrack, VideoTrackInterface*) + PROXY_METHOD1(bool, RemoveTrack, AudioTrackInterface*) + PROXY_METHOD1(bool, RemoveTrack, VideoTrackInterface*) + PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) + PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) +END_PROXY_MAP() + +} // namespace webrtc + +#endif // API_MEDIASTREAMPROXY_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediastreamtrackproxy.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediastreamtrackproxy.h new file mode 100644 index 000000000000..57a769559980 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediastreamtrackproxy.h @@ -0,0 +1,65 @@ +/* + * Copyright 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file includes proxy classes for tracks. The purpose is +// to make sure tracks are only accessed from the signaling thread. + +#ifndef API_MEDIASTREAMTRACKPROXY_H_ +#define API_MEDIASTREAMTRACKPROXY_H_ + +#include + +#include "api/mediastreaminterface.h" +#include "api/proxy.h" + +namespace webrtc { + +// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods +// are called on is an implementation detail. + +BEGIN_SIGNALING_PROXY_MAP(AudioTrack) + PROXY_SIGNALING_THREAD_DESTRUCTOR() + PROXY_CONSTMETHOD0(std::string, kind) + PROXY_CONSTMETHOD0(std::string, id) + PROXY_CONSTMETHOD0(TrackState, state) + PROXY_CONSTMETHOD0(bool, enabled) + PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource) + PROXY_METHOD1(void, AddSink, AudioTrackSinkInterface*) + PROXY_METHOD1(void, RemoveSink, AudioTrackSinkInterface*) + PROXY_METHOD1(bool, GetSignalLevel, int*) + PROXY_METHOD0(rtc::scoped_refptr, GetAudioProcessor) + PROXY_METHOD1(bool, set_enabled, bool) + PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) + PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) +END_PROXY_MAP() + +BEGIN_PROXY_MAP(VideoTrack) + PROXY_SIGNALING_THREAD_DESTRUCTOR() + PROXY_CONSTMETHOD0(std::string, kind) + PROXY_CONSTMETHOD0(std::string, id) + PROXY_CONSTMETHOD0(TrackState, state) + PROXY_CONSTMETHOD0(bool, enabled) + PROXY_METHOD1(bool, set_enabled, bool) + PROXY_CONSTMETHOD0(ContentHint, content_hint) + PROXY_METHOD1(void, set_content_hint, ContentHint) + PROXY_WORKER_METHOD2(void, + AddOrUpdateSink, + rtc::VideoSinkInterface*, + const rtc::VideoSinkWants&) + PROXY_WORKER_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) + PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource) + + PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) + PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) +END_PROXY_MAP() + +} // namespace webrtc + +#endif // API_MEDIASTREAMTRACKPROXY_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediatypes.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediatypes.h new file mode 100644 index 000000000000..93ce1a231ddd --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/mediatypes.h @@ -0,0 +1,31 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_MEDIATYPES_H_ +#define API_MEDIATYPES_H_ + +#include + +namespace cricket { + +enum MediaType { + MEDIA_TYPE_AUDIO, + MEDIA_TYPE_VIDEO, + MEDIA_TYPE_DATA +}; + +std::string MediaTypeToString(MediaType type); +// Aborts on invalid string. Only expected to be used on strings that are +// guaranteed to be valid, such as MediaStreamTrackInterface::kind(). +MediaType MediaTypeFromString(const std::string& type_str); + +} // namespace cricket + +#endif // API_MEDIATYPES_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/notifier.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/notifier.h new file mode 100644 index 000000000000..ceeda4de557d --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/notifier.h @@ -0,0 +1,61 @@ +/* + * Copyright 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_NOTIFIER_H_ +#define API_NOTIFIER_H_ + +#include + +#include "api/mediastreaminterface.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +// Implements a template version of a notifier. +// TODO(deadbeef): This is an implementation detail; move out of api/. +template +class Notifier : public T { + public: + Notifier() { + } + + virtual void RegisterObserver(ObserverInterface* observer) { + RTC_DCHECK(observer != nullptr); + observers_.push_back(observer); + } + + virtual void UnregisterObserver(ObserverInterface* observer) { + for (std::list::iterator it = observers_.begin(); + it != observers_.end(); it++) { + if (*it == observer) { + observers_.erase(it); + break; + } + } + } + + void FireOnChanged() { + // Copy the list of observers to avoid a crash if the observer object + // unregisters as a result of the OnChanged() call. If the same list is used + // UnregisterObserver will affect the list make the iterator invalid. + std::list observers = observers_; + for (std::list::iterator it = observers.begin(); + it != observers.end(); ++it) { + (*it)->OnChanged(); + } + } + + protected: + std::list observers_; +}; + +} // namespace webrtc + +#endif // API_NOTIFIER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/optional.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/optional.h new file mode 100644 index 000000000000..ba06831a29f8 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/optional.h @@ -0,0 +1,443 @@ +/* + * Copyright 2015 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_OPTIONAL_H_ +#define API_OPTIONAL_H_ + +#include +#include +#include + +#ifdef UNIT_TEST +#include +#include +#endif // UNIT_TEST + +#include "api/array_view.h" +#include "rtc_base/checks.h" +#include "rtc_base/sanitizer.h" + +namespace rtc { + +namespace optional_internal { + +#if RTC_HAS_ASAN + +// This is a non-inlined function. The optimizer can't see inside it. It +// prevents the compiler from generating optimized code that reads value_ even +// if it is unset. Although safe, this causes memory sanitizers to complain. +const void* FunctionThatDoesNothingImpl(const void*); + +template +inline const T* FunctionThatDoesNothing(T* x) { + return reinterpret_cast( + FunctionThatDoesNothingImpl(reinterpret_cast(x))); +} + +#else + +template +inline const T* FunctionThatDoesNothing(T* x) { + return x; +} + +#endif + +struct NulloptArg; + +} // namespace optional_internal + +// nullopt_t must be a non-aggregate literal type with a constexpr constructor +// that takes some implementation-defined literal type. It mustn't have a +// default constructor nor an initializer-list constructor. +// See: +// http://en.cppreference.com/w/cpp/utility/optional/nullopt_t +// That page uses int, though this seems to confuse older versions of GCC. +struct nullopt_t { + constexpr explicit nullopt_t(rtc::optional_internal::NulloptArg&) {} +}; + +// Specification: +// http://en.cppreference.com/w/cpp/utility/optional/nullopt +extern const nullopt_t nullopt; + +// Simple std::optional-wannabe. It either contains a T or not. +// +// A moved-from Optional may only be destroyed, and assigned to if T allows +// being assigned to after having been moved from. Specifically, you may not +// assume that it just doesn't contain a value anymore. +// +// Examples of good places to use Optional: +// +// - As a class or struct member, when the member doesn't always have a value: +// struct Prisoner { +// std::string name; +// Optional cell_number; // Empty if not currently incarcerated. +// }; +// +// - As a return value for functions that may fail to return a value on all +// allowed inputs. For example, a function that searches an array might +// return an Optional (the index where it found the element, or +// nothing if it didn't find it); and a function that parses numbers might +// return Optional (the parsed number, or nothing if parsing failed). +// +// Examples of bad places to use Optional: +// +// - As a return value for functions that may fail because of disallowed +// inputs. For example, a string length function should not return +// Optional so that it can return nothing in case the caller passed +// it a null pointer; the function should probably use RTC_[D]CHECK instead, +// and return plain size_t. +// +// - As a return value for functions that may fail to return a value on all +// allowed inputs, but need to tell the caller what went wrong. Returning +// Optional when parsing a single number as in the example above +// might make sense, but any larger parse job is probably going to need to +// tell the caller what the problem was, not just that there was one. +// +// - As a non-mutable function argument. When you want to pass a value of a +// type T that can fail to be there, const T* is almost always both fastest +// and cleanest. (If you're *sure* that the the caller will always already +// have an Optional, const Optional& is slightly faster than const T*, +// but this is a micro-optimization. In general, stick to const T*.) +// +// TODO(kwiberg): Get rid of this class when the standard library has +// std::optional (and we're allowed to use it). +template +class Optional final { + public: + // Construct an empty Optional. + Optional() : has_value_(false), empty_('\0') { PoisonValue(); } + + Optional(rtc::nullopt_t) // NOLINT(runtime/explicit) + : Optional() {} + + // Construct an Optional that contains a value. + Optional(const T& value) // NOLINT(runtime/explicit) + : has_value_(true) { + new (&value_) T(value); + } + Optional(T&& value) // NOLINT(runtime/explicit) + : has_value_(true) { + new (&value_) T(std::move(value)); + } + + // Copy constructor: copies the value from m if it has one. + Optional(const Optional& m) : has_value_(m.has_value_) { + if (has_value_) + new (&value_) T(m.value_); + else + PoisonValue(); + } + + // Move constructor: if m has a value, moves the value from m, leaving m + // still in a state where it has a value, but a moved-from one (the + // properties of which depends on T; the only general guarantee is that we + // can destroy m). + Optional(Optional&& m) : has_value_(m.has_value_) { + if (has_value_) + new (&value_) T(std::move(m.value_)); + else + PoisonValue(); + } + + ~Optional() { + if (has_value_) + value_.~T(); + else + UnpoisonValue(); + } + + Optional& operator=(rtc::nullopt_t) { + reset(); + return *this; + } + + // Copy assignment. Uses T's copy assignment if both sides have a value, T's + // copy constructor if only the right-hand side has a value. + Optional& operator=(const Optional& m) { + if (m.has_value_) { + if (has_value_) { + value_ = m.value_; // T's copy assignment. + } else { + UnpoisonValue(); + new (&value_) T(m.value_); // T's copy constructor. + has_value_ = true; + } + } else { + reset(); + } + return *this; + } + + // Move assignment. Uses T's move assignment if both sides have a value, T's + // move constructor if only the right-hand side has a value. The state of m + // after it's been moved from is as for the move constructor. + Optional& operator=(Optional&& m) { + if (m.has_value_) { + if (has_value_) { + value_ = std::move(m.value_); // T's move assignment. + } else { + UnpoisonValue(); + new (&value_) T(std::move(m.value_)); // T's move constructor. + has_value_ = true; + } + } else { + reset(); + } + return *this; + } + + // Swap the values if both m1 and m2 have values; move the value if only one + // of them has one. + friend void swap(Optional& m1, Optional& m2) { + if (m1.has_value_) { + if (m2.has_value_) { + // Both have values: swap. + using std::swap; + swap(m1.value_, m2.value_); + } else { + // Only m1 has a value: move it to m2. + m2.UnpoisonValue(); + new (&m2.value_) T(std::move(m1.value_)); + m1.value_.~T(); // Destroy the moved-from value. + m1.has_value_ = false; + m2.has_value_ = true; + m1.PoisonValue(); + } + } else if (m2.has_value_) { + // Only m2 has a value: move it to m1. + m1.UnpoisonValue(); + new (&m1.value_) T(std::move(m2.value_)); + m2.value_.~T(); // Destroy the moved-from value. + m1.has_value_ = true; + m2.has_value_ = false; + m2.PoisonValue(); + } + } + + // Destroy any contained value. Has no effect if we have no value. + void reset() { + if (!has_value_) + return; + value_.~T(); + has_value_ = false; + PoisonValue(); + } + + template + void emplace(Args&&... args) { + if (has_value_) + value_.~T(); + else + UnpoisonValue(); + new (&value_) T(std::forward(args)...); + has_value_ = true; + } + + // Conversion to bool to test if we have a value. + explicit operator bool() const { return has_value_; } + bool has_value() const { return has_value_; } + + // Dereferencing. Only allowed if we have a value. + const T* operator->() const { + RTC_DCHECK(has_value_); + return &value_; + } + T* operator->() { + RTC_DCHECK(has_value_); + return &value_; + } + const T& operator*() const { + RTC_DCHECK(has_value_); + return value_; + } + T& operator*() { + RTC_DCHECK(has_value_); + return value_; + } + const T& value() const { + RTC_DCHECK(has_value_); + return value_; + } + T& value() { + RTC_DCHECK(has_value_); + return value_; + } + + // Dereference with a default value in case we don't have a value. + const T& value_or(const T& default_val) const { + // The no-op call prevents the compiler from generating optimized code that + // reads value_ even if !has_value_, but only if FunctionThatDoesNothing is + // not completely inlined; see its declaration.). + return has_value_ ? *optional_internal::FunctionThatDoesNothing(&value_) + : default_val; + } + + // Equality tests. Two Optionals are equal if they contain equivalent values, + // or if they're both empty. + friend bool operator==(const Optional& m1, const Optional& m2) { + return m1.has_value_ && m2.has_value_ ? m1.value_ == m2.value_ + : m1.has_value_ == m2.has_value_; + } + friend bool operator==(const Optional& opt, const T& value) { + return opt.has_value_ && opt.value_ == value; + } + friend bool operator==(const T& value, const Optional& opt) { + return opt.has_value_ && value == opt.value_; + } + + friend bool operator==(const Optional& opt, rtc::nullopt_t) { + return !opt.has_value_; + } + + friend bool operator==(rtc::nullopt_t, const Optional& opt) { + return !opt.has_value_; + } + + friend bool operator!=(const Optional& m1, const Optional& m2) { + return m1.has_value_ && m2.has_value_ ? m1.value_ != m2.value_ + : m1.has_value_ != m2.has_value_; + } + friend bool operator!=(const Optional& opt, const T& value) { + return !opt.has_value_ || opt.value_ != value; + } + friend bool operator!=(const T& value, const Optional& opt) { + return !opt.has_value_ || value != opt.value_; + } + + friend bool operator!=(const Optional& opt, rtc::nullopt_t) { + return opt.has_value_; + } + + friend bool operator!=(rtc::nullopt_t, const Optional& opt) { + return opt.has_value_; + } + + private: + // Tell sanitizers that value_ shouldn't be touched. + void PoisonValue() { + rtc::AsanPoison(rtc::MakeArrayView(&value_, 1)); + rtc::MsanMarkUninitialized(rtc::MakeArrayView(&value_, 1)); + } + + // Tell sanitizers that value_ is OK to touch again. + void UnpoisonValue() { rtc::AsanUnpoison(rtc::MakeArrayView(&value_, 1)); } + + bool has_value_; // True iff value_ contains a live value. + union { + // empty_ exists only to make it possible to initialize the union, even when + // it doesn't contain any data. If the union goes uninitialized, it may + // trigger compiler warnings. + char empty_; + // By placing value_ in a union, we get to manage its construction and + // destruction manually: the Optional constructors won't automatically + // construct it, and the Optional destructor won't automatically destroy + // it. Basically, this just allocates a properly sized and aligned block of + // memory in which we can manually put a T with placement new. + T value_; + }; +}; + +#ifdef UNIT_TEST +namespace optional_internal { + +// Checks if there's a valid PrintTo(const T&, std::ostream*) call for T. +template +struct HasPrintTo { + private: + struct No {}; + + template + static auto Test(const T2& obj) + -> decltype(PrintTo(obj, std::declval())); + + template + static No Test(...); + + public: + static constexpr bool value = + !std::is_same(std::declval())), No>::value; +}; + +// Checks if there's a valid operator<<(std::ostream&, const T&) call for T. +template +struct HasOstreamOperator { + private: + struct No {}; + + template + static auto Test(const T2& obj) + -> decltype(std::declval() << obj); + + template + static No Test(...); + + public: + static constexpr bool value = + !std::is_same(std::declval())), No>::value; +}; + +// Prefer using PrintTo to print the object. +template +typename std::enable_if::value, void>::type OptionalPrintToHelper( + const T& value, + std::ostream* os) { + PrintTo(value, os); +} + +// Fall back to operator<<(std::ostream&, ...) if it exists. +template +typename std::enable_if::value && !HasPrintTo::value, + void>::type +OptionalPrintToHelper(const T& value, std::ostream* os) { + *os << value; +} + +inline void OptionalPrintObjectBytes(const unsigned char* bytes, + size_t size, + std::ostream* os) { + *os << "(bytes[i]); + } + *os << "]>"; +} + +// As a final back-up, just print the contents of the objcets byte-wise. +template +typename std::enable_if::value && !HasPrintTo::value, + void>::type +OptionalPrintToHelper(const T& value, std::ostream* os) { + OptionalPrintObjectBytes(reinterpret_cast(&value), + sizeof(value), os); +} + +} // namespace optional_internal + +// PrintTo is used by gtest to print out the results of tests. We want to ensure +// the object contained in an Optional can be printed out if it's set, while +// avoiding touching the object's storage if it is undefined. +template +void PrintTo(const rtc::Optional& opt, std::ostream* os) { + if (opt) { + optional_internal::OptionalPrintToHelper(*opt, os); + } else { + *os << ""; + } +} + +#endif // UNIT_TEST + +} // namespace rtc + +#endif // API_OPTIONAL_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/mediadescription.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/mediadescription.h new file mode 100644 index 000000000000..1a6d0e9037c2 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/mediadescription.h @@ -0,0 +1,53 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ORTC_MEDIADESCRIPTION_H_ +#define API_ORTC_MEDIADESCRIPTION_H_ + +#include +#include +#include + +#include "api/cryptoparams.h" +#include "api/optional.h" + +namespace webrtc { + +// A structured representation of a media description within an SDP session +// description. +class MediaDescription { + public: + explicit MediaDescription(std::string mid) : mid_(std::move(mid)) {} + + ~MediaDescription() {} + + // The mid(media stream identification) is used for identifying media streams + // within a session description. + // https://tools.ietf.org/html/rfc5888#section-6 + rtc::Optional mid() const { return mid_; } + void set_mid(std::string mid) { mid_.emplace(std::move(mid)); } + + // Security keys and parameters for this media stream. Can be used to + // negotiate parameters for SRTP. + // https://tools.ietf.org/html/rfc4568#page-5 + std::vector& sdes_params() { return sdes_params_; } + const std::vector& sdes_params() const { + return sdes_params_; + } + + private: + rtc::Optional mid_; + + std::vector sdes_params_; +}; + +} // namespace webrtc + +#endif // API_ORTC_MEDIADESCRIPTION_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/ortcfactoryinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/ortcfactoryinterface.h new file mode 100644 index 000000000000..ea25c8a3cbcd --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/ortcfactoryinterface.h @@ -0,0 +1,232 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ORTC_ORTCFACTORYINTERFACE_H_ +#define API_ORTC_ORTCFACTORYINTERFACE_H_ + +#include +#include +#include // For std::move. + +#include "api/mediaconstraintsinterface.h" +#include "api/mediastreaminterface.h" +#include "api/mediatypes.h" +#include "api/ortc/ortcrtpreceiverinterface.h" +#include "api/ortc/ortcrtpsenderinterface.h" +#include "api/ortc/packettransportinterface.h" +#include "api/ortc/rtptransportcontrollerinterface.h" +#include "api/ortc/rtptransportinterface.h" +#include "api/ortc/srtptransportinterface.h" +#include "api/ortc/udptransportinterface.h" +#include "api/rtcerror.h" +#include "api/rtpparameters.h" +#include "rtc_base/network.h" +#include "rtc_base/scoped_ref_ptr.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +// TODO(deadbeef): This should be part of /api/, but currently it's not and +// including its header violates checkdeps rules. +class AudioDeviceModule; + +// WARNING: This is experimental/under development, so use at your own risk; no +// guarantee about API stability is guaranteed here yet. +// +// This class is the ORTC analog of PeerConnectionFactory. It acts as a factory +// for ORTC objects that can be connected to each other. +// +// Some of these objects may not be represented by the ORTC specification, but +// follow the same general principles. +// +// If one of the factory methods takes another object as an argument, it MUST +// have been created by the same OrtcFactory. +// +// On object lifetimes: objects should be destroyed in this order: +// 1. Objects created by the factory. +// 2. The factory itself. +// 3. Objects passed into OrtcFactoryInterface::Create. +class OrtcFactoryInterface { + public: + // |network_thread| is the thread on which packets are sent and received. + // If null, a new rtc::Thread with a default socket server is created. + // + // |signaling_thread| is used for callbacks to the consumer of the API. If + // null, the current thread will be used, which assumes that the API consumer + // is running a message loop on this thread (either using an existing + // rtc::Thread, or by calling rtc::Thread::Current()->ProcessMessages). + // + // |network_manager| is used to determine which network interfaces are + // available. This is used for ICE, for example. If null, a default + // implementation will be used. Only accessed on |network_thread|. + // + // |socket_factory| is used (on the network thread) for creating sockets. If + // it's null, a default implementation will be used, which assumes + // |network_thread| is a normal rtc::Thread. + // + // |adm| is optional, and allows a different audio device implementation to + // be injected; otherwise a platform-specific module will be used that will + // use the default audio input. + // + // |audio_encoder_factory| and |audio_decoder_factory| are used to + // instantiate audio codecs; they determine what codecs are supported. + // + // Note that the OrtcFactoryInterface does not take ownership of any of the + // objects passed in by raw pointer, and as previously stated, these objects + // can't be destroyed before the factory is. + static RTCErrorOr> Create( + rtc::Thread* network_thread, + rtc::Thread* signaling_thread, + rtc::NetworkManager* network_manager, + rtc::PacketSocketFactory* socket_factory, + AudioDeviceModule* adm, + rtc::scoped_refptr audio_encoder_factory, + rtc::scoped_refptr audio_decoder_factory); + + // Constructor for convenience which uses default implementations where + // possible (though does still require that the current thread runs a message + // loop; see above). + static RTCErrorOr> Create( + rtc::scoped_refptr audio_encoder_factory, + rtc::scoped_refptr audio_decoder_factory) { + return Create(nullptr, nullptr, nullptr, nullptr, nullptr, + audio_encoder_factory, audio_decoder_factory); + } + + virtual ~OrtcFactoryInterface() {} + + // Creates an RTP transport controller, which is used in calls to + // CreateRtpTransport methods. If your application has some notion of a + // "call", you should create one transport controller per call. + // + // However, if you only are using one RtpTransport object, this doesn't need + // to be called explicitly; CreateRtpTransport will create one automatically + // if |rtp_transport_controller| is null. See below. + // + // TODO(deadbeef): Add MediaConfig and RtcEventLog arguments? + virtual RTCErrorOr> + CreateRtpTransportController() = 0; + + // Creates an RTP transport using the provided packet transports and + // transport controller. + // + // |rtp| will be used for sending RTP packets, and |rtcp| for RTCP packets. + // + // |rtp| can't be null. |rtcp| must be non-null if and only if + // |rtp_parameters.rtcp.mux| is false, indicating that RTCP muxing isn't used. + // Note that if RTCP muxing isn't enabled initially, it can still enabled + // later through SetParameters. + // + // If |transport_controller| is null, one will automatically be created, and + // its lifetime managed by the returned RtpTransport. This should only be + // done if a single RtpTransport is being used to communicate with the remote + // endpoint. + virtual RTCErrorOr> CreateRtpTransport( + const RtpTransportParameters& rtp_parameters, + PacketTransportInterface* rtp, + PacketTransportInterface* rtcp, + RtpTransportControllerInterface* transport_controller) = 0; + + // Creates an SrtpTransport which is an RTP transport that uses SRTP. + virtual RTCErrorOr> + CreateSrtpTransport( + const RtpTransportParameters& rtp_parameters, + PacketTransportInterface* rtp, + PacketTransportInterface* rtcp, + RtpTransportControllerInterface* transport_controller) = 0; + + // Returns the capabilities of an RTP sender of type |kind|. These + // capabilities can be used to determine what RtpParameters to use to create + // an RtpSender. + // + // If for some reason you pass in MEDIA_TYPE_DATA, returns an empty structure. + virtual RtpCapabilities GetRtpSenderCapabilities( + cricket::MediaType kind) const = 0; + + // Creates an RTP sender with |track|. Will not start sending until Send is + // called. This is provided as a convenience; it's equivalent to calling + // CreateRtpSender with a kind (see below), followed by SetTrack. + // + // |track| and |transport| must not be null. + virtual RTCErrorOr> CreateRtpSender( + rtc::scoped_refptr track, + RtpTransportInterface* transport) = 0; + + // Overload of CreateRtpSender allows creating the sender without a track. + // + // |kind| must be MEDIA_TYPE_AUDIO or MEDIA_TYPE_VIDEO. + virtual RTCErrorOr> CreateRtpSender( + cricket::MediaType kind, + RtpTransportInterface* transport) = 0; + + // Returns the capabilities of an RTP receiver of type |kind|. These + // capabilities can be used to determine what RtpParameters to use to create + // an RtpReceiver. + // + // If for some reason you pass in MEDIA_TYPE_DATA, returns an empty structure. + virtual RtpCapabilities GetRtpReceiverCapabilities( + cricket::MediaType kind) const = 0; + + // Creates an RTP receiver of type |kind|. Will not start receiving media + // until Receive is called. + // + // |kind| must be MEDIA_TYPE_AUDIO or MEDIA_TYPE_VIDEO. + // + // |transport| must not be null. + virtual RTCErrorOr> + CreateRtpReceiver(cricket::MediaType kind, + RtpTransportInterface* transport) = 0; + + // Create a UDP transport with IP address family |family|, using a port + // within the specified range. + // + // |family| must be AF_INET or AF_INET6. + // + // |min_port|/|max_port| values of 0 indicate no range restriction. + // + // Returns an error if the transport wasn't successfully created. + virtual RTCErrorOr> + CreateUdpTransport(int family, uint16_t min_port, uint16_t max_port) = 0; + + // Method for convenience that has no port range restrictions. + RTCErrorOr> CreateUdpTransport( + int family) { + return CreateUdpTransport(family, 0, 0); + } + + // NOTE: The methods below to create tracks/sources return scoped_refptrs + // rather than unique_ptrs, because these interfaces are also used with + // PeerConnection, where everything is ref-counted. + + // Creates a audio source representing the default microphone input. + // |options| decides audio processing settings. + virtual rtc::scoped_refptr CreateAudioSource( + const cricket::AudioOptions& options) = 0; + + // Version of the above method that uses default options. + rtc::scoped_refptr CreateAudioSource() { + return CreateAudioSource(cricket::AudioOptions()); + } + + // Creates a new local video track wrapping |source|. The same |source| can + // be used in several tracks. + virtual rtc::scoped_refptr CreateVideoTrack( + const std::string& id, + VideoTrackSourceInterface* source) = 0; + + // Creates an new local audio track wrapping |source|. + virtual rtc::scoped_refptr CreateAudioTrack( + const std::string& id, + AudioSourceInterface* source) = 0; +}; + +} // namespace webrtc + +#endif // API_ORTC_ORTCFACTORYINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/ortcrtpreceiverinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/ortcrtpreceiverinterface.h new file mode 100644 index 000000000000..59ff97762135 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/ortcrtpreceiverinterface.h @@ -0,0 +1,84 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains interfaces for RtpReceivers: +// http://publications.ortc.org/2016/20161202/#rtcrtpreceiver* +// +// However, underneath the RtpReceiver is an RtpTransport, rather than a +// DtlsTransport. This is to allow different types of RTP transports (besides +// DTLS-SRTP) to be used. + +#ifndef API_ORTC_ORTCRTPRECEIVERINTERFACE_H_ +#define API_ORTC_ORTCRTPRECEIVERINTERFACE_H_ + +#include "api/mediastreaminterface.h" +#include "api/mediatypes.h" +#include "api/ortc/rtptransportinterface.h" +#include "api/rtcerror.h" +#include "api/rtpparameters.h" + +namespace webrtc { + +// Note: Since receiver capabilities may depend on how the OrtcFactory was +// created, instead of a static "GetCapabilities" method on this interface, +// there is a "GetRtpReceiverCapabilities" method on the OrtcFactory. +class OrtcRtpReceiverInterface { + public: + virtual ~OrtcRtpReceiverInterface() {} + + // Returns a track representing the media received by this receiver. + // + // Currently, this will return null until Receive has been successfully + // called. Also, a new track will be created every time the primary SSRC + // changes. + // + // If encodings are removed, GetTrack will return null. Though deactivating + // an encoding (setting |active| to false) will not do this. + // + // In the future, these limitations will be fixed, and GetTrack will return + // the same track for the lifetime of the RtpReceiver. So it's not + // recommended to write code that depends on this non-standard behavior. + virtual rtc::scoped_refptr GetTrack() const = 0; + + // Once supported, will switch to receiving media on a new transport. + // However, this is not currently supported and will always return an error. + virtual RTCError SetTransport(RtpTransportInterface* transport) = 0; + // Returns previously set (or constructed-with) transport. + virtual RtpTransportInterface* GetTransport() const = 0; + + // Start receiving media with |parameters| (if |parameters| contains an + // active encoding). + // + // There are no limitations to how the parameters can be changed after the + // initial call to Receive, as long as they're valid (for example, they can't + // use the same payload type for two codecs). + virtual RTCError Receive(const RtpParameters& parameters) = 0; + // Returns parameters that were last successfully passed into Receive, or + // empty parameters if that hasn't yet occurred. + // + // Note that for parameters that are described as having an "implementation + // default" value chosen, GetParameters() will return those chosen defaults, + // with the exception of SSRCs which have special behavior. See + // rtpparameters.h for more details. + virtual RtpParameters GetParameters() const = 0; + + // Audio or video receiver? + // + // Once GetTrack() starts always returning a track, this method will be + // redundant, as one can call "GetTrack()->kind()". However, it's still a + // nice convenience, and is symmetric with OrtcRtpSenderInterface::GetKind. + virtual cricket::MediaType GetKind() const = 0; + + // TODO(deadbeef): GetContributingSources +}; + +} // namespace webrtc + +#endif // API_ORTC_ORTCRTPRECEIVERINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/ortcrtpsenderinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/ortcrtpsenderinterface.h new file mode 100644 index 000000000000..fd4dfaa7907f --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/ortcrtpsenderinterface.h @@ -0,0 +1,77 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains interfaces for RtpSenders: +// http://publications.ortc.org/2016/20161202/#rtcrtpsender* +// +// However, underneath the RtpSender is an RtpTransport, rather than a +// DtlsTransport. This is to allow different types of RTP transports (besides +// DTLS-SRTP) to be used. + +#ifndef API_ORTC_ORTCRTPSENDERINTERFACE_H_ +#define API_ORTC_ORTCRTPSENDERINTERFACE_H_ + +#include "api/mediastreaminterface.h" +#include "api/mediatypes.h" +#include "api/ortc/rtptransportinterface.h" +#include "api/rtcerror.h" +#include "api/rtpparameters.h" + +namespace webrtc { + +// Note: Since sender capabilities may depend on how the OrtcFactory was +// created, instead of a static "GetCapabilities" method on this interface, +// there is a "GetRtpSenderCapabilities" method on the OrtcFactory. +class OrtcRtpSenderInterface { + public: + virtual ~OrtcRtpSenderInterface() {} + + // Sets the source of media that will be sent by this sender. + // + // If Send has already been called, will immediately switch to sending this + // track. If |track| is null, will stop sending media. + // + // Returns INVALID_PARAMETER error if an audio track is set on a video + // RtpSender, or vice-versa. + virtual RTCError SetTrack(MediaStreamTrackInterface* track) = 0; + // Returns previously set (or constructed-with) track. + virtual rtc::scoped_refptr GetTrack() const = 0; + + // Once supported, will switch to sending media on a new transport. However, + // this is not currently supported and will always return an error. + virtual RTCError SetTransport(RtpTransportInterface* transport) = 0; + // Returns previously set (or constructed-with) transport. + virtual RtpTransportInterface* GetTransport() const = 0; + + // Start sending media with |parameters| (if |parameters| contains an active + // encoding). + // + // There are no limitations to how the parameters can be changed after the + // initial call to Send, as long as they're valid (for example, they can't + // use the same payload type for two codecs). + virtual RTCError Send(const RtpParameters& parameters) = 0; + // Returns parameters that were last successfully passed into Send, or empty + // parameters if that hasn't yet occurred. + // + // Note that for parameters that are described as having an "implementation + // default" value chosen, GetParameters() will return those chosen defaults, + // with the exception of SSRCs which have special behavior. See + // rtpparameters.h for more details. + virtual RtpParameters GetParameters() const = 0; + + // Audio or video sender? + virtual cricket::MediaType GetKind() const = 0; + + // TODO(deadbeef): SSRC conflict signal. +}; + +} // namespace webrtc + +#endif // API_ORTC_ORTCRTPSENDERINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/packettransportinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/packettransportinterface.h new file mode 100644 index 000000000000..f357f8af63a5 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/packettransportinterface.h @@ -0,0 +1,39 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ORTC_PACKETTRANSPORTINTERFACE_H_ +#define API_ORTC_PACKETTRANSPORTINTERFACE_H_ + +namespace rtc { + +class PacketTransportInternal; + +} // namespace rtc + +namespace webrtc { + +// Base class for different packet-based transports. +class PacketTransportInterface { + public: + virtual ~PacketTransportInterface() {} + + protected: + // Only for internal use. Returns a pointer to an internal interface, for use + // by the implementation. + virtual rtc::PacketTransportInternal* GetInternal() = 0; + + // Classes that can use this internal interface. + friend class RtpTransportControllerAdapter; + friend class RtpTransportAdapter; +}; + +} // namespace webrtc + +#endif // API_ORTC_PACKETTRANSPORTINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/rtptransportcontrollerinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/rtptransportcontrollerinterface.h new file mode 100644 index 000000000000..85f37fa7a0d5 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/rtptransportcontrollerinterface.h @@ -0,0 +1,57 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ORTC_RTPTRANSPORTCONTROLLERINTERFACE_H_ +#define API_ORTC_RTPTRANSPORTCONTROLLERINTERFACE_H_ + +#include + +#include "api/ortc/rtptransportinterface.h" + +namespace webrtc { + +class RtpTransportControllerAdapter; + +// Used to group RTP transports between a local endpoint and the same remote +// endpoint, for the purpose of sharing bandwidth estimation and other things. +// +// Comparing this to the PeerConnection model, non-budled audio/video would use +// two RtpTransports with a single RtpTransportController, whereas bundled +// media would use a single RtpTransport, and two PeerConnections would use +// independent RtpTransportControllers. +// +// RtpTransports are associated with this controller when they're created, by +// passing the controller into OrtcFactory's relevant "CreateRtpTransport" +// method. When a transport is destroyed, it's automatically disassociated. +// GetTransports returns all currently associated transports. +// +// This is the RTP equivalent of "IceTransportController" in ORTC; RtpTransport +// is to RtpTransportController as IceTransport is to IceTransportController. +class RtpTransportControllerInterface { + public: + virtual ~RtpTransportControllerInterface() {} + + // Returns all transports associated with this controller (see explanation + // above). No ordering is guaranteed. + virtual std::vector GetTransports() const = 0; + + protected: + // Only for internal use. Returns a pointer to an internal interface, for use + // by the implementation. + virtual RtpTransportControllerAdapter* GetInternal() = 0; + + // Classes that can use this internal interface. + friend class OrtcFactory; + friend class RtpTransportAdapter; +}; + +} // namespace webrtc + +#endif // API_ORTC_RTPTRANSPORTCONTROLLERINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/rtptransportinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/rtptransportinterface.h new file mode 100644 index 000000000000..716a297c54eb --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/rtptransportinterface.h @@ -0,0 +1,124 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ORTC_RTPTRANSPORTINTERFACE_H_ +#define API_ORTC_RTPTRANSPORTINTERFACE_H_ + +#include + +#include "api/optional.h" +#include "api/ortc/packettransportinterface.h" +#include "api/rtcerror.h" +#include "api/rtp_headers.h" +#include "common_types.h" // NOLINT(build/include) + +namespace webrtc { + +class RtpTransportAdapter; + +struct RtcpParameters final { + // The SSRC to be used in the "SSRC of packet sender" field. If not set, one + // will be chosen by the implementation. + // TODO(deadbeef): Not implemented. + rtc::Optional ssrc; + + // The Canonical Name (CNAME) used by RTCP (e.g. in SDES messages). + // + // If empty in the construction of the RtpTransport, one will be generated by + // the implementation, and returned in GetRtcpParameters. Multiple + // RtpTransports created by the same OrtcFactory will use the same generated + // CNAME. + // + // If empty when passed into SetParameters, the CNAME simply won't be + // modified. + std::string cname; + + // Send reduced-size RTCP? + bool reduced_size = false; + + // Send RTCP multiplexed on the RTP transport? + bool mux = true; + + bool operator==(const RtcpParameters& o) const { + return ssrc == o.ssrc && cname == o.cname && + reduced_size == o.reduced_size && mux == o.mux; + } + bool operator!=(const RtcpParameters& o) const { return !(*this == o); } +}; + +struct RtpTransportParameters final { + RtcpParameters rtcp; + + // Enabled periodic sending of keep-alive packets, that help prevent timeouts + // on the network level, such as NAT bindings. See RFC6263 section 4.6. + RtpKeepAliveConfig keepalive; + + bool operator==(const RtpTransportParameters& o) const { + return rtcp == o.rtcp && keepalive == o.keepalive; + } + bool operator!=(const RtpTransportParameters& o) const { + return !(*this == o); + } +}; + +// Base class for different types of RTP transports that can be created by an +// OrtcFactory. Used by RtpSenders/RtpReceivers. +// +// This is not present in the standard ORTC API, but exists here for a few +// reasons. Firstly, it allows different types of RTP transports to be used: +// DTLS-SRTP (which is required for the web), but also SDES-SRTP and +// unencrypted RTP. It also simplifies the handling of RTCP muxing, and +// provides a better API point for it. +// +// Note that Edge's implementation of ORTC provides a similar API point, called +// RTCSrtpSdesTransport: +// https://msdn.microsoft.com/en-us/library/mt502527(v=vs.85).aspx +class RtpTransportInterface { + public: + virtual ~RtpTransportInterface() {} + + // Returns packet transport that's used to send RTP packets. + virtual PacketTransportInterface* GetRtpPacketTransport() const = 0; + + // Returns separate packet transport that's used to send RTCP packets. If + // RTCP multiplexing is being used, returns null. + virtual PacketTransportInterface* GetRtcpPacketTransport() const = 0; + + // Set/get RTP/RTCP transport params. Can be used to enable RTCP muxing or + // reduced-size RTCP if initially not enabled. + // + // Changing |mux| from "true" to "false" is not allowed, and changing the + // CNAME is currently unsupported. + // RTP keep-alive settings need to be set before before an RtpSender has + // started sending, altering the payload type or timeout interval after this + // point is not supported. The parameters must also match across all RTP + // transports for a given RTP transport controller. + virtual RTCError SetParameters(const RtpTransportParameters& parameters) = 0; + // Returns last set or constructed-with parameters. If |cname| was empty in + // construction, the generated CNAME will be present in the returned + // parameters (see above). + virtual RtpTransportParameters GetParameters() const = 0; + + protected: + // Only for internal use. Returns a pointer to an internal interface, for use + // by the implementation. + virtual RtpTransportAdapter* GetInternal() = 0; + + // Classes that can use this internal interface. + friend class OrtcFactory; + friend class OrtcRtpSenderAdapter; + friend class OrtcRtpReceiverAdapter; + friend class RtpTransportControllerAdapter; + friend class RtpTransportAdapter; +}; + +} // namespace webrtc + +#endif // API_ORTC_RTPTRANSPORTINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/sessiondescription.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/sessiondescription.h new file mode 100644 index 000000000000..ebbaa27d6ffc --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/sessiondescription.h @@ -0,0 +1,45 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ORTC_SESSIONDESCRIPTION_H_ +#define API_ORTC_SESSIONDESCRIPTION_H_ + +#include +#include + +namespace webrtc { + +// A structured representation of an SDP session description. +class SessionDescription { + public: + SessionDescription(int64_t session_id, std::string session_version) + : session_id_(session_id), session_version_(std::move(session_version)) {} + + // https://tools.ietf.org/html/rfc4566#section-5.2 + // o= + // + // session_id_ is the "sess-id" field. + // session_version_ is the "sess-version" field. + int64_t session_id() { return session_id_; } + void set_session_id(int64_t session_id) { session_id_ = session_id; } + + const std::string& session_version() const { return session_version_; } + void set_session_version(std::string session_version) { + session_version_ = std::move(session_version); + } + + private: + int64_t session_id_; + std::string session_version_; +}; + +} // namespace webrtc + +#endif // API_ORTC_SESSIONDESCRIPTION_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/srtptransportinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/srtptransportinterface.h new file mode 100644 index 000000000000..41c8ccc9c149 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/srtptransportinterface.h @@ -0,0 +1,48 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ORTC_SRTPTRANSPORTINTERFACE_H_ +#define API_ORTC_SRTPTRANSPORTINTERFACE_H_ + +#include "api/ortc/rtptransportinterface.h" +#include "api/rtcerror.h" +#include "api/cryptoparams.h" + +namespace webrtc { + +// The subclass of the RtpTransport which uses SRTP. The keying information +// is explicitly passed in from the application. +// +// If using SDP and SDES (RFC4568) for signaling, then after applying the +// answer, the negotiated keying information from the offer and answer would be +// set and the SRTP would be active. +// +// Note that Edge's implementation of ORTC provides a similar API point, called +// RTCSrtpSdesTransport: +// https://msdn.microsoft.com/en-us/library/mt502527(v=vs.85).aspx +class SrtpTransportInterface : public RtpTransportInterface { + public: + virtual ~SrtpTransportInterface() {} + + // There are some limitations of the current implementation: + // 1. Send and receive keys must use the same crypto suite. + // 2. The keys can't be changed after initially set. + // 3. The keys must be set before creating a sender/receiver using the SRTP + // transport. + // Set the SRTP keying material for sending RTP and RTCP. + virtual RTCError SetSrtpSendKey(const cricket::CryptoParams& params) = 0; + + // Set the SRTP keying material for receiving RTP and RTCP. + virtual RTCError SetSrtpReceiveKey(const cricket::CryptoParams& params) = 0; +}; + +} // namespace webrtc + +#endif // API_ORTC_SRTPTRANSPORTINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/udptransportinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/udptransportinterface.h new file mode 100644 index 000000000000..f246a25e9ddd --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/ortc/udptransportinterface.h @@ -0,0 +1,49 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ORTC_UDPTRANSPORTINTERFACE_H_ +#define API_ORTC_UDPTRANSPORTINTERFACE_H_ + +#include "api/ortc/packettransportinterface.h" +#include "api/proxy.h" +#include "rtc_base/socketaddress.h" + +namespace webrtc { + +// Interface for a raw UDP transport (not using ICE), meaning a combination of +// a local/remote IP address/port. +// +// An instance can be instantiated using OrtcFactory. +// +// Each instance reserves a UDP port, which will be freed when the +// UdpTransportInterface destructor is called. +// +// Calling SetRemoteAddress sets the destination of outgoing packets; without a +// destination, packets can't be sent, but they can be received. +class UdpTransportInterface : public virtual PacketTransportInterface { + public: + // Get the address of the socket allocated for this transport. + virtual rtc::SocketAddress GetLocalAddress() const = 0; + + // Sets the address to which packets will be delivered. + // + // Calling with a "nil" (default-constructed) address is legal, and unsets + // any previously set destination. + // + // However, calling with an incomplete address (port or IP not set) will + // fail. + virtual bool SetRemoteAddress(const rtc::SocketAddress& dest) = 0; + // Simple getter. If never set, returns nil address. + virtual rtc::SocketAddress GetRemoteAddress() const = 0; +}; + +} // namespace webrtc + +#endif // API_ORTC_UDPTRANSPORTINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/peerconnectionfactoryproxy.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/peerconnectionfactoryproxy.h new file mode 100644 index 000000000000..77778092b711 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/peerconnectionfactoryproxy.h @@ -0,0 +1,77 @@ +/* + * Copyright 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_PEERCONNECTIONFACTORYPROXY_H_ +#define API_PEERCONNECTIONFACTORYPROXY_H_ + +#include +#include +#include + +#include "api/peerconnectioninterface.h" +#include "api/proxy.h" +#include "rtc_base/bind.h" + +namespace webrtc { + +// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods +// are called on is an implementation detail. +BEGIN_SIGNALING_PROXY_MAP(PeerConnectionFactory) + PROXY_SIGNALING_THREAD_DESTRUCTOR() + // Use the overloads of CreateVideoSource that take raw VideoCapturer + // pointers from PeerConnectionFactoryInterface. + // TODO(deadbeef): Remove this using statement once those overloads are + // removed. + using PeerConnectionFactoryInterface::CreateVideoSource; + PROXY_METHOD1(void, SetOptions, const Options&) + PROXY_METHOD5(rtc::scoped_refptr, + CreatePeerConnection, + const PeerConnectionInterface::RTCConfiguration&, + const MediaConstraintsInterface*, + std::unique_ptr, + std::unique_ptr, + PeerConnectionObserver*); + PROXY_METHOD4(rtc::scoped_refptr, + CreatePeerConnection, + const PeerConnectionInterface::RTCConfiguration&, + std::unique_ptr, + std::unique_ptr, + PeerConnectionObserver*); + PROXY_METHOD2(rtc::scoped_refptr, + CreatePeerConnection, + const PeerConnectionInterface::RTCConfiguration&, + PeerConnectionDependencies); + PROXY_METHOD1(rtc::scoped_refptr, + CreateLocalMediaStream, const std::string&) + PROXY_METHOD1(rtc::scoped_refptr, + CreateAudioSource, const MediaConstraintsInterface*) + PROXY_METHOD1(rtc::scoped_refptr, + CreateAudioSource, + const cricket::AudioOptions&) + PROXY_METHOD2(rtc::scoped_refptr, + CreateVideoSource, + std::unique_ptr, + const MediaConstraintsInterface*) + PROXY_METHOD1(rtc::scoped_refptr, + CreateVideoSource, + std::unique_ptr) + PROXY_METHOD2(rtc::scoped_refptr, + CreateVideoTrack, + const std::string&, + VideoTrackSourceInterface*) + PROXY_METHOD2(rtc::scoped_refptr, + CreateAudioTrack, const std::string&, AudioSourceInterface*) + PROXY_METHOD2(bool, StartAecDump, rtc::PlatformFile, int64_t) + PROXY_METHOD0(void, StopAecDump) +END_PROXY_MAP() + +} // namespace webrtc + +#endif // API_PEERCONNECTIONFACTORYPROXY_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/peerconnectioninterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/peerconnectioninterface.h new file mode 100644 index 000000000000..8c0602945b1d --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/peerconnectioninterface.h @@ -0,0 +1,1550 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains the PeerConnection interface as defined in +// https://w3c.github.io/webrtc-pc/#peer-to-peer-connections +// +// The PeerConnectionFactory class provides factory methods to create +// PeerConnection, MediaStream and MediaStreamTrack objects. +// +// The following steps are needed to setup a typical call using WebRTC: +// +// 1. Create a PeerConnectionFactoryInterface. Check constructors for more +// information about input parameters. +// +// 2. Create a PeerConnection object. Provide a configuration struct which +// points to STUN and/or TURN servers used to generate ICE candidates, and +// provide an object that implements the PeerConnectionObserver interface, +// which is used to receive callbacks from the PeerConnection. +// +// 3. Create local MediaStreamTracks using the PeerConnectionFactory and add +// them to PeerConnection by calling AddTrack (or legacy method, AddStream). +// +// 4. Create an offer, call SetLocalDescription with it, serialize it, and send +// it to the remote peer +// +// 5. Once an ICE candidate has been gathered, the PeerConnection will call the +// observer function OnIceCandidate. The candidates must also be serialized and +// sent to the remote peer. +// +// 6. Once an answer is received from the remote peer, call +// SetRemoteDescription with the remote answer. +// +// 7. Once a remote candidate is received from the remote peer, provide it to +// the PeerConnection by calling AddIceCandidate. +// +// The receiver of a call (assuming the application is "call"-based) can decide +// to accept or reject the call; this decision will be taken by the application, +// not the PeerConnection. +// +// If the application decides to accept the call, it should: +// +// 1. Create PeerConnectionFactoryInterface if it doesn't exist. +// +// 2. Create a new PeerConnection. +// +// 3. Provide the remote offer to the new PeerConnection object by calling +// SetRemoteDescription. +// +// 4. Generate an answer to the remote offer by calling CreateAnswer and send it +// back to the remote peer. +// +// 5. Provide the local answer to the new PeerConnection by calling +// SetLocalDescription with the answer. +// +// 6. Provide the remote ICE candidates by calling AddIceCandidate. +// +// 7. Once a candidate has been gathered, the PeerConnection will call the +// observer function OnIceCandidate. Send these candidates to the remote peer. + +#ifndef API_PEERCONNECTIONINTERFACE_H_ +#define API_PEERCONNECTIONINTERFACE_H_ + +#include +#include +#include +#include + +#include "api/audio/audio_mixer.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/audio_options.h" +#include "api/call/callfactoryinterface.h" +#include "api/datachannelinterface.h" +#include "api/dtmfsenderinterface.h" +#include "api/fec_controller.h" +#include "api/jsep.h" +#include "api/mediastreaminterface.h" +#include "api/rtcerror.h" +#include "api/rtceventlogoutput.h" +#include "api/rtpreceiverinterface.h" +#include "api/rtpsenderinterface.h" +#include "api/rtptransceiverinterface.h" +#include "api/setremotedescriptionobserverinterface.h" +#include "api/stats/rtcstatscollectorcallback.h" +#include "api/statstypes.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/network_control.h" +#include "api/turncustomizer.h" +#include "api/umametrics.h" +#include "logging/rtc_event_log/rtc_event_log_factory_interface.h" +#include "media/base/mediaconfig.h" +// TODO(bugs.webrtc.org/6353): cricket::VideoCapturer is deprecated and should +// be deleted from the PeerConnection api. +#include "media/base/videocapturer.h" // nogncheck +// TODO(bugs.webrtc.org/7447): We plan to provide a way to let applications +// inject a PacketSocketFactory and/or NetworkManager, and not expose +// PortAllocator in the PeerConnection api. +#include "p2p/base/portallocator.h" // nogncheck +// TODO(nisse): The interface for bitrate allocation strategy belongs in api/. +#include "rtc_base/bitrateallocationstrategy.h" +#include "rtc_base/network.h" +#include "rtc_base/platform_file.h" +#include "rtc_base/rtccertificate.h" +#include "rtc_base/rtccertificategenerator.h" +#include "rtc_base/socketaddress.h" +#include "rtc_base/sslcertificate.h" +#include "rtc_base/sslstreamadapter.h" + +namespace rtc { +class SSLIdentity; +class Thread; +} + +namespace cricket { +class MediaEngineInterface; +class WebRtcVideoDecoderFactory; +class WebRtcVideoEncoderFactory; +} + +namespace webrtc { +class AudioDeviceModule; +class AudioMixer; +class AudioProcessing; +class MediaConstraintsInterface; +class VideoDecoderFactory; +class VideoEncoderFactory; + +// MediaStream container interface. +class StreamCollectionInterface : public rtc::RefCountInterface { + public: + // TODO(ronghuawu): Update the function names to c++ style, e.g. find -> Find. + virtual size_t count() = 0; + virtual MediaStreamInterface* at(size_t index) = 0; + virtual MediaStreamInterface* find(const std::string& label) = 0; + virtual MediaStreamTrackInterface* FindAudioTrack( + const std::string& id) = 0; + virtual MediaStreamTrackInterface* FindVideoTrack( + const std::string& id) = 0; + + protected: + // Dtor protected as objects shouldn't be deleted via this interface. + ~StreamCollectionInterface() {} +}; + +class StatsObserver : public rtc::RefCountInterface { + public: + virtual void OnComplete(const StatsReports& reports) = 0; + + protected: + virtual ~StatsObserver() {} +}; + +enum class SdpSemantics { kPlanB, kUnifiedPlan }; + +class PeerConnectionInterface : public rtc::RefCountInterface { + public: + // See https://w3c.github.io/webrtc-pc/#state-definitions + enum SignalingState { + kStable, + kHaveLocalOffer, + kHaveLocalPrAnswer, + kHaveRemoteOffer, + kHaveRemotePrAnswer, + kClosed, + }; + + enum IceGatheringState { + kIceGatheringNew, + kIceGatheringGathering, + kIceGatheringComplete + }; + + enum IceConnectionState { + kIceConnectionNew, + kIceConnectionChecking, + kIceConnectionConnected, + kIceConnectionCompleted, + kIceConnectionFailed, + kIceConnectionDisconnected, + kIceConnectionClosed, + kIceConnectionMax, + }; + + // TLS certificate policy. + enum TlsCertPolicy { + // For TLS based protocols, ensure the connection is secure by not + // circumventing certificate validation. + kTlsCertPolicySecure, + // For TLS based protocols, disregard security completely by skipping + // certificate validation. This is insecure and should never be used unless + // security is irrelevant in that particular context. + kTlsCertPolicyInsecureNoCheck, + }; + + struct IceServer { + // TODO(jbauch): Remove uri when all code using it has switched to urls. + // List of URIs associated with this server. Valid formats are described + // in RFC7064 and RFC7065, and more may be added in the future. The "host" + // part of the URI may contain either an IP address or a hostname. + std::string uri; + std::vector urls; + std::string username; + std::string password; + TlsCertPolicy tls_cert_policy = kTlsCertPolicySecure; + // If the URIs in |urls| only contain IP addresses, this field can be used + // to indicate the hostname, which may be necessary for TLS (using the SNI + // extension). If |urls| itself contains the hostname, this isn't + // necessary. + std::string hostname; + // List of protocols to be used in the TLS ALPN extension. + std::vector tls_alpn_protocols; + // List of elliptic curves to be used in the TLS elliptic curves extension. + std::vector tls_elliptic_curves; + + bool operator==(const IceServer& o) const { + return uri == o.uri && urls == o.urls && username == o.username && + password == o.password && tls_cert_policy == o.tls_cert_policy && + hostname == o.hostname && + tls_alpn_protocols == o.tls_alpn_protocols && + tls_elliptic_curves == o.tls_elliptic_curves; + } + bool operator!=(const IceServer& o) const { return !(*this == o); } + }; + typedef std::vector IceServers; + + enum IceTransportsType { + // TODO(pthatcher): Rename these kTransporTypeXXX, but update + // Chromium at the same time. + kNone, + kRelay, + kNoHost, + kAll + }; + + // https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24#section-4.1.1 + enum BundlePolicy { + kBundlePolicyBalanced, + kBundlePolicyMaxBundle, + kBundlePolicyMaxCompat + }; + + // https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24#section-4.1.1 + enum RtcpMuxPolicy { + kRtcpMuxPolicyNegotiate, + kRtcpMuxPolicyRequire, + }; + + enum TcpCandidatePolicy { + kTcpCandidatePolicyEnabled, + kTcpCandidatePolicyDisabled + }; + + enum CandidateNetworkPolicy { + kCandidateNetworkPolicyAll, + kCandidateNetworkPolicyLowCost + }; + + enum ContinualGatheringPolicy { + GATHER_ONCE, + GATHER_CONTINUALLY + }; + + enum class RTCConfigurationType { + // A configuration that is safer to use, despite not having the best + // performance. Currently this is the default configuration. + kSafe, + // An aggressive configuration that has better performance, although it + // may be riskier and may need extra support in the application. + kAggressive + }; + + // TODO(hbos): Change into class with private data and public getters. + // TODO(nisse): In particular, accessing fields directly from an + // application is brittle, since the organization mirrors the + // organization of the implementation, which isn't stable. So we + // need getters and setters at least for fields which applications + // are interested in. + struct RTCConfiguration { + // This struct is subject to reorganization, both for naming + // consistency, and to group settings to match where they are used + // in the implementation. To do that, we need getter and setter + // methods for all settings which are of interest to applications, + // Chrome in particular. + + RTCConfiguration() = default; + explicit RTCConfiguration(RTCConfigurationType type) { + if (type == RTCConfigurationType::kAggressive) { + // These parameters are also defined in Java and IOS configurations, + // so their values may be overwritten by the Java or IOS configuration. + bundle_policy = kBundlePolicyMaxBundle; + rtcp_mux_policy = kRtcpMuxPolicyRequire; + ice_connection_receiving_timeout = + kAggressiveIceConnectionReceivingTimeout; + + // These parameters are not defined in Java or IOS configuration, + // so their values will not be overwritten. + enable_ice_renomination = true; + redetermine_role_on_ice_restart = false; + } + } + + bool operator==(const RTCConfiguration& o) const; + bool operator!=(const RTCConfiguration& o) const; + + bool dscp() const { return media_config.enable_dscp; } + void set_dscp(bool enable) { media_config.enable_dscp = enable; } + + bool cpu_adaptation() const { + return media_config.video.enable_cpu_adaptation; + } + void set_cpu_adaptation(bool enable) { + media_config.video.enable_cpu_adaptation = enable; + } + + bool suspend_below_min_bitrate() const { + return media_config.video.suspend_below_min_bitrate; + } + void set_suspend_below_min_bitrate(bool enable) { + media_config.video.suspend_below_min_bitrate = enable; + } + + bool prerenderer_smoothing() const { + return media_config.video.enable_prerenderer_smoothing; + } + void set_prerenderer_smoothing(bool enable) { + media_config.video.enable_prerenderer_smoothing = enable; + } + + bool experiment_cpu_load_estimator() const { + return media_config.video.experiment_cpu_load_estimator; + } + void set_experiment_cpu_load_estimator(bool enable) { + media_config.video.experiment_cpu_load_estimator = enable; + } + static const int kUndefined = -1; + // Default maximum number of packets in the audio jitter buffer. + static const int kAudioJitterBufferMaxPackets = 50; + // ICE connection receiving timeout for aggressive configuration. + static const int kAggressiveIceConnectionReceivingTimeout = 1000; + + //////////////////////////////////////////////////////////////////////// + // The below few fields mirror the standard RTCConfiguration dictionary: + // https://w3c.github.io/webrtc-pc/#rtcconfiguration-dictionary + //////////////////////////////////////////////////////////////////////// + + // TODO(pthatcher): Rename this ice_servers, but update Chromium + // at the same time. + IceServers servers; + // TODO(pthatcher): Rename this ice_transport_type, but update + // Chromium at the same time. + IceTransportsType type = kAll; + BundlePolicy bundle_policy = kBundlePolicyBalanced; + RtcpMuxPolicy rtcp_mux_policy = kRtcpMuxPolicyRequire; + std::vector> certificates; + int ice_candidate_pool_size = 0; + + ////////////////////////////////////////////////////////////////////////// + // The below fields correspond to constraints from the deprecated + // constraints interface for constructing a PeerConnection. + // + // rtc::Optional fields can be "missing", in which case the implementation + // default will be used. + ////////////////////////////////////////////////////////////////////////// + + // If set to true, don't gather IPv6 ICE candidates. + // TODO(deadbeef): Remove this? IPv6 support has long stopped being + // experimental + bool disable_ipv6 = false; + + // If set to true, don't gather IPv6 ICE candidates on Wi-Fi. + // Only intended to be used on specific devices. Certain phones disable IPv6 + // when the screen is turned off and it would be better to just disable the + // IPv6 ICE candidates on Wi-Fi in those cases. + bool disable_ipv6_on_wifi = false; + + // By default, the PeerConnection will use a limited number of IPv6 network + // interfaces, in order to avoid too many ICE candidate pairs being created + // and delaying ICE completion. + // + // Can be set to INT_MAX to effectively disable the limit. + int max_ipv6_networks = cricket::kDefaultMaxIPv6Networks; + + // Exclude link-local network interfaces + // from considertaion for gathering ICE candidates. + bool disable_link_local_networks = false; + + // If set to true, use RTP data channels instead of SCTP. + // TODO(deadbeef): Remove this. We no longer commit to supporting RTP data + // channels, though some applications are still working on moving off of + // them. + bool enable_rtp_data_channel = false; + + // Minimum bitrate at which screencast video tracks will be encoded at. + // This means adding padding bits up to this bitrate, which can help + // when switching from a static scene to one with motion. + rtc::Optional screencast_min_bitrate; + + // Use new combined audio/video bandwidth estimation? + rtc::Optional combined_audio_video_bwe; + + // Can be used to disable DTLS-SRTP. This should never be done, but can be + // useful for testing purposes, for example in setting up a loopback call + // with a single PeerConnection. + rtc::Optional enable_dtls_srtp; + + ///////////////////////////////////////////////// + // The below fields are not part of the standard. + ///////////////////////////////////////////////// + + // Can be used to disable TCP candidate generation. + TcpCandidatePolicy tcp_candidate_policy = kTcpCandidatePolicyEnabled; + + // Can be used to avoid gathering candidates for a "higher cost" network, + // if a lower cost one exists. For example, if both Wi-Fi and cellular + // interfaces are available, this could be used to avoid using the cellular + // interface. + CandidateNetworkPolicy candidate_network_policy = + kCandidateNetworkPolicyAll; + + // The maximum number of packets that can be stored in the NetEq audio + // jitter buffer. Can be reduced to lower tolerated audio latency. + int audio_jitter_buffer_max_packets = kAudioJitterBufferMaxPackets; + + // Whether to use the NetEq "fast mode" which will accelerate audio quicker + // if it falls behind. + bool audio_jitter_buffer_fast_accelerate = false; + + // Timeout in milliseconds before an ICE candidate pair is considered to be + // "not receiving", after which a lower priority candidate pair may be + // selected. + int ice_connection_receiving_timeout = kUndefined; + + // Interval in milliseconds at which an ICE "backup" candidate pair will be + // pinged. This is a candidate pair which is not actively in use, but may + // be switched to if the active candidate pair becomes unusable. + // + // This is relevant mainly to Wi-Fi/cell handoff; the application may not + // want this backup cellular candidate pair pinged frequently, since it + // consumes data/battery. + int ice_backup_candidate_pair_ping_interval = kUndefined; + + // Can be used to enable continual gathering, which means new candidates + // will be gathered as network interfaces change. Note that if continual + // gathering is used, the candidate removal API should also be used, to + // avoid an ever-growing list of candidates. + ContinualGatheringPolicy continual_gathering_policy = GATHER_ONCE; + + // If set to true, candidate pairs will be pinged in order of most likely + // to work (which means using a TURN server, generally), rather than in + // standard priority order. + bool prioritize_most_likely_ice_candidate_pairs = false; + + // Implementation defined settings. A public member only for the benefit of + // the implementation. Applications must not access it directly, and should + // instead use provided accessor methods, e.g., set_cpu_adaptation. + struct cricket::MediaConfig media_config; + + // If set to true, only one preferred TURN allocation will be used per + // network interface. UDP is preferred over TCP and IPv6 over IPv4. This + // can be used to cut down on the number of candidate pairings. + bool prune_turn_ports = false; + + // If set to true, this means the ICE transport should presume TURN-to-TURN + // candidate pairs will succeed, even before a binding response is received. + // This can be used to optimize the initial connection time, since the DTLS + // handshake can begin immediately. + bool presume_writable_when_fully_relayed = false; + + // If true, "renomination" will be added to the ice options in the transport + // description. + // See: https://tools.ietf.org/html/draft-thatcher-ice-renomination-00 + bool enable_ice_renomination = false; + + // If true, the ICE role is re-determined when the PeerConnection sets a + // local transport description that indicates an ICE restart. + // + // This is standard RFC5245 ICE behavior, but causes unnecessary role + // thrashing, so an application may wish to avoid it. This role + // re-determining was removed in ICEbis (ICE v2). + bool redetermine_role_on_ice_restart = true; + + // The following fields define intervals in milliseconds at which ICE + // connectivity checks are sent. + // + // We consider ICE is "strongly connected" for an agent when there is at + // least one candidate pair that currently succeeds in connectivity check + // from its direction i.e. sending a STUN ping and receives a STUN ping + // response, AND all candidate pairs have sent a minimum number of pings for + // connectivity (this number is implementation-specific). Otherwise, ICE is + // considered in "weak connectivity". + // + // Note that the above notion of strong and weak connectivity is not defined + // in RFC 5245, and they apply to our current ICE implementation only. + // + // 1) ice_check_interval_strong_connectivity defines the interval applied to + // ALL candidate pairs when ICE is strongly connected, and it overrides the + // default value of this interval in the ICE implementation; + // 2) ice_check_interval_weak_connectivity defines the counterpart for ALL + // pairs when ICE is weakly connected, and it overrides the default value of + // this interval in the ICE implementation; + // 3) ice_check_min_interval defines the minimal interval (equivalently the + // maximum rate) that overrides the above two intervals when either of them + // is less. + rtc::Optional ice_check_interval_strong_connectivity; + rtc::Optional ice_check_interval_weak_connectivity; + rtc::Optional ice_check_min_interval; + + // The min time period for which a candidate pair must wait for response to + // connectivity checks before it becomes unwritable. This parameter + // overrides the default value in the ICE implementation if set. + rtc::Optional ice_unwritable_timeout; + + // The min number of connectivity checks that a candidate pair must sent + // without receiving response before it becomes unwritable. This parameter + // overrides the default value in the ICE implementation if set. + rtc::Optional ice_unwritable_min_checks; + + // The interval in milliseconds at which STUN candidates will resend STUN + // binding requests to keep NAT bindings open. + rtc::Optional stun_candidate_keepalive_interval; + + // ICE Periodic Regathering + // If set, WebRTC will periodically create and propose candidates without + // starting a new ICE generation. The regathering happens continuously with + // interval specified in milliseconds by the uniform distribution [a, b]. + rtc::Optional ice_regather_interval_range; + + // Optional TurnCustomizer. + // With this class one can modify outgoing TURN messages. + // The object passed in must remain valid until PeerConnection::Close() is + // called. + webrtc::TurnCustomizer* turn_customizer = nullptr; + + // Preferred network interface. + // A candidate pair on a preferred network has a higher precedence in ICE + // than one on an un-preferred network, regardless of priority or network + // cost. + rtc::Optional network_preference; + + // Configure the SDP semantics used by this PeerConnection. Note that the + // WebRTC 1.0 specification requires kUnifiedPlan semantics. The + // RtpTransceiver API is only available with kUnifiedPlan semantics. + // + // kPlanB will cause PeerConnection to create offers and answers with at + // most one audio and one video m= section with multiple RtpSenders and + // RtpReceivers specified as multiple a=ssrc lines within the section. This + // will also cause PeerConnection to ignore all but the first m= section of + // the same media type. + // + // kUnifiedPlan will cause PeerConnection to create offers and answers with + // multiple m= sections where each m= section maps to one RtpSender and one + // RtpReceiver (an RtpTransceiver), either both audio or both video. This + // will also cause PeerConnection to ignore all but the first a=ssrc lines + // that form a Plan B stream. + // + // For users who wish to send multiple audio/video streams and need to stay + // interoperable with legacy WebRTC implementations or use legacy APIs, + // specify kPlanB. + // + // For all other users, specify kUnifiedPlan. + SdpSemantics sdp_semantics = SdpSemantics::kPlanB; + + // + // Don't forget to update operator== if adding something. + // + }; + + // See: https://www.w3.org/TR/webrtc/#idl-def-rtcofferansweroptions + struct RTCOfferAnswerOptions { + static const int kUndefined = -1; + static const int kMaxOfferToReceiveMedia = 1; + + // The default value for constraint offerToReceiveX:true. + static const int kOfferToReceiveMediaTrue = 1; + + // These options are left as backwards compatibility for clients who need + // "Plan B" semantics. Clients who have switched to "Unified Plan" semantics + // should use the RtpTransceiver API (AddTransceiver) instead. + // + // offer_to_receive_X set to 1 will cause a media description to be + // generated in the offer, even if no tracks of that type have been added. + // Values greater than 1 are treated the same. + // + // If set to 0, the generated directional attribute will not include the + // "recv" direction (meaning it will be "sendonly" or "inactive". + int offer_to_receive_video = kUndefined; + int offer_to_receive_audio = kUndefined; + + bool voice_activity_detection = true; + bool ice_restart = false; + + // If true, will offer to BUNDLE audio/video/data together. Not to be + // confused with RTCP mux (multiplexing RTP and RTCP together). + bool use_rtp_mux = true; + + RTCOfferAnswerOptions() = default; + + RTCOfferAnswerOptions(int offer_to_receive_video, + int offer_to_receive_audio, + bool voice_activity_detection, + bool ice_restart, + bool use_rtp_mux) + : offer_to_receive_video(offer_to_receive_video), + offer_to_receive_audio(offer_to_receive_audio), + voice_activity_detection(voice_activity_detection), + ice_restart(ice_restart), + use_rtp_mux(use_rtp_mux) {} + }; + + // Used by GetStats to decide which stats to include in the stats reports. + // |kStatsOutputLevelStandard| includes the standard stats for Javascript API; + // |kStatsOutputLevelDebug| includes both the standard stats and additional + // stats for debugging purposes. + enum StatsOutputLevel { + kStatsOutputLevelStandard, + kStatsOutputLevelDebug, + }; + + // Accessor methods to active local streams. + // This method is not supported with kUnifiedPlan semantics. Please use + // GetSenders() instead. + virtual rtc::scoped_refptr + local_streams() = 0; + + // Accessor methods to remote streams. + // This method is not supported with kUnifiedPlan semantics. Please use + // GetReceivers() instead. + virtual rtc::scoped_refptr + remote_streams() = 0; + + // Add a new MediaStream to be sent on this PeerConnection. + // Note that a SessionDescription negotiation is needed before the + // remote peer can receive the stream. + // + // This has been removed from the standard in favor of a track-based API. So, + // this is equivalent to simply calling AddTrack for each track within the + // stream, with the one difference that if "stream->AddTrack(...)" is called + // later, the PeerConnection will automatically pick up the new track. Though + // this functionality will be deprecated in the future. + // + // This method is not supported with kUnifiedPlan semantics. Please use + // AddTrack instead. + virtual bool AddStream(MediaStreamInterface* stream) = 0; + + // Remove a MediaStream from this PeerConnection. + // Note that a SessionDescription negotiation is needed before the + // remote peer is notified. + // + // This method is not supported with kUnifiedPlan semantics. Please use + // RemoveTrack instead. + virtual void RemoveStream(MediaStreamInterface* stream) = 0; + + // Add a new MediaStreamTrack to be sent on this PeerConnection, and return + // the newly created RtpSender. The RtpSender will be associated with the + // streams specified in the |stream_ids| list. + // + // Errors: + // - INVALID_PARAMETER: |track| is null, has a kind other than audio or video, + // or a sender already exists for the track. + // - INVALID_STATE: The PeerConnection is closed. + // TODO(steveanton): Remove default implementation once downstream + // implementations have been updated. + virtual RTCErrorOr> AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids) { + return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); + } + // |streams| indicates which stream ids the track should be associated + // with. + // TODO(steveanton): Remove this overload once callers have moved to the + // signature with stream ids. + virtual rtc::scoped_refptr AddTrack( + MediaStreamTrackInterface* track, + std::vector streams) { + // Default implementation provided so downstream implementations can remove + // this. + return nullptr; + } + + // Remove an RtpSender from this PeerConnection. + // Returns true on success. + virtual bool RemoveTrack(RtpSenderInterface* sender) = 0; + + // AddTransceiver creates a new RtpTransceiver and adds it to the set of + // transceivers. Adding a transceiver will cause future calls to CreateOffer + // to add a media description for the corresponding transceiver. + // + // The initial value of |mid| in the returned transceiver is null. Setting a + // new session description may change it to a non-null value. + // + // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-addtransceiver + // + // Optionally, an RtpTransceiverInit structure can be specified to configure + // the transceiver from construction. If not specified, the transceiver will + // default to having a direction of kSendRecv and not be part of any streams. + // + // These methods are only available when Unified Plan is enabled (see + // RTCConfiguration). + // + // Common errors: + // - INTERNAL_ERROR: The configuration does not have Unified Plan enabled. + // TODO(steveanton): Make these pure virtual once downstream projects have + // updated. + + // Adds a transceiver with a sender set to transmit the given track. The kind + // of the transceiver (and sender/receiver) will be derived from the kind of + // the track. + // Errors: + // - INVALID_PARAMETER: |track| is null. + virtual RTCErrorOr> + AddTransceiver(rtc::scoped_refptr track) { + return RTCError(RTCErrorType::INTERNAL_ERROR, "not implemented"); + } + virtual RTCErrorOr> + AddTransceiver(rtc::scoped_refptr track, + const RtpTransceiverInit& init) { + return RTCError(RTCErrorType::INTERNAL_ERROR, "not implemented"); + } + + // Adds a transceiver with the given kind. Can either be MEDIA_TYPE_AUDIO or + // MEDIA_TYPE_VIDEO. + // Errors: + // - INVALID_PARAMETER: |media_type| is not MEDIA_TYPE_AUDIO or + // MEDIA_TYPE_VIDEO. + virtual RTCErrorOr> + AddTransceiver(cricket::MediaType media_type) { + return RTCError(RTCErrorType::INTERNAL_ERROR, "not implemented"); + } + virtual RTCErrorOr> + AddTransceiver(cricket::MediaType media_type, + const RtpTransceiverInit& init) { + return RTCError(RTCErrorType::INTERNAL_ERROR, "not implemented"); + } + + // Returns pointer to a DtmfSender on success. Otherwise returns null. + // + // This API is no longer part of the standard; instead DtmfSenders are + // obtained from RtpSenders. Which is what the implementation does; it finds + // an RtpSender for |track| and just returns its DtmfSender. + virtual rtc::scoped_refptr CreateDtmfSender( + AudioTrackInterface* track) = 0; + + // TODO(deadbeef): Make these pure virtual once all subclasses implement them. + + // Creates a sender without a track. Can be used for "early media"/"warmup" + // use cases, where the application may want to negotiate video attributes + // before a track is available to send. + // + // The standard way to do this would be through "addTransceiver", but we + // don't support that API yet. + // + // |kind| must be "audio" or "video". + // + // |stream_id| is used to populate the msid attribute; if empty, one will + // be generated automatically. + // + // This method is not supported with kUnifiedPlan semantics. Please use + // AddTransceiver instead. + virtual rtc::scoped_refptr CreateSender( + const std::string& kind, + const std::string& stream_id) { + return rtc::scoped_refptr(); + } + + // If Plan B semantics are specified, gets all RtpSenders, created either + // through AddStream, AddTrack, or CreateSender. All senders of a specific + // media type share the same media description. + // + // If Unified Plan semantics are specified, gets the RtpSender for each + // RtpTransceiver. + virtual std::vector> GetSenders() + const { + return std::vector>(); + } + + // If Plan B semantics are specified, gets all RtpReceivers created when a + // remote description is applied. All receivers of a specific media type share + // the same media description. It is also possible to have a media description + // with no associated RtpReceivers, if the directional attribute does not + // indicate that the remote peer is sending any media. + // + // If Unified Plan semantics are specified, gets the RtpReceiver for each + // RtpTransceiver. + virtual std::vector> GetReceivers() + const { + return std::vector>(); + } + + // Get all RtpTransceivers, created either through AddTransceiver, AddTrack or + // by a remote description applied with SetRemoteDescription. + // + // Note: This method is only available when Unified Plan is enabled (see + // RTCConfiguration). + virtual std::vector> + GetTransceivers() const { + return {}; + } + + // The legacy non-compliant GetStats() API. This correspond to the + // callback-based version of getStats() in JavaScript. The returned metrics + // are UNDOCUMENTED and many of them rely on implementation-specific details. + // The goal is to DELETE THIS VERSION but we can't today because it is heavily + // relied upon by third parties. See https://crbug.com/822696. + // + // This version is wired up into Chrome. Any stats implemented are + // automatically exposed to the Web Platform. This has BYPASSED the Chrome + // release processes for years and lead to cross-browser incompatibility + // issues and web application reliance on Chrome-only behavior. + // + // This API is in "maintenance mode", serious regressions should be fixed but + // adding new stats is highly discouraged. + // + // TODO(hbos): Deprecate and remove this when third parties have migrated to + // the spec-compliant GetStats() API. https://crbug.com/822696 + virtual bool GetStats(StatsObserver* observer, + MediaStreamTrackInterface* track, // Optional + StatsOutputLevel level) = 0; + // The spec-compliant GetStats() API. This correspond to the promise-based + // version of getStats() in JavaScript. Implementation status is described in + // api/stats/rtcstats_objects.h. For more details on stats, see spec: + // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-getstats + // TODO(hbos): Takes shared ownership, use rtc::scoped_refptr<> instead. This + // requires stop overriding the current version in third party or making third + // party calls explicit to avoid ambiguity during switch. Make the future + // version abstract as soon as third party projects implement it. + virtual void GetStats(RTCStatsCollectorCallback* callback) {} + // Spec-compliant getStats() performing the stats selection algorithm with the + // sender. https://w3c.github.io/webrtc-pc/#dom-rtcrtpsender-getstats + // TODO(hbos): Make abstract as soon as third party projects implement it. + virtual void GetStats( + rtc::scoped_refptr selector, + rtc::scoped_refptr callback) {} + // Spec-compliant getStats() performing the stats selection algorithm with the + // receiver. https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getstats + // TODO(hbos): Make abstract as soon as third party projects implement it. + virtual void GetStats( + rtc::scoped_refptr selector, + rtc::scoped_refptr callback) {} + // Clear cached stats in the RTCStatsCollector. + // Exposed for testing while waiting for automatic cache clear to work. + // https://bugs.webrtc.org/8693 + virtual void ClearStatsCache() {} + + // Create a data channel with the provided config, or default config if none + // is provided. Note that an offer/answer negotiation is still necessary + // before the data channel can be used. + // + // Also, calling CreateDataChannel is the only way to get a data "m=" section + // in SDP, so it should be done before CreateOffer is called, if the + // application plans to use data channels. + virtual rtc::scoped_refptr CreateDataChannel( + const std::string& label, + const DataChannelInit* config) = 0; + + // Returns the more recently applied description; "pending" if it exists, and + // otherwise "current". See below. + virtual const SessionDescriptionInterface* local_description() const = 0; + virtual const SessionDescriptionInterface* remote_description() const = 0; + + // A "current" description the one currently negotiated from a complete + // offer/answer exchange. + virtual const SessionDescriptionInterface* current_local_description() const { + return nullptr; + } + virtual const SessionDescriptionInterface* current_remote_description() + const { + return nullptr; + } + + // A "pending" description is one that's part of an incomplete offer/answer + // exchange (thus, either an offer or a pranswer). Once the offer/answer + // exchange is finished, the "pending" description will become "current". + virtual const SessionDescriptionInterface* pending_local_description() const { + return nullptr; + } + virtual const SessionDescriptionInterface* pending_remote_description() + const { + return nullptr; + } + + // Create a new offer. + // The CreateSessionDescriptionObserver callback will be called when done. + virtual void CreateOffer(CreateSessionDescriptionObserver* observer, + const MediaConstraintsInterface* constraints) {} + + // TODO(jiayl): remove the default impl and the old interface when chromium + // code is updated. + virtual void CreateOffer(CreateSessionDescriptionObserver* observer, + const RTCOfferAnswerOptions& options) {} + + // Create an answer to an offer. + // The CreateSessionDescriptionObserver callback will be called when done. + virtual void CreateAnswer(CreateSessionDescriptionObserver* observer, + const RTCOfferAnswerOptions& options) {} + // Deprecated - use version above. + // TODO(hta): Remove and remove default implementations when all callers + // are updated. + virtual void CreateAnswer(CreateSessionDescriptionObserver* observer, + const MediaConstraintsInterface* constraints) {} + + // Sets the local session description. + // The PeerConnection takes the ownership of |desc| even if it fails. + // The |observer| callback will be called when done. + // TODO(deadbeef): Change |desc| to be a unique_ptr, to make it clear + // that this method always takes ownership of it. + virtual void SetLocalDescription(SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc) = 0; + // Sets the remote session description. + // The PeerConnection takes the ownership of |desc| even if it fails. + // The |observer| callback will be called when done. + // TODO(hbos): Remove when Chrome implements the new signature. + virtual void SetRemoteDescription(SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc) {} + // TODO(hbos): Make pure virtual when Chrome has updated its signature. + virtual void SetRemoteDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) {} + + // TODO(deadbeef): Make this pure virtual once all Chrome subclasses of + // PeerConnectionInterface implement it. + virtual PeerConnectionInterface::RTCConfiguration GetConfiguration() { + return PeerConnectionInterface::RTCConfiguration(); + } + + // Sets the PeerConnection's global configuration to |config|. + // + // The members of |config| that may be changed are |type|, |servers|, + // |ice_candidate_pool_size| and |prune_turn_ports| (though the candidate + // pool size can't be changed after the first call to SetLocalDescription). + // Note that this means the BUNDLE and RTCP-multiplexing policies cannot be + // changed with this method. + // + // Any changes to STUN/TURN servers or ICE candidate policy will affect the + // next gathering phase, and cause the next call to createOffer to generate + // new ICE credentials, as described in JSEP. This also occurs when + // |prune_turn_ports| changes, for the same reasoning. + // + // If an error occurs, returns false and populates |error| if non-null: + // - INVALID_MODIFICATION if |config| contains a modified parameter other + // than one of the parameters listed above. + // - INVALID_RANGE if |ice_candidate_pool_size| is out of range. + // - SYNTAX_ERROR if parsing an ICE server URL failed. + // - INVALID_PARAMETER if a TURN server is missing |username| or |password|. + // - INTERNAL_ERROR if an unexpected error occurred. + // + // TODO(deadbeef): Make this pure virtual once all Chrome subclasses of + // PeerConnectionInterface implement it. + virtual bool SetConfiguration( + const PeerConnectionInterface::RTCConfiguration& config, + RTCError* error) { + return false; + } + // Version without error output param for backwards compatibility. + // TODO(deadbeef): Remove once chromium is updated. + virtual bool SetConfiguration( + const PeerConnectionInterface::RTCConfiguration& config) { + return false; + } + + // Provides a remote candidate to the ICE Agent. + // A copy of the |candidate| will be created and added to the remote + // description. So the caller of this method still has the ownership of the + // |candidate|. + virtual bool AddIceCandidate(const IceCandidateInterface* candidate) = 0; + + // Removes a group of remote candidates from the ICE agent. Needed mainly for + // continual gathering, to avoid an ever-growing list of candidates as + // networks come and go. + virtual bool RemoveIceCandidates( + const std::vector& candidates) { + return false; + } + + // Register a metric observer (used by chromium). It's reference counted, and + // this method takes a reference. RegisterUMAObserver(nullptr) will release + // the reference. + // TODO(deadbeef): Take argument as scoped_refptr? + virtual void RegisterUMAObserver(UMAObserver* observer) = 0; + + // 0 <= min <= current <= max should hold for set parameters. + struct BitrateParameters { + rtc::Optional min_bitrate_bps; + rtc::Optional current_bitrate_bps; + rtc::Optional max_bitrate_bps; + }; + + // SetBitrate limits the bandwidth allocated for all RTP streams sent by + // this PeerConnection. Other limitations might affect these limits and + // are respected (for example "b=AS" in SDP). + // + // Setting |current_bitrate_bps| will reset the current bitrate estimate + // to the provided value. + virtual RTCError SetBitrate(const BitrateSettings& bitrate) { + BitrateParameters bitrate_parameters; + bitrate_parameters.min_bitrate_bps = bitrate.min_bitrate_bps; + bitrate_parameters.current_bitrate_bps = bitrate.start_bitrate_bps; + bitrate_parameters.max_bitrate_bps = bitrate.max_bitrate_bps; + return SetBitrate(bitrate_parameters); + } + + // TODO(nisse): Deprecated - use version above. These two default + // implementations require subclasses to implement one or the other + // of the methods. + virtual RTCError SetBitrate(const BitrateParameters& bitrate_parameters) { + BitrateSettings bitrate; + bitrate.min_bitrate_bps = bitrate_parameters.min_bitrate_bps; + bitrate.start_bitrate_bps = bitrate_parameters.current_bitrate_bps; + bitrate.max_bitrate_bps = bitrate_parameters.max_bitrate_bps; + return SetBitrate(bitrate); + } + + // Sets current strategy. If not set default WebRTC allocator will be used. + // May be changed during an active session. The strategy + // ownership is passed with std::unique_ptr + // TODO(alexnarest): Make this pure virtual when tests will be updated + virtual void SetBitrateAllocationStrategy( + std::unique_ptr + bitrate_allocation_strategy) {} + + // Enable/disable playout of received audio streams. Enabled by default. Note + // that even if playout is enabled, streams will only be played out if the + // appropriate SDP is also applied. Setting |playout| to false will stop + // playout of the underlying audio device but starts a task which will poll + // for audio data every 10ms to ensure that audio processing happens and the + // audio statistics are updated. + // TODO(henrika): deprecate and remove this. + virtual void SetAudioPlayout(bool playout) {} + + // Enable/disable recording of transmitted audio streams. Enabled by default. + // Note that even if recording is enabled, streams will only be recorded if + // the appropriate SDP is also applied. + // TODO(henrika): deprecate and remove this. + virtual void SetAudioRecording(bool recording) {} + + // Returns the current SignalingState. + virtual SignalingState signaling_state() = 0; + + // Returns the aggregate state of all ICE *and* DTLS transports. + // TODO(deadbeef): Implement "PeerConnectionState" according to the standard, + // to aggregate ICE+DTLS state, and change the scope of IceConnectionState to + // be just the ICE layer. See: crbug.com/webrtc/6145 + virtual IceConnectionState ice_connection_state() = 0; + + virtual IceGatheringState ice_gathering_state() = 0; + + // Starts RtcEventLog using existing file. Takes ownership of |file| and + // passes it on to Call, which will take the ownership. If the + // operation fails the file will be closed. The logging will stop + // automatically after 10 minutes have passed, or when the StopRtcEventLog + // function is called. + // TODO(eladalon): Deprecate and remove this. + virtual bool StartRtcEventLog(rtc::PlatformFile file, + int64_t max_size_bytes) { + return false; + } + + // Start RtcEventLog using an existing output-sink. Takes ownership of + // |output| and passes it on to Call, which will take the ownership. If the + // operation fails the output will be closed and deallocated. The event log + // will send serialized events to the output object every |output_period_ms|. + virtual bool StartRtcEventLog(std::unique_ptr output, + int64_t output_period_ms) { + return false; + } + + // Stops logging the RtcEventLog. + // TODO(ivoc): Make this pure virtual when Chrome is updated. + virtual void StopRtcEventLog() {} + + // Terminates all media, closes the transports, and in general releases any + // resources used by the PeerConnection. This is an irreversible operation. + // + // Note that after this method completes, the PeerConnection will no longer + // use the PeerConnectionObserver interface passed in on construction, and + // thus the observer object can be safely destroyed. + virtual void Close() = 0; + + protected: + // Dtor protected as objects shouldn't be deleted via this interface. + ~PeerConnectionInterface() {} +}; + +// PeerConnection callback interface, used for RTCPeerConnection events. +// Application should implement these methods. +class PeerConnectionObserver { + public: + virtual ~PeerConnectionObserver() = default; + + // Triggered when the SignalingState changed. + virtual void OnSignalingChange( + PeerConnectionInterface::SignalingState new_state) = 0; + + // Triggered when media is received on a new stream from remote peer. + virtual void OnAddStream(rtc::scoped_refptr stream) {} + + // Triggered when a remote peer closes a stream. + virtual void OnRemoveStream(rtc::scoped_refptr stream) { + } + + // Triggered when a remote peer opens a data channel. + virtual void OnDataChannel( + rtc::scoped_refptr data_channel) = 0; + + // Triggered when renegotiation is needed. For example, an ICE restart + // has begun. + virtual void OnRenegotiationNeeded() = 0; + + // Called any time the IceConnectionState changes. + // + // Note that our ICE states lag behind the standard slightly. The most + // notable differences include the fact that "failed" occurs after 15 + // seconds, not 30, and this actually represents a combination ICE + DTLS + // state, so it may be "failed" if DTLS fails while ICE succeeds. + virtual void OnIceConnectionChange( + PeerConnectionInterface::IceConnectionState new_state) = 0; + + // Called any time the IceGatheringState changes. + virtual void OnIceGatheringChange( + PeerConnectionInterface::IceGatheringState new_state) = 0; + + // A new ICE candidate has been gathered. + virtual void OnIceCandidate(const IceCandidateInterface* candidate) = 0; + + // Ice candidates have been removed. + // TODO(honghaiz): Make this a pure virtual method when all its subclasses + // implement it. + virtual void OnIceCandidatesRemoved( + const std::vector& candidates) {} + + // Called when the ICE connection receiving status changes. + virtual void OnIceConnectionReceivingChange(bool receiving) {} + + // This is called when a receiver and its track are created. + // TODO(zhihuang): Make this pure virtual when all subclasses implement it. + // Note: This is called with both Plan B and Unified Plan semantics. Unified + // Plan users should prefer OnTrack, OnAddTrack is only called as backwards + // compatibility (and is called in the exact same situations as OnTrack). + virtual void OnAddTrack( + rtc::scoped_refptr receiver, + const std::vector>& streams) {} + + // This is called when signaling indicates a transceiver will be receiving + // media from the remote endpoint. This is fired during a call to + // SetRemoteDescription. The receiving track can be accessed by: + // |transceiver->receiver()->track()| and its associated streams by + // |transceiver->receiver()->streams()|. + // Note: This will only be called if Unified Plan semantics are specified. + // This behavior is specified in section 2.2.8.2.5 of the "Set the + // RTCSessionDescription" algorithm: + // https://w3c.github.io/webrtc-pc/#set-description + virtual void OnTrack( + rtc::scoped_refptr transceiver) {} + + // Called when signaling indicates that media will no longer be received on a + // track. + // With Plan B semantics, the given receiver will have been removed from the + // PeerConnection and the track muted. + // With Unified Plan semantics, the receiver will remain but the transceiver + // will have changed direction to either sendonly or inactive. + // https://w3c.github.io/webrtc-pc/#process-remote-track-removal + // TODO(hbos,deadbeef): Make pure virtual when all subclasses implement it. + virtual void OnRemoveTrack( + rtc::scoped_refptr receiver) {} +}; + +// PeerConnectionDependencies holds all of PeerConnections dependencies. +// A dependency is distinct from a configuration as it defines significant +// executable code that can be provided by a user of the API. +// +// All new dependencies should be added as a unique_ptr to allow the +// PeerConnection object to be the definitive owner of the dependencies +// lifetime making injection safer. +struct PeerConnectionDependencies final { + explicit PeerConnectionDependencies(PeerConnectionObserver* observer_in) + : observer(observer_in) {} + // This object is not copyable or assignable. + PeerConnectionDependencies(const PeerConnectionDependencies&) = delete; + PeerConnectionDependencies& operator=(const PeerConnectionDependencies&) = + delete; + // This object is only moveable. + PeerConnectionDependencies(PeerConnectionDependencies&&) = default; + PeerConnectionDependencies& operator=(PeerConnectionDependencies&&) = default; + // Mandatory dependencies + PeerConnectionObserver* observer = nullptr; + // Optional dependencies + std::unique_ptr allocator; + std::unique_ptr cert_generator; + std::unique_ptr tls_cert_verifier; +}; + +// PeerConnectionFactoryInterface is the factory interface used for creating +// PeerConnection, MediaStream and MediaStreamTrack objects. +// +// The simplest method for obtaiing one, CreatePeerConnectionFactory will +// create the required libjingle threads, socket and network manager factory +// classes for networking if none are provided, though it requires that the +// application runs a message loop on the thread that called the method (see +// explanation below) +// +// If an application decides to provide its own threads and/or implementation +// of networking classes, it should use the alternate +// CreatePeerConnectionFactory method which accepts threads as input, and use +// the CreatePeerConnection version that takes a PortAllocator as an argument. +class PeerConnectionFactoryInterface : public rtc::RefCountInterface { + public: + class Options { + public: + Options() : crypto_options(rtc::CryptoOptions::NoGcm()) {} + + // If set to true, created PeerConnections won't enforce any SRTP + // requirement, allowing unsecured media. Should only be used for + // testing/debugging. + bool disable_encryption = false; + + // Deprecated. The only effect of setting this to true is that + // CreateDataChannel will fail, which is not that useful. + bool disable_sctp_data_channels = false; + + // If set to true, any platform-supported network monitoring capability + // won't be used, and instead networks will only be updated via polling. + // + // This only has an effect if a PeerConnection is created with the default + // PortAllocator implementation. + bool disable_network_monitor = false; + + // Sets the network types to ignore. For instance, calling this with + // ADAPTER_TYPE_ETHERNET | ADAPTER_TYPE_LOOPBACK will ignore Ethernet and + // loopback interfaces. + int network_ignore_mask = rtc::kDefaultNetworkIgnoreMask; + + // Sets the maximum supported protocol version. The highest version + // supported by both ends will be used for the connection, i.e. if one + // party supports DTLS 1.0 and the other DTLS 1.2, DTLS 1.0 will be used. + rtc::SSLProtocolVersion ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12; + + // Sets crypto related options, e.g. enabled cipher suites. + rtc::CryptoOptions crypto_options; + }; + + // Set the options to be used for subsequently created PeerConnections. + virtual void SetOptions(const Options& options) = 0; + + // The preferred way to create a new peer connection. Simply provide the + // configuration and a PeerConnectionDependencies structure. + // TODO(benwright): Make pure virtual once downstream mock PC factory classes + // are updated. + virtual rtc::scoped_refptr CreatePeerConnection( + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies dependencies) { + return nullptr; + } + + // Deprecated; |allocator| and |cert_generator| may be null, in which case + // default implementations will be used. + // + // |observer| must not be null. + // + // Note that this method does not take ownership of |observer|; it's the + // responsibility of the caller to delete it. It can be safely deleted after + // Close has been called on the returned PeerConnection, which ensures no + // more observer callbacks will be invoked. + virtual rtc::scoped_refptr CreatePeerConnection( + const PeerConnectionInterface::RTCConfiguration& configuration, + std::unique_ptr allocator, + std::unique_ptr cert_generator, + PeerConnectionObserver* observer) { + return nullptr; + } + // Deprecated; should use RTCConfiguration for everything that previously + // used constraints. + virtual rtc::scoped_refptr CreatePeerConnection( + const PeerConnectionInterface::RTCConfiguration& configuration, + const MediaConstraintsInterface* constraints, + std::unique_ptr allocator, + std::unique_ptr cert_generator, + PeerConnectionObserver* observer) { + return nullptr; + } + + virtual rtc::scoped_refptr CreateLocalMediaStream( + const std::string& stream_id) = 0; + + // Creates an AudioSourceInterface. + // |options| decides audio processing settings. + virtual rtc::scoped_refptr CreateAudioSource( + const cricket::AudioOptions& options) = 0; + // Deprecated - use version above. + // Can use CopyConstraintsIntoAudioOptions to bridge the gap. + virtual rtc::scoped_refptr CreateAudioSource( + const MediaConstraintsInterface* constraints) { + return nullptr; + } + + // Creates a VideoTrackSourceInterface from |capturer|. + // TODO(deadbeef): We should aim to remove cricket::VideoCapturer from the + // API. It's mainly used as a wrapper around webrtc's provided + // platform-specific capturers, but these should be refactored to use + // VideoTrackSourceInterface directly. + // TODO(deadbeef): Make pure virtual once downstream mock PC factory classes + // are updated. + virtual rtc::scoped_refptr CreateVideoSource( + std::unique_ptr capturer) { + return nullptr; + } + + // A video source creator that allows selection of resolution and frame rate. + // |constraints| decides video resolution and frame rate but can be null. + // In the null case, use the version above. + // + // |constraints| is only used for the invocation of this method, and can + // safely be destroyed afterwards. + virtual rtc::scoped_refptr CreateVideoSource( + std::unique_ptr capturer, + const MediaConstraintsInterface* constraints) { + return nullptr; + } + + // Deprecated; please use the versions that take unique_ptrs above. + // TODO(deadbeef): Remove these once safe to do so. + virtual rtc::scoped_refptr CreateVideoSource( + cricket::VideoCapturer* capturer) { + return CreateVideoSource(std::unique_ptr(capturer)); + } + virtual rtc::scoped_refptr CreateVideoSource( + cricket::VideoCapturer* capturer, + const MediaConstraintsInterface* constraints) { + return CreateVideoSource(std::unique_ptr(capturer), + constraints); + } + + // Creates a new local VideoTrack. The same |source| can be used in several + // tracks. + virtual rtc::scoped_refptr CreateVideoTrack( + const std::string& label, + VideoTrackSourceInterface* source) = 0; + + // Creates an new AudioTrack. At the moment |source| can be null. + virtual rtc::scoped_refptr + CreateAudioTrack(const std::string& label, + AudioSourceInterface* source) = 0; + + // Starts AEC dump using existing file. Takes ownership of |file| and passes + // it on to VoiceEngine (via other objects) immediately, which will take + // the ownerhip. If the operation fails, the file will be closed. + // A maximum file size in bytes can be specified. When the file size limit is + // reached, logging is stopped automatically. If max_size_bytes is set to a + // value <= 0, no limit will be used, and logging will continue until the + // StopAecDump function is called. + virtual bool StartAecDump(rtc::PlatformFile file, int64_t max_size_bytes) = 0; + + // Stops logging the AEC dump. + virtual void StopAecDump() = 0; + + protected: + // Dtor and ctor protected as objects shouldn't be created or deleted via + // this interface. + PeerConnectionFactoryInterface() {} + ~PeerConnectionFactoryInterface() {} // NOLINT +}; + +// Create a new instance of PeerConnectionFactoryInterface. +// +// This method relies on the thread it's called on as the "signaling thread" +// for the PeerConnectionFactory it creates. +// +// As such, if the current thread is not already running an rtc::Thread message +// loop, an application using this method must eventually either call +// rtc::Thread::Current()->Run(), or call +// rtc::Thread::Current()->ProcessMessages() within the application's own +// message loop. +rtc::scoped_refptr CreatePeerConnectionFactory( + rtc::scoped_refptr audio_encoder_factory, + rtc::scoped_refptr audio_decoder_factory); + +// Create a new instance of PeerConnectionFactoryInterface. +// +// |network_thread|, |worker_thread| and |signaling_thread| are +// the only mandatory parameters. +// +// If non-null, a reference is added to |default_adm|, and ownership of +// |video_encoder_factory| and |video_decoder_factory| is transferred to the +// returned factory. +// TODO(deadbeef): Use rtc::scoped_refptr<> and std::unique_ptr<> to make this +// ownership transfer and ref counting more obvious. +rtc::scoped_refptr CreatePeerConnectionFactory( + rtc::Thread* network_thread, + rtc::Thread* worker_thread, + rtc::Thread* signaling_thread, + AudioDeviceModule* default_adm, + rtc::scoped_refptr audio_encoder_factory, + rtc::scoped_refptr audio_decoder_factory, + cricket::WebRtcVideoEncoderFactory* video_encoder_factory, + cricket::WebRtcVideoDecoderFactory* video_decoder_factory); + +// Create a new instance of PeerConnectionFactoryInterface with optional +// external audio mixed and audio processing modules. +// +// If |audio_mixer| is null, an internal audio mixer will be created and used. +// If |audio_processing| is null, an internal audio processing module will be +// created and used. +rtc::scoped_refptr CreatePeerConnectionFactory( + rtc::Thread* network_thread, + rtc::Thread* worker_thread, + rtc::Thread* signaling_thread, + AudioDeviceModule* default_adm, + rtc::scoped_refptr audio_encoder_factory, + rtc::scoped_refptr audio_decoder_factory, + cricket::WebRtcVideoEncoderFactory* video_encoder_factory, + cricket::WebRtcVideoDecoderFactory* video_decoder_factory, + rtc::scoped_refptr audio_mixer, + rtc::scoped_refptr audio_processing); + +// Create a new instance of PeerConnectionFactoryInterface with optional +// external audio mixer, audio processing, and fec controller modules. +// +// If |audio_mixer| is null, an internal audio mixer will be created and used. +// If |audio_processing| is null, an internal audio processing module will be +// created and used. +// If |fec_controller_factory| is null, an internal fec controller module will +// be created and used. +// If |network_controller_factory| is provided, it will be used if enabled via +// field trial. +rtc::scoped_refptr CreatePeerConnectionFactory( + rtc::Thread* network_thread, + rtc::Thread* worker_thread, + rtc::Thread* signaling_thread, + AudioDeviceModule* default_adm, + rtc::scoped_refptr audio_encoder_factory, + rtc::scoped_refptr audio_decoder_factory, + cricket::WebRtcVideoEncoderFactory* video_encoder_factory, + cricket::WebRtcVideoDecoderFactory* video_decoder_factory, + rtc::scoped_refptr audio_mixer, + rtc::scoped_refptr audio_processing, + std::unique_ptr fec_controller_factory, + std::unique_ptr + network_controller_factory = nullptr); + +// Create a new instance of PeerConnectionFactoryInterface with optional video +// codec factories. These video factories represents all video codecs, i.e. no +// extra internal video codecs will be added. +// When building WebRTC with rtc_use_builtin_sw_codecs = false, this is the +// only available CreatePeerConnectionFactory overload. +rtc::scoped_refptr CreatePeerConnectionFactory( + rtc::Thread* network_thread, + rtc::Thread* worker_thread, + rtc::Thread* signaling_thread, + rtc::scoped_refptr default_adm, + rtc::scoped_refptr audio_encoder_factory, + rtc::scoped_refptr audio_decoder_factory, + std::unique_ptr video_encoder_factory, + std::unique_ptr video_decoder_factory, + rtc::scoped_refptr audio_mixer, + rtc::scoped_refptr audio_processing); + +// Create a new instance of PeerConnectionFactoryInterface with external audio +// mixer. +// +// If |audio_mixer| is null, an internal audio mixer will be created and used. +rtc::scoped_refptr +CreatePeerConnectionFactoryWithAudioMixer( + rtc::Thread* network_thread, + rtc::Thread* worker_thread, + rtc::Thread* signaling_thread, + AudioDeviceModule* default_adm, + rtc::scoped_refptr audio_encoder_factory, + rtc::scoped_refptr audio_decoder_factory, + cricket::WebRtcVideoEncoderFactory* video_encoder_factory, + cricket::WebRtcVideoDecoderFactory* video_decoder_factory, + rtc::scoped_refptr audio_mixer); + +// Create a new instance of PeerConnectionFactoryInterface. +// Same thread is used as worker and network thread. +inline rtc::scoped_refptr +CreatePeerConnectionFactory( + rtc::Thread* worker_and_network_thread, + rtc::Thread* signaling_thread, + AudioDeviceModule* default_adm, + rtc::scoped_refptr audio_encoder_factory, + rtc::scoped_refptr audio_decoder_factory, + cricket::WebRtcVideoEncoderFactory* video_encoder_factory, + cricket::WebRtcVideoDecoderFactory* video_decoder_factory) { + return CreatePeerConnectionFactory( + worker_and_network_thread, worker_and_network_thread, signaling_thread, + default_adm, audio_encoder_factory, audio_decoder_factory, + video_encoder_factory, video_decoder_factory); +} + +// This is a lower-level version of the CreatePeerConnectionFactory functions +// above. It's implemented in the "peerconnection" build target, whereas the +// above methods are only implemented in the broader "libjingle_peerconnection" +// build target, which pulls in the implementations of every module webrtc may +// use. +// +// If an application knows it will only require certain modules, it can reduce +// webrtc's impact on its binary size by depending only on the "peerconnection" +// target and the modules the application requires, using +// CreateModularPeerConnectionFactory instead of one of the +// CreatePeerConnectionFactory methods above. For example, if an application +// only uses WebRTC for audio, it can pass in null pointers for the +// video-specific interfaces, and omit the corresponding modules from its +// build. +// +// If |network_thread| or |worker_thread| are null, the PeerConnectionFactory +// will create the necessary thread internally. If |signaling_thread| is null, +// the PeerConnectionFactory will use the thread on which this method is called +// as the signaling thread, wrapping it in an rtc::Thread object if needed. +// +// If non-null, a reference is added to |default_adm|, and ownership of +// |video_encoder_factory| and |video_decoder_factory| is transferred to the +// returned factory. +// +// If |audio_mixer| is null, an internal audio mixer will be created and used. +// +// TODO(deadbeef): Use rtc::scoped_refptr<> and std::unique_ptr<> to make this +// ownership transfer and ref counting more obvious. +// +// TODO(deadbeef): Encapsulate these modules in a struct, so that when a new +// module is inevitably exposed, we can just add a field to the struct instead +// of adding a whole new CreateModularPeerConnectionFactory overload. +rtc::scoped_refptr +CreateModularPeerConnectionFactory( + rtc::Thread* network_thread, + rtc::Thread* worker_thread, + rtc::Thread* signaling_thread, + std::unique_ptr media_engine, + std::unique_ptr call_factory, + std::unique_ptr event_log_factory); + +rtc::scoped_refptr +CreateModularPeerConnectionFactory( + rtc::Thread* network_thread, + rtc::Thread* worker_thread, + rtc::Thread* signaling_thread, + std::unique_ptr media_engine, + std::unique_ptr call_factory, + std::unique_ptr event_log_factory, + std::unique_ptr fec_controller_factory, + std::unique_ptr + network_controller_factory = nullptr); + +} // namespace webrtc + +#endif // API_PEERCONNECTIONINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/peerconnectionproxy.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/peerconnectionproxy.h new file mode 100644 index 000000000000..3abcf96f6076 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/peerconnectionproxy.h @@ -0,0 +1,156 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_PEERCONNECTIONPROXY_H_ +#define API_PEERCONNECTIONPROXY_H_ + +#include +#include +#include + +#include "api/peerconnectioninterface.h" +#include "api/proxy.h" + +namespace webrtc { + +// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods +// are called on is an implementation detail. +BEGIN_SIGNALING_PROXY_MAP(PeerConnection) + PROXY_SIGNALING_THREAD_DESTRUCTOR() + PROXY_METHOD0(rtc::scoped_refptr, local_streams) + PROXY_METHOD0(rtc::scoped_refptr, remote_streams) + PROXY_METHOD1(bool, AddStream, MediaStreamInterface*) + PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*) + PROXY_METHOD2(RTCErrorOr>, + AddTrack, + rtc::scoped_refptr, + const std::vector&); + PROXY_METHOD2(rtc::scoped_refptr, + AddTrack, + MediaStreamTrackInterface*, + std::vector) + PROXY_METHOD1(bool, RemoveTrack, RtpSenderInterface*) + PROXY_METHOD1(RTCErrorOr>, + AddTransceiver, + rtc::scoped_refptr) + PROXY_METHOD2(RTCErrorOr>, + AddTransceiver, + rtc::scoped_refptr, + const RtpTransceiverInit&) + PROXY_METHOD1(RTCErrorOr>, + AddTransceiver, + cricket::MediaType) + PROXY_METHOD2(RTCErrorOr>, + AddTransceiver, + cricket::MediaType, + const RtpTransceiverInit&) + PROXY_METHOD1(rtc::scoped_refptr, + CreateDtmfSender, + AudioTrackInterface*) + PROXY_METHOD2(rtc::scoped_refptr, + CreateSender, + const std::string&, + const std::string&) + PROXY_CONSTMETHOD0(std::vector>, + GetSenders) + PROXY_CONSTMETHOD0(std::vector>, + GetReceivers) + PROXY_CONSTMETHOD0(std::vector>, + GetTransceivers) + PROXY_METHOD3(bool, + GetStats, + StatsObserver*, + MediaStreamTrackInterface*, + StatsOutputLevel) + PROXY_METHOD1(void, GetStats, RTCStatsCollectorCallback*) + PROXY_METHOD2(void, + GetStats, + rtc::scoped_refptr, + rtc::scoped_refptr); + PROXY_METHOD2(void, + GetStats, + rtc::scoped_refptr, + rtc::scoped_refptr); + PROXY_METHOD2(rtc::scoped_refptr, + CreateDataChannel, + const std::string&, + const DataChannelInit*) + PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, local_description) + PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, remote_description) + PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, + pending_local_description) + PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, + pending_remote_description) + PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, + current_local_description) + PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, + current_remote_description) + PROXY_METHOD2(void, + CreateOffer, + CreateSessionDescriptionObserver*, + const MediaConstraintsInterface*) + PROXY_METHOD2(void, + CreateAnswer, + CreateSessionDescriptionObserver*, + const MediaConstraintsInterface*) + PROXY_METHOD2(void, + CreateOffer, + CreateSessionDescriptionObserver*, + const RTCOfferAnswerOptions&) + PROXY_METHOD2(void, + CreateAnswer, + CreateSessionDescriptionObserver*, + const RTCOfferAnswerOptions&) + PROXY_METHOD2(void, + SetLocalDescription, + SetSessionDescriptionObserver*, + SessionDescriptionInterface*) + PROXY_METHOD2(void, + SetRemoteDescription, + SetSessionDescriptionObserver*, + SessionDescriptionInterface*) + PROXY_METHOD2(void, + SetRemoteDescription, + std::unique_ptr, + rtc::scoped_refptr); + PROXY_METHOD0(PeerConnectionInterface::RTCConfiguration, GetConfiguration); + PROXY_METHOD2(bool, + SetConfiguration, + const PeerConnectionInterface::RTCConfiguration&, + RTCError*); + PROXY_METHOD1(bool, + SetConfiguration, + const PeerConnectionInterface::RTCConfiguration&); + PROXY_METHOD1(bool, AddIceCandidate, const IceCandidateInterface*) + PROXY_METHOD1(bool, + RemoveIceCandidates, + const std::vector&); + PROXY_METHOD1(void, SetAudioPlayout, bool) + PROXY_METHOD1(void, SetAudioRecording, bool) + PROXY_METHOD1(void, RegisterUMAObserver, UMAObserver*) + PROXY_METHOD1(RTCError, SetBitrate, const BitrateSettings&); + PROXY_METHOD1(void, + SetBitrateAllocationStrategy, + std::unique_ptr); + PROXY_METHOD0(SignalingState, signaling_state) + PROXY_METHOD0(IceConnectionState, ice_connection_state) + PROXY_METHOD0(IceGatheringState, ice_gathering_state) + PROXY_METHOD2(bool, StartRtcEventLog, rtc::PlatformFile, int64_t) + PROXY_METHOD2(bool, + StartRtcEventLog, + std::unique_ptr, + int64_t); + PROXY_METHOD0(void, StopRtcEventLog) + PROXY_METHOD0(void, Close) +END_PROXY_MAP() + +} // namespace webrtc + +#endif // API_PEERCONNECTIONPROXY_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/proxy.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/proxy.h new file mode 100644 index 000000000000..dd7182e55460 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/proxy.h @@ -0,0 +1,572 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains Macros for creating proxies for webrtc MediaStream and +// PeerConnection classes. +// TODO(deadbeef): Move this to pc/; this is part of the implementation. + +// +// Example usage: +// +// class TestInterface : public rtc::RefCountInterface { +// public: +// std::string FooA() = 0; +// std::string FooB(bool arg1) const = 0; +// std::string FooC(bool arg1) = 0; +// }; +// +// Note that return types can not be a const reference. +// +// class Test : public TestInterface { +// ... implementation of the interface. +// }; +// +// BEGIN_PROXY_MAP(Test) +// PROXY_SIGNALING_THREAD_DESTRUCTOR() +// PROXY_METHOD0(std::string, FooA) +// PROXY_CONSTMETHOD1(std::string, FooB, arg1) +// PROXY_WORKER_METHOD1(std::string, FooC, arg1) +// END_PROXY_MAP() +// +// Where the destructor and first two methods are invoked on the signaling +// thread, and the third is invoked on the worker thread. +// +// The proxy can be created using +// +// TestProxy::Create(Thread* signaling_thread, Thread* worker_thread, +// TestInterface*). +// +// The variant defined with BEGIN_SIGNALING_PROXY_MAP is unaware of +// the worker thread, and invokes all methods on the signaling thread. +// +// The variant defined with BEGIN_OWNED_PROXY_MAP does not use +// refcounting, and instead just takes ownership of the object being proxied. + +#ifndef API_PROXY_H_ +#define API_PROXY_H_ + +#include +#include + +#include "rtc_base/event.h" +#include "rtc_base/refcountedobject.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +template +class ReturnType { + public: + template + void Invoke(C* c, M m) { r_ = (c->*m)(); } + template + void Invoke(C* c, M m, T1 a1) { + r_ = (c->*m)(std::move(a1)); + } + template + void Invoke(C* c, M m, T1 a1, T2 a2) { + r_ = (c->*m)(std::move(a1), std::move(a2)); + } + template + void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3) { + r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3)); + } + template + void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4) { + r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3), std::move(a4)); + } + template + void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5) { + r_ = (c->*m)(std::move(a1), std::move(a2), std::move(a3), std::move(a4), + std::move(a5)); + } + + R moved_result() { return std::move(r_); } + + private: + R r_; +}; + +template <> +class ReturnType { + public: + template + void Invoke(C* c, M m) { (c->*m)(); } + template + void Invoke(C* c, M m, T1 a1) { + (c->*m)(std::move(a1)); + } + template + void Invoke(C* c, M m, T1 a1, T2 a2) { + (c->*m)(std::move(a1), std::move(a2)); + } + template + void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3) { + (c->*m)(std::move(a1), std::move(a2), std::move(a3)); + } + + void moved_result() {} +}; + +namespace internal { + +class SynchronousMethodCall + : public rtc::MessageData, + public rtc::MessageHandler { + public: + explicit SynchronousMethodCall(rtc::MessageHandler* proxy); + ~SynchronousMethodCall() override; + + void Invoke(const rtc::Location& posted_from, rtc::Thread* t); + + private: + void OnMessage(rtc::Message*) override; + + std::unique_ptr e_; + rtc::MessageHandler* proxy_; +}; + +} // namespace internal + +template +class MethodCall0 : public rtc::Message, + public rtc::MessageHandler { + public: + typedef R (C::*Method)(); + MethodCall0(C* c, Method m) : c_(c), m_(m) {} + + R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { + internal::SynchronousMethodCall(this).Invoke(posted_from, t); + return r_.moved_result(); + } + + private: + void OnMessage(rtc::Message*) { r_.Invoke(c_, m_); } + + C* c_; + Method m_; + ReturnType r_; +}; + +template +class ConstMethodCall0 : public rtc::Message, + public rtc::MessageHandler { + public: + typedef R (C::*Method)() const; + ConstMethodCall0(C* c, Method m) : c_(c), m_(m) {} + + R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { + internal::SynchronousMethodCall(this).Invoke(posted_from, t); + return r_.moved_result(); + } + + private: + void OnMessage(rtc::Message*) { r_.Invoke(c_, m_); } + + C* c_; + Method m_; + ReturnType r_; +}; + +template +class MethodCall1 : public rtc::Message, + public rtc::MessageHandler { + public: + typedef R (C::*Method)(T1 a1); + MethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(std::move(a1)) {} + + R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { + internal::SynchronousMethodCall(this).Invoke(posted_from, t); + return r_.moved_result(); + } + + private: + void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, std::move(a1_)); } + + C* c_; + Method m_; + ReturnType r_; + T1 a1_; +}; + +template +class ConstMethodCall1 : public rtc::Message, + public rtc::MessageHandler { + public: + typedef R (C::*Method)(T1 a1) const; + ConstMethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(std::move(a1)) {} + + R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { + internal::SynchronousMethodCall(this).Invoke(posted_from, t); + return r_.moved_result(); + } + + private: + void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, std::move(a1_)); } + + C* c_; + Method m_; + ReturnType r_; + T1 a1_; +}; + +template +class MethodCall2 : public rtc::Message, + public rtc::MessageHandler { + public: + typedef R (C::*Method)(T1 a1, T2 a2); + MethodCall2(C* c, Method m, T1 a1, T2 a2) + : c_(c), m_(m), a1_(std::move(a1)), a2_(std::move(a2)) {} + + R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { + internal::SynchronousMethodCall(this).Invoke(posted_from, t); + return r_.moved_result(); + } + + private: + void OnMessage(rtc::Message*) { + r_.Invoke(c_, m_, std::move(a1_), std::move(a2_)); + } + + C* c_; + Method m_; + ReturnType r_; + T1 a1_; + T2 a2_; +}; + +template +class MethodCall3 : public rtc::Message, + public rtc::MessageHandler { + public: + typedef R (C::*Method)(T1 a1, T2 a2, T3 a3); + MethodCall3(C* c, Method m, T1 a1, T2 a2, T3 a3) + : c_(c), + m_(m), + a1_(std::move(a1)), + a2_(std::move(a2)), + a3_(std::move(a3)) {} + + R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { + internal::SynchronousMethodCall(this).Invoke(posted_from, t); + return r_.moved_result(); + } + + private: + void OnMessage(rtc::Message*) { + r_.Invoke(c_, m_, std::move(a1_), std::move(a2_), std::move(a3_)); + } + + C* c_; + Method m_; + ReturnType r_; + T1 a1_; + T2 a2_; + T3 a3_; +}; + +template +class MethodCall4 : public rtc::Message, + public rtc::MessageHandler { + public: + typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4); + MethodCall4(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4) + : c_(c), + m_(m), + a1_(std::move(a1)), + a2_(std::move(a2)), + a3_(std::move(a3)), + a4_(std::move(a4)) {} + + R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { + internal::SynchronousMethodCall(this).Invoke(posted_from, t); + return r_.moved_result(); + } + + private: + void OnMessage(rtc::Message*) { + r_.Invoke(c_, m_, std::move(a1_), std::move(a2_), std::move(a3_), + std::move(a4_)); + } + + C* c_; + Method m_; + ReturnType r_; + T1 a1_; + T2 a2_; + T3 a3_; + T4 a4_; +}; + +template +class MethodCall5 : public rtc::Message, + public rtc::MessageHandler { + public: + typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4, T5 a5); + MethodCall5(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5) + : c_(c), + m_(m), + a1_(std::move(a1)), + a2_(std::move(a2)), + a3_(std::move(a3)), + a4_(std::move(a4)), + a5_(std::move(a5)) {} + + R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { + internal::SynchronousMethodCall(this).Invoke(posted_from, t); + return r_.moved_result(); + } + + private: + void OnMessage(rtc::Message*) { + r_.Invoke(c_, m_, std::move(a1_), std::move(a2_), std::move(a3_), + std::move(a4_), std::move(a5_)); + } + + C* c_; + Method m_; + ReturnType r_; + T1 a1_; + T2 a2_; + T3 a3_; + T4 a4_; + T5 a5_; +}; + + +// Helper macros to reduce code duplication. +#define PROXY_MAP_BOILERPLATE(c) \ + template \ + class c##ProxyWithInternal; \ + typedef c##ProxyWithInternal c##Proxy; \ + template \ + class c##ProxyWithInternal : public c##Interface { \ + protected: \ + typedef c##Interface C; \ + \ + public: \ + const INTERNAL_CLASS* internal() const { return c_; } \ + INTERNAL_CLASS* internal() { return c_; } + +#define END_PROXY_MAP() \ + }; + +#define SIGNALING_PROXY_MAP_BOILERPLATE(c) \ + protected: \ + c##ProxyWithInternal(rtc::Thread* signaling_thread, INTERNAL_CLASS* c) \ + : signaling_thread_(signaling_thread), c_(c) {} \ + \ + private: \ + mutable rtc::Thread* signaling_thread_; + +#define WORKER_PROXY_MAP_BOILERPLATE(c) \ + protected: \ + c##ProxyWithInternal(rtc::Thread* signaling_thread, \ + rtc::Thread* worker_thread, INTERNAL_CLASS* c) \ + : signaling_thread_(signaling_thread), \ + worker_thread_(worker_thread), \ + c_(c) {} \ + \ + private: \ + mutable rtc::Thread* signaling_thread_; \ + mutable rtc::Thread* worker_thread_; + +// Note that the destructor is protected so that the proxy can only be +// destroyed via RefCountInterface. +#define REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ + protected: \ + ~c##ProxyWithInternal() { \ + MethodCall0 call( \ + this, &c##ProxyWithInternal::DestroyInternal); \ + call.Marshal(RTC_FROM_HERE, destructor_thread()); \ + } \ + \ + private: \ + void DestroyInternal() { c_ = nullptr; } \ + rtc::scoped_refptr c_; + +// Note: This doesn't use a unique_ptr, because it intends to handle a corner +// case where an object's deletion triggers a callback that calls back into +// this proxy object. If relying on a unique_ptr to delete the object, its +// inner pointer would be set to null before this reentrant callback would have +// a chance to run, resulting in a segfault. +#define OWNED_PROXY_MAP_BOILERPLATE(c) \ + public: \ + ~c##ProxyWithInternal() { \ + MethodCall0 call( \ + this, &c##ProxyWithInternal::DestroyInternal); \ + call.Marshal(RTC_FROM_HERE, destructor_thread()); \ + } \ + \ + private: \ + void DestroyInternal() { delete c_; } \ + INTERNAL_CLASS* c_; + +#define BEGIN_SIGNALING_PROXY_MAP(c) \ + PROXY_MAP_BOILERPLATE(c) \ + SIGNALING_PROXY_MAP_BOILERPLATE(c) \ + REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ + public: \ + static rtc::scoped_refptr Create( \ + rtc::Thread* signaling_thread, INTERNAL_CLASS* c) { \ + return new rtc::RefCountedObject(signaling_thread, \ + c); \ + } + +#define BEGIN_PROXY_MAP(c) \ + PROXY_MAP_BOILERPLATE(c) \ + WORKER_PROXY_MAP_BOILERPLATE(c) \ + REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ + public: \ + static rtc::scoped_refptr Create( \ + rtc::Thread* signaling_thread, rtc::Thread* worker_thread, \ + INTERNAL_CLASS* c) { \ + return new rtc::RefCountedObject(signaling_thread, \ + worker_thread, c); \ + } + +#define BEGIN_OWNED_PROXY_MAP(c) \ + PROXY_MAP_BOILERPLATE(c) \ + WORKER_PROXY_MAP_BOILERPLATE(c) \ + OWNED_PROXY_MAP_BOILERPLATE(c) \ + public: \ + static std::unique_ptr Create( \ + rtc::Thread* signaling_thread, rtc::Thread* worker_thread, \ + std::unique_ptr c) { \ + return std::unique_ptr(new c##ProxyWithInternal( \ + signaling_thread, worker_thread, c.release())); \ + } + +#define PROXY_SIGNALING_THREAD_DESTRUCTOR() \ + private: \ + rtc::Thread* destructor_thread() const { return signaling_thread_; } \ + \ + public: // NOLINTNEXTLINE + +#define PROXY_WORKER_THREAD_DESTRUCTOR() \ + private: \ + rtc::Thread* destructor_thread() const { return worker_thread_; } \ + \ + public: // NOLINTNEXTLINE + +#define PROXY_METHOD0(r, method) \ + r method() override { \ + MethodCall0 call(c_, &C::method); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_CONSTMETHOD0(r, method) \ + r method() const override { \ + ConstMethodCall0 call(c_, &C::method); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_METHOD1(r, method, t1) \ + r method(t1 a1) override { \ + MethodCall1 call(c_, &C::method, std::move(a1)); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_CONSTMETHOD1(r, method, t1) \ + r method(t1 a1) const override { \ + ConstMethodCall1 call(c_, &C::method, std::move(a1)); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_METHOD2(r, method, t1, t2) \ + r method(t1 a1, t2 a2) override { \ + MethodCall2 call(c_, &C::method, std::move(a1), \ + std::move(a2)); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_METHOD3(r, method, t1, t2, t3) \ + r method(t1 a1, t2 a2, t3 a3) override { \ + MethodCall3 call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3)); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_METHOD4(r, method, t1, t2, t3, t4) \ + r method(t1 a1, t2 a2, t3 a3, t4 a4) override { \ + MethodCall4 call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3), \ + std::move(a4)); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_METHOD5(r, method, t1, t2, t3, t4, t5) \ + r method(t1 a1, t2 a2, t3 a3, t4 a4, t5 a5) override { \ + MethodCall5 call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3), \ + std::move(a4), std::move(a5)); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +// Define methods which should be invoked on the worker thread. +#define PROXY_WORKER_METHOD0(r, method) \ + r method() override { \ + MethodCall0 call(c_, &C::method); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_CONSTMETHOD0(r, method) \ + r method() const override { \ + ConstMethodCall0 call(c_, &C::method); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_METHOD1(r, method, t1) \ + r method(t1 a1) override { \ + MethodCall1 call(c_, &C::method, std::move(a1)); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_CONSTMETHOD1(r, method, t1) \ + r method(t1 a1) const override { \ + ConstMethodCall1 call(c_, &C::method, std::move(a1)); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_METHOD2(r, method, t1, t2) \ + r method(t1 a1, t2 a2) override { \ + MethodCall2 call(c_, &C::method, std::move(a1), \ + std::move(a2)); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_CONSTMETHOD2(r, method, t1, t2) \ + r method(t1 a1, t2 a2) const override { \ + ConstMethodCall2 call(c_, &C::method, std::move(a1), \ + std::move(a2)); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_METHOD3(r, method, t1, t2, t3) \ + r method(t1 a1, t2 a2, t3 a3) override { \ + MethodCall3 call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3)); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_CONSTMETHOD3(r, method, t1, t2) \ + r method(t1 a1, t2 a2, t3 a3) const override { \ + ConstMethodCall3 call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3)); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +} // namespace webrtc + +#endif // API_PROXY_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/refcountedbase.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/refcountedbase.h new file mode 100644 index 000000000000..8c26efd3e3bb --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/refcountedbase.h @@ -0,0 +1,43 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_REFCOUNTEDBASE_H_ +#define API_REFCOUNTEDBASE_H_ + +#include "rtc_base/constructormagic.h" +#include "rtc_base/refcount.h" +#include "rtc_base/refcounter.h" + +namespace rtc { + +class RefCountedBase { + public: + RefCountedBase() = default; + + void AddRef() const { ref_count_.IncRef(); } + RefCountReleaseStatus Release() const { + const auto status = ref_count_.DecRef(); + if (status == RefCountReleaseStatus::kDroppedLastRef) { + delete this; + } + return status; + } + + protected: + virtual ~RefCountedBase() = default; + + private: + mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; + + RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedBase); +}; + +} // namespace rtc + +#endif // API_REFCOUNTEDBASE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtcerror.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtcerror.h new file mode 100644 index 000000000000..c87ce916501f --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtcerror.h @@ -0,0 +1,310 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTCERROR_H_ +#define API_RTCERROR_H_ + +#ifdef UNIT_TEST +#include +#endif // UNIT_TEST +#include +#include // For std::move. + +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +// Enumeration to represent distinct classes of errors that an application +// may wish to act upon differently. These roughly map to DOMExceptions or +// RTCError "errorDetailEnum" values in the web API, as described in the +// comments below. +enum class RTCErrorType { + // No error. + NONE, + + // An operation is valid, but currently unsupported. + // Maps to OperationError DOMException. + UNSUPPORTED_OPERATION, + + // A supplied parameter is valid, but currently unsupported. + // Maps to OperationError DOMException. + UNSUPPORTED_PARAMETER, + + // General error indicating that a supplied parameter is invalid. + // Maps to InvalidAccessError or TypeError DOMException depending on context. + INVALID_PARAMETER, + + // Slightly more specific than INVALID_PARAMETER; a parameter's value was + // outside the allowed range. + // Maps to RangeError DOMException. + INVALID_RANGE, + + // Slightly more specific than INVALID_PARAMETER; an error occurred while + // parsing string input. + // Maps to SyntaxError DOMException. + SYNTAX_ERROR, + + // The object does not support this operation in its current state. + // Maps to InvalidStateError DOMException. + INVALID_STATE, + + // An attempt was made to modify the object in an invalid way. + // Maps to InvalidModificationError DOMException. + INVALID_MODIFICATION, + + // An error occurred within an underlying network protocol. + // Maps to NetworkError DOMException. + NETWORK_ERROR, + + // Some resource has been exhausted; file handles, hardware resources, ports, + // etc. + // Maps to OperationError DOMException. + RESOURCE_EXHAUSTED, + + // The operation failed due to an internal error. + // Maps to OperationError DOMException. + INTERNAL_ERROR, +}; + +// Roughly corresponds to RTCError in the web api. Holds an error type, a +// message, and possibly additional information specific to that error. +// +// Doesn't contain anything beyond a type and message now, but will in the +// future as more errors are implemented. +class RTCError { + public: + // Constructors. + + // Creates a "no error" error. + RTCError() {} + explicit RTCError(RTCErrorType type) : type_(type) {} + // For performance, prefer using the constructor that takes a const char* if + // the message is a static string. + RTCError(RTCErrorType type, const char* message) + : type_(type), static_message_(message), have_string_message_(false) {} + RTCError(RTCErrorType type, std::string&& message) + : type_(type), string_message_(message), have_string_message_(true) {} + + // Delete the copy constructor and assignment operator; there aren't any use + // cases where you should need to copy an RTCError, as opposed to moving it. + // Can revisit this decision if use cases arise in the future. + RTCError(const RTCError& other) = delete; + RTCError& operator=(const RTCError& other) = delete; + + // Move constructor and move-assignment operator. + RTCError(RTCError&& other); + RTCError& operator=(RTCError&& other); + + ~RTCError(); + + // Identical to default constructed error. + // + // Preferred over the default constructor for code readability. + static RTCError OK(); + + // Error type. + RTCErrorType type() const { return type_; } + void set_type(RTCErrorType type) { type_ = type; } + + // Human-readable message describing the error. Shouldn't be used for + // anything but logging/diagnostics, since messages are not guaranteed to be + // stable. + const char* message() const; + // For performance, prefer using the method that takes a const char* if the + // message is a static string. + void set_message(const char* message); + void set_message(std::string&& message); + + // Convenience method for situations where you only care whether or not an + // error occurred. + bool ok() const { return type_ == RTCErrorType::NONE; } + + private: + RTCErrorType type_ = RTCErrorType::NONE; + // For performance, we use static strings wherever possible. But in some + // cases the error string may need to be constructed, in which case an + // std::string is used. + union { + const char* static_message_ = ""; + std::string string_message_; + }; + // Whether or not |static_message_| or |string_message_| is being used in the + // above union. + bool have_string_message_ = false; +}; + +// Outputs the error as a friendly string. Update this method when adding a new +// error type. +// +// Only intended to be used for logging/disagnostics. +std::string ToString(RTCErrorType error); + +#ifdef UNIT_TEST +inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) + std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) + RTCErrorType error) { + return stream << ToString(error); +} +#endif // UNIT_TEST + +// Helper macro that can be used by implementations to create an error with a +// message and log it. |message| should be a string literal or movable +// std::string. +#define LOG_AND_RETURN_ERROR_EX(type, message, severity) \ + { \ + RTC_DCHECK(type != RTCErrorType::NONE); \ + RTC_LOG(severity) << message << " (" << ToString(type) << ")"; \ + return webrtc::RTCError(type, message); \ + } + +#define LOG_AND_RETURN_ERROR(type, message) \ + LOG_AND_RETURN_ERROR_EX(type, message, LS_ERROR) + +// RTCErrorOr is the union of an RTCError object and a T object. RTCErrorOr +// models the concept of an object that is either a usable value, or an error +// Status explaining why such a value is not present. To this end RTCErrorOr +// does not allow its RTCErrorType value to be RTCErrorType::NONE. This is +// enforced by a debug check in most cases. +// +// The primary use-case for RTCErrorOr is as the return value of a function +// which may fail. For example, CreateRtpSender will fail if the parameters +// could not be successfully applied at the media engine level, but if +// successful will return a unique_ptr to an RtpSender. +// +// Example client usage for a RTCErrorOr>: +// +// RTCErrorOr> result = FooFactory::MakeNewFoo(arg); +// if (result.ok()) { +// std::unique_ptr foo = result.ConsumeValue(); +// foo->DoSomethingCool(); +// } else { +// RTC_LOG(LS_ERROR) << result.error(); +// } +// +// Example factory implementation returning RTCErrorOr>: +// +// RTCErrorOr> FooFactory::MakeNewFoo(int arg) { +// if (arg <= 0) { +// return RTCError(RTCErrorType::INVALID_RANGE, "Arg must be positive"); +// } else { +// return std::unique_ptr(new Foo(arg)); +// } +// } +// +template +class RTCErrorOr { + // Used to convert between RTCErrorOr/RtcErrorOr, when an implicit + // conversion from Foo to Bar exists. + template + friend class RTCErrorOr; + + public: + typedef T element_type; + + // Constructs a new RTCErrorOr with RTCErrorType::INTERNAL_ERROR error. This + // is marked 'explicit' to try to catch cases like 'return {};', where people + // think RTCErrorOr> will be initialized with an empty + // vector, instead of a RTCErrorType::INTERNAL_ERROR error. + RTCErrorOr() : error_(RTCErrorType::INTERNAL_ERROR) {} + + // Constructs a new RTCErrorOr with the given non-ok error. After calling + // this constructor, calls to value() will DCHECK-fail. + // + // NOTE: Not explicit - we want to use RTCErrorOr as a return + // value, so it is convenient and sensible to be able to do 'return + // RTCError(...)' when the return type is RTCErrorOr. + // + // REQUIRES: !error.ok(). This requirement is DCHECKed. + RTCErrorOr(RTCError&& error) : error_(std::move(error)) { // NOLINT + RTC_DCHECK(!error.ok()); + } + + // Constructs a new RTCErrorOr with the given value. After calling this + // constructor, calls to value() will succeed, and calls to error() will + // return a default-constructed RTCError. + // + // NOTE: Not explicit - we want to use RTCErrorOr as a return type + // so it is convenient and sensible to be able to do 'return T()' + // when the return type is RTCErrorOr. + RTCErrorOr(T&& value) : value_(std::move(value)) {} // NOLINT + + // Delete the copy constructor and assignment operator; there aren't any use + // cases where you should need to copy an RTCErrorOr, as opposed to moving + // it. Can revisit this decision if use cases arise in the future. + RTCErrorOr(const RTCErrorOr& other) = delete; + RTCErrorOr& operator=(const RTCErrorOr& other) = delete; + + // Move constructor and move-assignment operator. + // + // Visual Studio doesn't support "= default" with move constructors or + // assignment operators (even though they compile, they segfault), so define + // them explicitly. + RTCErrorOr(RTCErrorOr&& other) + : error_(std::move(other.error_)), value_(std::move(other.value_)) {} + RTCErrorOr& operator=(RTCErrorOr&& other) { + error_ = std::move(other.error_); + value_ = std::move(other.value_); + return *this; + } + + // Conversion constructor and assignment operator; T must be copy or move + // constructible from U. + template + RTCErrorOr(RTCErrorOr other) // NOLINT + : error_(std::move(other.error_)), value_(std::move(other.value_)) {} + template + RTCErrorOr& operator=(RTCErrorOr other) { + error_ = std::move(other.error_); + value_ = std::move(other.value_); + return *this; + } + + // Returns a reference to our error. If this contains a T, then returns + // default-constructed RTCError. + const RTCError& error() const { return error_; } + + // Moves the error. Can be useful if, say "CreateFoo" returns an + // RTCErrorOr, and internally calls "CreateBar" which returns an + // RTCErrorOr, and wants to forward the error up the stack. + RTCError MoveError() { return std::move(error_); } + + // Returns this->error().ok() + bool ok() const { return error_.ok(); } + + // Returns a reference to our current value, or DCHECK-fails if !this->ok(). + // + // Can be convenient for the implementation; for example, a method may want + // to access the value in some way before returning it to the next method on + // the stack. + const T& value() const { + RTC_DCHECK(ok()); + return value_; + } + T& value() { + RTC_DCHECK(ok()); + return value_; + } + + // Moves our current value out of this object and returns it, or DCHECK-fails + // if !this->ok(). + T MoveValue() { + RTC_DCHECK(ok()); + return std::move(value_); + } + + private: + RTCError error_; + T value_; +}; + +} // namespace webrtc + +#endif // API_RTCERROR_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtceventlogoutput.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtceventlogoutput.h new file mode 100644 index 000000000000..67e408d648a7 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtceventlogoutput.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTCEVENTLOGOUTPUT_H_ +#define API_RTCEVENTLOGOUTPUT_H_ + +#include + +namespace webrtc { + +// NOTE: This class is still under development and may change without notice. +class RtcEventLogOutput { + public: + virtual ~RtcEventLogOutput() = default; + + // An output normally starts out active, though that might not always be + // the case (e.g. failed to open a file for writing). + // Once an output has become inactive (e.g. maximum file size reached), it can + // never become active again. + virtual bool IsActive() const = 0; + + // Write encoded events to an output. Returns true if the output was + // successfully written in its entirety. Otherwise, no guarantee is given + // about how much data was written, if any. The output sink becomes inactive + // after the first time |false| is returned. Write() may not be called on + // an inactive output sink. + virtual bool Write(const std::string& output) = 0; +}; + +} // namespace webrtc + +#endif // API_RTCEVENTLOGOUTPUT_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtp_headers.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtp_headers.h new file mode 100644 index 000000000000..e82d121040d6 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtp_headers.h @@ -0,0 +1,172 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTP_HEADERS_H_ +#define API_RTP_HEADERS_H_ + +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/optional.h" +#include "api/video/video_content_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" + +#include "rtc_base/checks.h" +#include "rtc_base/deprecation.h" +#include "common_types.h" // NOLINT(build/include) +#include "typedefs.h" // NOLINT(build/include) + +namespace webrtc { + +// Class to represent the value of RTP header extensions that are +// variable-length strings (e.g., RtpStreamId and RtpMid). +// Unlike std::string, it can be copied with memcpy and cleared with memset. +// +// Empty value represents unset header extension (use empty() to query). +class StringRtpHeaderExtension { + public: + // String RTP header extensions are limited to 16 bytes because it is the + // maximum length that can be encoded with one-byte header extensions. + static constexpr size_t kMaxSize = 16; + + static bool IsLegalName(rtc::ArrayView name); + + StringRtpHeaderExtension() { value_[0] = 0; } + explicit StringRtpHeaderExtension(rtc::ArrayView value) { + Set(value.data(), value.size()); + } + StringRtpHeaderExtension(const StringRtpHeaderExtension&) = default; + StringRtpHeaderExtension& operator=(const StringRtpHeaderExtension&) = + default; + + bool empty() const { return value_[0] == 0; } + const char* data() const { return value_; } + size_t size() const { return strnlen(value_, kMaxSize); } + + void Set(rtc::ArrayView value) { + Set(reinterpret_cast(value.data()), value.size()); + } + void Set(const char* data, size_t size); + + friend bool operator==(const StringRtpHeaderExtension& lhs, + const StringRtpHeaderExtension& rhs) { + return strncmp(lhs.value_, rhs.value_, kMaxSize) == 0; + } + friend bool operator!=(const StringRtpHeaderExtension& lhs, + const StringRtpHeaderExtension& rhs) { + return !(lhs == rhs); + } + + private: + char value_[kMaxSize]; +}; + +// StreamId represents RtpStreamId which is a string. +typedef StringRtpHeaderExtension StreamId; + +// Mid represents RtpMid which is a string. +typedef StringRtpHeaderExtension Mid; + +struct RTPHeaderExtension { + RTPHeaderExtension(); + RTPHeaderExtension(const RTPHeaderExtension& other); + RTPHeaderExtension& operator=(const RTPHeaderExtension& other); + + bool hasTransmissionTimeOffset; + int32_t transmissionTimeOffset; + bool hasAbsoluteSendTime; + uint32_t absoluteSendTime; + bool hasTransportSequenceNumber; + uint16_t transportSequenceNumber; + + // Audio Level includes both level in dBov and voiced/unvoiced bit. See: + // https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/ + bool hasAudioLevel; + bool voiceActivity; + uint8_t audioLevel; + + // For Coordination of Video Orientation. See + // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ + // ts_126114v120700p.pdf + bool hasVideoRotation; + VideoRotation videoRotation; + + // TODO(ilnik): Refactor this and one above to be rtc::Optional() and remove + // a corresponding bool flag. + bool hasVideoContentType; + VideoContentType videoContentType; + + bool has_video_timing; + VideoSendTiming video_timing; + + PlayoutDelay playout_delay = {-1, -1}; + + // For identification of a stream when ssrc is not signaled. See + // https://tools.ietf.org/html/draft-ietf-avtext-rid-09 + // TODO(danilchap): Update url from draft to release version. + StreamId stream_id; + StreamId repaired_stream_id; + + // For identifying the media section used to interpret this RTP packet. See + // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-38 + Mid mid; +}; + +struct RTPHeader { + RTPHeader(); + RTPHeader(const RTPHeader& other); + RTPHeader& operator=(const RTPHeader& other); + + bool markerBit; + uint8_t payloadType; + uint16_t sequenceNumber; + uint32_t timestamp; + uint32_t ssrc; + uint8_t numCSRCs; + uint32_t arrOfCSRCs[kRtpCsrcSize]; + size_t paddingLength; + size_t headerLength; + int payload_type_frequency; + RTPHeaderExtension extension; +}; + +// RTCP mode to use. Compound mode is described by RFC 4585 and reduced-size +// RTCP mode is described by RFC 5506. +enum class RtcpMode { kOff, kCompound, kReducedSize }; + +enum NetworkState { + kNetworkUp, + kNetworkDown, +}; + +struct RtpKeepAliveConfig final { + // If no packet has been sent for |timeout_interval_ms|, send a keep-alive + // packet. The keep-alive packet is an empty (no payload) RTP packet with a + // payload type of 20 as long as the other end has not negotiated the use of + // this value. If this value has already been negotiated, then some other + // unused static payload type from table 5 of RFC 3551 shall be used and set + // in |payload_type|. + int64_t timeout_interval_ms = -1; + uint8_t payload_type = 20; + + bool operator==(const RtpKeepAliveConfig& o) const { + return timeout_interval_ms == o.timeout_interval_ms && + payload_type == o.payload_type; + } + bool operator!=(const RtpKeepAliveConfig& o) const { return !(*this == o); } +}; + +} // namespace webrtc + +#endif // API_RTP_HEADERS_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtpparameters.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtpparameters.h new file mode 100644 index 000000000000..96df9ce8855d --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtpparameters.h @@ -0,0 +1,589 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTPPARAMETERS_H_ +#define API_RTPPARAMETERS_H_ + +#include +#include +#include + +#include "api/mediatypes.h" +#include "api/optional.h" + +namespace webrtc { + +// These structures are intended to mirror those defined by: +// http://draft.ortc.org/#rtcrtpdictionaries* +// Contains everything specified as of 2017 Jan 24. +// +// They are used when retrieving or modifying the parameters of an +// RtpSender/RtpReceiver, or retrieving capabilities. +// +// Note on conventions: Where ORTC may use "octet", "short" and "unsigned" +// types, we typically use "int", in keeping with our style guidelines. The +// parameter's actual valid range will be enforced when the parameters are set, +// rather than when the parameters struct is built. An exception is made for +// SSRCs, since they use the full unsigned 32-bit range, and aren't expected to +// be used for any numeric comparisons/operations. +// +// Additionally, where ORTC uses strings, we may use enums for things that have +// a fixed number of supported values. However, for things that can be extended +// (such as codecs, by providing an external encoder factory), a string +// identifier is used. + +enum class FecMechanism { + RED, + RED_AND_ULPFEC, + FLEXFEC, +}; + +// Used in RtcpFeedback struct. +enum class RtcpFeedbackType { + CCM, + NACK, + REMB, // "goog-remb" + TRANSPORT_CC, +}; + +// Used in RtcpFeedback struct when type is NACK or CCM. +enum class RtcpFeedbackMessageType { + // Equivalent to {type: "nack", parameter: undefined} in ORTC. + GENERIC_NACK, + PLI, // Usable with NACK. + FIR, // Usable with CCM. +}; + +enum class DtxStatus { + DISABLED, + ENABLED, +}; + +// Based on the spec in +// https://w3c.github.io/webrtc-pc/#idl-def-rtcdegradationpreference. +// These options are enforced on a best-effort basis. For instance, all of +// these options may suffer some frame drops in order to avoid queuing. +// TODO(sprang): Look into possibility of more strictly enforcing the +// maintain-framerate option. +// TODO(deadbeef): Default to "balanced", as the spec indicates? +enum class DegradationPreference { + // Don't take any actions based on over-utilization signals. Not part of the + // web API. + DISABLED, + // On over-use, request lower frame rate, possibly causing frame drops. + MAINTAIN_FRAMERATE, + // On over-use, request lower resolution, possibly causing down-scaling. + MAINTAIN_RESOLUTION, + // Try to strike a "pleasing" balance between frame rate or resolution. + BALANCED, +}; + +extern const double kDefaultBitratePriority; + +struct RtcpFeedback { + RtcpFeedbackType type = RtcpFeedbackType::CCM; + + // Equivalent to ORTC "parameter" field with slight differences: + // 1. It's an enum instead of a string. + // 2. Generic NACK feedback is represented by a GENERIC_NACK message type, + // rather than an unset "parameter" value. + rtc::Optional message_type; + + // Constructors for convenience. + RtcpFeedback(); + explicit RtcpFeedback(RtcpFeedbackType type); + RtcpFeedback(RtcpFeedbackType type, RtcpFeedbackMessageType message_type); + ~RtcpFeedback(); + + bool operator==(const RtcpFeedback& o) const { + return type == o.type && message_type == o.message_type; + } + bool operator!=(const RtcpFeedback& o) const { return !(*this == o); } +}; + +// RtpCodecCapability is to RtpCodecParameters as RtpCapabilities is to +// RtpParameters. This represents the static capabilities of an endpoint's +// implementation of a codec. +struct RtpCodecCapability { + RtpCodecCapability(); + ~RtpCodecCapability(); + + // Build MIME "type/subtype" string from |name| and |kind|. + std::string mime_type() const { return MediaTypeToString(kind) + "/" + name; } + + // Used to identify the codec. Equivalent to MIME subtype. + std::string name; + + // The media type of this codec. Equivalent to MIME top-level type. + cricket::MediaType kind = cricket::MEDIA_TYPE_AUDIO; + + // Clock rate in Hertz. If unset, the codec is applicable to any clock rate. + rtc::Optional clock_rate; + + // Default payload type for this codec. Mainly needed for codecs that use + // that have statically assigned payload types. + rtc::Optional preferred_payload_type; + + // Maximum packetization time supported by an RtpReceiver for this codec. + // TODO(deadbeef): Not implemented. + rtc::Optional max_ptime; + + // Preferred packetization time for an RtpReceiver or RtpSender of this + // codec. + // TODO(deadbeef): Not implemented. + rtc::Optional ptime; + + // The number of audio channels supported. Unused for video codecs. + rtc::Optional num_channels; + + // Feedback mechanisms supported for this codec. + std::vector rtcp_feedback; + + // Codec-specific parameters that must be signaled to the remote party. + // + // Corresponds to "a=fmtp" parameters in SDP. + // + // Contrary to ORTC, these parameters are named using all lowercase strings. + // This helps make the mapping to SDP simpler, if an application is using + // SDP. Boolean values are represented by the string "1". + std::unordered_map parameters; + + // Codec-specific parameters that may optionally be signaled to the remote + // party. + // TODO(deadbeef): Not implemented. + std::unordered_map options; + + // Maximum number of temporal layer extensions supported by this codec. + // For example, a value of 1 indicates that 2 total layers are supported. + // TODO(deadbeef): Not implemented. + int max_temporal_layer_extensions = 0; + + // Maximum number of spatial layer extensions supported by this codec. + // For example, a value of 1 indicates that 2 total layers are supported. + // TODO(deadbeef): Not implemented. + int max_spatial_layer_extensions = 0; + + // Whether the implementation can send/receive SVC layers with distinct + // SSRCs. Always false for audio codecs. True for video codecs that support + // scalable video coding with MRST. + // TODO(deadbeef): Not implemented. + bool svc_multi_stream_support = false; + + bool operator==(const RtpCodecCapability& o) const { + return name == o.name && kind == o.kind && clock_rate == o.clock_rate && + preferred_payload_type == o.preferred_payload_type && + max_ptime == o.max_ptime && ptime == o.ptime && + num_channels == o.num_channels && rtcp_feedback == o.rtcp_feedback && + parameters == o.parameters && options == o.options && + max_temporal_layer_extensions == o.max_temporal_layer_extensions && + max_spatial_layer_extensions == o.max_spatial_layer_extensions && + svc_multi_stream_support == o.svc_multi_stream_support; + } + bool operator!=(const RtpCodecCapability& o) const { return !(*this == o); } +}; + +// Used in RtpCapabilities; represents the capabilities/preferences of an +// implementation for a header extension. +// +// Just called "RtpHeaderExtension" in ORTC, but the "Capability" suffix was +// added here for consistency and to avoid confusion with +// RtpHeaderExtensionParameters. +// +// Note that ORTC includes a "kind" field, but we omit this because it's +// redundant; if you call "RtpReceiver::GetCapabilities(MEDIA_TYPE_AUDIO)", +// you know you're getting audio capabilities. +struct RtpHeaderExtensionCapability { + // URI of this extension, as defined in RFC5285. + std::string uri; + + // Preferred value of ID that goes in the packet. + rtc::Optional preferred_id; + + // If true, it's preferred that the value in the header is encrypted. + // TODO(deadbeef): Not implemented. + bool preferred_encrypt = false; + + // Constructors for convenience. + RtpHeaderExtensionCapability(); + explicit RtpHeaderExtensionCapability(const std::string& uri); + RtpHeaderExtensionCapability(const std::string& uri, int preferred_id); + ~RtpHeaderExtensionCapability(); + + bool operator==(const RtpHeaderExtensionCapability& o) const { + return uri == o.uri && preferred_id == o.preferred_id && + preferred_encrypt == o.preferred_encrypt; + } + bool operator!=(const RtpHeaderExtensionCapability& o) const { + return !(*this == o); + } +}; + +// RTP header extension, see RFC 5285. +struct RtpExtension { + RtpExtension(); + RtpExtension(const std::string& uri, int id); + RtpExtension(const std::string& uri, int id, bool encrypt); + ~RtpExtension(); + std::string ToString() const; + bool operator==(const RtpExtension& rhs) const { + return uri == rhs.uri && id == rhs.id && encrypt == rhs.encrypt; + } + static bool IsSupportedForAudio(const std::string& uri); + static bool IsSupportedForVideo(const std::string& uri); + // Return "true" if the given RTP header extension URI may be encrypted. + static bool IsEncryptionSupported(const std::string& uri); + + // Returns the named header extension if found among all extensions, + // nullptr otherwise. + static const RtpExtension* FindHeaderExtensionByUri( + const std::vector& extensions, + const std::string& uri); + + // Return a list of RTP header extensions with the non-encrypted extensions + // removed if both the encrypted and non-encrypted extension is present for + // the same URI. + static std::vector FilterDuplicateNonEncrypted( + const std::vector& extensions); + + // Header extension for audio levels, as defined in: + // http://tools.ietf.org/html/draft-ietf-avtext-client-to-mixer-audio-level-03 + static const char kAudioLevelUri[]; + static const int kAudioLevelDefaultId; + + // Header extension for RTP timestamp offset, see RFC 5450 for details: + // http://tools.ietf.org/html/rfc5450 + static const char kTimestampOffsetUri[]; + static const int kTimestampOffsetDefaultId; + + // Header extension for absolute send time, see url for details: + // http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time + static const char kAbsSendTimeUri[]; + static const int kAbsSendTimeDefaultId; + + // Header extension for coordination of video orientation, see url for + // details: + // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ts_126114v120700p.pdf + static const char kVideoRotationUri[]; + static const int kVideoRotationDefaultId; + + // Header extension for video content type. E.g. default or screenshare. + static const char kVideoContentTypeUri[]; + static const int kVideoContentTypeDefaultId; + + // Header extension for video timing. + static const char kVideoTimingUri[]; + static const int kVideoTimingDefaultId; + + // Header extension for transport sequence number, see url for details: + // http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions + static const char kTransportSequenceNumberUri[]; + static const int kTransportSequenceNumberDefaultId; + + static const char kPlayoutDelayUri[]; + static const int kPlayoutDelayDefaultId; + + // Header extension for identifying media section within a transport. + // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-49#section-15 + static const char kMidUri[]; + static const int kMidDefaultId; + + // Encryption of Header Extensions, see RFC 6904 for details: + // https://tools.ietf.org/html/rfc6904 + static const char kEncryptHeaderExtensionsUri[]; + + // Inclusive min and max IDs for one-byte header extensions, per RFC5285. + static const int kMinId; + static const int kMaxId; + + std::string uri; + int id = 0; + bool encrypt = false; +}; + +// TODO(deadbeef): This is missing the "encrypt" flag, which is unimplemented. +typedef RtpExtension RtpHeaderExtensionParameters; + +struct RtpFecParameters { + // If unset, a value is chosen by the implementation. + // Works just like RtpEncodingParameters::ssrc. + rtc::Optional ssrc; + + FecMechanism mechanism = FecMechanism::RED; + + // Constructors for convenience. + RtpFecParameters(); + explicit RtpFecParameters(FecMechanism mechanism); + RtpFecParameters(FecMechanism mechanism, uint32_t ssrc); + ~RtpFecParameters(); + + bool operator==(const RtpFecParameters& o) const { + return ssrc == o.ssrc && mechanism == o.mechanism; + } + bool operator!=(const RtpFecParameters& o) const { return !(*this == o); } +}; + +struct RtpRtxParameters { + // If unset, a value is chosen by the implementation. + // Works just like RtpEncodingParameters::ssrc. + rtc::Optional ssrc; + + // Constructors for convenience. + RtpRtxParameters(); + explicit RtpRtxParameters(uint32_t ssrc); + ~RtpRtxParameters(); + + bool operator==(const RtpRtxParameters& o) const { return ssrc == o.ssrc; } + bool operator!=(const RtpRtxParameters& o) const { return !(*this == o); } +}; + +struct RtpEncodingParameters { + RtpEncodingParameters(); + ~RtpEncodingParameters(); + + // If unset, a value is chosen by the implementation. + // + // Note that the chosen value is NOT returned by GetParameters, because it + // may change due to an SSRC conflict, in which case the conflict is handled + // internally without any event. Another way of looking at this is that an + // unset SSRC acts as a "wildcard" SSRC. + rtc::Optional ssrc; + + // Can be used to reference a codec in the |codecs| member of the + // RtpParameters that contains this RtpEncodingParameters. If unset, the + // implementation will choose the first possible codec (if a sender), or + // prepare to receive any codec (for a receiver). + // TODO(deadbeef): Not implemented. Implementation of RtpSender will always + // choose the first codec from the list. + rtc::Optional codec_payload_type; + + // Specifies the FEC mechanism, if set. + // TODO(deadbeef): Not implemented. Current implementation will use whatever + // FEC codecs are available, including red+ulpfec. + rtc::Optional fec; + + // Specifies the RTX parameters, if set. + // TODO(deadbeef): Not implemented with PeerConnection senders/receivers. + rtc::Optional rtx; + + // Only used for audio. If set, determines whether or not discontinuous + // transmission will be used, if an available codec supports it. If not + // set, the implementation default setting will be used. + // TODO(deadbeef): Not implemented. Current implementation will use a CN + // codec as long as it's present. + rtc::Optional dtx; + + // The relative bitrate priority of this encoding. Currently this is + // implemented for the entire rtp sender by using the value of the first + // encoding parameter. + // TODO(webrtc.bugs.org/8630): Implement this per encoding parameter. + // Currently there is logic for how bitrate is distributed per simulcast layer + // in the VideoBitrateAllocator. This must be updated to incorporate relative + // bitrate priority. + double bitrate_priority = kDefaultBitratePriority; + + // Indicates the preferred duration of media represented by a packet in + // milliseconds for this encoding. If set, this will take precedence over the + // ptime set in the RtpCodecParameters. This could happen if SDP negotiation + // creates a ptime for a specific codec, which is later changed in the + // RtpEncodingParameters by the application. + // TODO(bugs.webrtc.org/8819): Not implemented. + rtc::Optional ptime; + + // If set, this represents the Transport Independent Application Specific + // maximum bandwidth defined in RFC3890. If unset, there is no maximum + // bitrate. Currently this is implemented for the entire rtp sender by using + // the value of the first encoding parameter. + // + // TODO(webrtc.bugs.org/8655): Implement this per encoding parameter. + // Current implementation for a sender: + // The max bitrate is decided by taking the minimum of the first encoding + // parameter's max_bitrate_bps and the max bitrate specified by the sdp with + // the b=AS attribute. In the case of simulcast video, default values are used + // for each simulcast layer, and if there is some bitrate left over from the + // sender's max bitrate then it will roll over into the highest quality layer. + // + // Just called "maxBitrate" in ORTC spec. + // + // TODO(deadbeef): With ORTC RtpSenders, this currently sets the total + // bandwidth for the entire bandwidth estimator (audio and video). This is + // just always how "b=AS" was handled, but it's not correct and should be + // fixed. + rtc::Optional max_bitrate_bps; + + // TODO(deadbeef): Not implemented. + rtc::Optional max_framerate; + + // For video, scale the resolution down by this factor. + // TODO(deadbeef): Not implemented. + rtc::Optional scale_resolution_down_by; + + // Scale the framerate down by this factor. + // TODO(deadbeef): Not implemented. + rtc::Optional scale_framerate_down_by; + + // For an RtpSender, set to true to cause this encoding to be encoded and + // sent, and false for it not to be encoded and sent. This allows control + // across multiple encodings of a sender for turning simulcast layers on and + // off. + // TODO(webrtc.bugs.org/8807): Updating this parameter will trigger an encoder + // reset, but this isn't necessarily required. + bool active = true; + + // Value to use for RID RTP header extension. + // Called "encodingId" in ORTC. + // TODO(deadbeef): Not implemented. + std::string rid; + + // RIDs of encodings on which this layer depends. + // Called "dependencyEncodingIds" in ORTC spec. + // TODO(deadbeef): Not implemented. + std::vector dependency_rids; + + bool operator==(const RtpEncodingParameters& o) const { + return ssrc == o.ssrc && codec_payload_type == o.codec_payload_type && + fec == o.fec && rtx == o.rtx && dtx == o.dtx && + bitrate_priority == o.bitrate_priority && ptime == o.ptime && + max_bitrate_bps == o.max_bitrate_bps && + max_framerate == o.max_framerate && + scale_resolution_down_by == o.scale_resolution_down_by && + scale_framerate_down_by == o.scale_framerate_down_by && + active == o.active && rid == o.rid && + dependency_rids == o.dependency_rids; + } + bool operator!=(const RtpEncodingParameters& o) const { + return !(*this == o); + } +}; + +struct RtpCodecParameters { + RtpCodecParameters(); + ~RtpCodecParameters(); + + // Build MIME "type/subtype" string from |name| and |kind|. + std::string mime_type() const { return MediaTypeToString(kind) + "/" + name; } + + // Used to identify the codec. Equivalent to MIME subtype. + std::string name; + + // The media type of this codec. Equivalent to MIME top-level type. + cricket::MediaType kind = cricket::MEDIA_TYPE_AUDIO; + + // Payload type used to identify this codec in RTP packets. + // This must always be present, and must be unique across all codecs using + // the same transport. + int payload_type = 0; + + // If unset, the implementation default is used. + rtc::Optional clock_rate; + + // The number of audio channels used. Unset for video codecs. If unset for + // audio, the implementation default is used. + // TODO(deadbeef): The "implementation default" part isn't fully implemented. + // Only defaults to 1, even though some codecs (such as opus) should really + // default to 2. + rtc::Optional num_channels; + + // The maximum packetization time to be used by an RtpSender. + // If |ptime| is also set, this will be ignored. + // TODO(deadbeef): Not implemented. + rtc::Optional max_ptime; + + // The packetization time to be used by an RtpSender. + // If unset, will use any time up to max_ptime. + // TODO(deadbeef): Not implemented. + rtc::Optional ptime; + + // Feedback mechanisms to be used for this codec. + // TODO(deadbeef): Not implemented with PeerConnection senders/receivers. + std::vector rtcp_feedback; + + // Codec-specific parameters that must be signaled to the remote party. + // + // Corresponds to "a=fmtp" parameters in SDP. + // + // Contrary to ORTC, these parameters are named using all lowercase strings. + // This helps make the mapping to SDP simpler, if an application is using + // SDP. Boolean values are represented by the string "1". + std::unordered_map parameters; + + bool operator==(const RtpCodecParameters& o) const { + return name == o.name && kind == o.kind && payload_type == o.payload_type && + clock_rate == o.clock_rate && num_channels == o.num_channels && + max_ptime == o.max_ptime && ptime == o.ptime && + rtcp_feedback == o.rtcp_feedback && parameters == o.parameters; + } + bool operator!=(const RtpCodecParameters& o) const { return !(*this == o); } +}; + +// RtpCapabilities is used to represent the static capabilities of an +// endpoint. An application can use these capabilities to construct an +// RtpParameters. +struct RtpCapabilities { + RtpCapabilities(); + ~RtpCapabilities(); + + // Supported codecs. + std::vector codecs; + + // Supported RTP header extensions. + std::vector header_extensions; + + // Supported Forward Error Correction (FEC) mechanisms. Note that the RED, + // ulpfec and flexfec codecs used by these mechanisms will still appear in + // |codecs|. + std::vector fec; + + bool operator==(const RtpCapabilities& o) const { + return codecs == o.codecs && header_extensions == o.header_extensions && + fec == o.fec; + } + bool operator!=(const RtpCapabilities& o) const { return !(*this == o); } +}; + +// Note that unlike in ORTC, an RtcpParameters structure is not included in +// RtpParameters, because our API includes an additional "RtpTransport" +// abstraction on which RTCP parameters are set. +struct RtpParameters { + RtpParameters(); + ~RtpParameters(); + + // Used when calling getParameters/setParameters with a PeerConnection + // RtpSender, to ensure that outdated parameters are not unintentionally + // applied successfully. + std::string transaction_id; + + // Value to use for MID RTP header extension. + // Called "muxId" in ORTC. + // TODO(deadbeef): Not implemented. + std::string mid; + + std::vector codecs; + + // TODO(deadbeef): Not implemented with PeerConnection senders/receivers. + std::vector header_extensions; + + std::vector encodings; + + // TODO(deadbeef): Not implemented. + DegradationPreference degradation_preference = + DegradationPreference::BALANCED; + + bool operator==(const RtpParameters& o) const { + return mid == o.mid && codecs == o.codecs && + header_extensions == o.header_extensions && + encodings == o.encodings && + degradation_preference == o.degradation_preference; + } + bool operator!=(const RtpParameters& o) const { return !(*this == o); } +}; + +} // namespace webrtc + +#endif // API_RTPPARAMETERS_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtpreceiverinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtpreceiverinterface.h new file mode 100644 index 000000000000..0e32eae8cb7e --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtpreceiverinterface.h @@ -0,0 +1,145 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains interfaces for RtpReceivers +// http://w3c.github.io/webrtc-pc/#rtcrtpreceiver-interface + +#ifndef API_RTPRECEIVERINTERFACE_H_ +#define API_RTPRECEIVERINTERFACE_H_ + +#include +#include + +#include "api/mediastreaminterface.h" +#include "api/mediatypes.h" +#include "api/proxy.h" +#include "api/rtpparameters.h" +#include "rtc_base/refcount.h" +#include "rtc_base/scoped_ref_ptr.h" + +namespace webrtc { + +enum class RtpSourceType { + SSRC, + CSRC, +}; + +class RtpSource { + public: + RtpSource() = delete; + RtpSource(int64_t timestamp_ms, + uint32_t source_id, + RtpSourceType source_type); + RtpSource(int64_t timestamp_ms, + uint32_t source_id, + RtpSourceType source_type, + uint8_t audio_level); + RtpSource(const RtpSource&); + RtpSource& operator=(const RtpSource&); + ~RtpSource(); + + int64_t timestamp_ms() const { return timestamp_ms_; } + void update_timestamp_ms(int64_t timestamp_ms) { + RTC_DCHECK_LE(timestamp_ms_, timestamp_ms); + timestamp_ms_ = timestamp_ms; + } + + // The identifier of the source can be the CSRC or the SSRC. + uint32_t source_id() const { return source_id_; } + + // The source can be either a contributing source or a synchronization source. + RtpSourceType source_type() const { return source_type_; } + + rtc::Optional audio_level() const { return audio_level_; } + void set_audio_level(const rtc::Optional& level) { + audio_level_ = level; + } + + bool operator==(const RtpSource& o) const { + return timestamp_ms_ == o.timestamp_ms() && source_id_ == o.source_id() && + source_type_ == o.source_type() && audio_level_ == o.audio_level_; + } + + private: + int64_t timestamp_ms_; + uint32_t source_id_; + RtpSourceType source_type_; + rtc::Optional audio_level_; +}; + +class RtpReceiverObserverInterface { + public: + // Note: Currently if there are multiple RtpReceivers of the same media type, + // they will all call OnFirstPacketReceived at once. + // + // In the future, it's likely that an RtpReceiver will only call + // OnFirstPacketReceived when a packet is received specifically for its + // SSRC/mid. + virtual void OnFirstPacketReceived(cricket::MediaType media_type) = 0; + + protected: + virtual ~RtpReceiverObserverInterface() {} +}; + +class RtpReceiverInterface : public rtc::RefCountInterface { + public: + virtual rtc::scoped_refptr track() const = 0; + // The list of streams that |track| is associated with. This is the same as + // the [[AssociatedRemoteMediaStreams]] internal slot in the spec. + // https://w3c.github.io/webrtc-pc/#dfn-x%5B%5Bassociatedremotemediastreams%5D%5D + // TODO(hbos): Make pure virtual as soon as Chromium's mock implements this. + virtual std::vector> streams() const; + + // Audio or video receiver? + virtual cricket::MediaType media_type() const = 0; + + // Not to be confused with "mid", this is a field we can temporarily use + // to uniquely identify a receiver until we implement Unified Plan SDP. + virtual std::string id() const = 0; + + // The WebRTC specification only defines RTCRtpParameters in terms of senders, + // but this API also applies them to receivers, similar to ORTC: + // http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. + virtual RtpParameters GetParameters() const = 0; + // Currently, doesn't support changing any parameters, but may in the future. + virtual bool SetParameters(const RtpParameters& parameters) = 0; + + // Does not take ownership of observer. + // Must call SetObserver(nullptr) before the observer is destroyed. + virtual void SetObserver(RtpReceiverObserverInterface* observer) = 0; + + // TODO(zhihuang): Remove the default implementation once the subclasses + // implement this. Currently, the only relevant subclass is the + // content::FakeRtpReceiver in Chromium. + virtual std::vector GetSources() const; + + protected: + ~RtpReceiverInterface() override = default; +}; + +// Define proxy for RtpReceiverInterface. +// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods +// are called on is an implementation detail. +BEGIN_SIGNALING_PROXY_MAP(RtpReceiver) + PROXY_SIGNALING_THREAD_DESTRUCTOR() + PROXY_CONSTMETHOD0(rtc::scoped_refptr, track) + PROXY_CONSTMETHOD0(std::vector>, + streams) + PROXY_CONSTMETHOD0(cricket::MediaType, media_type) + PROXY_CONSTMETHOD0(std::string, id) + PROXY_CONSTMETHOD0(RtpParameters, GetParameters); + PROXY_METHOD1(bool, SetParameters, const RtpParameters&) + PROXY_METHOD1(void, SetObserver, RtpReceiverObserverInterface*); + PROXY_CONSTMETHOD0(std::vector, GetSources); + END_PROXY_MAP() + +} // namespace webrtc + +#endif // API_RTPRECEIVERINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtpsenderinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtpsenderinterface.h new file mode 100644 index 000000000000..8c7e751392b2 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtpsenderinterface.h @@ -0,0 +1,93 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains interfaces for RtpSenders +// http://w3c.github.io/webrtc-pc/#rtcrtpsender-interface + +#ifndef API_RTPSENDERINTERFACE_H_ +#define API_RTPSENDERINTERFACE_H_ + +#include +#include + +#include "api/dtmfsenderinterface.h" +#include "api/mediastreaminterface.h" +#include "api/mediatypes.h" +#include "api/proxy.h" +#include "api/rtcerror.h" +#include "api/rtpparameters.h" +#include "rtc_base/deprecation.h" +#include "rtc_base/refcount.h" +#include "rtc_base/scoped_ref_ptr.h" + +namespace webrtc { + +class RtpSenderInterface : public rtc::RefCountInterface { + public: + // Returns true if successful in setting the track. + // Fails if an audio track is set on a video RtpSender, or vice-versa. + virtual bool SetTrack(MediaStreamTrackInterface* track) = 0; + virtual rtc::scoped_refptr track() const = 0; + + // Returns primary SSRC used by this sender for sending media. + // Returns 0 if not yet determined. + // TODO(deadbeef): Change to rtc::Optional. + // TODO(deadbeef): Remove? With GetParameters this should be redundant. + virtual uint32_t ssrc() const = 0; + + // Audio or video sender? + virtual cricket::MediaType media_type() const = 0; + + // Not to be confused with "mid", this is a field we can temporarily use + // to uniquely identify a receiver until we implement Unified Plan SDP. + virtual std::string id() const = 0; + + // Returns a list of media stream ids associated with this sender's track. + // These are signalled in the SDP so that the remote side can associate + // tracks. + virtual std::vector stream_ids() const = 0; + + // TODO(orphis): Transitional implementation + // Remove the const implementation and make the non-const pure virtual once + // when external code depending on this has updated + virtual RtpParameters GetParameters() { return RtpParameters(); } + RTC_DEPRECATED virtual RtpParameters GetParameters() const { + return const_cast(this)->GetParameters(); + } + // Note that only a subset of the parameters can currently be changed. See + // rtpparameters.h + virtual RTCError SetParameters(const RtpParameters& parameters) = 0; + + // Returns null for a video sender. + virtual rtc::scoped_refptr GetDtmfSender() const = 0; + + protected: + virtual ~RtpSenderInterface() {} +}; + +// Define proxy for RtpSenderInterface. +// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods +// are called on is an implementation detail. +BEGIN_SIGNALING_PROXY_MAP(RtpSender) + PROXY_SIGNALING_THREAD_DESTRUCTOR() + PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*) + PROXY_CONSTMETHOD0(rtc::scoped_refptr, track) + PROXY_CONSTMETHOD0(uint32_t, ssrc) + PROXY_CONSTMETHOD0(cricket::MediaType, media_type) + PROXY_CONSTMETHOD0(std::string, id) + PROXY_CONSTMETHOD0(std::vector, stream_ids) + PROXY_METHOD0(RtpParameters, GetParameters); + PROXY_METHOD1(RTCError, SetParameters, const RtpParameters&) + PROXY_CONSTMETHOD0(rtc::scoped_refptr, GetDtmfSender); + END_PROXY_MAP() + +} // namespace webrtc + +#endif // API_RTPSENDERINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtptransceiverinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtptransceiverinterface.h new file mode 100644 index 000000000000..7d2a1dffe20f --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/rtptransceiverinterface.h @@ -0,0 +1,128 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTPTRANSCEIVERINTERFACE_H_ +#define API_RTPTRANSCEIVERINTERFACE_H_ + +#include +#include + +#include "api/array_view.h" +#include "api/optional.h" +#include "api/rtpreceiverinterface.h" +#include "api/rtpsenderinterface.h" +#include "rtc_base/refcount.h" + +namespace webrtc { + +// https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverdirection +enum class RtpTransceiverDirection { + kSendRecv, + kSendOnly, + kRecvOnly, + kInactive +}; + +// Structure for initializing an RtpTransceiver in a call to +// PeerConnectionInterface::AddTransceiver. +// https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverinit +struct RtpTransceiverInit final { + // Direction of the RtpTransceiver. See RtpTransceiverInterface::direction(). + RtpTransceiverDirection direction = RtpTransceiverDirection::kSendRecv; + + // The added RtpTransceiver will be added to these streams. + std::vector stream_ids; + + // TODO(bugs.webrtc.org/7600): Not implemented. + std::vector send_encodings; +}; + +// The RtpTransceiverInterface maps to the RTCRtpTransceiver defined by the +// WebRTC specification. A transceiver represents a combination of an RtpSender +// and an RtpReceiver than share a common mid. As defined in JSEP, an +// RtpTransceiver is said to be associated with a media description if its mid +// property is non-null; otherwise, it is said to be disassociated. +// JSEP: https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24 +// +// Note that RtpTransceivers are only supported when using PeerConnection with +// Unified Plan SDP. +// +// This class is thread-safe. +// +// WebRTC specification for RTCRtpTransceiver, the JavaScript analog: +// https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver +class RtpTransceiverInterface : public rtc::RefCountInterface { + public: + // Media type of the transceiver. Any sender(s)/receiver(s) will have this + // type as well. + virtual cricket::MediaType media_type() const = 0; + + // The mid attribute is the mid negotiated and present in the local and + // remote descriptions. Before negotiation is complete, the mid value may be + // null. After rollbacks, the value may change from a non-null value to null. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-mid + virtual rtc::Optional mid() const = 0; + + // The sender attribute exposes the RtpSender corresponding to the RTP media + // that may be sent with the transceiver's mid. The sender is always present, + // regardless of the direction of media. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-sender + virtual rtc::scoped_refptr sender() const = 0; + + // The receiver attribute exposes the RtpReceiver corresponding to the RTP + // media that may be received with the transceiver's mid. The receiver is + // always present, regardless of the direction of media. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-receiver + virtual rtc::scoped_refptr receiver() const = 0; + + // The stopped attribute indicates that the sender of this transceiver will no + // longer send, and that the receiver will no longer receive. It is true if + // either stop has been called or if setting the local or remote description + // has caused the RtpTransceiver to be stopped. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stopped + virtual bool stopped() const = 0; + + // The direction attribute indicates the preferred direction of this + // transceiver, which will be used in calls to CreateOffer and CreateAnswer. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction + virtual RtpTransceiverDirection direction() const = 0; + + // Sets the preferred direction of this transceiver. An update of + // directionality does not take effect immediately. Instead, future calls to + // CreateOffer and CreateAnswer mark the corresponding media descriptions as + // sendrecv, sendonly, recvonly, or inactive. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction + virtual void SetDirection(RtpTransceiverDirection new_direction) = 0; + + // The current_direction attribute indicates the current direction negotiated + // for this transceiver. If this transceiver has never been represented in an + // offer/answer exchange, or if the transceiver is stopped, the value is null. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-currentdirection + virtual rtc::Optional current_direction() const = 0; + + // The Stop method irreversibly stops the RtpTransceiver. The sender of this + // transceiver will no longer send, the receiver will no longer receive. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop + virtual void Stop() = 0; + + // The SetCodecPreferences method overrides the default codec preferences used + // by WebRTC for this transceiver. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-setcodecpreferences + // TODO(steveanton): Not implemented. + virtual void SetCodecPreferences( + rtc::ArrayView codecs) = 0; + + protected: + virtual ~RtpTransceiverInterface() = default; +}; + +} // namespace webrtc + +#endif // API_RTPTRANSCEIVERINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/setremotedescriptionobserverinterface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/setremotedescriptionobserverinterface.h new file mode 100644 index 000000000000..bea8b82bd560 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/setremotedescriptionobserverinterface.h @@ -0,0 +1,31 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_SETREMOTEDESCRIPTIONOBSERVERINTERFACE_H_ +#define API_SETREMOTEDESCRIPTIONOBSERVERINTERFACE_H_ + +#include "api/rtcerror.h" +#include "rtc_base/refcount.h" + +namespace webrtc { + +// An observer for PeerConnectionInterface::SetRemoteDescription(). The +// callback is invoked such that the state of the peer connection can be +// examined to accurately reflect the effects of the SetRemoteDescription +// operation. +class SetRemoteDescriptionObserverInterface : public rtc::RefCountInterface { + public: + // On success, |error.ok()| is true. + virtual void OnSetRemoteDescriptionComplete(RTCError error) = 0; +}; + +} // namespace webrtc + +#endif // API_SETREMOTEDESCRIPTIONOBSERVERINTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstats.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstats.h new file mode 100644 index 000000000000..887d602042cb --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstats.h @@ -0,0 +1,332 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_STATS_RTCSTATS_H_ +#define API_STATS_RTCSTATS_H_ + +#include +#include +#include +#include +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +class RTCStatsMemberInterface; + +// Abstract base class for RTCStats-derived dictionaries, see +// https://w3c.github.io/webrtc-stats/. +// +// All derived classes must have the following static variable defined: +// static const char kType[]; +// It is used as a unique class identifier and a string representation of the +// class type, see https://w3c.github.io/webrtc-stats/#rtcstatstype-str*. +// Use the |WEBRTC_RTCSTATS_IMPL| macro when implementing subclasses, see macro +// for details. +// +// Derived classes list their dictionary members, RTCStatsMember, as public +// fields, allowing the following: +// +// RTCFooStats foo("fooId", GetCurrentTime()); +// foo.bar = 42; +// foo.baz = std::vector(); +// foo.baz->push_back("hello world"); +// uint32_t x = *foo.bar; +// +// Pointers to all the members are available with |Members|, allowing iteration: +// +// for (const RTCStatsMemberInterface* member : foo.Members()) { +// printf("%s = %s\n", member->name(), member->ValueToString().c_str()); +// } +class RTCStats { + public: + RTCStats(const std::string& id, int64_t timestamp_us) + : id_(id), timestamp_us_(timestamp_us) {} + RTCStats(std::string&& id, int64_t timestamp_us) + : id_(std::move(id)), timestamp_us_(timestamp_us) {} + virtual ~RTCStats() {} + + virtual std::unique_ptr copy() const = 0; + + const std::string& id() const { return id_; } + // Time relative to the UNIX epoch (Jan 1, 1970, UTC), in microseconds. + int64_t timestamp_us() const { return timestamp_us_; } + // Returns the static member variable |kType| of the implementing class. + virtual const char* type() const = 0; + // Returns a vector of pointers to all the |RTCStatsMemberInterface| members + // of this class. This allows for iteration of members. For a given class, + // |Members| always returns the same members in the same order. + std::vector Members() const; + // Checks if the two stats objects are of the same type and have the same + // member values. Timestamps are not compared. These operators are exposed for + // testing. + bool operator==(const RTCStats& other) const; + bool operator!=(const RTCStats& other) const; + + // Creates a JSON readable string representation of the stats + // object, listing all of its members (names and values). + std::string ToJson() const; + + // Downcasts the stats object to an |RTCStats| subclass |T|. DCHECKs that the + // object is of type |T|. + template + const T& cast_to() const { + RTC_DCHECK_EQ(type(), T::kType); + return static_cast(*this); + } + + protected: + // Gets a vector of all members of this |RTCStats| object, including members + // derived from parent classes. |additional_capacity| is how many more members + // shall be reserved in the vector (so that subclasses can allocate a vector + // with room for both parent and child members without it having to resize). + virtual std::vector + MembersOfThisObjectAndAncestors( + size_t additional_capacity) const; + + std::string const id_; + int64_t timestamp_us_; +}; + +// All |RTCStats| classes should use these macros. +// |WEBRTC_RTCSTATS_DECL| is placed in a public section of the class definition. +// |WEBRTC_RTCSTATS_IMPL| is placed outside the class definition (in a .cc). +// +// These macros declare (in _DECL) and define (in _IMPL) the static |kType| and +// overrides methods as required by subclasses of |RTCStats|: |copy|, |type| and +// |MembersOfThisObjectAndAncestors|. The |...| argument is a list of addresses +// to each member defined in the implementing class. The list must have at least +// one member. +// +// (Since class names need to be known to implement these methods this cannot be +// part of the base |RTCStats|. While these methods could be implemented using +// templates, that would only work for immediate subclasses. Subclasses of +// subclasses also have to override these methods, resulting in boilerplate +// code. Using a macro avoids this and works for any |RTCStats| class, including +// grandchildren.) +// +// Sample usage: +// +// rtcfoostats.h: +// class RTCFooStats : public RTCStats { +// public: +// WEBRTC_RTCSTATS_DECL(); +// +// RTCFooStats(const std::string& id, int64_t timestamp_us); +// +// RTCStatsMember foo; +// RTCStatsMember bar; +// }; +// +// rtcfoostats.cc: +// WEBRTC_RTCSTATS_IMPL(RTCFooStats, RTCStats, "foo-stats" +// &foo, +// &bar); +// +// RTCFooStats::RTCFooStats(const std::string& id, int64_t timestamp_us) +// : RTCStats(id, timestamp_us), +// foo("foo"), +// bar("bar") { +// } +// +#define WEBRTC_RTCSTATS_DECL() \ + public: \ + static const char kType[]; \ + \ + std::unique_ptr copy() const override; \ + const char* type() const override; \ + \ + protected: \ + std::vector \ + MembersOfThisObjectAndAncestors( \ + size_t local_var_additional_capacity) const override; \ + \ + public: + +#define WEBRTC_RTCSTATS_IMPL(this_class, parent_class, type_str, ...) \ + const char this_class::kType[] = type_str; \ + \ + std::unique_ptr this_class::copy() const { \ + return std::unique_ptr(new this_class(*this)); \ + } \ + \ + const char* this_class::type() const { \ + return this_class::kType; \ + } \ + \ + std::vector \ + this_class::MembersOfThisObjectAndAncestors( \ + size_t local_var_additional_capacity) const { \ + const webrtc::RTCStatsMemberInterface* local_var_members[] = { \ + __VA_ARGS__ \ + }; \ + size_t local_var_members_count = \ + sizeof(local_var_members) / sizeof(local_var_members[0]); \ + std::vector local_var_members_vec =\ + parent_class::MembersOfThisObjectAndAncestors( \ + local_var_members_count + local_var_additional_capacity); \ + RTC_DCHECK_GE( \ + local_var_members_vec.capacity() - local_var_members_vec.size(), \ + local_var_members_count + local_var_additional_capacity); \ + local_var_members_vec.insert(local_var_members_vec.end(), \ + &local_var_members[0], \ + &local_var_members[local_var_members_count]); \ + return local_var_members_vec; \ + } + +// Interface for |RTCStats| members, which have a name and a value of a type +// defined in a subclass. Only the types listed in |Type| are supported, these +// are implemented by |RTCStatsMember|. The value of a member may be +// undefined, the value can only be read if |is_defined|. +class RTCStatsMemberInterface { + public: + // Member value types. + enum Type { + kBool, // bool + kInt32, // int32_t + kUint32, // uint32_t + kInt64, // int64_t + kUint64, // uint64_t + kDouble, // double + kString, // std::string + + kSequenceBool, // std::vector + kSequenceInt32, // std::vector + kSequenceUint32, // std::vector + kSequenceInt64, // std::vector + kSequenceUint64, // std::vector + kSequenceDouble, // std::vector + kSequenceString, // std::vector + }; + + virtual ~RTCStatsMemberInterface() {} + + const char* name() const { return name_; } + virtual Type type() const = 0; + virtual bool is_sequence() const = 0; + virtual bool is_string() const = 0; + bool is_defined() const { return is_defined_; } + // Type and value comparator. The names are not compared. These operators are + // exposed for testing. + virtual bool operator==(const RTCStatsMemberInterface& other) const = 0; + bool operator!=(const RTCStatsMemberInterface& other) const { + return !(*this == other); + } + virtual std::string ValueToString() const = 0; + // This is the same as ValueToString except for kInt64 and kUint64 types, + // where the value is represented as a double instead of as an integer. + // Since JSON stores numbers as floating point numbers, very large integers + // cannot be accurately represented, so we prefer to display them as doubles + // instead. + virtual std::string ValueToJson() const = 0; + + template + const T& cast_to() const { + RTC_DCHECK_EQ(type(), T::kType); + return static_cast(*this); + } + + protected: + RTCStatsMemberInterface(const char* name, bool is_defined) + : name_(name), is_defined_(is_defined) {} + + const char* const name_; + bool is_defined_; +}; + +// Template implementation of |RTCStatsMemberInterface|. Every possible |T| is +// specialized in rtcstats.cc, using a different |T| results in a linker error +// (undefined reference to |kType|). The supported types are the ones described +// by |RTCStatsMemberInterface::Type|. +template +class RTCStatsMember : public RTCStatsMemberInterface { + public: + static const Type kType; + + explicit RTCStatsMember(const char* name) + : RTCStatsMemberInterface(name, false), + value_() {} + RTCStatsMember(const char* name, const T& value) + : RTCStatsMemberInterface(name, true), + value_(value) {} + RTCStatsMember(const char* name, T&& value) + : RTCStatsMemberInterface(name, true), + value_(std::move(value)) {} + explicit RTCStatsMember(const RTCStatsMember& other) + : RTCStatsMemberInterface(other.name_, other.is_defined_), + value_(other.value_) {} + explicit RTCStatsMember(RTCStatsMember&& other) + : RTCStatsMemberInterface(other.name_, other.is_defined_), + value_(std::move(other.value_)) {} + + Type type() const override { return kType; } + bool is_sequence() const override; + bool is_string() const override; + bool operator==(const RTCStatsMemberInterface& other) const override { + if (type() != other.type()) + return false; + const RTCStatsMember& other_t = + static_cast&>(other); + if (!is_defined_) + return !other_t.is_defined(); + if (!other.is_defined()) + return false; + return value_ == other_t.value_; + } + std::string ValueToString() const override; + std::string ValueToJson() const override; + + // Assignment operators. + T& operator=(const T& value) { + value_ = value; + is_defined_ = true; + return value_; + } + T& operator=(const T&& value) { + value_ = std::move(value); + is_defined_ = true; + return value_; + } + T& operator=(const RTCStatsMember& other) { + RTC_DCHECK(other.is_defined_); + value_ = other.is_defined_; + is_defined_ = true; + return value_; + } + + // Value getters. + T& operator*() { + RTC_DCHECK(is_defined_); + return value_; + } + const T& operator*() const { + RTC_DCHECK(is_defined_); + return value_; + } + + // Value getters, arrow operator. + T* operator->() { + RTC_DCHECK(is_defined_); + return &value_; + } + const T* operator->() const { + RTC_DCHECK(is_defined_); + return &value_; + } + + private: + T value_; +}; + +} // namespace webrtc + +#endif // API_STATS_RTCSTATS_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstats_objects.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstats_objects.h new file mode 100644 index 000000000000..842fca8cb2c8 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstats_objects.h @@ -0,0 +1,442 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_STATS_RTCSTATS_OBJECTS_H_ +#define API_STATS_RTCSTATS_OBJECTS_H_ + +#include +#include +#include + +#include "api/stats/rtcstats.h" + +namespace webrtc { + +// https://w3c.github.io/webrtc-pc/#idl-def-rtcdatachannelstate +struct RTCDataChannelState { + static const char* const kConnecting; + static const char* const kOpen; + static const char* const kClosing; + static const char* const kClosed; +}; + +// https://w3c.github.io/webrtc-stats/#dom-rtcstatsicecandidatepairstate +struct RTCStatsIceCandidatePairState { + static const char* const kFrozen; + static const char* const kWaiting; + static const char* const kInProgress; + static const char* const kFailed; + static const char* const kSucceeded; +}; + +// https://w3c.github.io/webrtc-pc/#rtcicecandidatetype-enum +struct RTCIceCandidateType { + static const char* const kHost; + static const char* const kSrflx; + static const char* const kPrflx; + static const char* const kRelay; +}; + +// https://w3c.github.io/webrtc-pc/#idl-def-rtcdtlstransportstate +struct RTCDtlsTransportState { + static const char* const kNew; + static const char* const kConnecting; + static const char* const kConnected; + static const char* const kClosed; + static const char* const kFailed; +}; + +// |RTCMediaStreamTrackStats::kind| is not an enum in the spec but the only +// valid values are "audio" and "video". +// https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats-kind +struct RTCMediaStreamTrackKind { + static const char* const kAudio; + static const char* const kVideo; +}; + +// https://w3c.github.io/webrtc-stats/#dom-rtcnetworktype +struct RTCNetworkType { + static const char* const kBluetooth; + static const char* const kCellular; + static const char* const kEthernet; + static const char* const kWifi; + static const char* const kWimax; + static const char* const kVpn; + static const char* const kUnknown; +}; + +// https://w3c.github.io/webrtc-stats/#certificatestats-dict* +class RTCCertificateStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCCertificateStats(const std::string& id, int64_t timestamp_us); + RTCCertificateStats(std::string&& id, int64_t timestamp_us); + RTCCertificateStats(const RTCCertificateStats& other); + ~RTCCertificateStats() override; + + RTCStatsMember fingerprint; + RTCStatsMember fingerprint_algorithm; + RTCStatsMember base64_certificate; + RTCStatsMember issuer_certificate_id; +}; + +// https://w3c.github.io/webrtc-stats/#codec-dict* +class RTCCodecStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCCodecStats(const std::string& id, int64_t timestamp_us); + RTCCodecStats(std::string&& id, int64_t timestamp_us); + RTCCodecStats(const RTCCodecStats& other); + ~RTCCodecStats() override; + + RTCStatsMember payload_type; + RTCStatsMember mime_type; + RTCStatsMember clock_rate; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7061 + RTCStatsMember channels; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7061 + RTCStatsMember sdp_fmtp_line; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7061 + RTCStatsMember implementation; +}; + +// https://w3c.github.io/webrtc-stats/#dcstats-dict* +class RTCDataChannelStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCDataChannelStats(const std::string& id, int64_t timestamp_us); + RTCDataChannelStats(std::string&& id, int64_t timestamp_us); + RTCDataChannelStats(const RTCDataChannelStats& other); + ~RTCDataChannelStats() override; + + RTCStatsMember label; + RTCStatsMember protocol; + RTCStatsMember datachannelid; + // TODO(hbos): Support enum types? "RTCStatsMember"? + RTCStatsMember state; + RTCStatsMember messages_sent; + RTCStatsMember bytes_sent; + RTCStatsMember messages_received; + RTCStatsMember bytes_received; +}; + +// https://w3c.github.io/webrtc-stats/#candidatepair-dict* +// TODO(hbos): Tracking bug https://bugs.webrtc.org/7062 +class RTCIceCandidatePairStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCIceCandidatePairStats(const std::string& id, int64_t timestamp_us); + RTCIceCandidatePairStats(std::string&& id, int64_t timestamp_us); + RTCIceCandidatePairStats(const RTCIceCandidatePairStats& other); + ~RTCIceCandidatePairStats() override; + + RTCStatsMember transport_id; + RTCStatsMember local_candidate_id; + RTCStatsMember remote_candidate_id; + // TODO(hbos): Support enum types? + // "RTCStatsMember"? + RTCStatsMember state; + RTCStatsMember priority; + RTCStatsMember nominated; + // TODO(hbos): Collect this the way the spec describes it. We have a value for + // it but it is not spec-compliant. https://bugs.webrtc.org/7062 + RTCStatsMember writable; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 + RTCStatsMember readable; + RTCStatsMember bytes_sent; + RTCStatsMember bytes_received; + RTCStatsMember total_round_trip_time; + RTCStatsMember current_round_trip_time; + RTCStatsMember available_outgoing_bitrate; + // TODO(hbos): Populate this value. It is wired up and collected the same way + // "VideoBwe.googAvailableReceiveBandwidth" is, but that value is always + // undefined. https://bugs.webrtc.org/7062 + RTCStatsMember available_incoming_bitrate; + RTCStatsMember requests_received; + RTCStatsMember requests_sent; + RTCStatsMember responses_received; + RTCStatsMember responses_sent; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 + RTCStatsMember retransmissions_received; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 + RTCStatsMember retransmissions_sent; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 + RTCStatsMember consent_requests_received; + RTCStatsMember consent_requests_sent; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 + RTCStatsMember consent_responses_received; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 + RTCStatsMember consent_responses_sent; +}; + +// https://w3c.github.io/webrtc-stats/#icecandidate-dict* +// TODO(hbos): |RTCStatsCollector| only collects candidates that are part of +// ice candidate pairs, but there could be candidates not paired with anything. +// crbug.com/632723 +// TODO(qingsi): Add the stats of STUN binding requests (keepalives) and collect +// them in the new PeerConnection::GetStats. +class RTCIceCandidateStats : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCIceCandidateStats(const RTCIceCandidateStats& other); + ~RTCIceCandidateStats() override; + + RTCStatsMember transport_id; + RTCStatsMember is_remote; + RTCStatsMember network_type; + RTCStatsMember ip; + RTCStatsMember port; + RTCStatsMember protocol; + // TODO(hbos): Support enum types? "RTCStatsMember"? + RTCStatsMember candidate_type; + RTCStatsMember priority; + // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/632723 + RTCStatsMember url; + // TODO(hbos): |deleted = true| case is not supported by |RTCStatsCollector|. + // crbug.com/632723 + RTCStatsMember deleted; // = false + + protected: + RTCIceCandidateStats( + const std::string& id, int64_t timestamp_us, bool is_remote); + RTCIceCandidateStats(std::string&& id, int64_t timestamp_us, bool is_remote); +}; + +// In the spec both local and remote varieties are of type RTCIceCandidateStats. +// But here we define them as subclasses of |RTCIceCandidateStats| because the +// |kType| need to be different ("RTCStatsType type") in the local/remote case. +// https://w3c.github.io/webrtc-stats/#rtcstatstype-str* +// This forces us to have to override copy() and type(). +class RTCLocalIceCandidateStats final : public RTCIceCandidateStats { + public: + static const char kType[]; + RTCLocalIceCandidateStats(const std::string& id, int64_t timestamp_us); + RTCLocalIceCandidateStats(std::string&& id, int64_t timestamp_us); + std::unique_ptr copy() const override; + const char* type() const override; +}; + +class RTCRemoteIceCandidateStats final : public RTCIceCandidateStats { + public: + static const char kType[]; + RTCRemoteIceCandidateStats(const std::string& id, int64_t timestamp_us); + RTCRemoteIceCandidateStats(std::string&& id, int64_t timestamp_us); + std::unique_ptr copy() const override; + const char* type() const override; +}; + +// https://w3c.github.io/webrtc-stats/#msstats-dict* +// TODO(hbos): Tracking bug crbug.com/660827 +class RTCMediaStreamStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCMediaStreamStats(const std::string& id, int64_t timestamp_us); + RTCMediaStreamStats(std::string&& id, int64_t timestamp_us); + RTCMediaStreamStats(const RTCMediaStreamStats& other); + ~RTCMediaStreamStats() override; + + RTCStatsMember stream_identifier; + RTCStatsMember> track_ids; +}; + +// https://w3c.github.io/webrtc-stats/#mststats-dict* +// TODO(hbos): Tracking bug crbug.com/659137 +class RTCMediaStreamTrackStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCMediaStreamTrackStats(const std::string& id, int64_t timestamp_us, + const char* kind); + RTCMediaStreamTrackStats(std::string&& id, int64_t timestamp_us, + const char* kind); + RTCMediaStreamTrackStats(const RTCMediaStreamTrackStats& other); + ~RTCMediaStreamTrackStats() override; + + RTCStatsMember track_identifier; + RTCStatsMember remote_source; + RTCStatsMember ended; + // TODO(hbos): |RTCStatsCollector| does not return stats for detached tracks. + // crbug.com/659137 + RTCStatsMember detached; + // See |RTCMediaStreamTrackKind| for valid values. + RTCStatsMember kind; + // TODO(gustaf): Implement jitter_buffer_delay for video (currently + // implemented for audio only). + // https://crbug.com/webrtc/8318 + RTCStatsMember jitter_buffer_delay; + // Video-only members + RTCStatsMember frame_width; + RTCStatsMember frame_height; + // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/659137 + RTCStatsMember frames_per_second; + RTCStatsMember frames_sent; + RTCStatsMember huge_frames_sent; + RTCStatsMember frames_received; + RTCStatsMember frames_decoded; + RTCStatsMember frames_dropped; + // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/659137 + RTCStatsMember frames_corrupted; + // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/659137 + RTCStatsMember partial_frames_lost; + // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/659137 + RTCStatsMember full_frames_lost; + // Audio-only members + RTCStatsMember audio_level; + RTCStatsMember total_audio_energy; + RTCStatsMember echo_return_loss; + RTCStatsMember echo_return_loss_enhancement; + RTCStatsMember total_samples_received; + RTCStatsMember total_samples_duration; + RTCStatsMember concealed_samples; + RTCStatsMember concealment_events; +}; + +// https://w3c.github.io/webrtc-stats/#pcstats-dict* +class RTCPeerConnectionStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCPeerConnectionStats(const std::string& id, int64_t timestamp_us); + RTCPeerConnectionStats(std::string&& id, int64_t timestamp_us); + RTCPeerConnectionStats(const RTCPeerConnectionStats& other); + ~RTCPeerConnectionStats() override; + + RTCStatsMember data_channels_opened; + RTCStatsMember data_channels_closed; +}; + +// https://w3c.github.io/webrtc-stats/#streamstats-dict* +// TODO(hbos): Tracking bug crbug.com/657854 +class RTCRTPStreamStats : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCRTPStreamStats(const RTCRTPStreamStats& other); + ~RTCRTPStreamStats() override; + + RTCStatsMember ssrc; + // TODO(hbos): When the remote case is supported |RTCStatsCollector| needs to + // set this. crbug.com/657855, 657856 + RTCStatsMember associate_stats_id; + // TODO(hbos): Remote case not supported by |RTCStatsCollector|. + // crbug.com/657855, 657856 + RTCStatsMember is_remote; // = false + RTCStatsMember media_type; + RTCStatsMember track_id; + RTCStatsMember transport_id; + RTCStatsMember codec_id; + // FIR and PLI counts are only defined for |media_type == "video"|. + RTCStatsMember fir_count; + RTCStatsMember pli_count; + // TODO(hbos): NACK count should be collected by |RTCStatsCollector| for both + // audio and video but is only defined in the "video" case. crbug.com/657856 + RTCStatsMember nack_count; + // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/657854 + // SLI count is only defined for |media_type == "video"|. + RTCStatsMember sli_count; + RTCStatsMember qp_sum; + + protected: + RTCRTPStreamStats(const std::string& id, int64_t timestamp_us); + RTCRTPStreamStats(std::string&& id, int64_t timestamp_us); +}; + +// https://w3c.github.io/webrtc-stats/#inboundrtpstats-dict* +// TODO(hbos): Support the remote case |is_remote = true|. +// https://bugs.webrtc.org/7065 +class RTCInboundRTPStreamStats final : public RTCRTPStreamStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCInboundRTPStreamStats(const std::string& id, int64_t timestamp_us); + RTCInboundRTPStreamStats(std::string&& id, int64_t timestamp_us); + RTCInboundRTPStreamStats(const RTCInboundRTPStreamStats& other); + ~RTCInboundRTPStreamStats() override; + + RTCStatsMember packets_received; + RTCStatsMember bytes_received; + RTCStatsMember packets_lost; // Signed per RFC 3550 + // TODO(hbos): Collect and populate this value for both "audio" and "video", + // currently not collected for "video". https://bugs.webrtc.org/7065 + RTCStatsMember jitter; + RTCStatsMember fraction_lost; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember round_trip_time; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember packets_discarded; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember packets_repaired; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember burst_packets_lost; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember burst_packets_discarded; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember burst_loss_count; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember burst_discard_count; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember burst_loss_rate; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember burst_discard_rate; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember gap_loss_rate; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember gap_discard_rate; + RTCStatsMember frames_decoded; +}; + +// https://w3c.github.io/webrtc-stats/#outboundrtpstats-dict* +// TODO(hbos): Support the remote case |is_remote = true|. +// https://bugs.webrtc.org/7066 +class RTCOutboundRTPStreamStats final : public RTCRTPStreamStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCOutboundRTPStreamStats(const std::string& id, int64_t timestamp_us); + RTCOutboundRTPStreamStats(std::string&& id, int64_t timestamp_us); + RTCOutboundRTPStreamStats(const RTCOutboundRTPStreamStats& other); + ~RTCOutboundRTPStreamStats() override; + + RTCStatsMember packets_sent; + RTCStatsMember bytes_sent; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7066 + RTCStatsMember target_bitrate; + RTCStatsMember frames_encoded; +}; + +// https://w3c.github.io/webrtc-stats/#transportstats-dict* +class RTCTransportStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCTransportStats(const std::string& id, int64_t timestamp_us); + RTCTransportStats(std::string&& id, int64_t timestamp_us); + RTCTransportStats(const RTCTransportStats& other); + ~RTCTransportStats() override; + + RTCStatsMember bytes_sent; + RTCStatsMember bytes_received; + RTCStatsMember rtcp_transport_stats_id; + // TODO(hbos): Support enum types? "RTCStatsMember"? + RTCStatsMember dtls_state; + RTCStatsMember selected_candidate_pair_id; + RTCStatsMember local_certificate_id; + RTCStatsMember remote_certificate_id; +}; + +} // namespace webrtc + +#endif // API_STATS_RTCSTATS_OBJECTS_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstatscollectorcallback.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstatscollectorcallback.h new file mode 100644 index 000000000000..2c67bb81bbca --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstatscollectorcallback.h @@ -0,0 +1,30 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_STATS_RTCSTATSCOLLECTORCALLBACK_H_ +#define API_STATS_RTCSTATSCOLLECTORCALLBACK_H_ + +#include "api/stats/rtcstatsreport.h" +#include "rtc_base/refcount.h" +#include "rtc_base/scoped_ref_ptr.h" + +namespace webrtc { + +class RTCStatsCollectorCallback : public virtual rtc::RefCountInterface { + public: + virtual ~RTCStatsCollectorCallback() {} + + virtual void OnStatsDelivered( + const rtc::scoped_refptr& report) = 0; +}; + +} // namespace webrtc + +#endif // API_STATS_RTCSTATSCOLLECTORCALLBACK_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstatsreport.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstatsreport.h new file mode 100644 index 000000000000..8485a08e08b2 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/stats/rtcstatsreport.h @@ -0,0 +1,104 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_STATS_RTCSTATSREPORT_H_ +#define API_STATS_RTCSTATSREPORT_H_ + +#include +#include +#include +#include + +#include "api/stats/rtcstats.h" +#include "rtc_base/refcount.h" +#include "rtc_base/refcountedobject.h" +#include "rtc_base/scoped_ref_ptr.h" + +namespace webrtc { + +// A collection of stats. +// This is accessible as a map from |RTCStats::id| to |RTCStats|. +class RTCStatsReport : public rtc::RefCountInterface { + public: + typedef std::map> StatsMap; + + class ConstIterator { + public: + ConstIterator(const ConstIterator&& other); + ~ConstIterator(); + + ConstIterator& operator++(); + ConstIterator& operator++(int); + const RTCStats& operator*() const; + const RTCStats* operator->() const; + bool operator==(const ConstIterator& other) const; + bool operator!=(const ConstIterator& other) const; + + private: + friend class RTCStatsReport; + ConstIterator(const rtc::scoped_refptr& report, + StatsMap::const_iterator it); + + // Reference report to make sure it is kept alive. + rtc::scoped_refptr report_; + StatsMap::const_iterator it_; + }; + + // TODO(hbos): Remove "= 0" once Chromium unittest has been updated to call + // with a parameter. crbug.com/627816 + static rtc::scoped_refptr Create(int64_t timestamp_us = 0); + + explicit RTCStatsReport(int64_t timestamp_us); + RTCStatsReport(const RTCStatsReport& other) = delete; + rtc::scoped_refptr Copy() const; + + int64_t timestamp_us() const { return timestamp_us_; } + void AddStats(std::unique_ptr stats); + const RTCStats* Get(const std::string& id) const; + size_t size() const { return stats_.size(); } + + // Removes the stats object from the report, returning ownership of it or null + // if there is no object with |id|. + std::unique_ptr Take(const std::string& id); + // Takes ownership of all the stats in |victim|, leaving it empty. + void TakeMembersFrom(rtc::scoped_refptr victim); + + // Stats iterators. Stats are ordered lexicographically on |RTCStats::id|. + ConstIterator begin() const; + ConstIterator end() const; + + // Gets the subset of stats that are of type |T|, where |T| is any class + // descending from |RTCStats|. + template + std::vector GetStatsOfType() const { + std::vector stats_of_type; + for (const RTCStats& stats : *this) { + if (stats.type() == T::kType) + stats_of_type.push_back(&stats.cast_to()); + } + return stats_of_type; + } + + // Creates a JSON readable string representation of the report, + // listing all of its stats objects. + std::string ToJson() const; + + friend class rtc::RefCountedObject; + + private: + ~RTCStatsReport() override; + + int64_t timestamp_us_; + StatsMap stats_; +}; + +} // namespace webrtc + +#endif // API_STATS_RTCSTATSREPORT_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/statstypes.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/statstypes.h new file mode 100644 index 000000000000..4eac02df80e7 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/statstypes.h @@ -0,0 +1,450 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains structures used for retrieving statistics from an ongoing +// libjingle session. + +#ifndef API_STATSTYPES_H_ +#define API_STATSTYPES_H_ + +#include +#include +#include +#include +#include + +#include "rtc_base/constructormagic.h" +#include "rtc_base/refcount.h" +#include "rtc_base/scoped_ref_ptr.h" +#include "rtc_base/stringencode.h" +#include "rtc_base/thread_checker.h" + +namespace webrtc { + +class StatsReport { + public: + // Indicates whether a track is for sending or receiving. + // Used in reports for audio/video tracks. + enum Direction { + kSend = 0, + kReceive, + }; + + enum StatsType { + // StatsReport types. + // A StatsReport of |type| = "googSession" contains overall information + // about the thing libjingle calls a session (which may contain one + // or more RTP sessions. + kStatsReportTypeSession, + + // A StatsReport of |type| = "googTransport" contains information + // about a libjingle "transport". + kStatsReportTypeTransport, + + // A StatsReport of |type| = "googComponent" contains information + // about a libjingle "channel" (typically, RTP or RTCP for a transport). + // This is intended to be the same thing as an ICE "Component". + kStatsReportTypeComponent, + + // A StatsReport of |type| = "googCandidatePair" contains information + // about a libjingle "connection" - a single source/destination port pair. + // This is intended to be the same thing as an ICE "candidate pair". + kStatsReportTypeCandidatePair, + + // A StatsReport of |type| = "VideoBWE" is statistics for video Bandwidth + // Estimation, which is global per-session. The |id| field is "bweforvideo" + // (will probably change in the future). + kStatsReportTypeBwe, + + // A StatsReport of |type| = "ssrc" is statistics for a specific rtp stream. + // The |id| field is the SSRC in decimal form of the rtp stream. + kStatsReportTypeSsrc, + + // A StatsReport of |type| = "remoteSsrc" is statistics for a specific + // rtp stream, generated by the remote end of the connection. + kStatsReportTypeRemoteSsrc, + + // A StatsReport of |type| = "googTrack" is statistics for a specific media + // track. The |id| field is the track id. + kStatsReportTypeTrack, + + // A StatsReport of |type| = "localcandidate" or "remotecandidate" is + // attributes on a specific ICE Candidate. It links to its connection pair + // by candidate id. The string value is taken from + // http://w3c.github.io/webrtc-stats/#rtcstatstype-enum*. + kStatsReportTypeIceLocalCandidate, + kStatsReportTypeIceRemoteCandidate, + + // A StatsReport of |type| = "googCertificate" contains an SSL certificate + // transmitted by one of the endpoints of this connection. The |id| is + // controlled by the fingerprint, and is used to identify the certificate in + // the Channel stats (as "googLocalCertificateId" or + // "googRemoteCertificateId") and in any child certificates (as + // "googIssuerId"). + kStatsReportTypeCertificate, + + // A StatsReport of |type| = "datachannel" with statistics for a + // particular DataChannel. + kStatsReportTypeDataChannel, + }; + + enum StatsValueName { + kStatsValueNameActiveConnection, + kStatsValueNameAecDivergentFilterFraction, + kStatsValueNameAudioInputLevel, + kStatsValueNameAudioOutputLevel, + kStatsValueNameBytesReceived, + kStatsValueNameBytesSent, + kStatsValueNameCodecImplementationName, + kStatsValueNameConcealedSamples, + kStatsValueNameConcealmentEvents, + kStatsValueNameDataChannelId, + kStatsValueNameFramesDecoded, + kStatsValueNameFramesEncoded, + kStatsValueNameJitterBufferDelay, + kStatsValueNameMediaType, + kStatsValueNamePacketsLost, + kStatsValueNamePacketsReceived, + kStatsValueNamePacketsSent, + kStatsValueNameProtocol, + kStatsValueNameQpSum, + kStatsValueNameReceiving, + kStatsValueNameSelectedCandidatePairId, + kStatsValueNameSsrc, + kStatsValueNameState, + kStatsValueNameTotalAudioEnergy, + kStatsValueNameTotalSamplesDuration, + kStatsValueNameTotalSamplesReceived, + kStatsValueNameTransportId, + kStatsValueNameSentPingRequestsTotal, + kStatsValueNameSentPingRequestsBeforeFirstResponse, + kStatsValueNameSentPingResponses, + kStatsValueNameRecvPingRequests, + kStatsValueNameRecvPingResponses, + kStatsValueNameSentStunKeepaliveRequests, + kStatsValueNameRecvStunKeepaliveResponses, + kStatsValueNameStunKeepaliveRttTotal, + kStatsValueNameStunKeepaliveRttSquaredTotal, + + // Internal StatsValue names. + kStatsValueNameAccelerateRate, + kStatsValueNameActualEncBitrate, + kStatsValueNameAdaptationChanges, + kStatsValueNameAvailableReceiveBandwidth, + kStatsValueNameAvailableSendBandwidth, + kStatsValueNameAvgEncodeMs, + kStatsValueNameBandwidthLimitedResolution, + kStatsValueNameBucketDelay, + kStatsValueNameCaptureStartNtpTimeMs, + kStatsValueNameCandidateIPAddress, + kStatsValueNameCandidateNetworkType, + kStatsValueNameCandidatePortNumber, + kStatsValueNameCandidatePriority, + kStatsValueNameCandidateTransportType, + kStatsValueNameCandidateType, + kStatsValueNameChannelId, + kStatsValueNameCodecName, + kStatsValueNameComponent, + kStatsValueNameContentName, + kStatsValueNameContentType, + kStatsValueNameCpuLimitedResolution, + kStatsValueNameCurrentDelayMs, + kStatsValueNameDecodeMs, + kStatsValueNameDecodingCNG, + kStatsValueNameDecodingCTN, + kStatsValueNameDecodingCTSG, + kStatsValueNameDecodingMutedOutput, + kStatsValueNameDecodingNormal, + kStatsValueNameDecodingPLC, + kStatsValueNameDecodingPLCCNG, + kStatsValueNameDer, + kStatsValueNameDtlsCipher, + kStatsValueNameEchoDelayMedian, + kStatsValueNameEchoDelayStdDev, + kStatsValueNameEchoReturnLoss, + kStatsValueNameEchoReturnLossEnhancement, + kStatsValueNameEncodeUsagePercent, + kStatsValueNameExpandRate, + kStatsValueNameFingerprint, + kStatsValueNameFingerprintAlgorithm, + kStatsValueNameFirsReceived, + kStatsValueNameFirsSent, + kStatsValueNameFrameHeightInput, + kStatsValueNameFrameHeightReceived, + kStatsValueNameFrameHeightSent, + kStatsValueNameFrameRateDecoded, + kStatsValueNameFrameRateInput, + kStatsValueNameFrameRateOutput, + kStatsValueNameFrameRateReceived, + kStatsValueNameFrameRateSent, + kStatsValueNameFrameWidthInput, + kStatsValueNameFrameWidthReceived, + kStatsValueNameFrameWidthSent, + kStatsValueNameHasEnteredLowResolution, + kStatsValueNameHugeFramesSent, + kStatsValueNameInitiator, + kStatsValueNameInterframeDelayMaxMs, // Max over last 10 seconds. + kStatsValueNameIssuerId, + kStatsValueNameJitterBufferMs, + kStatsValueNameJitterReceived, + kStatsValueNameLabel, + kStatsValueNameLocalAddress, + kStatsValueNameLocalCandidateId, + kStatsValueNameLocalCandidateType, + kStatsValueNameLocalCertificateId, + kStatsValueNameMaxDecodeMs, + kStatsValueNameMinPlayoutDelayMs, + kStatsValueNameNacksReceived, + kStatsValueNameNacksSent, + kStatsValueNamePlisReceived, + kStatsValueNamePlisSent, + kStatsValueNamePreemptiveExpandRate, + kStatsValueNamePreferredJitterBufferMs, + kStatsValueNameRemoteAddress, + kStatsValueNameRemoteCandidateId, + kStatsValueNameRemoteCandidateType, + kStatsValueNameRemoteCertificateId, + kStatsValueNameRenderDelayMs, + kStatsValueNameResidualEchoLikelihood, + kStatsValueNameResidualEchoLikelihoodRecentMax, + kStatsValueNameAnaBitrateActionCounter, + kStatsValueNameAnaChannelActionCounter, + kStatsValueNameAnaDtxActionCounter, + kStatsValueNameAnaFecActionCounter, + kStatsValueNameAnaFrameLengthIncreaseCounter, + kStatsValueNameAnaFrameLengthDecreaseCounter, + kStatsValueNameAnaUplinkPacketLossFraction, + kStatsValueNameRetransmitBitrate, + kStatsValueNameRtt, + kStatsValueNameSecondaryDecodedRate, + kStatsValueNameSecondaryDiscardedRate, + kStatsValueNameSendPacketsDiscarded, + kStatsValueNameSpeechExpandRate, + kStatsValueNameSrtpCipher, + kStatsValueNameTargetDelayMs, + kStatsValueNameTargetEncBitrate, + kStatsValueNameTimingFrameInfo, // Result of |TimingFrameInfo::ToString| + kStatsValueNameTrackId, + kStatsValueNameTransmitBitrate, + kStatsValueNameTransportType, + kStatsValueNameTypingNoiseState, + kStatsValueNameWritable, + }; + + class IdBase : public rtc::RefCountInterface { + public: + ~IdBase() override; + StatsType type() const; + + // Users of IdBase will be using the Id typedef, which is compatible with + // this Equals() function. It simply calls the protected (and overridden) + // Equals() method. + bool Equals(const rtc::scoped_refptr& other) const { + return Equals(*other.get()); + } + + virtual std::string ToString() const = 0; + + protected: + // Protected since users of the IdBase type will be using the Id typedef. + virtual bool Equals(const IdBase& other) const; + + explicit IdBase(StatsType type); // Only meant for derived classes. + const StatsType type_; + + static const char kSeparator = '_'; + }; + + typedef rtc::scoped_refptr Id; + + struct Value { + enum Type { + kInt, // int. + kInt64, // int64_t. + kFloat, // float. + kString, // std::string + kStaticString, // const char*. + kBool, // bool. + kId, // Id. + }; + + Value(StatsValueName name, int64_t value, Type int_type); + Value(StatsValueName name, float f); + Value(StatsValueName name, const std::string& value); + Value(StatsValueName name, const char* value); + Value(StatsValueName name, bool b); + Value(StatsValueName name, const Id& value); + + ~Value(); + + // Support ref counting. Note that for performance reasons, we + // don't use thread safe operations. Therefore, all operations + // affecting the ref count (in practice, creation and copying of + // the Values mapping) must occur on webrtc's signalling thread. + int AddRef() const { + RTC_DCHECK_RUN_ON(&thread_checker_); + return ++ref_count_; + } + int Release() const { + RTC_DCHECK_RUN_ON(&thread_checker_); + int count = --ref_count_; + if (!count) + delete this; + return count; + } + + // TODO(tommi): This compares name as well as value... + // I think we should only need to compare the value part and + // move the name part into a hash map. + bool Equals(const Value& other) const; + + // Comparison operators. Return true iff the current instance is of the + // correct type and holds the same value. No conversion is performed so + // a string value of "123" is not equal to an int value of 123 and an int + // value of 123 is not equal to a float value of 123.0f. + // One exception to this is that types kInt and kInt64 can be compared and + // kString and kStaticString too. + bool operator==(const std::string& value) const; + bool operator==(const char* value) const; + bool operator==(int64_t value) const; + bool operator==(bool value) const; + bool operator==(float value) const; + bool operator==(const Id& value) const; + + // Getters that allow getting the native value directly. + // The caller must know the type beforehand or else hit a check. + int int_val() const; + int64_t int64_val() const; + float float_val() const; + const char* static_string_val() const; + const std::string& string_val() const; + bool bool_val() const; + const Id& id_val() const; + + // Returns the string representation of |name|. + const char* display_name() const; + + // Converts the native value to a string representation of the value. + std::string ToString() const; + + Type type() const { return type_; } + + // TODO(tommi): Move |name| and |display_name| out of the Value struct. + const StatsValueName name; + + private: + rtc::ThreadChecker thread_checker_; + mutable int ref_count_ RTC_GUARDED_BY(thread_checker_) = 0; + + const Type type_; + // TODO(tommi): Use C++ 11 union and make value_ const. + union InternalType { + int int_; + int64_t int64_; + float float_; + bool bool_; + std::string* string_; + const char* static_string_; + Id* id_; + } value_; + + RTC_DISALLOW_COPY_AND_ASSIGN(Value); + }; + + typedef rtc::scoped_refptr ValuePtr; + typedef std::map Values; + + // Ownership of |id| is passed to |this|. + explicit StatsReport(const Id& id); + ~StatsReport(); + + // Factory functions for various types of stats IDs. + static Id NewBandwidthEstimationId(); + static Id NewTypedId(StatsType type, const std::string& id); + static Id NewTypedIntId(StatsType type, int id); + static Id NewIdWithDirection( + StatsType type, const std::string& id, Direction direction); + static Id NewCandidateId(bool local, const std::string& id); + static Id NewComponentId( + const std::string& content_name, int component); + static Id NewCandidatePairId( + const std::string& content_name, int component, int index); + + const Id& id() const { return id_; } + StatsType type() const { return id_->type(); } + double timestamp() const { return timestamp_; } + void set_timestamp(double t) { timestamp_ = t; } + bool empty() const { return values_.empty(); } + const Values& values() const { return values_; } + + const char* TypeToString() const; + + void AddString(StatsValueName name, const std::string& value); + void AddString(StatsValueName name, const char* value); + void AddInt64(StatsValueName name, int64_t value); + void AddInt(StatsValueName name, int value); + void AddFloat(StatsValueName name, float value); + void AddBoolean(StatsValueName name, bool value); + void AddId(StatsValueName name, const Id& value); + + const Value* FindValue(StatsValueName name) const; + + private: + // The unique identifier for this object. + // This is used as a key for this report in ordered containers, + // so it must never be changed. + const Id id_; + double timestamp_; // Time since 1970-01-01T00:00:00Z in milliseconds. + Values values_; + + RTC_DISALLOW_COPY_AND_ASSIGN(StatsReport); +}; + +// Typedef for an array of const StatsReport pointers. +// Ownership of the pointers held by this implementation is assumed to lie +// elsewhere and lifetime guarantees are made by the implementation that uses +// this type. In the StatsCollector, object ownership lies with the +// StatsCollection class. +typedef std::vector StatsReports; + +// A map from the report id to the report. +// This class wraps an STL container and provides a limited set of +// functionality in order to keep things simple. +class StatsCollection { + public: + StatsCollection(); + ~StatsCollection(); + + typedef std::list Container; + typedef Container::iterator iterator; + typedef Container::const_iterator const_iterator; + + const_iterator begin() const; + const_iterator end() const; + size_t size() const; + + // Creates a new report object with |id| that does not already + // exist in the list of reports. + StatsReport* InsertNew(const StatsReport::Id& id); + StatsReport* FindOrAddNew(const StatsReport::Id& id); + StatsReport* ReplaceOrAddNew(const StatsReport::Id& id); + + // Looks for a report with the given |id|. If one is not found, null + // will be returned. + StatsReport* Find(const StatsReport::Id& id); + + private: + Container list_; + rtc::ThreadChecker thread_checker_; +}; + +} // namespace webrtc + +#endif // API_STATSTYPES_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/audioproc_float.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/audioproc_float.h new file mode 100644 index 000000000000..25e4dd5b1cd9 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/audioproc_float.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_AUDIOPROC_FLOAT_H_ +#define API_TEST_AUDIOPROC_FLOAT_H_ + +#include + +#include "modules/audio_processing/include/audio_processing.h" + +namespace webrtc { +namespace test { + +// This is an interface for the audio processing simulation utility. This +// utility can be used to simulate the audioprocessing module using a recording +// (either an AEC dump or wav files), and generate the output as a wav file. +// The |ap_builder| object will be used to create the AudioProcessing instance +// that is used during the simulation. The |ap_builder| supports setting of +// injectable components, which will be passed on to the created AudioProcessing +// instance. It is needed to pass the command line flags as |argc| and |argv|, +// so these can be interpreted properly by the utility. +// To get a fully-working audioproc_f utility, all that is needed is to write a +// main function, create an AudioProcessingBuilder, optionally set custom +// processing components on it, and pass the builder together with the command +// line arguments into this function. +// To see a list of all supported command line flags, run the executable with +// the '--help' flag. +int AudioprocFloat(std::unique_ptr ap_builder, + int argc, + char* argv[]); + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_AUDIOPROC_FLOAT_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/create_videocodec_test_fixture.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/create_videocodec_test_fixture.h new file mode 100644 index 000000000000..7a44f6b0580d --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/create_videocodec_test_fixture.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_CREATE_VIDEOCODEC_TEST_FIXTURE_H_ +#define API_TEST_CREATE_VIDEOCODEC_TEST_FIXTURE_H_ + +#include + +#include "api/test/videocodec_test_fixture.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" + +namespace webrtc { +namespace test { + +std::unique_ptr CreateVideoCodecTestFixture( + const VideoCodecTestFixture::Config& config); + +std::unique_ptr CreateVideoCodecTestFixture( + const VideoCodecTestFixture::Config& config, + std::unique_ptr decoder_factory, + std::unique_ptr encoder_factory); + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_CREATE_VIDEOCODEC_TEST_FIXTURE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/fakeconstraints.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/fakeconstraints.h new file mode 100644 index 000000000000..2010400aa4e8 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/fakeconstraints.h @@ -0,0 +1,116 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_FAKECONSTRAINTS_H_ +#define API_TEST_FAKECONSTRAINTS_H_ + +#include +#include + +#include "api/mediaconstraintsinterface.h" +#include "rtc_base/stringencode.h" + +namespace webrtc { + +class FakeConstraints : public webrtc::MediaConstraintsInterface { + public: + FakeConstraints() { } + virtual ~FakeConstraints() { } + + virtual const Constraints& GetMandatory() const { + return mandatory_; + } + + virtual const Constraints& GetOptional() const { + return optional_; + } + + template + void AddMandatory(const std::string& key, const T& value) { + mandatory_.push_back(Constraint(key, rtc::ToString(value))); + } + + template + void SetMandatory(const std::string& key, const T& value) { + std::string value_str; + if (mandatory_.FindFirst(key, &value_str)) { + for (Constraints::iterator iter = mandatory_.begin(); + iter != mandatory_.end(); ++iter) { + if (iter->key == key) { + mandatory_.erase(iter); + break; + } + } + } + mandatory_.push_back(Constraint(key, rtc::ToString(value))); + } + + template + void AddOptional(const std::string& key, const T& value) { + optional_.push_back(Constraint(key, rtc::ToString(value))); + } + + void SetMandatoryMinAspectRatio(double ratio) { + SetMandatory(MediaConstraintsInterface::kMinAspectRatio, ratio); + } + + void SetMandatoryMinWidth(int width) { + SetMandatory(MediaConstraintsInterface::kMinWidth, width); + } + + void SetMandatoryMinHeight(int height) { + SetMandatory(MediaConstraintsInterface::kMinHeight, height); + } + + void SetOptionalMaxWidth(int width) { + AddOptional(MediaConstraintsInterface::kMaxWidth, width); + } + + void SetMandatoryMaxFrameRate(int frame_rate) { + SetMandatory(MediaConstraintsInterface::kMaxFrameRate, frame_rate); + } + + void SetMandatoryReceiveAudio(bool enable) { + SetMandatory(MediaConstraintsInterface::kOfferToReceiveAudio, enable); + } + + void SetMandatoryReceiveVideo(bool enable) { + SetMandatory(MediaConstraintsInterface::kOfferToReceiveVideo, enable); + } + + void SetMandatoryUseRtpMux(bool enable) { + SetMandatory(MediaConstraintsInterface::kUseRtpMux, enable); + } + + void SetMandatoryIceRestart(bool enable) { + SetMandatory(MediaConstraintsInterface::kIceRestart, enable); + } + + void SetAllowRtpDataChannels() { + SetMandatory(MediaConstraintsInterface::kEnableRtpDataChannels, true); + SetMandatory(MediaConstraintsInterface::kEnableDtlsSrtp, false); + } + + void SetOptionalVAD(bool enable) { + AddOptional(MediaConstraintsInterface::kVoiceActivityDetection, enable); + } + + void SetAllowDtlsSctpDataChannels() { + SetMandatory(MediaConstraintsInterface::kEnableDtlsSrtp, true); + } + + private: + Constraints mandatory_; + Constraints optional_; +}; + +} // namespace webrtc + +#endif // API_TEST_FAKECONSTRAINTS_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_audio_mixer.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_audio_mixer.h new file mode 100644 index 000000000000..7a6c7420e811 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_audio_mixer.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_AUDIO_MIXER_H_ +#define API_TEST_MOCK_AUDIO_MIXER_H_ + +#include "api/audio/audio_mixer.h" + +#include "test/gmock.h" + +namespace webrtc { +namespace test { + +class MockAudioMixer : public AudioMixer { + public: + MOCK_METHOD1(AddSource, bool(Source* audio_source)); + MOCK_METHOD1(RemoveSource, void(Source* audio_source)); + MOCK_METHOD2(Mix, + void(size_t number_of_channels, + AudioFrame* audio_frame_for_mixing)); +}; +} // namespace test +} // namespace webrtc + +#endif // API_TEST_MOCK_AUDIO_MIXER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_rtpreceiver.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_rtpreceiver.h new file mode 100644 index 000000000000..de69ceaccdb4 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_rtpreceiver.h @@ -0,0 +1,38 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_RTPRECEIVER_H_ +#define API_TEST_MOCK_RTPRECEIVER_H_ + +#include +#include + +#include "api/rtpreceiverinterface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockRtpReceiver : public rtc::RefCountedObject { + public: + MOCK_METHOD1(SetTrack, void(MediaStreamTrackInterface*)); + MOCK_CONST_METHOD0(track, rtc::scoped_refptr()); + MOCK_CONST_METHOD0(streams, + std::vector>()); + MOCK_CONST_METHOD0(media_type, cricket::MediaType()); + MOCK_CONST_METHOD0(id, std::string()); + MOCK_CONST_METHOD0(GetParameters, RtpParameters()); + MOCK_METHOD1(SetParameters, bool(const RtpParameters&)); + MOCK_METHOD1(SetObserver, void(RtpReceiverObserverInterface*)); + MOCK_CONST_METHOD0(GetSources, std::vector()); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_RTPRECEIVER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_rtpsender.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_rtpsender.h new file mode 100644 index 000000000000..22f391b86c26 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_rtpsender.h @@ -0,0 +1,37 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_RTPSENDER_H_ +#define API_TEST_MOCK_RTPSENDER_H_ + +#include +#include + +#include "api/rtpsenderinterface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockRtpSender : public rtc::RefCountedObject { + public: + MOCK_METHOD1(SetTrack, bool(MediaStreamTrackInterface*)); + MOCK_CONST_METHOD0(track, rtc::scoped_refptr()); + MOCK_CONST_METHOD0(ssrc, uint32_t()); + MOCK_CONST_METHOD0(media_type, cricket::MediaType()); + MOCK_CONST_METHOD0(id, std::string()); + MOCK_CONST_METHOD0(stream_ids, std::vector()); + MOCK_METHOD0(GetParameters, RtpParameters()); + MOCK_METHOD1(SetParameters, RTCError(const RtpParameters&)); + MOCK_CONST_METHOD0(GetDtmfSender, rtc::scoped_refptr()); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_RTPSENDER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_video_decoder_factory.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_video_decoder_factory.h new file mode 100644 index 000000000000..915e3911f08e --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_video_decoder_factory.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_VIDEO_DECODER_FACTORY_H_ +#define API_TEST_MOCK_VIDEO_DECODER_FACTORY_H_ + +#include +#include + +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockVideoDecoderFactory : public webrtc::VideoDecoderFactory { + public: + MOCK_CONST_METHOD0(GetSupportedFormats, + std::vector()); + + // We need to proxy to a return type that is copyable. + std::unique_ptr CreateVideoDecoder( + const webrtc::SdpVideoFormat& format) { + return std::unique_ptr( + CreateVideoDecoderProxy(format)); + } + MOCK_METHOD1(CreateVideoDecoderProxy, + webrtc::VideoDecoder*(const webrtc::SdpVideoFormat&)); + + MOCK_METHOD0(Die, void()); + ~MockVideoDecoderFactory() { Die(); } +}; +} // namespace webrtc + +#endif // API_TEST_MOCK_VIDEO_DECODER_FACTORY_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_video_encoder_factory.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_video_encoder_factory.h new file mode 100644 index 000000000000..a694b636e025 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/mock_video_encoder_factory.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_VIDEO_ENCODER_FACTORY_H_ +#define API_TEST_MOCK_VIDEO_ENCODER_FACTORY_H_ + +#include +#include + +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockVideoEncoderFactory : public webrtc::VideoEncoderFactory { + public: + MOCK_CONST_METHOD0(GetSupportedFormats, + std::vector()); + MOCK_CONST_METHOD1(QueryVideoEncoder, + CodecInfo(const webrtc::SdpVideoFormat&)); + + // We need to proxy to a return type that is copyable. + std::unique_ptr CreateVideoEncoder( + const webrtc::SdpVideoFormat& format) { + return std::unique_ptr( + CreateVideoEncoderProxy(format)); + } + MOCK_METHOD1(CreateVideoEncoderProxy, + webrtc::VideoEncoder*(const webrtc::SdpVideoFormat&)); + + MOCK_METHOD0(Die, void()); + ~MockVideoEncoderFactory() { Die(); } +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_VIDEO_ENCODER_FACTORY_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/videocodec_test_fixture.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/videocodec_test_fixture.h new file mode 100644 index 000000000000..68e063750c4d --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/videocodec_test_fixture.h @@ -0,0 +1,159 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_VIDEOCODEC_TEST_FIXTURE_H_ +#define API_TEST_VIDEOCODEC_TEST_FIXTURE_H_ + +#include +#include + +#include "api/test/videocodec_test_stats.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "modules/video_coding/include/video_codec_interface.h" + +namespace webrtc { +namespace test { + +// Rates for the encoder and the frame number when to change profile. +struct RateProfile { + size_t target_kbps; + size_t input_fps; + size_t frame_index_rate_update; +}; + +struct RateControlThresholds { + double max_avg_bitrate_mismatch_percent; + double max_time_to_reach_target_bitrate_sec; + // TODO(ssilkin): Use absolute threshold for framerate. + double max_avg_framerate_mismatch_percent; + double max_avg_buffer_level_sec; + double max_max_key_frame_delay_sec; + double max_max_delta_frame_delay_sec; + size_t max_num_spatial_resizes; + size_t max_num_key_frames; +}; + +struct QualityThresholds { + double min_avg_psnr; + double min_min_psnr; + double min_avg_ssim; + double min_min_ssim; +}; + +struct BitstreamThresholds { + size_t max_max_nalu_size_bytes; +}; + +// Should video files be saved persistently to disk for post-run visualization? +struct VisualizationParams { + bool save_encoded_ivf; + bool save_decoded_y4m; +}; + +class VideoCodecTestFixture { + public: + class EncodedFrameChecker { + public: + virtual ~EncodedFrameChecker() = default; + virtual void CheckEncodedFrame(webrtc::VideoCodecType codec, + const EncodedImage& encoded_frame) const = 0; + }; + struct Config { + Config(); + void SetCodecSettings(std::string codec_name, + size_t num_simulcast_streams, + size_t num_spatial_layers, + size_t num_temporal_layers, + bool denoising_on, + bool frame_dropper_on, + bool spatial_resize_on, + size_t width, + size_t height); + + size_t NumberOfCores() const; + size_t NumberOfTemporalLayers() const; + size_t NumberOfSpatialLayers() const; + size_t NumberOfSimulcastStreams() const; + + std::string ToString() const; + std::string CodecName() const; + bool IsAsyncCodec() const; + + // Plain name of YUV file to process without file extension. + std::string filename; + + // File to process. This must be a video file in the YUV format. + std::string filepath; + + // Number of frames to process. + size_t num_frames = 0; + + // Bitstream constraints. + size_t max_payload_size_bytes = 1440; + + // Should we decode the encoded frames? + bool decode = true; + + // Force the encoder and decoder to use a single core for processing. + bool use_single_core = false; + + // Should cpu usage be measured? + // If set to true, the encoding will run in real-time. + bool measure_cpu = false; + + // If > 0: forces the encoder to create a keyframe every Nth frame. + size_t keyframe_interval = 0; + + // Codec settings to use. + webrtc::VideoCodec codec_settings; + + // Name of the codec being tested. + std::string codec_name; + + // H.264 specific settings. + struct H264CodecSettings { + H264::Profile profile = H264::kProfileConstrainedBaseline; + H264PacketizationMode packetization_mode = + webrtc::H264PacketizationMode::NonInterleaved; + } h264_codec_settings; + + // Should hardware accelerated codecs be used? + bool hw_encoder = false; + bool hw_decoder = false; + + // Should the encoder be wrapped in a SimulcastEncoderAdapter? + bool simulcast_adapted_encoder = false; + + // Should the hardware codecs be wrapped in software fallbacks? + bool sw_fallback_encoder = false; + bool sw_fallback_decoder = false; + + // Custom checker that will be called for each frame. + const EncodedFrameChecker* encoded_frame_checker = nullptr; + + // Print out frame level stats. + bool print_frame_level_stats = false; + }; + + virtual ~VideoCodecTestFixture() = default; + + virtual void RunTest(const std::vector& rate_profiles, + const std::vector* rc_thresholds, + const std::vector* quality_thresholds, + const BitstreamThresholds* bs_thresholds, + const VisualizationParams* visualization_params) = 0; + virtual VideoCodecTestStats& GetStats() = 0; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_VIDEOCODEC_TEST_FIXTURE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/videocodec_test_stats.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/videocodec_test_stats.h new file mode 100644 index 000000000000..de6d3500d43f --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/test/videocodec_test_stats.h @@ -0,0 +1,149 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_VIDEOCODEC_TEST_STATS_H_ +#define API_TEST_VIDEOCODEC_TEST_STATS_H_ + +#include +#include +#include + +#include "common_types.h" // NOLINT(build/include) + +namespace webrtc { +namespace test { + +// Statistics for a sequence of processed frames. This class is not thread safe. +class VideoCodecTestStats { + public: + // Statistics for one processed frame. + struct FrameStatistics { + FrameStatistics(size_t frame_number, size_t rtp_timestamp); + FrameStatistics(const FrameStatistics& rhs); + + std::string ToString() const; + + size_t frame_number = 0; + size_t rtp_timestamp = 0; + + // Encoding. + int64_t encode_start_ns = 0; + int encode_return_code = 0; + bool encoding_successful = false; + size_t encode_time_us = 0; + size_t target_bitrate_kbps = 0; + size_t length_bytes = 0; + webrtc::FrameType frame_type = kVideoFrameDelta; + + // Layering. + size_t spatial_idx = 0; + size_t temporal_idx = 0; + bool inter_layer_predicted = false; + bool non_ref_for_inter_layer_pred = true; + + // H264 specific. + size_t max_nalu_size_bytes = 0; + + // Decoding. + int64_t decode_start_ns = 0; + int decode_return_code = 0; + bool decoding_successful = false; + size_t decode_time_us = 0; + size_t decoded_width = 0; + size_t decoded_height = 0; + + // Quantization. + int qp = -1; + + // Quality. + float psnr_y = 0.0f; + float psnr_u = 0.0f; + float psnr_v = 0.0f; + float psnr = 0.0f; // 10 * log10(255^2 / (mse_y + mse_u + mse_v)). + float ssim = 0.0f; // 0.8 * ssim_y + 0.1 * (ssim_u + ssim_v). + }; + + struct VideoStatistics { + VideoStatistics(); + VideoStatistics(const VideoStatistics&); + + std::string ToString(std::string prefix) const; + + size_t target_bitrate_kbps = 0; + float input_framerate_fps = 0.0f; + + size_t spatial_idx = 0; + size_t temporal_idx = 0; + + size_t width = 0; + size_t height = 0; + + size_t length_bytes = 0; + size_t bitrate_kbps = 0; + float framerate_fps = 0; + + float enc_speed_fps = 0.0f; + float dec_speed_fps = 0.0f; + + float avg_delay_sec = 0.0f; + float max_key_frame_delay_sec = 0.0f; + float max_delta_frame_delay_sec = 0.0f; + float time_to_reach_target_bitrate_sec = 0.0f; + + float avg_key_frame_size_bytes = 0.0f; + float avg_delta_frame_size_bytes = 0.0f; + float avg_qp = 0.0f; + + float avg_psnr_y = 0.0f; + float avg_psnr_u = 0.0f; + float avg_psnr_v = 0.0f; + float avg_psnr = 0.0f; + float min_psnr = 0.0f; + float avg_ssim = 0.0f; + float min_ssim = 0.0f; + + size_t num_input_frames = 0; + size_t num_encoded_frames = 0; + size_t num_decoded_frames = 0; + size_t num_key_frames = 0; + size_t num_spatial_resizes = 0; + size_t max_nalu_size_bytes = 0; + }; + + virtual ~VideoCodecTestStats() = default; + + // Creates a FrameStatistics for the next frame to be processed. + virtual FrameStatistics* AddFrame(size_t timestamp, size_t spatial_idx) = 0; + + // Returns the FrameStatistics corresponding to |frame_number| or |timestamp|. + virtual FrameStatistics* GetFrame(size_t frame_number, + size_t spatial_idx) = 0; + virtual FrameStatistics* GetFrameWithTimestamp(size_t timestamp, + size_t spatial_idx) = 0; + + virtual std::vector SliceAndCalcLayerVideoStatistic( + size_t first_frame_num, + size_t last_frame_num) = 0; + + virtual VideoStatistics SliceAndCalcAggregatedVideoStatistic( + size_t first_frame_num, + size_t last_frame_num) = 0; + + virtual void PrintFrameStatistics() = 0; + + virtual size_t Size(size_t spatial_idx) = 0; + + virtual void Clear() = 0; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_VIDEOCODEC_TEST_STATS_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/bitrate_settings.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/bitrate_settings.h new file mode 100644 index 000000000000..1a24d9057fb2 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/bitrate_settings.h @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_BITRATE_SETTINGS_H_ +#define API_TRANSPORT_BITRATE_SETTINGS_H_ + +#include "api/optional.h" + +namespace webrtc { + +// Configuration of send bitrate. The |start_bitrate_bps| value is +// used for multiple purposes, both as a prior in the bandwidth +// estimator, and for initial configuration of the encoder. We may +// want to create separate apis for those, and use a smaller struct +// with only the min and max constraints. +struct BitrateSettings { + BitrateSettings(); + ~BitrateSettings(); + BitrateSettings(const BitrateSettings&); + // 0 <= min <= start <= max should hold for set parameters. + rtc::Optional min_bitrate_bps; + rtc::Optional start_bitrate_bps; + rtc::Optional max_bitrate_bps; +}; + +} // namespace webrtc + +#endif // API_TRANSPORT_BITRATE_SETTINGS_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/network_control.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/network_control.h new file mode 100644 index 000000000000..abd945d89703 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/network_control.h @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_NETWORK_CONTROL_H_ +#define API_TRANSPORT_NETWORK_CONTROL_H_ +#include +#include + +#include "api/transport/network_types.h" + +namespace webrtc { + +class TargetTransferRateObserver { + public: + virtual ~TargetTransferRateObserver() = default; + // Called to indicate target transfer rate as well as giving information about + // the current estimate of network parameters. + virtual void OnTargetTransferRate(TargetTransferRate) = 0; +}; + +// Configuration sent to factory create function. The parameters here are +// optional to use for a network controller implementation. +struct NetworkControllerConfig { + // The initial constraints to start with, these can be changed at any later + // time by calls to OnTargetRateConstraints. + TargetRateConstraints constraints; + // Initial stream specific configuration, these are changed at any later time + // by calls to OnStreamsConfig. + StreamsConfig stream_based_config; + // The initial bandwidth estimate to base target rate on. This should be used + // as the basis for initial OnTargetTransferRate and OnPacerConfig callbacks. + // Note that starting rate is only provided on construction. + DataRate starting_bandwidth = DataRate::Infinity(); +}; + +// NetworkControllerInterface is implemented by network controllers. A network +// controller is a class that uses information about network state and traffic +// to estimate network parameters such as round trip time and bandwidth. Network +// controllers does not guarantee thread safety, the interface must be used in a +// non-concurrent fashion. +class NetworkControllerInterface { + public: + virtual ~NetworkControllerInterface() = default; + + // Called when network availabilty changes. + virtual NetworkControlUpdate OnNetworkAvailability(NetworkAvailability) = 0; + // Called when the receiving or sending endpoint changes address. + virtual NetworkControlUpdate OnNetworkRouteChange(NetworkRouteChange) = 0; + // Called periodically with a periodicy as specified by + // NetworkControllerFactoryInterface::GetProcessInterval. + virtual NetworkControlUpdate OnProcessInterval(ProcessInterval) = 0; + // Called when remotely calculated bitrate is received. + virtual NetworkControlUpdate OnRemoteBitrateReport(RemoteBitrateReport) = 0; + // Called round trip time has been calculated by protocol specific mechanisms. + virtual NetworkControlUpdate OnRoundTripTimeUpdate(RoundTripTimeUpdate) = 0; + // Called when a packet is sent on the network. + virtual NetworkControlUpdate OnSentPacket(SentPacket) = 0; + // Called when the stream specific configuration has been updated. + virtual NetworkControlUpdate OnStreamsConfig(StreamsConfig) = 0; + // Called when target transfer rate constraints has been changed. + virtual NetworkControlUpdate OnTargetRateConstraints( + TargetRateConstraints) = 0; + // Called when a protocol specific calculation of packet loss has been made. + virtual NetworkControlUpdate OnTransportLossReport(TransportLossReport) = 0; + // Called with per packet feedback regarding receive time. + virtual NetworkControlUpdate OnTransportPacketsFeedback( + TransportPacketsFeedback) = 0; +}; + +// NetworkControllerFactoryInterface is an interface for creating a network +// controller. +class NetworkControllerFactoryInterface { + public: + virtual ~NetworkControllerFactoryInterface() = default; + + // Used to create a new network controller, requires an observer to be + // provided to handle callbacks. + virtual std::unique_ptr Create( + NetworkControllerConfig config) = 0; + // Returns the interval by which the network controller expects + // OnProcessInterval calls. + virtual TimeDelta GetProcessInterval() const = 0; +}; +} // namespace webrtc + +#endif // API_TRANSPORT_NETWORK_CONTROL_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/network_types.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/network_types.h new file mode 100644 index 000000000000..0693f69c7a02 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/network_types.h @@ -0,0 +1,198 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_NETWORK_TYPES_H_ +#define API_TRANSPORT_NETWORK_TYPES_H_ +#include +#include + +#include "api/optional.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" + +namespace webrtc { + +// Configuration + +// Use StreamsConfig for information about streams that is required for specific +// adjustments to the algorithms in network controllers. Especially useful +// for experiments. +struct StreamsConfig { + StreamsConfig(); + StreamsConfig(const StreamsConfig&); + ~StreamsConfig(); + Timestamp at_time = Timestamp::Infinity(); + bool requests_alr_probing = false; + rtc::Optional pacing_factor; + rtc::Optional min_pacing_rate; + rtc::Optional max_padding_rate; + rtc::Optional max_total_allocated_bitrate; +}; + +struct TargetRateConstraints { + TargetRateConstraints(); + TargetRateConstraints(const TargetRateConstraints&); + ~TargetRateConstraints(); + Timestamp at_time = Timestamp::Infinity(); + rtc::Optional min_data_rate; + rtc::Optional max_data_rate; +}; + +// Send side information + +struct NetworkAvailability { + Timestamp at_time = Timestamp::Infinity(); + bool network_available = false; +}; + +struct NetworkRouteChange { + NetworkRouteChange(); + NetworkRouteChange(const NetworkRouteChange&); + ~NetworkRouteChange(); + Timestamp at_time = Timestamp::Infinity(); + // The TargetRateConstraints are set here so they can be changed synchronously + // when network route changes. + TargetRateConstraints constraints; + rtc::Optional starting_rate; +}; + +struct PacedPacketInfo { + PacedPacketInfo(); + PacedPacketInfo(int probe_cluster_id, + int probe_cluster_min_probes, + int probe_cluster_min_bytes); + + bool operator==(const PacedPacketInfo& rhs) const; + + // TODO(srte): Move probing info to a separate, optional struct. + static constexpr int kNotAProbe = -1; + int send_bitrate_bps = -1; + int probe_cluster_id = kNotAProbe; + int probe_cluster_min_probes = -1; + int probe_cluster_min_bytes = -1; +}; + +struct SentPacket { + Timestamp send_time = Timestamp::Infinity(); + DataSize size = DataSize::Zero(); + PacedPacketInfo pacing_info; + // Transport independent sequence number, any tracked packet should have a + // sequence number that is unique over the whole call and increasing by 1 for + // each packet. + int64_t sequence_number; +}; + +// Transport level feedback + +struct RemoteBitrateReport { + Timestamp receive_time = Timestamp::Infinity(); + DataRate bandwidth = DataRate::Infinity(); +}; + +struct RoundTripTimeUpdate { + Timestamp receive_time = Timestamp::Infinity(); + TimeDelta round_trip_time = TimeDelta::PlusInfinity(); + bool smoothed = false; +}; + +struct TransportLossReport { + Timestamp receive_time = Timestamp::Infinity(); + Timestamp start_time = Timestamp::Infinity(); + Timestamp end_time = Timestamp::Infinity(); + uint64_t packets_lost_delta = 0; + uint64_t packets_received_delta = 0; +}; + +// Packet level feedback + +struct PacketResult { + PacketResult(); + PacketResult(const PacketResult&); + ~PacketResult(); + + rtc::Optional sent_packet; + Timestamp receive_time = Timestamp::Infinity(); +}; + +struct TransportPacketsFeedback { + TransportPacketsFeedback(); + TransportPacketsFeedback(const TransportPacketsFeedback& other); + ~TransportPacketsFeedback(); + + Timestamp feedback_time = Timestamp::Infinity(); + DataSize data_in_flight = DataSize::Zero(); + DataSize prior_in_flight = DataSize::Zero(); + std::vector packet_feedbacks; + + std::vector ReceivedWithSendInfo() const; + std::vector LostWithSendInfo() const; + std::vector PacketsWithFeedback() const; +}; + +// Network estimation + +struct NetworkEstimate { + Timestamp at_time = Timestamp::Infinity(); + DataRate bandwidth = DataRate::Infinity(); + TimeDelta round_trip_time = TimeDelta::PlusInfinity(); + TimeDelta bwe_period = TimeDelta::PlusInfinity(); + + float loss_rate_ratio = 0; +}; + +// Network control + +struct PacerConfig { + Timestamp at_time = Timestamp::Infinity(); + // Pacer should send at most data_window data over time_window duration. + DataSize data_window = DataSize::Infinity(); + TimeDelta time_window = TimeDelta::PlusInfinity(); + // Pacer should send at least pad_window data over time_window duration. + DataSize pad_window = DataSize::Zero(); + DataRate data_rate() const { return data_window / time_window; } + DataRate pad_rate() const { return pad_window / time_window; } +}; + +struct ProbeClusterConfig { + Timestamp at_time = Timestamp::Infinity(); + DataRate target_data_rate = DataRate::Zero(); + TimeDelta target_duration = TimeDelta::Zero(); + int32_t target_probe_count = 0; +}; + +struct TargetTransferRate { + Timestamp at_time = Timestamp::Infinity(); + // The estimate on which the target rate is based on. + NetworkEstimate network_estimate; + DataRate target_rate = DataRate::Zero(); +}; + +// Contains updates of network controller comand state. Using optionals to +// indicate whether a member has been updated. The array of probe clusters +// should be used to send out probes if not empty. +struct NetworkControlUpdate { + NetworkControlUpdate(); + NetworkControlUpdate(const NetworkControlUpdate&); + ~NetworkControlUpdate(); + rtc::Optional congestion_window; + rtc::Optional pacer_config; + std::vector probe_cluster_configs; + rtc::Optional target_rate; +}; + +// Process control +struct ProcessInterval { + Timestamp at_time = Timestamp::Infinity(); +}; +} // namespace webrtc + +#endif // API_TRANSPORT_NETWORK_TYPES_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/test/mock_network_control.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/test/mock_network_control.h new file mode 100644 index 000000000000..df83791e18c3 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/test/mock_network_control.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_TEST_MOCK_NETWORK_CONTROL_H_ +#define API_TRANSPORT_TEST_MOCK_NETWORK_CONTROL_H_ + +#include "api/transport/include/network_control.h" +#include "test/gmock.h" + +namespace webrtc { +namespace test { +class MockTargetTransferRateObserver : public TargetTransferRateObserver { + public: + MOCK_METHOD1(OnTargetTransferRate, void(TargetTransferRate)); +}; +} // namespace test +} // namespace webrtc + +#endif // API_TRANSPORT_TEST_MOCK_NETWORK_CONTROL_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/test/network_control_tester.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/test/network_control_tester.h new file mode 100644 index 000000000000..df52a9bf829a --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/transport/test/network_control_tester.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_TEST_NETWORK_CONTROL_TESTER_H_ +#define API_TRANSPORT_TEST_NETWORK_CONTROL_TESTER_H_ + +#include +#include +#include + +#include "api/optional.h" +#include "api/transport/network_control.h" + +namespace webrtc { +namespace test { + +// Produces one packet per time delta +class SimpleTargetRateProducer { + public: + static SentPacket ProduceNext(const NetworkControlUpdate& state, + Timestamp current_time, + TimeDelta time_delta); +}; + +class NetworkControllerTester { + public: + // A PacketProducer is a function that takes a network control state, a + // timestamp representing the expected send time and a time delta of the send + // times (This allows the PacketProducer to be stateless). It returns a + // SentPacket struct with actual send time and packet size. + using PacketProducer = std::function< + SentPacket(const NetworkControlUpdate&, Timestamp, TimeDelta)>; + NetworkControllerTester(NetworkControllerFactoryInterface* factory, + NetworkControllerConfig initial_config); + ~NetworkControllerTester(); + + // Runs the simulations for the given duration, the PacketProducer will be + // called repeatedly based on the given packet interval and the network will + // be simulated using given bandwidth and propagation delay. The simulation + // will call the controller under test with OnSentPacket and + // OnTransportPacketsFeedback. + + // Note that OnTransportPacketsFeedback will only be called for + // packets with resulting feedback time within the simulated duration. Packets + // with later feedback time are saved and used in the next call to + // RunSimulation where enough simulated time has passed. + void RunSimulation(TimeDelta duration, + TimeDelta packet_interval, + DataRate actual_bandwidth, + TimeDelta propagation_delay, + PacketProducer next_packet); + NetworkControlUpdate GetState() { return state_; } + + private: + PacketResult SimulateSend(SentPacket packet, + TimeDelta time_delta, + TimeDelta propagation_delay, + DataRate actual_bandwidth); + std::unique_ptr controller_; + TimeDelta process_interval_ = TimeDelta::PlusInfinity(); + Timestamp current_time_; + int64_t packet_sequence_number_; + TimeDelta accumulated_delay_; + std::deque outstanding_packets_; + NetworkControlUpdate state_; +}; +} // namespace test +} // namespace webrtc + +#endif // API_TRANSPORT_TEST_NETWORK_CONTROL_TESTER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/turncustomizer.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/turncustomizer.h new file mode 100644 index 000000000000..517abcc8f0d5 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/turncustomizer.h @@ -0,0 +1,46 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TURNCUSTOMIZER_H_ +#define API_TURNCUSTOMIZER_H_ + +#include + +namespace cricket { +class PortInterface; +class StunMessage; +} // namespace cricket + + +namespace webrtc { + +class TurnCustomizer { + public: + // This is called before a TURN message is sent. + // This could be used to add implementation specific attributes to a request. + virtual void MaybeModifyOutgoingStunMessage( + cricket::PortInterface* port, + cricket::StunMessage* message) = 0; + + // TURN can send data using channel data messages or Send indication. + // This method should return false if |data| should be sent using + // a Send indication instead of a ChannelData message, even if a + // channel is bound. + virtual bool AllowChannelData(cricket::PortInterface* port, + const void* data, + size_t size, + bool payload) = 0; + + virtual ~TurnCustomizer() {} +}; + +} // namespace webrtc + +#endif // API_TURNCUSTOMIZER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/umametrics.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/umametrics.h new file mode 100644 index 000000000000..f885416f37ac --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/umametrics.h @@ -0,0 +1,194 @@ +/* + * Copyright 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains enums related to IPv4/IPv6 metrics. + +#ifndef API_UMAMETRICS_H_ +#define API_UMAMETRICS_H_ + +#include "rtc_base/refcount.h" + +namespace webrtc { + +// Used to specify which enum counter type we're incrementing in +// MetricsObserverInterface::IncrementEnumCounter. +enum PeerConnectionEnumCounterType { + kEnumCounterAddressFamily, + // For the next 2 counters, we track them separately based on the "first hop" + // protocol used by the local candidate. "First hop" means the local candidate + // type in the case of non-TURN candidates, and the protocol used to connect + // to the TURN server in the case of TURN candidates. + kEnumCounterIceCandidatePairTypeUdp, + kEnumCounterIceCandidatePairTypeTcp, + + kEnumCounterAudioSrtpCipher, + kEnumCounterAudioSslCipher, + kEnumCounterVideoSrtpCipher, + kEnumCounterVideoSslCipher, + kEnumCounterDataSrtpCipher, + kEnumCounterDataSslCipher, + kEnumCounterDtlsHandshakeError, + kEnumCounterIceRegathering, + kEnumCounterIceRestart, + kEnumCounterKeyProtocol, + kEnumCounterSdpSemanticRequested, + kEnumCounterSdpSemanticNegotiated, + kEnumCounterKeyProtocolMediaType, + kEnumCounterSdpFormatReceived, + // The next 2 counters log the value of srtp_err_status_t defined in libsrtp. + kEnumCounterSrtpUnprotectError, + kEnumCounterSrtcpUnprotectError, + kPeerConnectionEnumCounterMax +}; + +// Currently this contains information related to WebRTC network/transport +// information. + +// The difference between PeerConnectionEnumCounter and +// PeerConnectionMetricsName is that the "EnumCounter" is only counting the +// occurrences of events, while "Name" has a value associated with it which is +// used to form a histogram. + +// This enum is backed by Chromium's histograms.xml, +// chromium/src/tools/metrics/histograms/histograms.xml +// Existing values cannot be re-ordered and new enums must be added +// before kBoundary. +enum PeerConnectionAddressFamilyCounter { + kPeerConnection_IPv4, + kPeerConnection_IPv6, + kBestConnections_IPv4, + kBestConnections_IPv6, + kPeerConnectionAddressFamilyCounter_Max, +}; + +// TODO(guoweis): Keep previous name here until all references are renamed. +#define kBoundary kPeerConnectionAddressFamilyCounter_Max + +// TODO(guoweis): Keep previous name here until all references are renamed. +typedef PeerConnectionAddressFamilyCounter PeerConnectionUMAMetricsCounter; + +// This enum defines types for UMA samples, which will have a range. +enum PeerConnectionMetricsName { + kNetworkInterfaces_IPv4, // Number of IPv4 interfaces. + kNetworkInterfaces_IPv6, // Number of IPv6 interfaces. + kTimeToConnect, // In milliseconds. + kLocalCandidates_IPv4, // Number of IPv4 local candidates. + kLocalCandidates_IPv6, // Number of IPv6 local candidates. + kPeerConnectionMetricsName_Max +}; + +// TODO(guoweis): Keep previous name here until all references are renamed. +typedef PeerConnectionMetricsName PeerConnectionUMAMetricsName; + +// The IceCandidatePairType has the format of +// _. It is recorded based on the +// type of candidate pair used when the PeerConnection first goes to a completed +// state. When BUNDLE is enabled, only the first transport gets recorded. +enum IceCandidatePairType { + // HostHost is deprecated. It was replaced with the set of types at the bottom + // to report private or public host IP address. + kIceCandidatePairHostHost, + kIceCandidatePairHostSrflx, + kIceCandidatePairHostRelay, + kIceCandidatePairHostPrflx, + kIceCandidatePairSrflxHost, + kIceCandidatePairSrflxSrflx, + kIceCandidatePairSrflxRelay, + kIceCandidatePairSrflxPrflx, + kIceCandidatePairRelayHost, + kIceCandidatePairRelaySrflx, + kIceCandidatePairRelayRelay, + kIceCandidatePairRelayPrflx, + kIceCandidatePairPrflxHost, + kIceCandidatePairPrflxSrflx, + kIceCandidatePairPrflxRelay, + + // The following 4 types tell whether local and remote hosts have private or + // public IP addresses. + kIceCandidatePairHostPrivateHostPrivate, + kIceCandidatePairHostPrivateHostPublic, + kIceCandidatePairHostPublicHostPrivate, + kIceCandidatePairHostPublicHostPublic, + kIceCandidatePairMax +}; + +enum KeyExchangeProtocolType { + kEnumCounterKeyProtocolDtls, + kEnumCounterKeyProtocolSdes, + kEnumCounterKeyProtocolMax +}; + +enum KeyExchangeProtocolMedia { + kEnumCounterKeyProtocolMediaTypeDtlsAudio, + kEnumCounterKeyProtocolMediaTypeDtlsVideo, + kEnumCounterKeyProtocolMediaTypeDtlsData, + kEnumCounterKeyProtocolMediaTypeSdesAudio, + kEnumCounterKeyProtocolMediaTypeSdesVideo, + kEnumCounterKeyProtocolMediaTypeSdesData, + kEnumCounterKeyProtocolMediaTypeMax +}; + +enum SdpSemanticRequested { + kSdpSemanticRequestDefault, + kSdpSemanticRequestPlanB, + kSdpSemanticRequestUnifiedPlan, + kSdpSemanticRequestMax +}; + +enum SdpSemanticNegotiated { + kSdpSemanticNegotiatedNone, + kSdpSemanticNegotiatedPlanB, + kSdpSemanticNegotiatedUnifiedPlan, + kSdpSemanticNegotiatedMixed, + kSdpSemanticNegotiatedMax +}; + +// Metric which records the format of the received SDP for tracking how much the +// difference between Plan B and Unified Plan affect users. +enum SdpFormatReceived { + // No audio or video tracks. This is worth special casing since it seems to be + // the most common scenario (data-channel only). + kSdpFormatReceivedNoTracks, + // No more than one audio and one video track. Should be compatible with both + // Plan B and Unified Plan endpoints. + kSdpFormatReceivedSimple, + // More than one audio track or more than one video track in the Plan B format + // (e.g., one audio media section with multiple streams). + kSdpFormatReceivedComplexPlanB, + // More than one audio track or more than one video track in the Unified Plan + // format (e.g., two audio media sections). + kSdpFormatReceivedComplexUnifiedPlan, + kSdpFormatReceivedMax +}; + +class MetricsObserverInterface : public rtc::RefCountInterface { + public: + // |type| is the type of the enum counter to be incremented. |counter| + // is the particular counter in that type. |counter_max| is the next sequence + // number after the highest counter. + virtual void IncrementEnumCounter(PeerConnectionEnumCounterType type, + int counter, + int counter_max) {} + + // This is used to handle sparse counters like SSL cipher suites. + // TODO(guoweis): Remove the implementation once the dependency's interface + // definition is updated. + virtual void IncrementSparseEnumCounter(PeerConnectionEnumCounterType type, + int counter); + + virtual void AddHistogramSample(PeerConnectionMetricsName type, + int value) = 0; +}; + +typedef MetricsObserverInterface UMAObserver; + +} // namespace webrtc + +#endif // API_UMAMETRICS_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/data_rate.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/data_rate.h new file mode 100644 index 000000000000..067b20010583 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/data_rate.h @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_UNITS_DATA_RATE_H_ +#define API_UNITS_DATA_RATE_H_ +#include +#include +#include +#include + +#include "rtc_base/checks.h" + +#include "api/units/data_size.h" +#include "api/units/time_delta.h" + +namespace webrtc { +namespace data_rate_impl { +constexpr int64_t kPlusInfinityVal = std::numeric_limits::max(); + +inline int64_t Microbits(const DataSize& size) { + constexpr int64_t kMaxBeforeConversion = + std::numeric_limits::max() / 8000000; + RTC_DCHECK_LE(size.bytes(), kMaxBeforeConversion) + << "size is too large to be expressed in microbytes"; + return size.bytes() * 8000000; +} +} // namespace data_rate_impl + +// DataRate is a class that represents a given data rate. This can be used to +// represent bandwidth, encoding bitrate, etc. The internal storage is bits per +// second (bps). +class DataRate { + public: + DataRate() = delete; + static DataRate Zero() { return DataRate(0); } + static DataRate Infinity() { + return DataRate(data_rate_impl::kPlusInfinityVal); + } + static DataRate bits_per_second(int64_t bits_per_sec) { + RTC_DCHECK_GE(bits_per_sec, 0); + return DataRate(bits_per_sec); + } + static DataRate bps(int64_t bits_per_sec) { + return DataRate::bits_per_second(bits_per_sec); + } + static DataRate kbps(int64_t kilobits_per_sec) { + return DataRate::bits_per_second(kilobits_per_sec * 1000); + } + int64_t bits_per_second() const { + RTC_DCHECK(IsFinite()); + return bits_per_sec_; + } + int64_t bps() const { return bits_per_second(); } + int64_t kbps() const { return (bps() + 500) / 1000; } + bool IsZero() const { return bits_per_sec_ == 0; } + bool IsInfinite() const { + return bits_per_sec_ == data_rate_impl::kPlusInfinityVal; + } + bool IsFinite() const { return !IsInfinite(); } + + bool operator==(const DataRate& other) const { + return bits_per_sec_ == other.bits_per_sec_; + } + bool operator!=(const DataRate& other) const { + return bits_per_sec_ != other.bits_per_sec_; + } + bool operator<=(const DataRate& other) const { + return bits_per_sec_ <= other.bits_per_sec_; + } + bool operator>=(const DataRate& other) const { + return bits_per_sec_ >= other.bits_per_sec_; + } + bool operator>(const DataRate& other) const { + return bits_per_sec_ > other.bits_per_sec_; + } + bool operator<(const DataRate& other) const { + return bits_per_sec_ < other.bits_per_sec_; + } + + private: + // Bits per second used internally to simplify debugging by making the value + // more recognizable. + explicit DataRate(int64_t bits_per_second) : bits_per_sec_(bits_per_second) {} + int64_t bits_per_sec_; +}; + +inline DataRate operator*(const DataRate& rate, const double& scalar) { + return DataRate::bits_per_second(std::round(rate.bits_per_second() * scalar)); +} +inline DataRate operator*(const double& scalar, const DataRate& rate) { + return rate * scalar; +} +inline DataRate operator*(const DataRate& rate, const int64_t& scalar) { + return DataRate::bits_per_second(rate.bits_per_second() * scalar); +} +inline DataRate operator*(const int64_t& scalar, const DataRate& rate) { + return rate * scalar; +} +inline DataRate operator*(const DataRate& rate, const int32_t& scalar) { + return DataRate::bits_per_second(rate.bits_per_second() * scalar); +} +inline DataRate operator*(const int32_t& scalar, const DataRate& rate) { + return rate * scalar; +} + +inline DataRate operator/(const DataSize& size, const TimeDelta& duration) { + return DataRate::bits_per_second(data_rate_impl::Microbits(size) / + duration.us()); +} +inline TimeDelta operator/(const DataSize& size, const DataRate& rate) { + return TimeDelta::us(data_rate_impl::Microbits(size) / + rate.bits_per_second()); +} +inline DataSize operator*(const DataRate& rate, const TimeDelta& duration) { + int64_t microbits = rate.bits_per_second() * duration.us(); + return DataSize::bytes((microbits + 4000000) / 8000000); +} +inline DataSize operator*(const TimeDelta& duration, const DataRate& rate) { + return rate * duration; +} + +std::string ToString(const DataRate& value); + +} // namespace webrtc + +#endif // API_UNITS_DATA_RATE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/data_size.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/data_size.h new file mode 100644 index 000000000000..74ab19e664e8 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/data_size.h @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_UNITS_DATA_SIZE_H_ +#define API_UNITS_DATA_SIZE_H_ + +#include +#include +#include +#include + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace data_size_impl { +constexpr int64_t kPlusInfinityVal = std::numeric_limits::max(); +} // namespace data_size_impl + +// DataSize is a class represeting a count of bytes. +class DataSize { + public: + DataSize() = delete; + static DataSize Zero() { return DataSize(0); } + static DataSize Infinity() { + return DataSize(data_size_impl::kPlusInfinityVal); + } + static DataSize bytes(int64_t bytes) { + RTC_DCHECK_GE(bytes, 0); + return DataSize(bytes); + } + int64_t bytes() const { + RTC_DCHECK(IsFinite()); + return bytes_; + } + int64_t kilobytes() const { return (bytes() + 500) / 1000; } + bool IsZero() const { return bytes_ == 0; } + bool IsInfinite() const { return bytes_ == data_size_impl::kPlusInfinityVal; } + bool IsFinite() const { return !IsInfinite(); } + DataSize operator-(const DataSize& other) const { + return DataSize::bytes(bytes() - other.bytes()); + } + DataSize operator+(const DataSize& other) const { + return DataSize::bytes(bytes() + other.bytes()); + } + DataSize& operator-=(const DataSize& other) { + bytes_ -= other.bytes(); + return *this; + } + DataSize& operator+=(const DataSize& other) { + bytes_ += other.bytes(); + return *this; + } + bool operator==(const DataSize& other) const { + return bytes_ == other.bytes_; + } + bool operator!=(const DataSize& other) const { + return bytes_ != other.bytes_; + } + bool operator<=(const DataSize& other) const { + return bytes_ <= other.bytes_; + } + bool operator>=(const DataSize& other) const { + return bytes_ >= other.bytes_; + } + bool operator>(const DataSize& other) const { return bytes_ > other.bytes_; } + bool operator<(const DataSize& other) const { return bytes_ < other.bytes_; } + + private: + explicit DataSize(int64_t bytes) : bytes_(bytes) {} + int64_t bytes_; +}; +inline DataSize operator*(const DataSize& size, const double& scalar) { + return DataSize::bytes(std::round(size.bytes() * scalar)); +} +inline DataSize operator*(const double& scalar, const DataSize& size) { + return size * scalar; +} +inline DataSize operator*(const DataSize& size, const int64_t& scalar) { + return DataSize::bytes(size.bytes() * scalar); +} +inline DataSize operator*(const int64_t& scalar, const DataSize& size) { + return size * scalar; +} +inline DataSize operator*(const DataSize& size, const int32_t& scalar) { + return DataSize::bytes(size.bytes() * scalar); +} +inline DataSize operator*(const int32_t& scalar, const DataSize& size) { + return size * scalar; +} +inline DataSize operator/(const DataSize& size, const int64_t& scalar) { + return DataSize::bytes(size.bytes() / scalar); +} + +std::string ToString(const DataSize& value); + +} // namespace webrtc + +#endif // API_UNITS_DATA_SIZE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/time_delta.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/time_delta.h new file mode 100644 index 000000000000..2491920cdbb2 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/time_delta.h @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_UNITS_TIME_DELTA_H_ +#define API_UNITS_TIME_DELTA_H_ + +#include +#include +#include +#include + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace timedelta_impl { +constexpr int64_t kPlusInfinityVal = std::numeric_limits::max(); +constexpr int64_t kMinusInfinityVal = std::numeric_limits::min(); +} // namespace timedelta_impl + +// TimeDelta represents the difference between two timestamps. Commonly this can +// be a duration. However since two Timestamps are not guaranteed to have the +// same epoch (they might come from different computers, making exact +// synchronisation infeasible), the duration covered by a TimeDelta can be +// undefined. To simplify usage, it can be constructed and converted to +// different units, specifically seconds (s), milliseconds (ms) and +// microseconds (us). +class TimeDelta { + public: + TimeDelta() = delete; + static TimeDelta Zero() { return TimeDelta(0); } + static TimeDelta PlusInfinity() { + return TimeDelta(timedelta_impl::kPlusInfinityVal); + } + static TimeDelta MinusInfinity() { + return TimeDelta(timedelta_impl::kMinusInfinityVal); + } + static TimeDelta seconds(int64_t seconds) { + return TimeDelta::us(seconds * 1000000); + } + static TimeDelta ms(int64_t milliseconds) { + return TimeDelta::us(milliseconds * 1000); + } + static TimeDelta us(int64_t microseconds) { + // Infinities only allowed via use of explicit constants. + RTC_DCHECK(microseconds > std::numeric_limits::min()); + RTC_DCHECK(microseconds < std::numeric_limits::max()); + return TimeDelta(microseconds); + } + int64_t seconds() const { + return (us() + (us() >= 0 ? 500000 : -500000)) / 1000000; + } + int64_t ms() const { return (us() + (us() >= 0 ? 500 : -500)) / 1000; } + int64_t us() const { + RTC_DCHECK(IsFinite()); + return microseconds_; + } + int64_t ns() const { + RTC_DCHECK(us() > std::numeric_limits::min() / 1000); + RTC_DCHECK(us() < std::numeric_limits::max() / 1000); + return us() * 1000; + } + + double SecondsAsDouble() const; + + TimeDelta Abs() const { return TimeDelta::us(std::abs(us())); } + bool IsZero() const { return microseconds_ == 0; } + bool IsFinite() const { return !IsInfinite(); } + bool IsInfinite() const { + return microseconds_ == timedelta_impl::kPlusInfinityVal || + microseconds_ == timedelta_impl::kMinusInfinityVal; + } + bool IsPlusInfinity() const { + return microseconds_ == timedelta_impl::kPlusInfinityVal; + } + bool IsMinusInfinity() const { + return microseconds_ == timedelta_impl::kMinusInfinityVal; + } + TimeDelta operator+(const TimeDelta& other) const { + return TimeDelta::us(us() + other.us()); + } + TimeDelta operator-(const TimeDelta& other) const { + return TimeDelta::us(us() - other.us()); + } + TimeDelta& operator-=(const TimeDelta& other) { + microseconds_ -= other.us(); + return *this; + } + TimeDelta& operator+=(const TimeDelta& other) { + microseconds_ += other.us(); + return *this; + } + + bool operator==(const TimeDelta& other) const { + return microseconds_ == other.microseconds_; + } + bool operator!=(const TimeDelta& other) const { + return microseconds_ != other.microseconds_; + } + bool operator<=(const TimeDelta& other) const { + return microseconds_ <= other.microseconds_; + } + bool operator>=(const TimeDelta& other) const { + return microseconds_ >= other.microseconds_; + } + bool operator>(const TimeDelta& other) const { + return microseconds_ > other.microseconds_; + } + bool operator<(const TimeDelta& other) const { + return microseconds_ < other.microseconds_; + } + + private: + explicit TimeDelta(int64_t us) : microseconds_(us) {} + int64_t microseconds_; +}; + +inline TimeDelta operator*(const TimeDelta& delta, const double& scalar) { + return TimeDelta::us(std::round(delta.us() * scalar)); +} +inline TimeDelta operator*(const double& scalar, const TimeDelta& delta) { + return delta * scalar; +} +inline TimeDelta operator*(const TimeDelta& delta, const int64_t& scalar) { + return TimeDelta::us(delta.us() * scalar); +} +inline TimeDelta operator*(const int64_t& scalar, const TimeDelta& delta) { + return delta * scalar; +} +inline TimeDelta operator*(const TimeDelta& delta, const int32_t& scalar) { + return TimeDelta::us(delta.us() * scalar); +} +inline TimeDelta operator*(const int32_t& scalar, const TimeDelta& delta) { + return delta * scalar; +} + +inline TimeDelta operator/(const TimeDelta& delta, const int64_t& scalar) { + return TimeDelta::us(delta.us() / scalar); +} + +std::string ToString(const TimeDelta& value); +} // namespace webrtc + +#endif // API_UNITS_TIME_DELTA_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/timestamp.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/timestamp.h new file mode 100644 index 000000000000..af62b3b4900c --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/units/timestamp.h @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_UNITS_TIMESTAMP_H_ +#define API_UNITS_TIMESTAMP_H_ + +#include +#include +#include + +#include "api/units/time_delta.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace timestamp_impl { +constexpr int64_t kPlusInfinityVal = std::numeric_limits::max(); +constexpr int64_t kMinusInfinityVal = std::numeric_limits::min(); +} // namespace timestamp_impl + +// Timestamp represents the time that has passed since some unspecified epoch. +// The epoch is assumed to be before any represented timestamps, this means that +// negative values are not valid. The most notable feature is that the +// difference of two Timestamps results in a TimeDelta. +class Timestamp { + public: + Timestamp() = delete; + static Timestamp Infinity() { + return Timestamp(timestamp_impl::kPlusInfinityVal); + } + static Timestamp seconds(int64_t seconds) { + return Timestamp::us(seconds * 1000000); + } + static Timestamp ms(int64_t millis) { return Timestamp::us(millis * 1000); } + static Timestamp us(int64_t micros) { + RTC_DCHECK_GE(micros, 0); + return Timestamp(micros); + } + int64_t seconds() const { return (us() + 500000) / 1000000; } + int64_t ms() const { return (us() + 500) / 1000; } + int64_t us() const { + RTC_DCHECK(IsFinite()); + return microseconds_; + } + + double SecondsAsDouble() const; + + bool IsInfinite() const { + return microseconds_ == timestamp_impl::kPlusInfinityVal; + } + bool IsFinite() const { return !IsInfinite(); } + TimeDelta operator-(const Timestamp& other) const { + return TimeDelta::us(us() - other.us()); + } + Timestamp operator-(const TimeDelta& delta) const { + return Timestamp::us(us() - delta.us()); + } + Timestamp operator+(const TimeDelta& delta) const { + return Timestamp::us(us() + delta.us()); + } + Timestamp& operator-=(const TimeDelta& other) { + microseconds_ -= other.us(); + return *this; + } + Timestamp& operator+=(const TimeDelta& other) { + microseconds_ += other.us(); + return *this; + } + bool operator==(const Timestamp& other) const { + return microseconds_ == other.microseconds_; + } + bool operator!=(const Timestamp& other) const { + return microseconds_ != other.microseconds_; + } + bool operator<=(const Timestamp& other) const { return us() <= other.us(); } + bool operator>=(const Timestamp& other) const { return us() >= other.us(); } + bool operator>(const Timestamp& other) const { return us() > other.us(); } + bool operator<(const Timestamp& other) const { return us() < other.us(); } + + private: + explicit Timestamp(int64_t us) : microseconds_(us) {} + int64_t microseconds_; +}; + +std::string ToString(const Timestamp& value); + +} // namespace webrtc + +#endif // API_UNITS_TIMESTAMP_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/encoded_frame.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/encoded_frame.h new file mode 100644 index 000000000000..1e919d0077b1 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/encoded_frame.h @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_ENCODED_FRAME_H_ +#define API_VIDEO_ENCODED_FRAME_H_ + +#include "modules/video_coding/encoded_frame.h" + +namespace webrtc { +namespace video_coding { + +// NOTE: This class is still under development and may change without notice. +struct VideoLayerFrameId { + // TODO(philipel): The default ctor is currently used internaly, but have a + // look if we can remove it. + VideoLayerFrameId() : picture_id(-1), spatial_layer(0) {} + VideoLayerFrameId(int64_t picture_id, uint8_t spatial_layer) + : picture_id(picture_id), spatial_layer(spatial_layer) {} + + bool operator==(const VideoLayerFrameId& rhs) const { + return picture_id == rhs.picture_id && spatial_layer == rhs.spatial_layer; + } + + bool operator!=(const VideoLayerFrameId& rhs) const { + return !(*this == rhs); + } + + bool operator<(const VideoLayerFrameId& rhs) const { + if (picture_id == rhs.picture_id) + return spatial_layer < rhs.spatial_layer; + return picture_id < rhs.picture_id; + } + + bool operator<=(const VideoLayerFrameId& rhs) const { return !(rhs < *this); } + bool operator>(const VideoLayerFrameId& rhs) const { return rhs < *this; } + bool operator>=(const VideoLayerFrameId& rhs) const { return rhs <= *this; } + + int64_t picture_id; + uint8_t spatial_layer; +}; + +// TODO(philipel): Remove webrtc::VCMEncodedFrame inheritance. +// TODO(philipel): Move transport specific info out of EncodedFrame. +// NOTE: This class is still under development and may change without notice. +class EncodedFrame : public webrtc::VCMEncodedFrame { + public: + static const uint8_t kMaxFrameReferences = 5; + + EncodedFrame() = default; + virtual ~EncodedFrame() {} + + virtual bool GetBitstream(uint8_t* destination) const = 0; + + // The capture timestamp of this frame. + virtual uint32_t Timestamp() const = 0; + + // When this frame was received. + virtual int64_t ReceivedTime() const = 0; + + // When this frame should be rendered. + virtual int64_t RenderTime() const = 0; + + // This information is currently needed by the timing calculation class. + // TODO(philipel): Remove this function when a new timing class has + // been implemented. + virtual bool delayed_by_retransmission() const; + + size_t size() const { return _length; } + + bool is_keyframe() const { return num_references == 0; } + + VideoLayerFrameId id; + uint32_t timestamp = 0; + + // TODO(philipel): Add simple modify/access functions to prevent adding too + // many |references|. + size_t num_references = 0; + int64_t references[kMaxFrameReferences]; + bool inter_layer_predicted = false; +}; + +} // namespace video_coding +} // namespace webrtc + +#endif // API_VIDEO_ENCODED_FRAME_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/i420_buffer.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/i420_buffer.h new file mode 100644 index 000000000000..2bd37bd42221 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/i420_buffer.h @@ -0,0 +1,111 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_I420_BUFFER_H_ +#define API_VIDEO_I420_BUFFER_H_ + +#include + +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" +#include "rtc_base/memory/aligned_malloc.h" + +namespace webrtc { + +// Plain I420 buffer in standard memory. +class I420Buffer : public I420BufferInterface { + public: + static rtc::scoped_refptr Create(int width, int height); + static rtc::scoped_refptr Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v); + + // Create a new buffer and copy the pixel data. + static rtc::scoped_refptr Copy(const I420BufferInterface& buffer); + // Deprecated. + static rtc::scoped_refptr Copy(const VideoFrameBuffer& buffer) { + return Copy(*buffer.GetI420()); + } + + static rtc::scoped_refptr Copy( + int width, int height, + const uint8_t* data_y, int stride_y, + const uint8_t* data_u, int stride_u, + const uint8_t* data_v, int stride_v); + + // Returns a rotated copy of |src|. + static rtc::scoped_refptr Rotate(const I420BufferInterface& src, + VideoRotation rotation); + // Deprecated. + static rtc::scoped_refptr Rotate(const VideoFrameBuffer& src, + VideoRotation rotation) { + return Rotate(*src.GetI420(), rotation); + } + + // Sets the buffer to all black. + static void SetBlack(I420Buffer* buffer); + + // Sets all three planes to all zeros. Used to work around for + // quirks in memory checkers + // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and + // ffmpeg (http://crbug.com/390941). + // TODO(nisse): Deprecated. Should be deleted if/when those issues + // are resolved in a better way. Or in the mean time, use SetBlack. + void InitializeData(); + + int width() const override; + int height() const override; + const uint8_t* DataY() const override; + const uint8_t* DataU() const override; + const uint8_t* DataV() const override; + + int StrideY() const override; + int StrideU() const override; + int StrideV() const override; + + uint8_t* MutableDataY(); + uint8_t* MutableDataU(); + uint8_t* MutableDataV(); + + // Scale the cropped area of |src| to the size of |this| buffer, and + // write the result into |this|. + void CropAndScaleFrom(const I420BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height); + + // The common case of a center crop, when needed to adjust the + // aspect ratio without distorting the image. + void CropAndScaleFrom(const I420BufferInterface& src); + + // Scale all of |src| to the size of |this| buffer, with no cropping. + void ScaleFrom(const I420BufferInterface& src); + + protected: + I420Buffer(int width, int height); + I420Buffer(int width, int height, int stride_y, int stride_u, int stride_v); + + ~I420Buffer() override; + + private: + const int width_; + const int height_; + const int stride_y_; + const int stride_u_; + const int stride_v_; + const std::unique_ptr data_; +}; + +} // namespace webrtc + +#endif // API_VIDEO_I420_BUFFER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_bitrate_allocation.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_bitrate_allocation.h new file mode 100644 index 000000000000..b748b67fd0da --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_bitrate_allocation.h @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_BITRATE_ALLOCATION_H_ +#define API_VIDEO_VIDEO_BITRATE_ALLOCATION_H_ + +#include +#include +#include + +#include "api/optional.h" +#include "typedefs.h" // NOLINT(build/include) + +namespace webrtc { + +// TODO(sprang): Move back to common_types when include of this is removed. +enum : int { kMaxSimulcastStreams = 4 }; +enum : int { kMaxSpatialLayers = 5 }; +enum : int { kMaxTemporalStreams = 4 }; + +// Class that describes how video bitrate, in bps, is allocated across temporal +// and spatial layers. Not that bitrates are NOT cumulative. Depending on if +// layers are dependent or not, it is up to the user to aggregate. +// For each index, the bitrate can also both set and unset. This is used with a +// set bps = 0 to signal an explicit "turn off" signal. +class VideoBitrateAllocation { + public: + static constexpr uint32_t kMaxBitrateBps = + std::numeric_limits::max(); + VideoBitrateAllocation(); + + bool SetBitrate(size_t spatial_index, + size_t temporal_index, + uint32_t bitrate_bps); + + bool HasBitrate(size_t spatial_index, size_t temporal_index) const; + + uint32_t GetBitrate(size_t spatial_index, size_t temporal_index) const; + + // Whether the specific spatial layers has the bitrate set in any of its + // temporal layers. + bool IsSpatialLayerUsed(size_t spatial_index) const; + + // Get the sum of all the temporal layer for a specific spatial layer. + uint32_t GetSpatialLayerSum(size_t spatial_index) const; + + // Sum of bitrates of temporal layers, from layer 0 to |temporal_index| + // inclusive, of specified spatial layer |spatial_index|. Bitrates of lower + // spatial layers are not included. + uint32_t GetTemporalLayerSum(size_t spatial_index, + size_t temporal_index) const; + + // Returns a vector of the temporal layer bitrates for the specific spatial + // layer. Length of the returned vector is cropped to the highest temporal + // layer with a defined bitrate. + std::vector GetTemporalLayerAllocation(size_t spatial_index) const; + + uint32_t get_sum_bps() const { return sum_; } // Sum of all bitrates. + uint32_t get_sum_kbps() const { + // Round down to not exceed the allocated bitrate. + return sum_ / 1000; + } + + bool operator==(const VideoBitrateAllocation& other) const; + inline bool operator!=(const VideoBitrateAllocation& other) const { + return !(*this == other); + } + + std::string ToString() const; + + private: + uint32_t sum_; + rtc::Optional bitrates_[kMaxSpatialLayers][kMaxTemporalStreams]; +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_BITRATE_ALLOCATION_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_content_type.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_content_type.h new file mode 100644 index 000000000000..8c6460288d82 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_content_type.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_CONTENT_TYPE_H_ +#define API_VIDEO_VIDEO_CONTENT_TYPE_H_ + +#include + +#include + +namespace webrtc { + +enum class VideoContentType : uint8_t { + UNSPECIFIED = 0, + SCREENSHARE = 1, +}; + +namespace videocontenttypehelpers { +bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id); +bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id); + +uint8_t GetExperimentId(const VideoContentType& content_type); +uint8_t GetSimulcastId(const VideoContentType& content_type); + +bool IsScreenshare(const VideoContentType& content_type); + +bool IsValidContentType(uint8_t value); + +const char* ToString(const VideoContentType& content_type); +} // namespace videocontenttypehelpers + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_CONTENT_TYPE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_frame.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_frame.h new file mode 100644 index 000000000000..a72bef1d32e3 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_frame.h @@ -0,0 +1,115 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_FRAME_H_ +#define API_VIDEO_VIDEO_FRAME_H_ + +#include + +#include "api/video/video_rotation.h" +#include "api/video/video_frame_buffer.h" + +namespace webrtc { + +class VideoFrame { + public: + // TODO(nisse): This constructor is consistent with the now deleted + // cricket::WebRtcVideoFrame. We should consider whether or not we + // want to stick to this style and deprecate the other constructor. + VideoFrame(const rtc::scoped_refptr& buffer, + webrtc::VideoRotation rotation, + int64_t timestamp_us); + + // Preferred constructor. + VideoFrame(const rtc::scoped_refptr& buffer, + uint32_t timestamp, + int64_t render_time_ms, + VideoRotation rotation); + + ~VideoFrame(); + + // Support move and copy. + VideoFrame(const VideoFrame&); + VideoFrame(VideoFrame&&); + VideoFrame& operator=(const VideoFrame&); + VideoFrame& operator=(VideoFrame&&); + + // Get frame width. + int width() const; + // Get frame height. + int height() const; + // Get frame size in pixels. + uint32_t size() const; + + // System monotonic clock, same timebase as rtc::TimeMicros(). + int64_t timestamp_us() const { return timestamp_us_; } + void set_timestamp_us(int64_t timestamp_us) { timestamp_us_ = timestamp_us; } + + // TODO(nisse): After the cricket::VideoFrame and webrtc::VideoFrame + // merge, timestamps other than timestamp_us will likely be + // deprecated. + + // Set frame timestamp (90kHz). + void set_timestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; } + + // Get frame timestamp (90kHz). + uint32_t timestamp() const { return timestamp_rtp_; } + + // For now, transport_frame_id and rtp timestamp are the same. + // TODO(nisse): Must be handled differently for QUIC. + uint32_t transport_frame_id() const { return timestamp(); } + + // Set capture ntp time in milliseconds. + // TODO(nisse): Deprecated. Migrate all users to timestamp_us(). + void set_ntp_time_ms(int64_t ntp_time_ms) { ntp_time_ms_ = ntp_time_ms; } + + // Get capture ntp time in milliseconds. + // TODO(nisse): Deprecated. Migrate all users to timestamp_us(). + int64_t ntp_time_ms() const { return ntp_time_ms_; } + + // Naming convention for Coordination of Video Orientation. Please see + // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ts_126114v120700p.pdf + // + // "pending rotation" or "pending" = a frame that has a VideoRotation > 0. + // + // "not pending" = a frame that has a VideoRotation == 0. + // + // "apply rotation" = modify a frame from being "pending" to being "not + // pending" rotation (a no-op for "unrotated"). + // + VideoRotation rotation() const { return rotation_; } + void set_rotation(VideoRotation rotation) { rotation_ = rotation; } + + // Get render time in milliseconds. + // TODO(nisse): Deprecated. Migrate all users to timestamp_us(). + int64_t render_time_ms() const; + + // Return the underlying buffer. Never nullptr for a properly + // initialized VideoFrame. + rtc::scoped_refptr video_frame_buffer() const; + + // TODO(nisse): Deprecated. + // Return true if the frame is stored in a texture. + bool is_texture() const { + return video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative; + } + + private: + // An opaque reference counted handle that stores the pixel data. + rtc::scoped_refptr video_frame_buffer_; + uint32_t timestamp_rtp_; + int64_t ntp_time_ms_; + int64_t timestamp_us_; + VideoRotation rotation_; +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_FRAME_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_frame_buffer.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_frame_buffer.h new file mode 100644 index 000000000000..2be7e0bb9f61 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_frame_buffer.h @@ -0,0 +1,138 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_FRAME_BUFFER_H_ +#define API_VIDEO_VIDEO_FRAME_BUFFER_H_ + +#include + +#include "rtc_base/refcount.h" +#include "rtc_base/scoped_ref_ptr.h" + +namespace webrtc { + +class I420BufferInterface; +class I420ABufferInterface; +class I444BufferInterface; + +// Base class for frame buffers of different types of pixel format and storage. +// The tag in type() indicates how the data is represented, and each type is +// implemented as a subclass. To access the pixel data, call the appropriate +// GetXXX() function, where XXX represents the type. There is also a function +// ToI420() that returns a frame buffer in I420 format, converting from the +// underlying representation if necessary. I420 is the most widely accepted +// format and serves as a fallback for video sinks that can only handle I420, +// e.g. the internal WebRTC software encoders. A special enum value 'kNative' is +// provided for external clients to implement their own frame buffer +// representations, e.g. as textures. The external client can produce such +// native frame buffers from custom video sources, and then cast it back to the +// correct subclass in custom video sinks. The purpose of this is to improve +// performance by providing an optimized path without intermediate conversions. +// Frame metadata such as rotation and timestamp are stored in +// webrtc::VideoFrame, and not here. +class VideoFrameBuffer : public rtc::RefCountInterface { + public: + // New frame buffer types will be added conservatively when there is an + // opportunity to optimize the path between some pair of video source and + // video sink. + enum class Type { + kNative, + kI420, + kI420A, + kI444, + }; + + // This function specifies in what pixel format the data is stored in. + virtual Type type() const = 0; + + // The resolution of the frame in pixels. For formats where some planes are + // subsampled, this is the highest-resolution plane. + virtual int width() const = 0; + virtual int height() const = 0; + + // Returns a memory-backed frame buffer in I420 format. If the pixel data is + // in another format, a conversion will take place. All implementations must + // provide a fallback to I420 for compatibility with e.g. the internal WebRTC + // software encoders. + virtual rtc::scoped_refptr ToI420() = 0; + + // These functions should only be called if type() is of the correct type. + // Calling with a different type will result in a crash. + // TODO(magjed): Return raw pointers for GetI420 once deprecated interface is + // removed. + rtc::scoped_refptr GetI420(); + rtc::scoped_refptr GetI420() const; + I420ABufferInterface* GetI420A(); + const I420ABufferInterface* GetI420A() const; + I444BufferInterface* GetI444(); + const I444BufferInterface* GetI444() const; + + protected: + ~VideoFrameBuffer() override {} +}; + +// This interface represents Type::kI420 and Type::kI444. +class PlanarYuvBuffer : public VideoFrameBuffer { + public: + virtual int ChromaWidth() const = 0; + virtual int ChromaHeight() const = 0; + + // Returns pointer to the pixel data for a given plane. The memory is owned by + // the VideoFrameBuffer object and must not be freed by the caller. + virtual const uint8_t* DataY() const = 0; + virtual const uint8_t* DataU() const = 0; + virtual const uint8_t* DataV() const = 0; + + // Returns the number of bytes between successive rows for a given plane. + virtual int StrideY() const = 0; + virtual int StrideU() const = 0; + virtual int StrideV() const = 0; + + protected: + ~PlanarYuvBuffer() override {} +}; + +class I420BufferInterface : public PlanarYuvBuffer { + public: + Type type() const override; + + int ChromaWidth() const final; + int ChromaHeight() const final; + + rtc::scoped_refptr ToI420() final; + + protected: + ~I420BufferInterface() override {} +}; + +class I420ABufferInterface : public I420BufferInterface { + public: + Type type() const final; + virtual const uint8_t* DataA() const = 0; + virtual int StrideA() const = 0; + + protected: + ~I420ABufferInterface() override {} +}; + +class I444BufferInterface : public PlanarYuvBuffer { + public: + Type type() const final; + + int ChromaWidth() const final; + int ChromaHeight() const final; + + protected: + ~I444BufferInterface() override {} +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_FRAME_BUFFER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_rotation.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_rotation.h new file mode 100644 index 000000000000..6a29588ee5c8 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_rotation.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_ROTATION_H_ +#define API_VIDEO_VIDEO_ROTATION_H_ + +namespace webrtc { + +// enum for clockwise rotation. +enum VideoRotation { + kVideoRotation_0 = 0, + kVideoRotation_90 = 90, + kVideoRotation_180 = 180, + kVideoRotation_270 = 270 +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_ROTATION_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_sink_interface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_sink_interface.h new file mode 100644 index 000000000000..aac8b4ac38a3 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_sink_interface.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_SINK_INTERFACE_H_ +#define API_VIDEO_VIDEO_SINK_INTERFACE_H_ + +#include + +namespace rtc { + +template +class VideoSinkInterface { + public: + virtual ~VideoSinkInterface() = default; + + virtual void OnFrame(const VideoFrameT& frame) = 0; + + // Should be called by the source when it discards the frame due to rate + // limiting. + virtual void OnDiscardedFrame() {} +}; + +} // namespace rtc + +#endif // API_VIDEO_VIDEO_SINK_INTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_source_interface.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_source_interface.h new file mode 100644 index 000000000000..d4e2d3ad9ec0 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_source_interface.h @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_SOURCE_INTERFACE_H_ +#define API_VIDEO_VIDEO_SOURCE_INTERFACE_H_ + +#include + +#include "api/optional.h" +#include "api/video/video_sink_interface.h" + +namespace rtc { + +// VideoSinkWants is used for notifying the source of properties a video frame +// should have when it is delivered to a certain sink. +struct VideoSinkWants { + VideoSinkWants(); + VideoSinkWants(const VideoSinkWants&); + ~VideoSinkWants(); + // Tells the source whether the sink wants frames with rotation applied. + // By default, any rotation must be applied by the sink. + bool rotation_applied = false; + + // Tells the source that the sink only wants black frames. + bool black_frames = false; + + // Tells the source the maximum number of pixels the sink wants. + int max_pixel_count = std::numeric_limits::max(); + // Tells the source the desired number of pixels the sinks wants. This will + // typically be used when stepping the resolution up again when conditions + // have improved after an earlier downgrade. The source should select the + // closest resolution to this pixel count, but if max_pixel_count is set, it + // still sets the absolute upper bound. + rtc::Optional target_pixel_count; + // Tells the source the maximum framerate the sink wants. + int max_framerate_fps = std::numeric_limits::max(); +}; + +template +class VideoSourceInterface { + public: + virtual void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) = 0; + // RemoveSink must guarantee that at the time the method returns, + // there is no current and no future calls to VideoSinkInterface::OnFrame. + virtual void RemoveSink(VideoSinkInterface* sink) = 0; + + protected: + // Non-public, since one shouldn't own sources via this interface. + virtual ~VideoSourceInterface() {} +}; + +} // namespace rtc +#endif // API_VIDEO_VIDEO_SOURCE_INTERFACE_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_stream_decoder.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_stream_decoder.h new file mode 100644 index 000000000000..1c4c5ff2a2a8 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_stream_decoder.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_STREAM_DECODER_H_ +#define API_VIDEO_VIDEO_STREAM_DECODER_H_ + +#include +#include +#include + +#include "api/video/encoded_frame.h" +#include "api/video/video_frame.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder_factory.h" + +namespace webrtc { +// NOTE: This class is still under development and may change without notice. +class VideoStreamDecoder { + public: + class Callbacks { + public: + virtual ~Callbacks() = default; + + // Called when the VideoStreamDecoder enters a non-decodable state. + virtual void OnNonDecodableState() = 0; + + // Called with the last continuous frame. + virtual void OnContinuousUntil( + const video_coding::VideoLayerFrameId& key) = 0; + + // Called with the decoded frame. + virtual void OnDecodedFrame(VideoFrame decodedImage, + rtc::Optional decode_time_ms, + rtc::Optional qp) = 0; + }; + + virtual ~VideoStreamDecoder() = default; + + virtual void OnFrame(std::unique_ptr frame) = 0; +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_STREAM_DECODER_H_ diff --git a/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_stream_decoder_create.h b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_stream_decoder_create.h new file mode 100644 index 000000000000..04682901da14 --- /dev/null +++ b/Engine/Source/ThirdParty/WebRTC/rev.23789/include/Win64/VS2017/api/video/video_stream_decoder_create.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_STREAM_DECODER_CREATE_H_ +#define API_VIDEO_VIDEO_STREAM_DECODER_CREATE_H_ + +#include +#include +#include + +#include "api/video/video_stream_decoder.h" + +namespace webrtc { +// The |decoder_settings| parameter is a map between: +// --> <