// Create MediaBlocks pipeline | |
_pipeline = new MediaBlocksPipeline(); | |
// Add file source | |
var fileSourceSettings = await UniversalSourceSettings.CreateAsync("video.mp4"); | |
var videoStreamAvailable = fileSourceSettings.GetInfo().VideoStreams.Count > 0; | |
var audioStreamAvailable = fileSourceSettings.GetInfo().AudioStreams.Count > 0; | |
var fileSource = new UniversalSourceBlock(fileSourceSettings); | |
// Add video renderer | |
if (videoStreamAvailable) | |
{ | |
var videoRenderer = new VideoRendererBlock(_pipeline, VideoView1); | |
_pipeline.Connect(fileSource, videoRenderer); | |
} | |
// Add audio output | |
if (audioStreamAvailable) | |
{ | |
var audioOutputDevice = (await DeviceEnumerator.Shared.AudioOutputsAsync(AudioOutputDeviceAPI.DirectSound))[0]; | |
var audioOutput = new AudioRendererBlock(new AudioRendererSettings(audioOutputDevice)); | |
_pipeline.Connect(fileSource, audioOutput); | |
} | |
// Start playback | |
await _pipeline.StartAsync(); |
// Create MediaBlocks pipeline | |
_pipeline = new MediaBlocksPipeline(); | |
// Add file source | |
var fileSourceSettings = await UniversalSourceSettings.CreateAsync(edFilename.Text); | |
var videoStreamAvailable = fileSourceSettings.GetInfo().VideoStreams.Count > 0; | |
var audioStreamAvailable = fileSourceSettings.GetInfo().AudioStreams.Count > 0; | |
var fileSource = new UniversalSourceBlock(fileSourceSettings); | |
// Add video renderer, text overlay and image overlay | |
if (videoStreamAvailable) | |
{ | |
var videoRenderer = new VideoRendererBlock(_pipeline, VideoView1); | |
var textOverlay = new TextOverlayBlock(new TextOverlaySettings("Hello world!")); | |
var imageOverlay = new ImageOverlayBlock(new ImageOverlaySettings("logo.png")); | |
_pipeline.Connect(fileSource, textOverlay); | |
_pipeline.Connect(textOverlay, imageOverlay); | |
_pipeline.Connect(imageOverlay, videoRenderer); | |
} | |
// Add audio output and equalizer | |
if (audioStreamAvailable) | |
{ | |
var audioOutputDevice = (await DeviceEnumerator.Shared.AudioOutputsAsync(AudioOutputDeviceAPI.DirectSound))[0]; | |
var audioOutput = new AudioRendererBlock(new AudioRendererSettings(audioOutputDevice)); | |
var equalizer = new EqualizerParametricBlock(); | |
// set some equalizer settings | |
_pipeline.Connect(fileSource, equalizer); | |
_pipeline.Connect(equalizer, audioOutput); | |
} | |
// Start playback | |
await _pipeline.StartAsync(); |
// Create MediaBlocksPipeline object | |
_pipeline = new MediaBlocksPipeline(); | |
// Add default video and audio sources | |
var videoSources = (await DeviceEnumerator.Shared.VideoSourcesAsync()).ToList(); | |
var videoSource = new SystemVideoSourceBlock(new VideoCaptureDeviceSourceSettings(videoSources.Find(x => x.Name.Contains("920")))); | |
var audioSources = (await DeviceEnumerator.Shared.AudioSourcesAsync()).ToList(); | |
var audioSource = new SystemAudioSourceBlock(audioSources[0].CreateSourceSettings()); | |
// Add video renderer | |
var videoRenderer = new VideoRendererBlock(_pipeline, videoView: VideoView1); | |
// Add audio renderer | |
var audioRenderers = (await DeviceEnumerator.Shared.AudioOutputsAsync()).ToList(); | |
var audioRenderer = new AudioRendererBlock(new AudioRendererSettings(audioRenderers[0])); | |
// Connect everything | |
_pipeline.Connect(videoSource, videoRenderer); | |
_pipeline.Connect(audioSource, audioRenderer); | |
// Start preview | |
await _pipeline.StartAsync(); |
// Create Media Blocks pipeline | |
_pipeline = new MediaBlocksPipeline(); | |
// Create video renderer | |
var videoRenderer = new VideoRendererBlock(_pipeline, VideoView1); | |
// Add RTSP camera source | |
var rtsp = await RTSPSourceSettings.CreateAsync(new Uri(edURL.Text), edLogin.Text, edPassword.Text, audioEnabled: cbAudioStream.Checked); | |
var rtspSource = new RTSPSourceBlock(rtsp); | |
_pipeline.Connect(rtspSource, videoRenderer); | |
// Add audio output (if required) | |
if (cbAudioStream.Checked && rtsp.IsAudioAvailable()) | |
{ | |
var audioOutputDevice = (await DeviceEnumerator.Shared.AudioOutputsAsync(AudioOutputDeviceAPI.DirectSound))[0]; | |
var audioOutput = new AudioRendererBlock(new AudioRendererSettings(audioOutputDevice)); | |
_pipeline.Connect(rtspSource, audioOutput); | |
} | |
// Start IP camera preview | |
await _pipeline.StartAsync(); |
// Create the pipeline | |
_pipeline = new MediaBlocksPipeline(); | |
// Add video and audio sources | |
var videoSources = (await DeviceEnumerator.Shared.VideoSourcesAsync()).ToList(); | |
var videoSource = new SystemVideoSourceBlock(new VideoCaptureDeviceSourceSettings(videoSources[0])); | |
var audioSources = (await DeviceEnumerator.Shared.AudioSourcesAsync()).ToList(); | |
var audioSource = new SystemAudioSourceBlock(audioSources[0].CreateSourceSettings()); | |
// Add video renderer and specify VideoView control | |
var videoRenderer = new VideoRendererBlock(_pipeline, videoView: VideoView1); | |
// Add audio renderer | |
var audioRenderers = (await DeviceEnumerator.Shared.AudioOutputsAsync()).ToList(); | |
var audioRenderer = new AudioRendererBlock(new AudioRendererSettings(audioRenderers[0])); | |
// Configure MP4 output | |
var output = new MP4OutputBlock(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.MyVideos), "output.mp4")); | |
// Add video and audio tees | |
var videoTee = new TeeBlock(2, MediaBlockPadMediaType.Video); | |
var audioTee = new TeeBlock(2, MediaBlockPadMediaType.Audio); | |
// Connect everything | |
_pipeline.Connect(videoSource, videoTee); | |
_pipeline.Connect(audioSource, audioTee); | |
_pipeline.Connect(videoTee, videoRenderer); | |
_pipeline.Connect(audioTee, audioRenderer); | |
_pipeline.Connect(videoTee, output); | |
_pipeline.Connect(audioTee, output); | |
// Start MP4 recording | |
await _pipeline.StartAsync(); |
Lizenzfreie Verbreitung.
Das SDK enthält GStreamer-Bibliotheken, die unter der LGPL v2.1 lizenziert sind.