#
Source Blocks - VisioForge Media Blocks SDK .Net
Source blocks provide data to the pipeline and are typically the first blocks in any media processing chain. VisioForge Media Blocks SDK .Net provides a comprehensive collection of source blocks for various inputs including hardware devices, files, networks, and virtual sources.
#
Hardware Source Blocks
#
System Video Source
SystemVideoSourceBlock is used to access webcams and other video capture devices.
#
Block info
Name: SystemVideoSourceBlock.
#
Enumerate available devices
Use the DeviceEnumerator.Shared.VideoSourcesAsync()
method to get a list of available devices and their specifications: available resolutions, frame rates, and video formats.
#
The sample pipeline
graph LR; SystemVideoSourceBlock-->VideoRendererBlock;
#
Sample code
// create pipeline
var pipeline = new MediaBlocksPipeline();
// create video source
VideoCaptureDeviceSourceSettings videoSourceSettings = null;
// select the first device
var device = (await DeviceEnumerator.Shared.VideoSourcesAsync())[0];
if (device != null)
{
// select the first format (maybe not the best, but it is just a sample)
var formatItem = device.VideoFormats[0];
if (formatItem != null)
{
videoSourceSettings = new VideoCaptureDeviceSourceSettings(device)
{
Format = formatItem.ToFormat()
};
// select the first frame rate
videoSourceSettings.Format.FrameRate = formatItem.FrameRateList[0];
}
}
// create video source block using the selected device and format
var videoSource = new SystemVideoSourceBlock(videoSourceSettings);
// create video renderer block
var videoRenderer = new VideoRendererBlock(pipeline, VideoView1);
// connect blocks
pipeline.Connect(videoSource.Output, videoRenderer.Input);
// start pipeline
await pipeline.StartAsync();
#
Sample applications
#
Remarks
You can specify an API to use when enumerating devices. Windows and Linux platforms have multiple APIs, while Android and iOS platforms have only one API.
#
Platforms
Windows, macOS, Linux, iOS, Android.
#
System Audio Source
SystemAudioSourceBlock is used to access mics and other audio capture devices.
#
Block info
Name: SystemAudioSourceBlock.
#
Enumerate available devices
Use the DeviceEnumerator.Shared.AudioSourcesAsync()
method call to get a list of available devices and their specifications.
During device enumeration, you can get the list of available devices and their specifications. You can select the device and its format to create the source settings.
#
The sample pipeline
graph LR; SystemAudioSourceBlock-->AudioRendererBlock;
#
Sample code
// create pipeline
var pipeline = new MediaBlocksPipeline();
// create audio source block
IAudioCaptureDeviceSourceSettings audioSourceSettings = null;
// select first device
var device = (await DeviceEnumerator.Shared.AudioSourcesAsync())[0];
if (device != null)
{
// select first format
var formatItem = device.Formats[0];
if (formatItem != null)
{
audioSourceSettings = device.CreateSourceSettings(formatItem.ToFormat());
}
}
// create audio source block using selected device and format
var audioSource = new SystemAudioSourceBlock(audioSourceSettings);
// create audio renderer block
var audioRenderer = new AudioRendererBlock();
// connect blocks
pipeline.Connect(audioSource.Output, audioRenderer.Input);
// start pipeline
await pipeline.StartAsync();
#
Capture audio from speakers (loopback)
Currently, loopback audio capture is supported only on Windows. Use the LoopbackAudioCaptureDeviceSourceSettings
class to create the source settings for loopback audio capture.
WASAPI2 is used as the default API for loopback audio capture. You can specify the API to use during device enumeration.
// create pipeline
var pipeline = new MediaBlocksPipeline();
// create audio source block
var deviceItem = (await DeviceEnumerator.Shared.AudioOutputsAsync(AudioOutputDeviceAPI.WASAPI2))[0];
if (deviceItem == null)
{
return;
}
var audioSourceSettings = new LoopbackAudioCaptureDeviceSourceSettings(deviceItem);
var audioSource = new SystemAudioSourceBlock(audioSourceSettings);
// create audio renderer block
var audioRenderer = new AudioRendererBlock();
// connect blocks
pipeline.Connect(audioSource.Output, audioRenderer.Input);
// start pipeline
await pipeline.StartAsync();
#
Sample applications
#
Remarks
You can specify an API to use during the device enumeration. Android and iOS platforms have only one API, while Windows and Linux have multiple APIs.
#
Platforms
Windows, macOS, Linux, iOS, Android.
#
Decklink
For information about Decklink sources, see Decklink.
#
File Source Blocks
#
Universal Source Block
A universal source that decodes video and audio files/network streams and provides uncompressed data to the connected blocks.
Block supports MP4, WebM, AVI, TS, MKV, MP3, AAC, M4A, and many other formats. If FFMPEG redist is available, all decoders available in FFMPEG will also be supported.
#
Block info
Name: UniversalSourceBlock.
#
The sample pipeline
graph LR; UniversalSourceBlock-->VideoRendererBlock; UniversalSourceBlock-->AudioRendererBlock;
#
Sample code
var pipeline = new MediaBlocksPipeline();
var fileSource = new UniversalSourceBlock();
fileSource.Filename = "test.mp4";
var videoRenderer = new VideoRendererBlock(pipeline, VideoView1);
pipeline.Connect(fileSource.VideoOutput, videoRenderer.Input);
var audioRenderer = new AudioRendererBlock();
pipeline.Connect(fileSource.AudioOutput, audioRenderer.Input);
await pipeline.StartAsync();
#
Sample applications
#
Platforms
Windows, macOS, Linux, iOS, Android.
#
Network Source Blocks
#
RTSP Source Block
The RTSP source supports connection to IP cameras and other devices supporting the RTSP protocol.
Supported video codecs: H264, HEVC, MJPEG. Supported audio codecs: AAC, MP3, PCM, G726, G711, and some others if FFMPEG redist is installed.
#
Block info
Name: RTSPSourceBlock.
#
The sample pipeline
RTSPSourceBlock:VideoOutput
→ VideoRendererBlock
RTSPSourceBlock:AudioOutput
→ AudioRendererBlock
#
Sample code
var pipeline = new MediaBlocksPipeline();
var rtspSettings = new RTSPSourceSettings(new Uri("rtsp://login:pwd@192.168.1.64:554/Streaming/Channels/101?transportmode=unicast&profile=Profile_1"), true)
{
Login = "login",
Password = "pwd"
};
var rtspSource = new RTSPSourceBlock(rtspSettings);
var videoRenderer = new VideoRendererBlock(pipeline, VideoView1);
pipeline.Connect(rtspSource.VideoOutput, videoRenderer.Input);
var audioRenderer = new AudioRendererBlock();
pipeline.Connect(rtspSource.AudioOutput, audioRenderer.Input);
await pipeline.StartAsync();
#
Sample applications
#
Platforms
Windows, macOS, Linux, iOS, Android.
#
HTTP Source Block
The HTTP source block allows data to be retrieved using HTTP/HTTPS protocols. It can be used to read data from MJPEG IP cameras, MP4 network files, or other sources.
#
Block info
Name: HTTPSourceBlock.
#
The sample pipeline
The sample pipeline reads data from an MJPEG camera and displays it using VideoView.
graph LR; HTTPSourceBlock-->JPEGDecoderBlock; JPEGDecoderBlock-->VideoRendererBlock;
#
Sample code
var pipeline = new MediaBlocksPipeline();
var settings = new HTTPSourceSettings(new Uri("http://mjpegcamera:8080"))
{
UserID = "username",
UserPassword = "password"
};
var source = new HTTPSourceBlock(settings);
var videoRenderer = new VideoRendererBlock(pipeline, VideoView1);
var jpegDecoder = new JPEGDecoderBlock();
pipeline.Connect(source.Output, jpegDecoder.Input);
pipeline.Connect(jpegDecoder.Output, videoRenderer.Input);
await pipeline.StartAsync();
#
Sample applications
#
Platforms
Windows, macOS, Linux.
#
NDI Source Block
The NDI source block supports connection to NDI software sources and devices supporting the NDI protocol.
#
Block info
Name: NDISourceBlock.
#
The sample pipeline
graph LR; NDISourceBlock-->VideoRendererBlock; NDISourceBlock-->AudioRendererBlock;
#
Sample code
var pipeline = new MediaBlocksPipeline();
// get NDI source info by enumerating sources
var ndiSources = await DeviceEnumerator.Shared.NDISourcesAsync();
var ndiSourceInfo = ndiSources[0];
// create NDI source settings
var ndiSettings = NDISourceSettings.CreateAsync(ndiSourceInfo);
var ndiSource = new NDISourceBlock(ndiSettings);
var videoRenderer = new VideoRendererBlock(pipeline, VideoView1);
pipeline.Connect(ndiSource.VideoOutput, videoRenderer.Input);
var audioRenderer = new AudioRendererBlock();
pipeline.Connect(ndiSource.AudioOutput, audioRenderer.Input);
await pipeline.StartAsync();
#
Sample applications
#
Platforms
Windows, macOS, Linux.
#
Basler Source Block
The Basler source block supports Basler USB3 Vision and GigE cameras. The Pylon SDK or Runtime should be installed to use the camera source.
#
Block info
Name: BaslerSourceBlock.
#
The sample pipeline
graph LR; BaslerSourceBlock-->VideoRendererBlock;
#
Sample code
var pipeline = new MediaBlocksPipeline();
// get Basler source info by enumerating sources
var sources = await DeviceEnumerator.Shared.BaslerSourcesAsync();
var sourceInfo = sources[0];
// create Basler source
var source = new BaslerSourceBlock(new BaslerSourceSettings(sourceInfo));
// create video renderer for VideoView
var videoRenderer = new VideoRendererBlock(pipeline, VideoView1);
// connect
pipeline.Connect(source.Output, videoRenderer.Input);
// start
await pipeline.StartAsync();
#
Sample applications
#
Platforms
Windows, Linux.
#
GenICam Source Block
The GenICam source supports connection to GigE, and the USB3 Vision camera supports the GenICam protocol.
#
Block info
Name: GenICamSourceBlock.
#
The sample pipeline
graph LR; GenICamSourceBlock-->VideoRendererBlock;
#
Sample code
var pipeline = new MediaBlocksPipeline();
var sourceSettings = new GenICamSourceSettings(cbCamera.Text, new VisioForge.Core.Types.Rect(0, 0, 512, 512), 15, GenICamPixelFormat.Mono8);
var source = new GenICamSourceBlock(sourceSettings);
var videoRenderer = new VideoRendererBlock(pipeline, VideoView1);
pipeline.Connect(source.Output, videoRenderer.Input);
await pipeline.StartAsync();
#
Sample applications
#
Prerequisites
#
macOS
Install the Aravis
package using Homebrew:
brew install aravis
#
Linux
Install the Aravis
package using the package manager:
sudo apt-get install libaravis-0.8-dev
#
Windows
Install the VisioForge.CrossPlatform.GenICam.Windows.x64
package to your project using NuGet.
#
Platforms
Windows, macOS, Linux
#
Spinnaker/FLIR Source Block
The Spinnaker/FLIR source supports connection to FLIR cameras using Spinnaker SDK.
#
Block info
Name: SpinnakerSourceBlock.
#
The sample pipeline
SpinnakerSourceBlock:Output
→ VideoRendererBlock
#
Sample code
var pipeline = new MediaBlocksPipeline();
var sources = await DeviceEnumerator.Shared.SpinnakerSourcesAsync();
var sourceSettings = new SpinnakerSourceSettings(sources[0].Name, new VisioForge.Core.Types.Rect(0, 0, 1280, 720), new VideoFrameRate(10));
var source = new SpinnakerSourceBlock(sourceSettings);
var videoRenderer = new VideoRendererBlock(pipeline, VideoView1);
pipeline.Connect(source.Output, videoRenderer.Input);
await pipeline.StartAsync();
#
Requirements
- Spinnaker SDK installed.
#
Platforms
Windows
#
SRT Source Block (with decoding)
The Secure Reliable Transport (SRT)
is an open-source video streaming protocol designed for secure and low-latency delivery over unpredictable networks, like the public internet. Developed by Haivision, SRT optimizes streaming performance by dynamically adapting to varying bandwidths and minimizing the effects of packet loss. It incorporates AES encryption for secure content transmission. Primarily used in broadcasting and online streaming, SRT is crucial for delivering high-quality video feeds in real-time applications, enhancing viewer experiences even in challenging network conditions. It supports point-to-point and multicast streaming, making it versatile for diverse setups.
The SRT source block provides decoded video and audio streams from an SRT source.
#
Block info
Name: SRTSourceBlock.
#
The sample pipeline
graph LR; SRTSourceBlock-->VideoRendererBlock; SRTSourceBlock-->AudioRendererBlock;
#
Sample code
var pipeline = new MediaBlocksPipeline();
var source = new SRTSourceBlock(new SRTSourceSettings() { Uri = edURL.Text });
var videoRenderer = new VideoRendererBlock(pipeline, VideoView1);
var audioRenderer = new AudioRendererBlock();
pipeline.Connect(source.VideoOutput, videoRenderer.Input);
pipeline.Connect(source.AudioOutput, audioRenderer.Input);
await pipeline.StartAsync();
#
Sample applications
#
Platforms
Windows, macOS, Linux, iOS, Android.
#
SRT RAW Source Block
The Secure Reliable Transport (SRT)
is a streaming protocol that optimizes video data delivery over unpredictable networks, like the Internet. It is open-source and designed to handle high-performance video and audio streaming. SRT provides security through end-to-end encryption, reliability by recovering lost packets, and low latency, which is suitable for live broadcasts. It adapts to varying network conditions by dynamically managing bandwidth, ensuring high-quality streams even under suboptimal conditions. Widely used in broadcasting and streaming applications, SRT supports interoperability and is ideal for remote production and content distribution.
The SRT source supports connection to SRT sources and provides a data stream. You can connect this block to DecodeBinBlock
to decode the stream.
#
Block info
Name: SRTRAWSourceBlock.
#
The sample pipeline
graph LR; SRTRAWSourceBlock-->DecodeBinBlock; DecodeBinBlock-->VideoRendererBlock; DecodeBinBlock-->AudioRendererBlock;
#
Sample code
var pipeline = new MediaBlocksPipeline();
var source = new SRTRAWSourceBlock(new SRTSourceSettings() { Uri = edURL.Text });
var decodeBin = new DecodeBinBlock();
var videoRenderer = new VideoRendererBlock(pipeline, VideoView1);
var audioRenderer = new AudioRendererBlock();
pipeline.Connect(source.Output, decodeBin.Input);
pipeline.Connect(decodeBin.VideoOutput, videoRenderer.Input);
pipeline.Connect(decodeBin.AudioOutput, audioRenderer.Input);
await pipeline.StartAsync();
#
Platforms
Windows, macOS, Linux, iOS, Android.
#
Other Source Blocks
#
Screen Source Block
Screen source supports recording video from the screen. You can select the display (if more than one), the part of the screen to be recorded, and optional mouse cursor recording.
#
Settings
#
Windows
ScreenCaptureDX9SourceSettings
- UseDirectX 9
for screen recording.ScreenCaptureD3D11SourceSettings
- UseDirect3D 11
for screen recording.ScreenCaptureGDISourceSettings
- UseGDI
for screen recording.
#
macOS
ScreenCaptureMacOSSourceSettings
- Use AVFoundation
for screen recording.
#
Linux
ScreenCaptureXDisplaySourceSettings
- Use X11
for screen recording.
#
iOS
IOSScreenSourceSettings
- Use AVFoundation
for current window recording.
#
Block info
Name: ScreenSourceBlock.
#
The sample pipeline
graph LR; ScreenSourceBlock-->H264EncoderBlock; H264EncoderBlock-->MP4SinkBlock;
#
Sample code
// create pipeline
var pipeline = new MediaBlocksPipeline();
// create source settings
var screenSourceSettings = new ScreenCaptureDX9SourceSettings() { FrameRate = 15 }
// create source block
var screenSourceBlock = new ScreenSourceBlock(screenSourceSettings);
// create video encoder block and connect it to the source block
var h264EncoderBlock = new H264EncoderBlock(new OpenH264EncoderSettings());
pipeline.Connect(screenSourceBlock.Output, h264EncoderBlock.Input);
// create MP4 sink block and connect it to the encoder block
var mp4SinkBlock = new MP4SinkBlock(new MP4SinkSettings(@"output.mp4"));
pipeline.Connect(h264EncoderBlock.Output, mp4SinkBlock.CreateNewInput(MediaBlockPadMediaType.Video));
// run pipeline
await pipeline.StartAsync();
#
[Windows] Window capture
You can capture a specific window by using the ScreenCaptureD3D11SourceSettings
class.
// create Direct3D11 source
var source = new ScreenCaptureD3D11SourceSettings();
// set frame rate
source.FrameRate = new VideoFrameRate(30);
// get handle of the window
var wih = new System.Windows.Interop.WindowInteropHelper(this);
source.WindowHandle = wih.Handle;
// create source block
var screenSourceBlock = new ScreenSourceBlock(new ScreenCaptureDX9SourceSettings() { FrameRate = 15 });
// other code is the same as above
#
Sample applications
#
Platforms
Windows, macOS, Linux, iOS.
#
Virtual Video Source Block
VirtualVideoSourceBlock is used to produce test video data in a wide variety of video formats. The type of test data is controlled by the settings.
#
Block info
Name: VirtualVideoSourceBlock.
#
The sample pipeline
graph LR; VirtualVideoSourceBlock-->VideoRendererBlock;
#
Sample code
var pipeline = new MediaBlocksPipeline();
var audioSourceBlock = new VirtualAudioSourceBlock(new VirtualAudioSourceSettings());
var videoSourceBlock = new VirtualVideoSourceBlock(new VirtualVideoSourceSettings());
var videoRenderer = new VideoRendererBlock(pipeline, VideoView1);
pipeline.Connect(videoSourceBlock.Output, videoRenderer.Input);
var audioRenderer = new AudioRendererBlock();
pipeline.Connect(audioSourceBlock.Output, audioRenderer.Input);
await pipeline.StartAsync();
#
Platforms
Windows, macOS, Linux, iOS, Android.
#
Virtual Audio Source Block
VirtualAudioSourceBlock is used to produce test audio data in a wide variety of audio formats. The type of test data is controlled by the settings.
#
Block info
Name: VirtualAudioSourceBlock.
#
The sample pipeline
graph LR; VirtualAudioSourceBlock-->AudioRendererBlock;
#
Sample code
var pipeline = new MediaBlocksPipeline();
var audioSourceBlock = new VirtualAudioSourceBlock(new VirtualAudioSourceSettings());
var videoSourceBlock = new VirtualVideoSourceBlock(new VirtualVideoSourceSettings());
var videoRenderer = new VideoRendererBlock(pipeline, VideoView1);
pipeline.Connect(videoSourceBlock.Output, videoRenderer.Input);
var audioRenderer = new AudioRendererBlock();
pipeline.Connect(audioSourceBlock.Output, audioRenderer.Input);
await pipeline.StartAsync();
#
Platforms
Windows, macOS, Linux, iOS, Android.