diff --git a/src/MMALSharp.Common/ImageContext.cs b/src/MMALSharp.Common/ImageContext.cs
index f101672e..5e717ef9 100644
--- a/src/MMALSharp.Common/ImageContext.cs
+++ b/src/MMALSharp.Common/ImageContext.cs
@@ -13,54 +13,56 @@ namespace MMALSharp.Common
///
public class ImageContext
{
+ // Fields are used rather than properties for hot-path performance reasons.
+
///
/// The working data.
///
- public byte[] Data { get; set; }
-
+ public byte[] Data;
+
///
/// Flag to indicate whether image frame is raw.
///
- public bool Raw { get; set; }
+ public bool Raw;
///
/// The resolution of the frame we're processing.
///
- public Resolution Resolution { get; set; }
+ public Resolution Resolution;
///
/// The encoding format of the frame we're processing.
///
- public MMALEncoding Encoding { get; set; }
+ public MMALEncoding Encoding;
///
/// The pixel format of the frame we're processing.
///
- public MMALEncoding PixelFormat { get; set; }
-
+ public MMALEncoding PixelFormat;
+
///
/// The image format to store the processed data in.
///
- public ImageFormat StoreFormat { get; set; }
+ public ImageFormat StoreFormat;
///
/// Indicates if this frame represents the end of the stream.
///
- public bool Eos { get; set; }
+ public bool Eos;
///
/// Indicates if this frame contains IFrame data.
///
- public bool IFrame { get; set; }
+ public bool IFrame;
///
/// The timestamp value.
///
- public long? Pts { get; set; }
-
+ public long? Pts;
+
///
/// The pixel format stride.
///
- public int Stride { get; set; }
+ public int Stride;
}
}
\ No newline at end of file
diff --git a/src/MMALSharp.Common/MMALSharp.Common.csproj b/src/MMALSharp.Common/MMALSharp.Common.csproj
index 945c8a6f..eed9c11d 100644
--- a/src/MMALSharp.Common/MMALSharp.Common.csproj
+++ b/src/MMALSharp.Common/MMALSharp.Common.csproj
@@ -17,7 +17,8 @@
0.7.0
..\..\StyleCop.Analyzers.ruleset
bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml
- {65a1440e-72e1-4943-b469-5cfba8cb5633}
+ {65a1440e-72e1-4943-b469-5cfba8cb5633}
+
True
diff --git a/src/MMALSharp.FFmpeg/Handlers/FFmpegCaptureHandler.cs b/src/MMALSharp.FFmpeg/Handlers/FFmpegCaptureHandler.cs
index d4643542..4e4b2592 100644
--- a/src/MMALSharp.FFmpeg/Handlers/FFmpegCaptureHandler.cs
+++ b/src/MMALSharp.FFmpeg/Handlers/FFmpegCaptureHandler.cs
@@ -74,7 +74,7 @@ public static ExternalProcessCaptureHandler RawVideoToMP4(string directory, stri
{
Filename = "ffmpeg",
Arguments = $"-framerate {fps} -i - -b:v {bitrate}k -c copy -movflags +frag_keyframe+separate_moof+omit_tfhd_offset+empty_moov {directory.TrimEnd()}/{filename}.mp4",
- EchoOutput = true,
+ EchoOutput = echoOutput,
DrainOutputDelayMs = 500, // default
TerminationSignals = ExternalProcessCaptureHandlerOptions.SignalsFFmpeg
};
diff --git a/src/MMALSharp.FFmpeg/Handlers/VLCCaptureHandler.cs b/src/MMALSharp.FFmpeg/Handlers/VLCCaptureHandler.cs
index 7e9cd1cc..a5dcd5b1 100644
--- a/src/MMALSharp.FFmpeg/Handlers/VLCCaptureHandler.cs
+++ b/src/MMALSharp.FFmpeg/Handlers/VLCCaptureHandler.cs
@@ -15,19 +15,19 @@ public static class VLCCaptureHandler
private static readonly string _VLCInternalMimeBoundaryName = "7b3cc56e5f51db803f790dad720ed50a";
///
- /// Listens for a request on the given port and begins streaming MJPEG images when a client connects.
+ /// Listens for a request on the given port and begins streaming MJPEG images when a client connects. Requires h.264 encoded I420 (YUV420p) as input.
///
/// The port to listen on. Defaults to 8554.
/// Whether to echo stdout and stderr to the console or suppress it. Defaults to true.
/// Maximum output bitrate. If source data is available at a higher bitrate, VLC caps to this. Defaults to 2500 (25Mbps).
/// Maximum output framerate. If source data is available at a higher framerate, VLC caps to this. Defaults to 20.
/// An initialized instance of
- public static ExternalProcessCaptureHandler StreamMJPEG(int listenPort = 8554, bool echoOutput = true, int maxBitrate = 2500, int maxFps = 20)
+ public static ExternalProcessCaptureHandler StreamH264asMJPEG(int listenPort = 8554, bool echoOutput = true, int maxBitrate = 2500, int maxFps = 20)
{
var opts = new ExternalProcessCaptureHandlerOptions
{
Filename = "cvlc",
- Arguments = $"stream:///dev/stdin --sout \"#transcode{{vcodec=mjpg,vb={maxBitrate},fps={maxFps},acodec=none}}:standard{{access=http{{mime=multipart/x-mixed-replace;boundary=--{_VLCInternalMimeBoundaryName}}},mux=mpjpeg,dst=:{listenPort}/}}\" :demux=h264",
+ Arguments = $"stream:///dev/stdin --sout \"#transcode{{vcodec=mjpg,vb={maxBitrate},fps={maxFps},acodec=none}}:standard{{access=http{{mime=multipart/x-mixed-replace;boundary={_VLCInternalMimeBoundaryName}}},mux=mpjpeg,dst=:{listenPort}/}}\" :demux=h264",
EchoOutput = echoOutput,
DrainOutputDelayMs = 500, // default
TerminationSignals = ExternalProcessCaptureHandlerOptions.SignalsVLC
@@ -35,5 +35,30 @@ public static ExternalProcessCaptureHandler StreamMJPEG(int listenPort = 8554, b
return new ExternalProcessCaptureHandler(opts);
}
+
+ ///
+ /// Listens for a request on the given port and begins streaming MJPEG images when a client connects. Requires raw RGB24 frames as input.
+ ///
+ /// The width of the raw frames. Defaults to 640.
+ /// The height of the raw frames. Defaults to 480.
+ /// Expected FPS of the raw frames. Defaults to 24.
+ /// The port to listen on. Defaults to 8554.
+ /// Whether to echo stdout and stderr to the console or suppress it. Defaults to true.
+ /// Maximum output bitrate. If source data is available at a higher bitrate, VLC caps to this. Defaults to 2500 (25Mbps).
+ /// Maximum output framerate. If source data is available at a higher framerate, VLC caps to this. Defaults to 20.
+ /// An initialized instance of
+ public static ExternalProcessCaptureHandler StreamRawRGB24asMJPEG(int width = 640, int height = 480, int fps = 24, int listenPort = 8554, bool echoOutput = true, int maxBitrate = 2500, int maxFps = 20)
+ {
+ var opts = new ExternalProcessCaptureHandlerOptions
+ {
+ Filename = "/bin/bash",
+ EchoOutput = true,
+ Arguments = $"-c \"ffmpeg -hide_banner -f rawvideo -c:v rawvideo -pix_fmt rgb24 -s:v {width}x{height} -r {fps} -i - -f h264 -c:v libx264 -preset ultrafast -tune zerolatency -vf format=yuv420p - | cvlc stream:///dev/stdin --sout '#transcode{{vcodec=mjpg,vb={maxBitrate},fps={maxFps},acodec=none}}:standard{{access=http{{mime=multipart/x-mixed-replace;boundary={_VLCInternalMimeBoundaryName}}},mux=mpjpeg,dst=:{listenPort}/}}' :demux=h264\"",
+ DrainOutputDelayMs = 500, // default = 500
+ TerminationSignals = ExternalProcessCaptureHandlerOptions.SignalsFFmpeg
+ };
+
+ return new ExternalProcessCaptureHandler(opts);
+ }
}
}
diff --git a/src/MMALSharp.FFmpeg/MMALSharp.FFmpeg.csproj b/src/MMALSharp.FFmpeg/MMALSharp.FFmpeg.csproj
index 34a12ffe..52462955 100644
--- a/src/MMALSharp.FFmpeg/MMALSharp.FFmpeg.csproj
+++ b/src/MMALSharp.FFmpeg/MMALSharp.FFmpeg.csproj
@@ -17,7 +17,8 @@
0.7.0
..\..\StyleCop.Analyzers.ruleset
bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml
- {0600c674-e587-4267-89f3-b52ae9591f80}
+ {0600c674-e587-4267-89f3-b52ae9591f80}
+
True
diff --git a/src/MMALSharp.Processing/Handlers/ExternalProcessCaptureHandler.cs b/src/MMALSharp.Processing/Handlers/ExternalProcessCaptureHandler.cs
index 875df7de..f20a87dd 100644
--- a/src/MMALSharp.Processing/Handlers/ExternalProcessCaptureHandler.cs
+++ b/src/MMALSharp.Processing/Handlers/ExternalProcessCaptureHandler.cs
@@ -112,15 +112,6 @@ public void PostProcess()
public string GetDirectory()
=> throw new NotImplementedException();
- ///
- /// Not used.
- ///
- /// N/A.
- /// A NotImplementedException.
- ///
- public ProcessResult Process(uint allocSize)
- => throw new NotImplementedException();
-
///
/// Writes frame data to the StandardInput stream for processing.
///
diff --git a/src/MMALSharp.Processing/Handlers/FrameBufferCaptureHandler.cs b/src/MMALSharp.Processing/Handlers/FrameBufferCaptureHandler.cs
index 7a3ab7a5..8a49aca7 100644
--- a/src/MMALSharp.Processing/Handlers/FrameBufferCaptureHandler.cs
+++ b/src/MMALSharp.Processing/Handlers/FrameBufferCaptureHandler.cs
@@ -16,9 +16,8 @@ namespace MMALSharp.Handlers
///
public class FrameBufferCaptureHandler : MemoryStreamCaptureHandler, IMotionCaptureHandler, IVideoCaptureHandler
{
- private MotionConfig _motionConfig;
private bool _detectingMotion;
- private FrameDiffAnalyser _motionAnalyser;
+ private FrameDiffDriver _driver;
private bool _waitForFullFrame = true;
private bool _writeFrameRequested = false;
@@ -45,6 +44,20 @@ public FrameBufferCaptureHandler()
: base()
{ }
+ ///
+ /// Creates a new configured for motion detection analysis (either using a recorded
+ /// raw video stream where MMALStandalone.Instance is used, or when the camera is used but triggering motion detection events
+ /// is unnecessary). If motion detection events are desired, use the camera's WithMotionDetection method.
+ ///
+ /// The motion configuration.
+ /// A callback for when motion is detected.
+ public FrameBufferCaptureHandler(MotionConfig motionConfig, Action onDetect)
+ : base()
+ {
+ _driver = new FrameDiffDriver(motionConfig, onDetect);
+ _detectingMotion = true;
+ }
+
///
/// Target directory when is invoked without a directory argument.
///
@@ -99,7 +112,7 @@ public override void Process(ImageContext context)
if (_detectingMotion)
{
- _motionAnalyser.Apply(context);
+ _driver.Apply(context);
}
// accumulate frame data in the underlying memory stream
@@ -122,22 +135,35 @@ public override void Process(ImageContext context)
///
public void ConfigureMotionDetection(MotionConfig config, Action onDetect)
{
- _motionConfig = config;
- _motionAnalyser = new FrameDiffAnalyser(config, onDetect);
+ _driver = new FrameDiffDriver(config, onDetect);
this.EnableMotionDetection();
}
///
public void EnableMotionDetection()
{
- _detectingMotion = true;
- _motionAnalyser?.ResetAnalyser();
+ if(_driver.OnDetectEnabled)
+ {
+ _detectingMotion = true;
+ _driver?.ResetAnalyser();
+ }
+ else
+ {
+ _driver.OnDetectEnabled = true;
+ }
}
///
- public void DisableMotionDetection()
+ public void DisableMotionDetection(bool disableCallbackOnly = false)
{
- _detectingMotion = false;
+ if(disableCallbackOnly)
+ {
+ _driver.OnDetectEnabled = false;
+ }
+ else
+ {
+ _detectingMotion = false;
+ }
}
///
@@ -158,5 +184,14 @@ private void WriteStreamToFile()
this.MostRecentFilename = filename;
this.MostRecentPathname = pathname;
}
+
+ // This is used for temporary local-development performance testing. See the
+ // commentedlines before and inside FrameDiffDriver around the Apply method.
+ // public override void Dispose()
+ // {
+ // long perf = (long)((float)_driver.totalElapsed / _driver.frameCounter);
+ // Console.WriteLine($"{perf} ms/frame, total {_driver.frameCounter} frames");
+ // base.Dispose();
+ // }
}
}
diff --git a/src/MMALSharp.Processing/Handlers/IMotionCaptureHandler.cs b/src/MMALSharp.Processing/Handlers/IMotionCaptureHandler.cs
index 6ca80705..de97b882 100644
--- a/src/MMALSharp.Processing/Handlers/IMotionCaptureHandler.cs
+++ b/src/MMALSharp.Processing/Handlers/IMotionCaptureHandler.cs
@@ -33,6 +33,8 @@ public interface IMotionCaptureHandler
///
/// Disables motion detection. When configured, this will instruct the capture handler not to detect motion.
///
- void DisableMotionDetection();
+ /// When true, motion detection will continue but the OnDetect callback
+ /// will not be invoked. Call to re-enable the callback.
+ void DisableMotionDetection(bool disableCallbackOnly = false);
}
}
diff --git a/src/MMALSharp.Processing/Handlers/InMemoryCaptureHandler.cs b/src/MMALSharp.Processing/Handlers/InMemoryCaptureHandler.cs
index 1743a969..246b9be7 100644
--- a/src/MMALSharp.Processing/Handlers/InMemoryCaptureHandler.cs
+++ b/src/MMALSharp.Processing/Handlers/InMemoryCaptureHandler.cs
@@ -22,7 +22,7 @@ public class InMemoryCaptureHandler : OutputCaptureHandler
/// The working data store.
///
public List WorkingData { get; set; }
-
+
///
/// Creates a new instance of .
///
@@ -30,13 +30,13 @@ public InMemoryCaptureHandler()
{
this.WorkingData = new List();
}
-
+
///
public override void Dispose()
{
MMALLog.Logger.LogInformation($"Successfully processed {Helpers.ConvertBytesToMegabytes(_totalProcessed)}.");
}
-
+
///
public override void Process(ImageContext context)
{
diff --git a/src/MMALSharp.Processing/Handlers/OutputCaptureHandler.cs b/src/MMALSharp.Processing/Handlers/OutputCaptureHandler.cs
index 01d0b232..9f37218b 100644
--- a/src/MMALSharp.Processing/Handlers/OutputCaptureHandler.cs
+++ b/src/MMALSharp.Processing/Handlers/OutputCaptureHandler.cs
@@ -64,10 +64,10 @@ public virtual void PostProcess()
}
///
- /// Allows manipulating of the image frame.
+ /// Allows manipulation of the image frame.
///
/// A delegate to the manipulation you wish to carry out.
- /// The image format to save manipulated files in..
+ /// The image format to save manipulated files in, or null to return raw data.
public void Manipulate(Action context, ImageFormat storeFormat)
{
this.OnManipulate = context;
diff --git a/src/MMALSharp.Processing/Handlers/StreamCaptureHandler.cs b/src/MMALSharp.Processing/Handlers/StreamCaptureHandler.cs
index e10d5bcb..986adbe8 100644
--- a/src/MMALSharp.Processing/Handlers/StreamCaptureHandler.cs
+++ b/src/MMALSharp.Processing/Handlers/StreamCaptureHandler.cs
@@ -23,7 +23,7 @@ public abstract class StreamCaptureHandler : OutputCaptureHandler
/// A Stream instance that we can process image data to.
///
public T CurrentStream { get; protected set; }
-
+
///
public override void Process(ImageContext context)
{
@@ -64,11 +64,11 @@ public override void PostProcess()
}
using (var ms = new MemoryStream(this.ImageContext.Data))
- {
+ {
this.CurrentStream.SetLength(0);
this.CurrentStream.Position = 0;
ms.CopyTo(this.CurrentStream);
- }
+ }
}
}
}
@@ -77,7 +77,7 @@ public override void PostProcess()
MMALLog.Logger.LogWarning($"Something went wrong while processing stream: {e.Message}. {e.InnerException?.Message}. {e.StackTrace}");
}
}
-
+
///
public override string TotalProcessed()
{
diff --git a/src/MMALSharp.Processing/MMALSharp.Processing.csproj b/src/MMALSharp.Processing/MMALSharp.Processing.csproj
index ced92179..bc955e67 100644
--- a/src/MMALSharp.Processing/MMALSharp.Processing.csproj
+++ b/src/MMALSharp.Processing/MMALSharp.Processing.csproj
@@ -18,7 +18,8 @@
0.7.0
..\..\StyleCop.Analyzers.ruleset
bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml
- {dabc9991-56ad-4235-ba86-63def12c261a}
+ {dabc9991-56ad-4235-ba86-63def12c261a}
+
diff --git a/src/MMALSharp.Processing/Processors/Effects/BoxBlur.cs b/src/MMALSharp.Processing/Processors/Effects/BoxBlur.cs
new file mode 100644
index 00000000..cdd37806
--- /dev/null
+++ b/src/MMALSharp.Processing/Processors/Effects/BoxBlur.cs
@@ -0,0 +1,42 @@
+//
+// Copyright (c) Ian Auty and contributors. All rights reserved.
+// Licensed under the MIT License. Please see LICENSE.txt for License info.
+//
+
+using MMALSharp.Common;
+using System;
+using System.Diagnostics;
+
+namespace MMALSharp.Processors.Effects
+{
+ ///
+ /// An image processor used to apply a box-blur effect.
+ ///
+ public class BoxBlur : ConvolutionBase, IFrameProcessor
+ {
+ private const int _kernelWidth = 3;
+ private const int _kernelHeight = 3;
+ private double[,] _kernel = new double[3, 3]
+ {
+ {0.11111111, 0.11111111, 0.11111111 },
+ {0.11111111, 0.11111111, 0.11111111 },
+ {0.11111111, 0.11111111, 0.11111111 },
+ };
+
+ ///
+ public BoxBlur()
+ : base()
+ { }
+
+ ///
+ public BoxBlur(int horizontalCellCount, int verticalCellCount)
+ : base(horizontalCellCount, verticalCellCount)
+ { }
+
+ ///
+ public void Apply(ImageContext context)
+ {
+ this.ApplyConvolution(_kernel, _kernelWidth, _kernelHeight, context);
+ }
+ }
+}
diff --git a/src/MMALSharp.Processing/Processors/Effects/ConvolutionBase.cs b/src/MMALSharp.Processing/Processors/Effects/ConvolutionBase.cs
index ae28e933..e38c03cf 100644
--- a/src/MMALSharp.Processing/Processors/Effects/ConvolutionBase.cs
+++ b/src/MMALSharp.Processing/Processors/Effects/ConvolutionBase.cs
@@ -4,12 +4,16 @@
//
using System;
+using System.Diagnostics;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
+using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
using MMALSharp.Common;
+using MMALSharp.Common.Utility;
namespace MMALSharp.Processors.Effects
{
@@ -18,6 +22,32 @@ namespace MMALSharp.Processors.Effects
///
public abstract class ConvolutionBase
{
+ private readonly int _horizontalCellCount;
+ private readonly int _verticalCellCount;
+
+ ///
+ /// Creates a object. This uses the default parallel processing
+ /// cell count based on the image resolution and the recommended values defined by the
+ /// . Requires use of one of the standard camera image resolutions.
+ ///
+ public ConvolutionBase()
+ {
+ _horizontalCellCount = 0;
+ _verticalCellCount = 0;
+ }
+
+ ///
+ /// Creates a object with custom parallel processing cell counts.
+ /// You must use this constructor if you are processing non-standard image resolutions.
+ ///
+ /// The number of columns to divide the image into.
+ /// The number of rows to divide the image into.
+ public ConvolutionBase(int horizontalCellCount, int verticalCellCount)
+ {
+ _horizontalCellCount = horizontalCellCount;
+ _verticalCellCount = verticalCellCount;
+ }
+
///
/// Apply a convolution based on the kernel passed in.
///
@@ -27,177 +57,231 @@ public abstract class ConvolutionBase
/// An image context providing additional metadata on the data passed in.
public void ApplyConvolution(double[,] kernel, int kernelWidth, int kernelHeight, ImageContext context)
{
- BitmapData bmpData = null;
- IntPtr pNative = IntPtr.Zero;
- int bytes;
- byte[] store = null;
+ var localContext = context.Raw ? context : CloneToRawBitmap(context);
- using (var ms = new MemoryStream(context.Data))
- using (var bmp = this.LoadBitmap(context, ms))
- {
- bmpData = bmp.LockBits(new Rectangle(0, 0,
- bmp.Width,
- bmp.Height),
- ImageLockMode.ReadWrite,
- bmp.PixelFormat);
+ bool storeFromRaw = context.Raw && context.StoreFormat != null;
- if (context.Raw)
- {
- this.InitBitmapData(context, bmpData);
- }
+ var analyser = new FrameAnalyser
+ {
+ HorizonalCellCount = _horizontalCellCount,
+ VerticalCellCount = _verticalCellCount,
+ };
+ analyser.Apply(localContext);
- pNative = bmpData.Scan0;
+ Parallel.ForEach(analyser.CellRect, (cell)
+ => ProcessCell(cell, localContext.Data, kernel, kernelWidth, kernelHeight, analyser.Metadata, storeFromRaw));
- // Split image into 4 quadrants and process individually.
- var quadA = new Rectangle(0, 0, bmpData.Width / 2, bmpData.Height / 2);
- var quadB = new Rectangle(bmpData.Width / 2, 0, bmpData.Width / 2, bmpData.Height / 2);
- var quadC = new Rectangle(0, bmpData.Height / 2, bmpData.Width / 2, bmpData.Height / 2);
- var quadD = new Rectangle(bmpData.Width / 2, bmpData.Height / 2, bmpData.Width / 2, bmpData.Height / 2);
+ if (context.StoreFormat != null)
+ {
+ FormatRawBitmap(localContext, context);
+ context.Raw = false; // context is never raw after formatting
+ }
+ else
+ {
+ if(!context.Raw)
+ {
+ // TakePicture doesn't set the Resolution, copy it from the cloned version which stored it from Bitmap
+ context.Resolution = new Resolution(localContext.Resolution.Width, localContext.Resolution.Height);
- bytes = bmpData.Stride * bmp.Height;
+ context.Data = new byte[localContext.Data.Length];
+ Array.Copy(localContext.Data, context.Data, context.Data.Length);
+ context.Raw = true; // we just copied raw data to the source context
+ }
+ }
+ }
- var rgbValues = new byte[bytes];
+ private void ProcessCell(Rectangle rect, byte[] image, double[,] kernel, int kernelWidth, int kernelHeight, FrameAnalysisMetadata metadata, bool storeFromRaw)
+ {
+ // Rectangle and FrameAnalysisMetadata are structures; they are by-value copies and all fields are value-types which makes them thread safe
- // Copy the RGB values into the array.
- Marshal.Copy(pNative, rgbValues, 0, bytes);
+ int x2 = rect.X + rect.Width;
+ int y2 = rect.Y + rect.Height;
- var bpp = Image.GetPixelFormatSize(bmp.PixelFormat) / 8;
+ int index;
- var t1 = Task.Run(() =>
- {
- this.ProcessQuadrant(quadA, bmp, bmpData, rgbValues, kernel, kernelWidth, kernelHeight, bpp);
- });
- var t2 = Task.Run(() =>
- {
- this.ProcessQuadrant(quadB, bmp, bmpData, rgbValues, kernel, kernelWidth, kernelHeight, bpp);
- });
- var t3 = Task.Run(() =>
+ // Indicates RGB needs to be swapped to BGR so that Bitmap.Save works correctly.
+ if (storeFromRaw)
+ {
+ for (var x = rect.X; x < x2; x++)
{
- this.ProcessQuadrant(quadC, bmp, bmpData, rgbValues, kernel, kernelWidth, kernelHeight, bpp);
- });
- var t4 = Task.Run(() =>
+ for (var y = rect.Y; y < y2; y++)
+ {
+ index = (x * metadata.Bpp) + (y * metadata.Stride);
+ byte swap = image[index];
+ image[index] = image[index + 2];
+ image[index + 2] = swap;
+ }
+ }
+ }
+
+ for (var x = rect.X; x < x2; x++)
+ {
+ for (var y = rect.Y; y < y2; y++)
{
- this.ProcessQuadrant(quadD, bmp, bmpData, rgbValues, kernel, kernelWidth, kernelHeight, bpp);
- });
+ double r = 0;
+ double g = 0;
+ double b = 0;
- Task.WaitAll(t1, t2, t3, t4);
+ if (x > kernelWidth && y > kernelHeight)
+ {
+ for (var t = 0; t < kernelWidth; t++)
+ {
+ for(var u = 0; u < kernelHeight; u++)
+ {
+ double k = kernel[t, u];
- if (context.Raw && context.StoreFormat == null)
- {
- store = new byte[bytes];
- Marshal.Copy(pNative, store, 0, bytes);
- }
+ index = (Clamp(y + u, y2) * metadata.Stride) + (Clamp(x + t, x2) * metadata.Bpp);
- bmp.UnlockBits(bmpData);
+ r += image[index] * k;
+ g += image[index + 1] * k;
+ b += image[index + 2] * k;
+ }
+ }
- if (!context.Raw || context.StoreFormat != null)
- {
- using (var ms2 = new MemoryStream())
- {
- bmp.Save(ms2, context.StoreFormat);
- store = new byte[ms2.Length];
- Array.Copy(ms2.ToArray(), 0, store, 0, ms2.Length);
+ r = (r < 0) ? 0 : r;
+ g = (g < 0) ? 0 : g;
+ b = (b < 0) ? 0 : b;
}
+
+ index = (x * metadata.Bpp) + (y * metadata.Stride);
+
+ image[index] = (byte)r;
+ image[index + 1] = (byte)g;
+ image[index + 2] = (byte)b;
}
}
-
- context.Data = store;
}
- private Bitmap LoadBitmap(ImageContext imageContext, MemoryStream stream)
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private int Clamp(int value, int maxIndex)
{
- if (imageContext.Raw)
+ if (value < 0)
+ {
+ return 0;
+ }
+
+ if (value < maxIndex)
{
- PixelFormat format = default;
+ return value;
+ }
- // RGB16 doesn't appear to be supported by GDI?
- if (imageContext.PixelFormat == MMALEncoding.RGB24)
+ return maxIndex - 1;
+ }
+
+ private ImageContext CloneToRawBitmap(ImageContext sourceContext)
+ {
+ var newContext = new ImageContext
+ {
+ Raw = true,
+ Eos = sourceContext.Eos,
+ IFrame = sourceContext.IFrame,
+ Encoding = sourceContext.Encoding,
+ Pts = sourceContext.Pts,
+ StoreFormat = sourceContext.StoreFormat
+ };
+
+ using (var ms = new MemoryStream(sourceContext.Data))
+ {
+ using (var sourceBmp = new Bitmap(ms))
{
- format = PixelFormat.Format24bppRgb;
+ // sourceContext.Resolution isn't set by TakePicture (width,height is 0,0)
+ newContext.Resolution = new Resolution(sourceBmp.Width, sourceBmp.Height);
+
+ // If the source bitmap has a raw-compatible format, use it, otherwise default to RGBA
+ newContext.PixelFormat = PixelFormatToMMALEncoding(sourceBmp.PixelFormat, MMALEncoding.RGBA);
+ var bmpTargetFormat = MMALEncodingToPixelFormat(newContext.PixelFormat);
+ var rect = new Rectangle(0, 0, sourceBmp.Width, sourceBmp.Height);
+
+ using (var newBmp = sourceBmp.Clone(rect, bmpTargetFormat))
+ {
+ BitmapData bmpData = null;
+ try
+ {
+ bmpData = newBmp.LockBits(rect, ImageLockMode.ReadOnly, bmpTargetFormat);
+ var ptr = bmpData.Scan0;
+ int size = bmpData.Stride * newBmp.Height;
+ newContext.Data = new byte[size];
+ newContext.Stride = bmpData.Stride;
+ Marshal.Copy(ptr, newContext.Data, 0, size);
+ }
+ finally
+ {
+ newBmp.UnlockBits(bmpData);
+ }
+ }
}
+ }
+
+ return newContext;
+ }
- if (imageContext.PixelFormat == MMALEncoding.RGB32)
+ private void FormatRawBitmap(ImageContext sourceContext, ImageContext targetContext)
+ {
+ var pixfmt = MMALEncodingToPixelFormat(sourceContext.PixelFormat);
+
+ using (var bitmap = new Bitmap(sourceContext.Resolution.Width, sourceContext.Resolution.Height, pixfmt))
+ {
+ BitmapData bmpData = null;
+ try
{
- format = PixelFormat.Format32bppRgb;
+ bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.WriteOnly, bitmap.PixelFormat);
+ var ptr = bmpData.Scan0;
+ int size = bmpData.Stride * bitmap.Height;
+ var data = sourceContext.Data;
+ Marshal.Copy(data, 0, ptr, size);
}
-
- if (imageContext.PixelFormat == MMALEncoding.RGBA)
+ finally
{
- format = PixelFormat.Format32bppArgb;
+ bitmap.UnlockBits(bmpData);
}
- if (format == default)
+ using (var ms = new MemoryStream())
{
- throw new Exception($"Unsupported pixel format for Bitmap: {imageContext.PixelFormat}.");
+ bitmap.Save(ms, targetContext.StoreFormat);
+ targetContext.Data = new byte[ms.Length];
+ Array.Copy(ms.ToArray(), 0, targetContext.Data, 0, ms.Length);
}
-
- return new Bitmap(imageContext.Resolution.Width, imageContext.Resolution.Height, format);
}
-
- return new Bitmap(stream);
- }
-
- private void InitBitmapData(ImageContext imageContext, BitmapData bmpData)
- {
- var pNative = bmpData.Scan0;
- Marshal.Copy(imageContext.Data, 0, pNative, imageContext.Data.Length);
}
- private void ProcessQuadrant(Rectangle quad, Bitmap bmp, BitmapData bmpData, byte[] rgbValues, double[,] kernel, int kernelWidth, int kernelHeight, int pixelDepth)
+ private PixelFormat MMALEncodingToPixelFormat(MMALEncoding encoding)
{
- unsafe
+ if (encoding == MMALEncoding.RGB24)
{
- // Declare an array to hold the bytes of the bitmap.
- var stride = bmpData.Stride;
-
- byte* ptr1 = (byte*)bmpData.Scan0;
-
- for (int column = quad.X; column < quad.X + quad.Width; column++)
- {
- for (int row = quad.Y; row < quad.Y + quad.Height; row++)
- {
- if (column > kernelWidth && row > kernelHeight)
- {
- int r1 = 0, g1 = 0, b1 = 0;
+ return PixelFormat.Format24bppRgb;
+ }
- for (var l = 0; l < kernelWidth; l++)
- {
- for (var m = 0; m < kernelHeight; m++)
- {
- r1 += (int)(rgbValues[(this.Bound(row + m, quad.Y + quad.Height) * stride) + (this.Bound(column + l, quad.X + quad.Width) * pixelDepth)] * kernel[l, m]);
- g1 += (int)(rgbValues[(this.Bound(row + m, quad.Y + quad.Height) * stride) + (this.Bound(column + l, quad.X + quad.Width) * pixelDepth) + 1] * kernel[l, m]);
- b1 += (int)(rgbValues[(this.Bound(row + m, quad.Y + quad.Height) * stride) + (this.Bound(column + l, quad.X + quad.Width) * pixelDepth) + 2] * kernel[l, m]);
- }
- }
+ if (encoding == MMALEncoding.RGB32)
+ {
+ return PixelFormat.Format32bppRgb;
+ }
- ptr1[(column * pixelDepth) + (row * stride)] = (byte)Math.Max(0, r1);
- ptr1[(column * pixelDepth) + (row * stride) + 1] = (byte)Math.Max(0, g1);
- ptr1[(column * pixelDepth) + (row * stride) + 2] = (byte)Math.Max(0, b1);
- }
- else
- {
- ptr1[(column * pixelDepth) + (row * stride)] = 0;
- ptr1[(column * pixelDepth) + (row * stride) + 1] = 0;
- ptr1[(column * pixelDepth) + (row * stride) + 2] = 0;
- }
- }
- }
+ if (encoding == MMALEncoding.RGBA)
+ {
+ return PixelFormat.Format32bppArgb;
}
+
+ throw new Exception($"Unsupported pixel format: {encoding}");
}
-
- private int Bound(int value, int endIndex)
+
+ private MMALEncoding PixelFormatToMMALEncoding(PixelFormat format, MMALEncoding defaultEncoding)
{
- if (value < 0)
+ if (format == PixelFormat.Format24bppRgb)
{
- return 0;
+ return MMALEncoding.RGB24;
}
- if (value < endIndex)
+ if (format == PixelFormat.Format32bppRgb)
{
- return value;
+ return MMALEncoding.RGB32;
+ }
+
+ if (format == PixelFormat.Format32bppArgb)
+ {
+ return MMALEncoding.RGBA;
}
-
- return endIndex - 1;
+
+ return defaultEncoding;
}
}
}
\ No newline at end of file
diff --git a/src/MMALSharp.Processing/Processors/Effects/EdgeDetection.cs b/src/MMALSharp.Processing/Processors/Effects/EdgeDetection.cs
index 60f98c43..c91d0320 100644
--- a/src/MMALSharp.Processing/Processors/Effects/EdgeDetection.cs
+++ b/src/MMALSharp.Processing/Processors/Effects/EdgeDetection.cs
@@ -4,6 +4,8 @@
//
using MMALSharp.Common;
+using System;
+using System.Diagnostics;
namespace MMALSharp.Processors.Effects
{
@@ -15,7 +17,7 @@ public enum EDStrength
///
/// Low strength.
///
- Low,
+ Low = 0,
///
/// Medium strength.
@@ -27,81 +29,65 @@ public enum EDStrength
///
High
}
-
+
///
/// A kernel based image processor used to apply Edge detection convolution.
///
public class EdgeDetection : ConvolutionBase, IFrameProcessor
{
- ///
- /// The kernel's width.
- ///
- public const int KernelWidth = 3;
-
- ///
- /// The kernel's height.
- ///
- public const int KernelHeight = 3;
+ private const int _kernelWidth = 3;
+ private const int _kernelHeight = 3;
- ///
- /// A kernel used to apply a low strength edge detection convolution to an image.
- ///
- public static double[,] LowStrengthKernel = new double[KernelWidth, KernelHeight]
+ private readonly double[][,] _kernels =
{
- { -1, 0, 1 },
- { 0, 0, 0 },
- { 1, 0, -1 }
+ new double[,] // 0 - Low
+ {
+ { -1, 0, 1 },
+ { 0, 0, 0 },
+ { 1, 0, -1 }
+ },
+ new double[,] // 1 - Medium
+ {
+ { 0, 1, 0 },
+ { 1, -4, 1 },
+ { 0, 1, 0 }
+ },
+ new double[,] // 2 - High
+ {
+ { -1, -1, -1 },
+ { -1, 8, -1 },
+ { -1, -1, -1 }
+ },
};
- ///
- /// A kernel used to apply a medium strength edge detection convolution to an image.
- ///
- public static double[,] MediumStrengthKernel = new double[KernelWidth, KernelHeight]
- {
- { 0, 1, 0 },
- { 1, -4, 1 },
- { 0, 1, 0 }
- };
+ private readonly int _kernelType;
///
- /// A kernel used to apply a high strength edge detection convolution to an image.
+ /// Creates a new instance of processor used to apply Edge detection convolution.
///
- public static double[,] HighStrengthKernel = new double[KernelWidth, KernelHeight]
+ /// The Edge detection strength.
+ public EdgeDetection(EDStrength strength)
+ : base()
{
- { -1, -1, -1 },
- { -1, 8, -1 },
- { -1, -1, -1 }
- };
+ _kernelType = (int)strength;
+ }
- ///
- /// The working kernel.
- ///
- public double[,] Kernel { get; }
-
///
/// Creates a new instance of processor used to apply Edge detection convolution.
///
/// The Edge detection strength.
- public EdgeDetection(EDStrength strength)
+ /// The number of columns to divide the image into.
+ /// The number of rows to divide the image into.
+ public EdgeDetection(EDStrength strength, int horizontalCellCount, int verticalCellCount)
+ : base(horizontalCellCount, verticalCellCount)
{
- switch (strength)
- {
- case EDStrength.Low:
- Kernel = LowStrengthKernel;
- break;
- case EDStrength.Medium:
- Kernel = MediumStrengthKernel;
- break;
- case EDStrength.High:
- Kernel = HighStrengthKernel;
- break;
- }
+ _kernelType = (int)strength;
}
///
public void Apply(ImageContext context)
{
- this.ApplyConvolution(this.Kernel, KernelWidth, KernelHeight, context);
+ this.ApplyConvolution(_kernels[_kernelType], _kernelWidth, _kernelHeight, context);
}
}
-}
\ No newline at end of file
+}
diff --git a/src/MMALSharp.Processing/Processors/Effects/GaussianProcessor.cs b/src/MMALSharp.Processing/Processors/Effects/GaussianProcessor.cs
index 8643cbbb..b06ff383 100644
--- a/src/MMALSharp.Processing/Processors/Effects/GaussianProcessor.cs
+++ b/src/MMALSharp.Processing/Processors/Effects/GaussianProcessor.cs
@@ -4,6 +4,8 @@
//
using MMALSharp.Common;
+using System;
+using System.Diagnostics;
namespace MMALSharp.Processors.Effects
{
@@ -15,7 +17,7 @@ public enum GaussianMatrix
///
/// Use a 3x3 matrix.
///
- Matrix3x3,
+ Matrix3x3 = 0,
///
/// Use a 5x5 matrix.
@@ -28,48 +30,58 @@ public enum GaussianMatrix
///
public class GaussianProcessor : ConvolutionBase, IFrameProcessor
{
- private readonly int _kernelWidth = 3;
- private readonly int _kernelHeight = 3;
+ private readonly int _kernelType;
- private double[,] Kernel { get; }
+ private readonly double[][,] _kernels =
+{
+ new double[3, 3] // 0 - Matrix3x3
+ {
+ { 0.0625, 0.125, 0.0625 },
+ { 0.125, 0.25, 0.125 },
+ { 0.0625, 0.125, 0.0625 },
+ },
+ new double[5, 5] // 1 - Matrix5x5
+ {
+ { 0.00390625, 0.015625, 0.0234375, 0.015625, 0.00390625 },
+ { 0.015625, 0.0625, 0.09375, 0.0625, 0.015625 },
+ { 0.0234375, 0.09375, 0.140625, 0.09375, 0.0234375 },
+ { 0.015625, 0.0625, 0.09375, 0.0625, 0.015625 },
+ { 0.00390625, 0.015625, 0.0234375, 0.015625, 0.00390625 },
+ },
+ };
+
+ private readonly (int width, int height)[] _sizes =
+ {
+ (3, 3), // 0 - Matrix3x3
+ (5, 5), // 1 - Matrix5x5
+ };
///
/// Creates a new instance of .
///
/// The Gaussian matrix to apply.
public GaussianProcessor(GaussianMatrix matrix)
+ : base()
{
- switch (matrix)
- {
- case GaussianMatrix.Matrix3x3:
- _kernelWidth = 3;
- _kernelHeight = 3;
- Kernel = new double[3, 3]
- {
- { 0.0625, 0.125, 0.0625 },
- { 0.125, 0.25, 0.125 },
- { 0.0625, 0.125, 0.0625 }
- };
- break;
- case GaussianMatrix.Matrix5x5:
- _kernelWidth = 5;
- _kernelHeight = 5;
- Kernel = new double[5, 5]
- {
- { 0.00390625, 0.015625, 0.0234375, 0.015625, 0.00390625 },
- { 0.015625, 0.0625, 0.09375, 0.0625, 0.015625 },
- { 0.0234375, 0.09375, 0.140625, 0.09375, 0.0234375 },
- { 0.015625, 0.0625, 0.09375, 0.0625, 0.015625 },
- { 0.00390625, 0.015625, 0.0234375, 0.015625, 0.00390625 },
- };
- break;
- }
+ _kernelType = (int)matrix;
+ }
+
+ ///
+ /// Creates a new instance of .
+ ///
+ /// The Gaussian matrix to apply.
+ /// The number of columns to divide the image into.
+ /// The number of rows to divide the image into.
+ public GaussianProcessor(GaussianMatrix matrix, int horizontalCellCount, int verticalCellCount)
+ : base(horizontalCellCount, verticalCellCount)
+ {
+ _kernelType = (int)matrix;
}
///
public void Apply(ImageContext context)
{
- this.ApplyConvolution(this.Kernel, _kernelWidth, _kernelHeight, context);
+ this.ApplyConvolution(_kernels[_kernelType], _sizes[_kernelType].width, _sizes[_kernelType].height, context);
}
}
}
\ No newline at end of file
diff --git a/src/MMALSharp.Processing/Processors/Effects/LineDetection.cs b/src/MMALSharp.Processing/Processors/Effects/LineDetection.cs
new file mode 100644
index 00000000..e3c4598b
--- /dev/null
+++ b/src/MMALSharp.Processing/Processors/Effects/LineDetection.cs
@@ -0,0 +1,118 @@
+//
+// Copyright (c) Ian Auty and contributors. All rights reserved.
+// Licensed under the MIT License. Please see LICENSE.txt for License info.
+//
+
+using MMALSharp.Common;
+using System;
+using System.Diagnostics;
+
+namespace MMALSharp.Processors.Effects
+{
+ ///
+ /// The type of line highlighted by the processor.
+ ///
+ public enum LineDetectionType
+ {
+ ///
+ /// Highlights horizontal lines.
+ ///
+ Horizontal = 0,
+
+ ///
+ /// Higlights vertical lines.
+ ///
+ Vertical,
+
+ ///
+ /// Highlights horizontal lines with a slight smoothing effect (less sensitive to noise).
+ ///
+ SobelHorizontal,
+
+ ///
+ /// Highlights vertical lines with a slight smoothing effect (less sensitive to noise).
+ ///
+ SobelVertical,
+
+ ///
+ /// Highlights diagonal lines sloping down left-to-right (135 degrees).
+ ///
+ DiagonalDown,
+
+ ///
+ /// Highlights diagonal lines sloping up left-to-right (45 degrees).
+ ///
+ DiagonalUp,
+ }
+
+ ///
+ /// An image processor used to highlight straight lines.
+ ///
+ public class LineDetection : ConvolutionBase, IFrameProcessor
+ {
+ private const int _kernelWidth = 3;
+ private const int _kernelHeight = 3;
+
+ private readonly double[][,] _kernels =
+ {
+ new double[3, 3] // 0 - Horizontal
+ {
+ { -1, 2, -1 },
+ { -1, 2, -1 },
+ { -1, 2, -1 }
+ },
+ new double[3, 3] // 1 - Vertical
+ {
+ { -1, -1, -1 },
+ { 2, 2, 2 },
+ { -1, -1, -1 }
+ },
+ new double[3, 3] // 2 - SobelHorizontal
+ {
+ { -1, 0, 1 },
+ { -2, 0, 2 },
+ { -1, 0, 1 }
+ },
+ new double[3, 3] // 3 - SobelVertical
+ {
+ { -1, -2, -1 },
+ { 0, 0, 0 },
+ { 1, 2, 1 }
+ },
+ new double[3, 3] // 4 - DiagonalDown
+ {
+ { -1, -1, 2 },
+ { -1, 2, -1 },
+ { 2, -1, -1 }
+ },
+ new double[3, 3] // 5 - DiagonalUp
+ {
+ { 2, -1, -1 },
+ { -1, 2, -1 },
+ { -1, -1, 2 }
+ }
+ };
+
+ private readonly int _kernelType;
+
+ ///
+ public LineDetection(LineDetectionType lineType)
+ : base()
+ {
+ _kernelType = (int)lineType;
+ }
+
+ ///
+ public LineDetection(LineDetectionType lineType, int horizontalCellCount, int verticalCellCount)
+ : base(horizontalCellCount, verticalCellCount)
+ {
+ _kernelType = (int)lineType;
+ }
+
+ ///
+ public void Apply(ImageContext context)
+ {
+ this.ApplyConvolution(_kernels[_kernelType], _kernelWidth, _kernelHeight, context);
+ }
+ }
+}
diff --git a/src/MMALSharp.Processing/Processors/Effects/SharpenProcessor.cs b/src/MMALSharp.Processing/Processors/Effects/SharpenProcessor.cs
index 1b9fdf8f..4326f727 100644
--- a/src/MMALSharp.Processing/Processors/Effects/SharpenProcessor.cs
+++ b/src/MMALSharp.Processing/Processors/Effects/SharpenProcessor.cs
@@ -4,6 +4,8 @@
//
using MMALSharp.Common;
+using System;
+using System.Diagnostics;
namespace MMALSharp.Processors.Effects
{
@@ -17,11 +19,21 @@ public class SharpenProcessor : ConvolutionBase, IFrameProcessor
private double[,] _kernel = new double[KernelWidth, KernelHeight]
{
- { 0, -1, 0 },
+ { 0, -1, 0 },
{ -1, 5, -1 },
- { 0, -1, 0 }
+ { 0, -1, 0 },
};
+ ///
+ public SharpenProcessor()
+ : base()
+ { }
+
+ ///
+ public SharpenProcessor(int horizontalCellCount, int verticalCellCount)
+ : base(horizontalCellCount, verticalCellCount)
+ { }
+
///
public void Apply(ImageContext context)
{
diff --git a/src/MMALSharp.Processing/Processors/FrameAnalyser.cs b/src/MMALSharp.Processing/Processors/FrameAnalyser.cs
index c7dbc2e8..fc1f8cd9 100644
--- a/src/MMALSharp.Processing/Processors/FrameAnalyser.cs
+++ b/src/MMALSharp.Processing/Processors/FrameAnalyser.cs
@@ -3,7 +3,10 @@
// Licensed under the MIT License. Please see LICENSE.txt for License info.
//
+using System;
using System.Collections.Generic;
+using System.Drawing;
+using System.Drawing.Imaging;
using Microsoft.Extensions.Logging;
using MMALSharp.Common;
using MMALSharp.Common.Utility;
@@ -11,10 +14,105 @@
namespace MMALSharp.Processors
{
///
- /// The FrameAnalyser class is used with the Image Analysis API.
+ /// The FrameAnalyser class is used with the Image Analysis API and
+ /// is the base class for frame-differencing motion detection.
///
- public abstract class FrameAnalyser : IFrameAnalyser
+ public class FrameAnalyser : IFrameAnalyser
{
+ // Some members are fields rather than properties for parallel processing performance reasons.
+ // Array-based fields are threadsafe as long as multiple threads access unique array indices.
+
+ ///
+ /// Cells are subsections of an image frame which are processed in parallel. This dictionary contains a list
+ /// of recommended cell count values based on image resolution. The dictionary key is a (width, height) tuple
+ /// and the value is a (horizontal, vertical) tuple. Multiply the horizontal and vertical values for total cell
+ /// count. Approximately 800 to 1000 cells seems ideal for a Raspberry Pi 4B. Note that the buffer (width, height)
+ /// can differ from the camera resolution (the hardware requires a horizontal 32-byte boundery and vertical 16-byte
+ /// boundary). The padded buffer "resolutions" are also provided by this dictionary.
+ ///
+ public static readonly IReadOnlyDictionary<(int width, int height), (int horizontal, int vertical)> RecommendedCellCounts
+ = new Dictionary<(int width, int height), (int horizontal, int vertical)>(13)
+ {
+ // For 1640 x 922, there is no useful divisor for the 922 pixel Y resolution. Dividing by 461 would
+ // yield an integer cell height of 2, but convolution requires at least 3 pixels. Instead we use 23
+ // which yields a cell height of just over 40, meaning the last row of pixels in each cell are not
+ // processed (the indexers are integers). However, the padded-buffer version (second list) has a
+ // vertical height of 928 which is divisible by 16.
+
+ // pixels per cell
+ { (1920, 1080), (30, 30) }, // 900 cells 64 x 36 Y padded buffer (see list below)
+ { (2592, 1944), (36, 36) }, // 1296 cells 72 x 54 Y padded
+ { (1296, 972), (27, 27) }, // 729 cells 36 x 48 XY padded
+ { (1296, 730), (72, 10) }, // 720 cells 18 x 73 XY padded
+ { (640, 480), (32, 32) }, // 1024 cells 20 x 15 not padded
+ { (3280, 2464), (40, 22) }, // 880 cells 82 x 112 X padded
+ { (1640, 1232), (40, 22) }, // 880 cells 41 x 56 X padded
+ { (1640, 922), (40, 23) }, // 920 cells 41 x 40.09 XY padded
+ { (1280, 720), (20, 36) }, // 720 cells 64 x 20 not padded
+ { (2028, 1080), (26, 36) }, // 936 cells 78 x 30 XY padded
+ { (2028, 1520), (26, 38) }, // 988 cells 78 x 40 X padded
+ { (4056, 3040), (26, 32) }, // 832 cells 156 x 95 X padded
+ { (1012, 760), (44, 19) }, // 836 cells 23 x 40 XY padded
+
+ // The raw image hardware buffer is padded to align to a 32-byte width and 16-byte height. This
+ // padded buffer size is what is stored into ImageContext.Resolution, not the requested camera
+ // pixel resolution. The following list represents the padded buffer sizes. The data in the buffer
+ // matches the camera resolution, the pixels added for padding are always empty (zero).
+
+ { (1920, 1088), (30, 32) }, // 960 cells 64 x 34 res 1920 x 1080
+ { (2592, 1952), (36, 32) }, // 1152 cells 72 x 61 res 2592 x 1944
+ { (1312, 976), (32, 16) }, // 512 cells 41 x 61 res 1296 x 972
+ { (1312, 736), (32, 23) }, // 736 cells 41 x 32 res 1296 x 730
+ { (3296, 2464), (32, 22) }, // 704 cells 103 x 112 res 3280 x 2464
+ { (1664, 1232), (52, 22) }, // 1144 cells 32 x 56 res 1640 x 1232
+ { (1664, 928), (52, 16) }, // 832 cells 32 x 58 res 1640 x 922
+ { (2048, 1088), (32, 32) }, // 1024 cells 64 x 34 res 2028 x 1080
+ { (2048, 1520), (32, 38) }, // 1216 cells 64 x 40 res 2028 x 1520
+ { (4064, 3040), (32, 32) }, // 1024 cells 127 x 95 res 4056 x 3040
+ { (1024, 768), (32, 24) }, // 768 cells 32 x 32 res 1012 x 760
+ };
+
+ ///
+ /// Tracks whether a full frame has been received and processed yet. Fields and properties
+ /// like and are not valid until this is true.
+ ///
+ internal bool CompletedFirstFrame = false;
+
+ ///
+ /// Represents the coordinates of each test cell for parallel processing. This is
+ /// threadsafe if threads do not access overlapping array indices.
+ ///
+ internal Rectangle[] CellRect;
+
+ ///
+ /// A byte array representation of the FrameAnalyser's own WorkingData object. Required
+ /// to provide fast thread-safe access for parallel analysis.
+ ///
+ internal byte[] CurrentFrame;
+
+ ///
+ /// Frame details collected when the first full frame is available. This is a struct and is
+ /// a threadsafe copy when passed by value as a method argument. Multiple threads must never
+ /// access this instance directly.
+ ///
+ internal FrameAnalysisMetadata Metadata;
+
+ ///
+ /// The number of cells an image frame is divided into vertically for parallel processing. This should
+ /// be a value that divides evenly into the Y resolution of the image frame. A list of recommended
+ /// values is provided in . If this value is not set when the first
+ /// full frame is received, the image resolution will use the recommended setting.
+ ///
+ public int HorizonalCellCount { get; set; }
+
+ ///
+ /// The number of cells an image frame is divided into vertically for parallel processing. This should
+ /// be a value that divides evenly into the Y resolution of the image frame. A list of recommended
+ /// values is provided in . If this value is not set when the first
+ /// full frame is received, the image resolution will use the recommended setting.
+ ///
+ public int VerticalCellCount { get; set; }
+
///
/// The frame we are working with.
///
@@ -24,11 +122,35 @@ public abstract class FrameAnalyser : IFrameAnalyser
/// True if the working data store contains a full frame.
///
protected bool FullFrame { get; set; }
-
+
+ ///
+ /// The number of bytes defining a pixel based on the .
+ ///
+ /// Contains the data and metadata for an image frame.
+ /// The number of bytes
+ public int GetBytesPerPixel(ImageContext context)
+ {
+ if(context.PixelFormat == null)
+ throw new Exception("Pixel format is null");
+
+ // RGB16 doesn't appear to be supported by GDI?
+ if (context.PixelFormat == MMALEncoding.RGB24)
+ {
+ return 24 / 8;
+ }
+
+ if (context.PixelFormat == MMALEncoding.RGB32 || context.PixelFormat == MMALEncoding.RGBA)
+ {
+ return 32 / 8;
+ }
+
+ throw new Exception($"Unsupported pixel format: {context.PixelFormat}");
+ }
+
///
/// Creates a new instance of .
///
- protected FrameAnalyser()
+ public FrameAnalyser()
{
this.WorkingData = new List();
}
@@ -51,6 +173,59 @@ public virtual void Apply(ImageContext context)
if (context.Eos)
{
this.FullFrame = true;
+
+ this.CurrentFrame = this.WorkingData.ToArray();
+
+ if (!CompletedFirstFrame)
+ {
+ ProcessFirstFrame(context);
+ CompletedFirstFrame = true;
+ }
+ }
+ }
+
+ ///
+ /// Executed the first time receives an with EOS set to true.
+ ///
+ /// Contains the data and metadata for an image frame.
+ protected virtual void ProcessFirstFrame(ImageContext context)
+ {
+ // Collect basic frame dimensions
+ Metadata.Width = context.Resolution.Width;
+ Metadata.Height = context.Resolution.Height;
+ Metadata.Bpp = this.GetBytesPerPixel(context);
+ Metadata.Stride = context.Stride;
+
+ if(HorizonalCellCount == 0 || VerticalCellCount == 0)
+ {
+ (int h, int v) counts;
+
+ if(!RecommendedCellCounts.TryGetValue((Metadata.Width, Metadata.Height), out counts))
+ {
+ throw new Exception($"Resolution {Metadata.Width}x{Metadata.Height} has no recommended cell counts");
+ }
+
+ HorizonalCellCount = counts.h;
+ VerticalCellCount = counts.v;
+ }
+
+ // Prepare the parallel processing cells
+ int indices = HorizonalCellCount * VerticalCellCount;
+ Metadata.CellWidth = Metadata.Width / HorizonalCellCount;
+ Metadata.CellHeight = Metadata.Height / VerticalCellCount;
+ int i = 0;
+
+ CellRect = new Rectangle[indices];
+
+ for (int row = 0; row < VerticalCellCount; row++)
+ {
+ int y = row * Metadata.CellHeight;
+ for (int col = 0; col < HorizonalCellCount; col++)
+ {
+ int x = col * Metadata.CellWidth;
+ CellRect[i] = new Rectangle(x, y, Metadata.CellWidth, Metadata.CellHeight);
+ i++;
+ }
}
}
}
diff --git a/src/MMALSharp.Processing/Processors/FrameAnalysisMetadata.cs b/src/MMALSharp.Processing/Processors/FrameAnalysisMetadata.cs
new file mode 100644
index 00000000..9d8a2bbb
--- /dev/null
+++ b/src/MMALSharp.Processing/Processors/FrameAnalysisMetadata.cs
@@ -0,0 +1,49 @@
+//
+// Copyright (c) Ian Auty and contributors. All rights reserved.
+// Licensed under the MIT License. Please see LICENSE.txt for License info.
+//
+
+namespace MMALSharp.Processors
+{
+ ///
+ /// A structure for storing frame metadata used for parallel processing by image analysis
+ /// and effects APIs. A struct is passed by-value which makes it a threadsafe local copy.
+ /// Pass this structure to parallel processing algorithms as a method argument to prevent
+ /// multiple threads from accessing the same copy.
+ ///
+ public struct FrameAnalysisMetadata
+ {
+ // Members are fields rather than properties for parallel processing performance reasons.
+ // These must be value-type fields for thread safety. Object references would not be thread safe.
+
+ ///
+ /// Frame width in pixels.
+ ///
+ internal int Width;
+
+ ///
+ /// Frame height in pixels.
+ ///
+ internal int Height;
+
+ ///
+ /// Frame stride (bytes per row).
+ ///
+ internal int Stride;
+
+ ///
+ /// Frame bytes per pixel.
+ ///
+ internal int Bpp;
+
+ ///
+ /// Width of a parallel processing cell in pixels.
+ ///
+ internal int CellWidth;
+
+ ///
+ /// Height of a parallel processing cell in pixels.
+ ///
+ internal int CellHeight;
+ }
+}
diff --git a/src/MMALSharp.Processing/Processors/Motion/FrameDiffAnalyser.cs b/src/MMALSharp.Processing/Processors/Motion/FrameDiffAnalyser.cs
deleted file mode 100644
index 1c4b20db..00000000
--- a/src/MMALSharp.Processing/Processors/Motion/FrameDiffAnalyser.cs
+++ /dev/null
@@ -1,324 +0,0 @@
-//
-// Copyright (c) Ian Auty and contributors. All rights reserved.
-// Licensed under the MIT License. Please see LICENSE.txt for License info.
-//
-
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Drawing;
-using System.Drawing.Imaging;
-using System.IO;
-using System.Runtime.InteropServices;
-using System.Threading.Tasks;
-using Microsoft.Extensions.Logging;
-using MMALSharp.Common;
-using MMALSharp.Common.Utility;
-
-namespace MMALSharp.Processors.Motion
-{
- ///
- /// The is used to detect changes between two image frames.
- ///
- public class FrameDiffAnalyser : FrameAnalyser
- {
- // When true, PrepareTestFrame does additional start-up processing
- private bool _firstFrame = true;
-
- // Frame dimensions collected when the first full frame is complete
- private int _frameWidth;
- private int _frameHeight;
- private int _frameStride;
- private int _frameBpp;
-
- private byte[] _mask;
- private Stopwatch _testFrameAge;
-
- private int[] _cellDiff;
- private Rectangle[] _cellRect;
- private byte[] _workingData;
-
- ///
- /// Controls how many cells the frames are divided into. The result is a power of two of this
- /// value (so the default of 32 yields 1024 cells). These cells are processed in parallel. This
- /// should be a value that divides evenly into the X and Y resolutions of the motion stream.
- ///
- public int CellDivisor { get; set; } = 32;
-
- internal Action OnDetect { get; set; }
-
- ///
- /// This is the image we are comparing against new incoming frames.
- ///
- protected byte[] TestFrame { get; set; }
-
- ///
- /// Indicates whether we have a full test frame.
- ///
- protected bool FullTestFrame { get; set; }
-
- ///
- /// The motion configuration object.
- ///
- protected MotionConfig MotionConfig { get; set; }
-
- ///
- /// The image metadata.
- ///
- protected ImageContext ImageContext { get; set; }
-
- ///
- /// Creates a new instance of .
- ///
- /// The motion configuration object.
- /// A callback when changes are detected.
- public FrameDiffAnalyser(MotionConfig config, Action onDetect)
- {
- this.MotionConfig = config;
- this.OnDetect = onDetect;
-
- _testFrameAge = new Stopwatch();
- }
-
- ///
- public override void Apply(ImageContext context)
- {
- this.ImageContext = context;
-
- base.Apply(context);
-
- if (!this.FullTestFrame)
- {
- if (context.Eos)
- {
- this.FullTestFrame = true;
- this.PrepareTestFrame();
- MMALLog.Logger.LogDebug("EOS reached for test frame.");
- }
- }
- else
- {
- MMALLog.Logger.LogDebug("Have full test frame.");
-
- if (this.FullFrame && !this.TestFrameExpired())
- {
- MMALLog.Logger.LogDebug("Have full frame, checking for changes.");
-
- this.CheckForChanges(this.OnDetect);
- }
- }
- }
-
- ///
- /// Resets the test and working frames this analyser is using.
- ///
- public void ResetAnalyser()
- {
- this.TestFrame = null;
- this.WorkingData = new List();
- this.FullFrame = false;
- this.FullTestFrame = false;
-
- _testFrameAge.Reset();
- }
-
- private void PrepareTestFrame()
- {
- if (_firstFrame)
- {
- // one-time collection of basic frame dimensions
- _frameWidth = this.ImageContext.Resolution.Width;
- _frameHeight = this.ImageContext.Resolution.Height;
- _frameBpp = this.GetBpp() / 8;
- _frameStride = this.ImageContext.Stride;
-
- // one-time setup of the diff cell parameters and arrays
- int indices = (int)Math.Pow(CellDivisor, 2);
- int cellWidth = _frameWidth / CellDivisor;
- int cellHeight = _frameHeight / CellDivisor;
- int i = 0;
-
- _cellRect = new Rectangle[indices];
- _cellDiff = new int[indices];
-
- for (int row = 0; row < CellDivisor; row++)
- {
- int y = row * cellHeight;
-
- for (int col = 0; col < CellDivisor; col++)
- {
- int x = col * cellWidth;
- _cellRect[i] = new Rectangle(x, y, cellWidth, cellHeight);
- i++;
- }
- }
-
- this.TestFrame = this.WorkingData.ToArray();
-
- if (!string.IsNullOrWhiteSpace(this.MotionConfig.MotionMaskPathname))
- {
- this.PrepareMask();
- }
-
- _firstFrame = false;
- }
- else
- {
- this.TestFrame = this.WorkingData.ToArray();
- }
-
- if (this.MotionConfig.TestFrameInterval != TimeSpan.Zero)
- {
- _testFrameAge.Restart();
- }
- }
-
- private int GetBpp()
- {
- PixelFormat format = default;
-
- // RGB16 doesn't appear to be supported by GDI?
- if (this.ImageContext.PixelFormat == MMALEncoding.RGB24)
- {
- return 24;
- }
-
- if (this.ImageContext.PixelFormat == MMALEncoding.RGB32 || this.ImageContext.PixelFormat == MMALEncoding.RGBA)
- {
- return 32;
- }
-
- if (format == default)
- {
- throw new Exception("Unsupported pixel format.");
- }
-
- return 0;
- }
-
- private void PrepareMask()
- {
- using (var fs = new FileStream(this.MotionConfig.MotionMaskPathname, FileMode.Open, FileAccess.Read))
- using (var mask = new Bitmap(fs))
- {
- // Verify it matches our frame dimensions
- var maskBpp = Image.GetPixelFormatSize(mask.PixelFormat) / 8;
- if (mask.Width != _frameWidth || mask.Height != _frameHeight || maskBpp != _frameBpp)
- {
- throw new Exception("Motion-detection mask must match raw stream width, height, and format (bits per pixel)");
- }
-
- // Store the byte array
- BitmapData bmpData = null;
- try
- {
- bmpData = mask.LockBits(new Rectangle(0, 0, mask.Width, mask.Height), ImageLockMode.ReadOnly, mask.PixelFormat);
- var pNative = bmpData.Scan0;
- int size = bmpData.Stride * mask.Height;
- _mask = new byte[size];
- Marshal.Copy(pNative, _mask, 0, size);
- }
- finally
- {
- mask.UnlockBits(bmpData);
- }
- }
- }
-
- private bool TestFrameExpired()
- {
- if (this.MotionConfig.TestFrameInterval == TimeSpan.Zero || _testFrameAge.Elapsed < this.MotionConfig.TestFrameInterval)
- {
- return false;
- }
-
- MMALLog.Logger.LogDebug("Have full frame, updating test frame.");
- this.PrepareTestFrame();
- return true;
- }
-
- private void CheckForChanges(Action onDetect)
- {
- var diff = this.Analyse();
-
- if (diff >= this.MotionConfig.Threshold)
- {
- MMALLog.Logger.LogInformation($"Motion detected! Frame difference {diff}.");
- onDetect();
- }
- }
-
- private int Analyse()
- {
- _workingData = this.WorkingData.ToArray();
-
- var result = Parallel.ForEach(_cellDiff, (cell, loopState, loopIndex) => CheckDiff(loopIndex, loopState));
-
- // How Parallel Stop works: https://docs.microsoft.com/en-us/previous-versions/msp-n-p/ff963552(v=pandp.10)#parallel-stop
- if (!result.IsCompleted && !result.LowestBreakIteration.HasValue)
- {
- return int.MaxValue; // loop was stopped, so return a large diff
- }
- else
- {
- int diff = 0;
-
- foreach (var cellDiff in _cellDiff)
- {
- diff += cellDiff;
- }
-
- return diff;
- }
- }
-
- private void CheckDiff(long cellIndex, ParallelLoopState loopState)
- {
- int diff = 0;
- var rect = _cellRect[cellIndex];
-
- for (int col = rect.X; col < rect.X + rect.Width; col++)
- {
- for (int row = rect.Y; row < rect.Y + rect.Height; row++)
- {
- var index = (col * _frameBpp) + (row * _frameStride);
-
- if (_mask != null)
- {
- var rgbMask = _mask[index] + _mask[index + 1] + _mask[index + 2];
-
- if (rgbMask == 0)
- {
- continue;
- }
- }
-
- var rgb1 = TestFrame[index] + TestFrame[index + 1] + TestFrame[index + 2];
- var rgb2 = _workingData[index] + _workingData[index + 1] + _workingData[index + 2];
-
- if (rgb2 - rgb1 > MotionConfig.Threshold)
- {
- diff++;
- }
-
- // If the threshold has been exceeded, exit immediately and preempt any CheckDiff calls not yet started.
- if (diff > MotionConfig.Threshold)
- {
- _cellDiff[cellIndex] = diff;
- loopState.Stop();
- return;
- }
- }
-
- if (diff > MotionConfig.Threshold)
- {
- _cellDiff[cellIndex] = diff;
- loopState.Stop();
- return;
- }
- }
-
- _cellDiff[cellIndex] = diff;
- }
- }
-}
diff --git a/src/MMALSharp.Processing/Processors/Motion/FrameDiffDriver.cs b/src/MMALSharp.Processing/Processors/Motion/FrameDiffDriver.cs
new file mode 100644
index 00000000..5ebe5486
--- /dev/null
+++ b/src/MMALSharp.Processing/Processors/Motion/FrameDiffDriver.cs
@@ -0,0 +1,236 @@
+//
+// Copyright (c) Ian Auty and contributors. All rights reserved.
+// Licensed under the MIT License. Please see LICENSE.txt for License info.
+//
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Drawing;
+using System.Drawing.Imaging;
+using System.IO;
+using System.Runtime.InteropServices;
+using Microsoft.Extensions.Logging;
+using MMALSharp.Common;
+using MMALSharp.Common.Utility;
+
+namespace MMALSharp.Processors.Motion
+{
+ ///
+ /// A frame difference motion detection class which buffers a test frame and a current frame,
+ /// and invokes an to detect motion.
+ ///
+ public class FrameDiffDriver : FrameAnalyser
+ {
+ // Some members are fields rather than properties for parallel processing performance reasons.
+ // Array-based fields are threadsafe as long as multiple threads access unique array indices.
+
+ ///
+ /// Fully black pixels are skipped when comparing the test frame to the current frame.
+ ///
+ internal byte[] FrameMask;
+
+ ///
+ /// This is the image we are comparing against new incoming frames.
+ ///
+ internal byte[] TestFrame;
+
+ ///
+ /// The number of pixels that differ in each cell between the test frame and current frame.
+ ///
+ internal int[] CellDiff;
+
+ private Action _onDetect;
+ private MotionConfig _motionConfig;
+ private bool _fullTestFrame;
+ private Stopwatch _testFrameAge;
+
+ ///
+ /// When true, the OnDetect Action will be invoked when motion is detected. Using this instead
+ /// of the capture handler's Enable/DisableMotionDetection allows ongoing motion detection.
+ ///
+ public bool OnDetectEnabled { get; set; }
+
+ ///
+ /// Tracks the elapsed time since motion was last detected.
+ ///
+ public Stopwatch LastDetectionEvent { get; private set; }
+
+ ///
+ /// Creates a new instance of .
+ ///
+ /// The motion configuration object.
+ /// A callback when changes are detected.
+ public FrameDiffDriver(MotionConfig config, Action onDetect)
+ {
+ _motionConfig = config;
+
+ _onDetect = onDetect;
+ this.OnDetectEnabled = _onDetect != null;
+
+ _testFrameAge = new Stopwatch();
+ this.LastDetectionEvent = new Stopwatch();
+ }
+
+ // These are used for temporary local performance-testing; uncomment these and four lines in
+ // the Apply method below, and see the Dispose method at the end of FrameBufferCaptureHandler.
+ // private Stopwatch frameTimer = new Stopwatch();
+ // internal long frameCounter;
+ // internal long totalElapsed;
+
+ ///
+ public override void Apply(ImageContext context)
+ {
+ base.Apply(context);
+
+ if (context.Eos)
+ {
+ // if zero bytes buffered, EOS is the end of a physical input video filestream
+ if (this.WorkingData.Count > 0)
+ {
+ if (!_fullTestFrame)
+ {
+ MMALLog.Logger.LogDebug("EOS reached for test frame.");
+
+ this.PrepareTestFrame();
+ _fullTestFrame = true;
+ }
+ else
+ {
+ MMALLog.Logger.LogDebug("Have full frame, invoking motion algorithm.");
+
+ // frameCounter++;
+ // frameTimer.Restart();
+
+ var detected = _motionConfig.MotionAlgorithm.DetectMotion(this, Metadata);
+
+ // frameTimer.Stop();
+ // totalElapsed += frameTimer.ElapsedMilliseconds;
+
+ if (detected)
+ {
+ this.LastDetectionEvent.Restart();
+
+ if(this.OnDetectEnabled)
+ {
+ _onDetect?.Invoke();
+ }
+ }
+
+ this.TryUpdateTestFrame();
+ }
+ }
+ else
+ {
+ MMALLog.Logger.LogDebug("EOS reached, no working data buffered");
+ }
+ }
+ }
+
+ ///
+ /// Resets the state of the buffers so that a new test frame is
+ /// stored. Also resets any state in the motion detection algorithm.
+ ///
+ public void ResetAnalyser()
+ {
+ this.FullFrame = false;
+ _fullTestFrame = false;
+ this.WorkingData = new List();
+ this.TestFrame = null;
+ this.CurrentFrame = null;
+
+ _testFrameAge.Reset();
+
+ _motionConfig.MotionAlgorithm.ResetAnalyser(this, this.Metadata);
+ }
+
+ ///
+ protected override void ProcessFirstFrame(ImageContext context)
+ {
+ base.ProcessFirstFrame(context);
+
+ this.PrepareTestFrame();
+
+ CellDiff = new int[CellRect.Length];
+
+ this.PrepareMask();
+
+ // Provide a copy with raw full-frame defaults that the algorithm can safely store and reuse
+ // if the algorithm is configured to output analysis frames to a capture handler's Apply method.
+ var fullFrameContextTemplate = new ImageContext
+ {
+ Eos = true,
+ IFrame = true,
+ Resolution = new Resolution(Metadata.Width, Metadata.Height),
+ Encoding = context.Encoding,
+ PixelFormat = context.PixelFormat,
+ Raw = context.Raw,
+ Pts = null,
+ Stride = Metadata.Stride
+ };
+
+ _motionConfig.MotionAlgorithm.FirstFrameCompleted(this, this.Metadata, fullFrameContextTemplate);
+ }
+
+ private void PrepareTestFrame()
+ {
+ this.TestFrame = this.WorkingData.ToArray();
+
+ if (_motionConfig.TestFrameInterval != TimeSpan.Zero)
+ {
+ _testFrameAge.Restart();
+ }
+ }
+
+ // Periodically replaces the test frame with the current frame, which helps when a scene
+ // changes over time (such as changing shadows throughout the day).
+ private void TryUpdateTestFrame()
+ {
+ // Exit if the update interval has not elapsed, or if there was recent motion
+ if (_motionConfig.TestFrameInterval == TimeSpan.Zero
+ || _testFrameAge.Elapsed < _motionConfig.TestFrameInterval
+ || (_motionConfig.TestFrameRefreshCooldown != TimeSpan.Zero
+ && this.LastDetectionEvent.Elapsed < _motionConfig.TestFrameRefreshCooldown))
+ {
+ return;
+ }
+
+ MMALLog.Logger.LogDebug($"Updating test frame after {_testFrameAge.ElapsedMilliseconds} ms");
+ this.PrepareTestFrame();
+ }
+
+ private void PrepareMask()
+ {
+ if (string.IsNullOrWhiteSpace(_motionConfig.MotionMaskPathname))
+ {
+ return;
+ }
+
+ using (var fs = new FileStream(_motionConfig.MotionMaskPathname, FileMode.Open, FileAccess.Read))
+ using (var mask = new Bitmap(fs))
+ {
+ // Verify it matches our frame dimensions
+ var maskBpp = Image.GetPixelFormatSize(mask.PixelFormat) / 8;
+ if (mask.Width != Metadata.Width || mask.Height != Metadata.Height || maskBpp != Metadata.Bpp)
+ {
+ throw new Exception("Motion-detection mask must match raw stream width, height, and format (bits per pixel)");
+ }
+
+ // Store the byte array
+ BitmapData bmpData = null;
+ try
+ {
+ bmpData = mask.LockBits(new Rectangle(0, 0, mask.Width, mask.Height), ImageLockMode.ReadOnly, mask.PixelFormat);
+ var pNative = bmpData.Scan0;
+ int size = bmpData.Stride * mask.Height;
+ FrameMask = new byte[size];
+ Marshal.Copy(pNative, FrameMask, 0, size);
+ }
+ finally
+ {
+ mask.UnlockBits(bmpData);
+ }
+ }
+ }
+ }
+}
diff --git a/src/MMALSharp.Processing/Processors/Motion/IMotionAlgorithm.cs b/src/MMALSharp.Processing/Processors/Motion/IMotionAlgorithm.cs
new file mode 100644
index 00000000..3244a2e6
--- /dev/null
+++ b/src/MMALSharp.Processing/Processors/Motion/IMotionAlgorithm.cs
@@ -0,0 +1,55 @@
+//
+// Copyright (c) Ian Auty and contributors. All rights reserved.
+// Licensed under the MIT License. Please see LICENSE.txt for License info.
+//
+
+using MMALSharp.Common;
+using MMALSharp.Handlers;
+
+namespace MMALSharp.Processors.Motion
+{
+ ///
+ /// Represents a frame-difference-based motion detection algorithm.
+ ///
+ public interface IMotionAlgorithm
+ {
+ ///
+ /// Activates analysis mode.
+ ///
+ /// Optional. If specified, the algorithm analyses each frame, marking cell corners,
+ /// outlining cells with motion, and altering the output to grayscale highlights of the calculated diff per pixel.
+ void EnableAnalysis(IOutputCaptureHandler handler);
+
+ ///
+ /// Deactivates analysis mode.
+ ///
+ void DisableAnalysis();
+
+ ///
+ /// Invoked after the buffer's is available
+ /// for the first time and frame metadata has been collected. Allows the algorithm
+ /// to modify the test frame, prepare matching local buffers, etc.
+ ///
+ /// The invoking this method.
+ /// Properties of the frame.
+ /// A sample context object which should be stored to feed to a capture handler if analysis is enabled.
+ void FirstFrameCompleted(FrameDiffDriver driver, FrameAnalysisMetadata metadata, ImageContext contextTemplate);
+
+ ///
+ /// Invoked when has a full test frame and a
+ /// new full comparison frame available.
+ ///
+ /// The invoking this method.
+ /// Properties of the frame.
+ /// Indicates whether motion was detected.
+ bool DetectMotion(FrameDiffDriver driver, FrameAnalysisMetadata metadata);
+
+ ///
+ /// Invoked when has been reset. The algorithm should also
+ /// reset stateful data, if any.
+ ///
+ /// The invoking this method.
+ /// Properties of the frame.
+ void ResetAnalyser(FrameDiffDriver driver, FrameAnalysisMetadata metadata);
+ }
+}
diff --git a/src/MMALSharp.Processing/Processors/Motion/MotionAlgorithmBase.cs b/src/MMALSharp.Processing/Processors/Motion/MotionAlgorithmBase.cs
new file mode 100644
index 00000000..ddc16701
--- /dev/null
+++ b/src/MMALSharp.Processing/Processors/Motion/MotionAlgorithmBase.cs
@@ -0,0 +1,80 @@
+//
+// Copyright (c) Ian Auty and contributors. All rights reserved.
+// Licensed under the MIT License. Please see LICENSE.txt for License info.
+//
+
+namespace MMALSharp.Processors.Motion
+{
+ ///
+ /// Utilities for derived motion algorithm classes.
+ ///
+ public abstract class MotionAlgorithmBase
+ {
+ ///
+ /// Highlights a motion detection cell, typically to indicate a threshold was tripped.
+ ///
+ /// Red channel of the highlight RGB color
+ /// Green channel of the highlight RGB color
+ /// Blue channel of the highlight RGB color
+ /// The containing the buffer
+ /// The structure with frame properties
+ /// The array index of the cell to highlight
+ /// The frame buffer to draw into
+ protected void HighlightCell(byte r, byte g, byte b, FrameDiffDriver driver, FrameAnalysisMetadata metadata, int index, byte[] buffer)
+ {
+ for (int x = driver.CellRect[index].X; x < driver.CellRect[index].X + driver.CellRect[index].Width; x++)
+ {
+ var y = driver.CellRect[index].Y;
+ var i = (x * metadata.Bpp) + (y * metadata.Stride);
+ buffer[i] = r;
+ buffer[i + 1] = g;
+ buffer[i + 2] = b;
+ y += driver.CellRect[index].Height - 1;
+ i = (x * metadata.Bpp) + (y * metadata.Stride);
+ buffer[i] = r;
+ buffer[i + 1] = g;
+ buffer[i + 2] = b;
+ }
+
+ for (int y = driver.CellRect[index].Y; y < driver.CellRect[index].Y + driver.CellRect[index].Height; y++)
+ {
+ var x = driver.CellRect[index].X;
+ var i = (x * metadata.Bpp) + (y * metadata.Stride);
+ buffer[i] = r;
+ buffer[i + 1] = g;
+ buffer[i + 2] = b;
+ x += driver.CellRect[index].Width - 1;
+ i = (x * metadata.Bpp) + (y * metadata.Stride);
+ buffer[i] = r;
+ buffer[i + 1] = g;
+ buffer[i + 2] = b;
+ }
+ }
+
+ ///
+ /// Draws a filled block into the frame buffer. Can be used as a visual indicator of internal app state.
+ ///
+ /// Red channel of the highlight RGB color
+ /// Green channel of the highlight RGB color
+ /// Blue channel of the highlight RGB color
+ /// Left column of the block
+ /// Right column of the block
+ /// Top row of the block
+ /// Bottom row of the block
+ /// The frame buffer to draw into
+ /// The structure with frame properties
+ protected void DrawIndicatorBlock(byte r, byte g, byte b, int x1, int x2, int y1, int y2, byte[] buffer, FrameAnalysisMetadata metrics)
+ {
+ for (int x = x1; x <= x2; x++)
+ {
+ for (int y = y1; y <= y2; y++)
+ {
+ var i = (x * metrics.Bpp) + (y * metrics.Stride);
+ buffer[i] = r;
+ buffer[i + 1] = g;
+ buffer[i + 2] = b;
+ }
+ }
+ }
+ }
+}
diff --git a/src/MMALSharp.Processing/Processors/Motion/MotionAlgorithmRGBDiff.cs b/src/MMALSharp.Processing/Processors/Motion/MotionAlgorithmRGBDiff.cs
new file mode 100644
index 00000000..804b6f52
--- /dev/null
+++ b/src/MMALSharp.Processing/Processors/Motion/MotionAlgorithmRGBDiff.cs
@@ -0,0 +1,209 @@
+//
+// Copyright (c) Ian Auty and contributors. All rights reserved.
+// Licensed under the MIT License. Please see LICENSE.txt for License info.
+//
+
+using MMALSharp.Common;
+using MMALSharp.Handlers;
+using System;
+using System.Threading.Tasks;
+
+namespace MMALSharp.Processors.Motion
+{
+ ///
+ /// A motion detection algorithm based on per-pixel RGB differencing.
+ ///
+ public class MotionAlgorithmRGBDiff : MotionAlgorithmBase, IMotionAlgorithm
+ {
+ private ThreadSafeParameters _parameters = default;
+
+ private int _cellPixelPercentage;
+ private int _cellCountThreshold;
+
+ private ImageContext _fullRawFrameImageContext;
+ private IOutputCaptureHandler _outputHandler;
+ private byte[] _analysisBuffer;
+
+ ///
+ /// Constructor.
+ ///
+ /// The minimum RGB difference to indicate the pixel has changed. Maximum value is 765 (full white to full black).
+ /// Percentage of pixels in each cell to mark the cell as changed.
+ /// Minimum number of cells changed to trigger motion detection.
+ public MotionAlgorithmRGBDiff(int rgbThreshold = 200, int cellPixelPercentage = 50, int cellCountThreshold = 20)
+ {
+ // RGB 255 x 3 = 765
+ if(rgbThreshold > 765)
+ {
+ throw new ArgumentException("Maximum RGB Threshold value is 765");
+ }
+
+ // Store this into the thread safe struct to pass to CheckDiff
+ _parameters.RGBThreshold = rgbThreshold;
+
+ // Can't calculate actual pixels until we get metadata in FirstFrameCompleted
+ _cellPixelPercentage = cellPixelPercentage;
+
+ // Not used in the parallel processing stage, store locally
+ _cellCountThreshold = cellCountThreshold;
+
+ _parameters.AnalysisMode = false;
+ }
+
+ ///
+ public void EnableAnalysis(IOutputCaptureHandler handler = null)
+ {
+ _parameters.AnalysisMode = true;
+ _outputHandler = handler;
+ }
+
+ ///
+ public void DisableAnalysis()
+ {
+ _parameters.AnalysisMode = true;
+ }
+
+ ///
+ public void FirstFrameCompleted(FrameDiffDriver driver, FrameAnalysisMetadata metadata, ImageContext contextTemplate)
+ {
+ _fullRawFrameImageContext = contextTemplate;
+
+ _parameters.CellPixelThreshold = (int)(metadata.CellWidth * metadata.CellHeight * (_cellPixelPercentage / 100f));
+
+ _analysisBuffer = new byte[driver.TestFrame.Length];
+ // Not necessary for this analysis, CheckDiff overwrites the buffer completely
+ // Array.Copy(driver.TestFrame, _analysisBuffer, _analysisBuffer.Length);
+
+ _fullRawFrameImageContext.Data = driver.TestFrame;
+ _outputHandler?.Process(_fullRawFrameImageContext);
+
+ _fullRawFrameImageContext.Data = _analysisBuffer;
+ }
+
+ ///
+ public void ResetAnalyser(FrameDiffDriver driver, FrameAnalysisMetadata metadata)
+ { } // not necessary for this algorithm
+
+ ///
+ public bool DetectMotion(FrameDiffDriver driver, FrameAnalysisMetadata metadata)
+ {
+ Parallel.ForEach(driver.CellDiff, (cell, loopState, loopIndex)
+ => CheckDiff(loopIndex, driver, metadata, _parameters));
+
+ int diff = 0;
+
+ for (int i = 0; i < driver.CellDiff.Length; i++)
+ {
+ diff += driver.CellDiff[i];
+
+ if (_parameters.AnalysisMode && driver.CellDiff[i] == 1)
+ {
+ HighlightCell(255, 0, 255, driver, metadata, i, _analysisBuffer);
+ }
+ }
+
+ var detected = diff >= _cellCountThreshold;
+
+ // Draw a bar across the frame; red indicates motion, green indicates no motion
+ if (_parameters.AnalysisMode && diff > 0)
+ {
+ int x2 = (int)(((diff * 2f) / (driver.CellDiff.Length / 2f)) * (metadata.Width / 2f));
+ (byte r, byte g) = detected ? ((byte)255, (byte)0) : ((byte)0, (byte)255);
+ DrawIndicatorBlock(r, g, 0, 0, x2, 0, 10, _analysisBuffer, metadata);
+ }
+
+ if(_parameters.AnalysisMode)
+ {
+ _outputHandler?.Process(_fullRawFrameImageContext);
+ }
+
+ return detected;
+ }
+
+ private void CheckDiff(long cellIndex, FrameDiffDriver driver, FrameAnalysisMetadata metadata, ThreadSafeParameters parameters)
+ {
+ // FrameAnalysisMetadata and ThreadSafeParameters are structures; they are by-value copies and all fields are value-types which makes them thread safe
+
+ int diff = 0;
+ var rect = driver.CellRect[cellIndex];
+
+ int x2 = rect.X + rect.Width;
+ int y2 = rect.Y + rect.Height;
+
+ for (var col = rect.X; col < x2; col++)
+ {
+ for (var row = rect.Y; row < y2; row++)
+ {
+ var index = (col * metadata.Bpp) + (row * metadata.Stride);
+
+ // Disregard full-black cells in the mask bitmap
+ if (driver.FrameMask != null)
+ {
+ var rgbMask = driver.FrameMask[index] + driver.FrameMask[index + 1] + driver.FrameMask[index + 2];
+
+ if (rgbMask == 0)
+ {
+ continue;
+ }
+ }
+
+ byte r = driver.TestFrame[index];
+ byte g = driver.TestFrame[index + 1];
+ byte b = driver.TestFrame[index + 2];
+ int rgb1 = r + g + b;
+
+ r = driver.CurrentFrame[index];
+ g = driver.CurrentFrame[index + 1];
+ b = driver.CurrentFrame[index + 2];
+ int rgb2 = r + g + b;
+
+ int rgbDiff = Math.Abs(rgb2 - rgb1);
+ if (rgbDiff > parameters.RGBThreshold)
+ {
+ diff++;
+ }
+
+ if(!parameters.AnalysisMode)
+ {
+ // Check for early exit opportunity
+ if(diff >= parameters.CellPixelThreshold)
+ {
+ continue;
+ }
+ }
+ else
+ {
+ // No early exit for analysis purposes
+
+ // Output in grayscale based on strength of the diff (765 = 255 x 3)
+ r = Math.Min((byte)255, (byte)((rgbDiff / 765f) * 255.999f));
+ g = r;
+ b = r;
+
+ // Highlight cell corners
+ if ((col == rect.X || col == x2 - 1) && (row == rect.Y || row == y2 - 1))
+ {
+ r = 128;
+ g = 0;
+ b = 128;
+ }
+
+ _analysisBuffer[index] = r;
+ _analysisBuffer[index + 1] = g;
+ _analysisBuffer[index + 2] = b;
+ }
+ }
+ }
+
+ driver.CellDiff[cellIndex] = (diff >= parameters.CellPixelThreshold) ? 1 : 0;
+ }
+
+ private struct ThreadSafeParameters
+ {
+ // Only use value-type fields (no properties, no reference types)
+ public int RGBThreshold;
+ public int CellPixelThreshold;
+ public bool AnalysisMode;
+ }
+ }
+}
diff --git a/src/MMALSharp.Processing/Processors/Motion/MotionConfig.cs b/src/MMALSharp.Processing/Processors/Motion/MotionConfig.cs
index a0eb4de5..1b900744 100644
--- a/src/MMALSharp.Processing/Processors/Motion/MotionConfig.cs
+++ b/src/MMALSharp.Processing/Processors/Motion/MotionConfig.cs
@@ -13,9 +13,9 @@ namespace MMALSharp.Processors.Motion
public class MotionConfig
{
///
- /// The amount of change which will trigger a motion event.
+ /// Implements the logic used to detect motion.
///
- public int Threshold { get; set; }
+ public IMotionAlgorithm MotionAlgorithm { get; set; }
///
/// The frequency at which the test frame is updated. The test frame is the baseline against
@@ -23,6 +23,11 @@ public class MotionConfig
///
public TimeSpan TestFrameInterval { get; set; }
+ ///
+ /// The minimum duration with no motion detection events before the test frame will update.
+ ///
+ public TimeSpan TestFrameRefreshCooldown { get; set; }
+
///
/// The name of a BMP file to apply as a motion-detection mask. The file must match the raw stream's
/// width, height, and color depth. Black pixels (RGB 0,0,0) are not tested for motion.
@@ -32,13 +37,19 @@ public class MotionConfig
///
/// Creates a new instance of .
///
- /// Motion sensitivity threshold. The default is 130 (suitable for many indoor scenes).
+ /// An instance of the motion detection algorithm implementation.
/// Frequency at which the test frame is updated. The default is 10 seconds.
+ /// The minimum duration with no motion detection before the test frame updates. The default is 3 seconds.
/// Pathname to an optional motion-detection mask bitmap.
- public MotionConfig(int threshold = 130, TimeSpan testFrameInterval = default, string motionMaskPathname = null)
+ public MotionConfig(
+ IMotionAlgorithm algorithm,
+ TimeSpan testFrameInterval = default,
+ TimeSpan testFrameCooldown = default,
+ string motionMaskPathname = null)
{
- this.Threshold = threshold;
+ this.MotionAlgorithm = algorithm;
this.TestFrameInterval = testFrameInterval.Equals(TimeSpan.Zero) ? TimeSpan.FromSeconds(10) : testFrameInterval;
+ this.TestFrameRefreshCooldown = testFrameCooldown.Equals(TimeSpan.Zero) ? TimeSpan.FromSeconds(3) : testFrameCooldown;
this.MotionMaskPathname = motionMaskPathname;
}
}
diff --git a/src/MMALSharp/MMALSharp.csproj b/src/MMALSharp/MMALSharp.csproj
index 5a3bac77..1c96e65a 100644
--- a/src/MMALSharp/MMALSharp.csproj
+++ b/src/MMALSharp/MMALSharp.csproj
@@ -17,7 +17,8 @@
0.7.0
..\..\StyleCop.Analyzers.ruleset
bin\$(Configuration)\$(TargetFramework)\$(AssemblyName).xml
- {47af9428-8246-4267-bc76-5fc648848e40}
+ {47af9428-8246-4267-bc76-5fc648848e40}
+
True
diff --git a/src/MMALSharp/Ports/Outputs/OutputPort.cs b/src/MMALSharp/Ports/Outputs/OutputPort.cs
index 8fc01b7b..3f16b6f2 100644
--- a/src/MMALSharp/Ports/Outputs/OutputPort.cs
+++ b/src/MMALSharp/Ports/Outputs/OutputPort.cs
@@ -33,7 +33,7 @@ internal set
this.Height = value.Pad().Height;
}
}
-
+
///
/// Creates a new instance of .
///
@@ -278,7 +278,7 @@ public void Start()
this.Trigger = new TaskCompletionSource();
this.Enable();
}
-
+
///
/// The native callback MMAL passes buffer headers to.
///
@@ -290,11 +290,11 @@ internal virtual void NativeOutputPortCallback(MMAL_PORT_T* port, MMAL_BUFFER_HE
{
MMALLog.Logger.LogDebug($"{this.Name}: In native output callback");
}
-
+
var bufferImpl = new MMALBufferImpl(buffer);
bufferImpl.PrintProperties();
-
+
var failed = bufferImpl.AssertProperty(MMALBufferProperties.MMAL_BUFFER_HEADER_FLAG_TRANSMISSION_FAILED);
var eos = bufferImpl.AssertProperty(MMALBufferProperties.MMAL_BUFFER_HEADER_FLAG_FRAME_END) ||
@@ -302,11 +302,11 @@ internal virtual void NativeOutputPortCallback(MMAL_PORT_T* port, MMAL_BUFFER_HE
this.ComponentReference.ForceStopProcessing ||
bufferImpl.Length == 0;
- if ((bufferImpl.CheckState() && bufferImpl.Length > 0 && !eos && !failed && !this.Trigger.Task.IsCompleted) || (eos && !this.Trigger.Task.IsCompleted))
+ if ((bufferImpl.CheckState() && bufferImpl.Length > 0 && !eos && !failed && !this.Trigger.Task.IsCompleted) || (eos && !this.Trigger.Task.IsCompleted && bufferImpl.Length > 0))
{
this.CallbackHandler.Callback(bufferImpl);
}
-
+
// Ensure we release the buffer before any signalling or we will cause a memory leak due to there still being a reference count on the buffer.
this.ReleaseBuffer(bufferImpl, eos);
@@ -314,7 +314,7 @@ internal virtual void NativeOutputPortCallback(MMAL_PORT_T* port, MMAL_BUFFER_HE
if (eos || failed)
{
MMALLog.Logger.LogDebug($"{this.Name}: End of stream. Signaling completion...");
-
+
Task.Run(() => { this.Trigger.SetResult(true); });
}
}