Find all our projects in development below.
All source code is GNU General Public License (GPL)
Browsing javaCamViewer/javaCamDetector.cs (11.71 KB)
namespace javaCamViewer
{
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Reflection;
using System.Threading;
using Accord.Vision.Detection;
using Accord.Vision.Detection.Cascades;
using AForge.Imaging;
using AForge.Imaging.Filters;
/// <summary>
/// javaCamDetector
/// </summary>
public class javaCamDetector : IMotionDetector
{
private const int startMotionDelay = 10000; // 10 seconds
private int currentDelay = -1;
private DateTime lastTime = DateTime.MinValue;
private const int pixelChangeThreshold = 33;
private const int minMotionObjectWidth = 65;
private const int minMotionObjectHeight = 55;
private const int maxMotionObjectWidth = 305;
private const int maxMotionObjectHeight = 285;
private const int maxBlobObjectsToHighlight = 4;
private IFilter grayscaleFilter = Grayscale.CommonAlgorithms.BT709;
private IFilter pixellateFilter = new Pixellate( );
private Difference differenceFilter = new Difference( );
private Threshold thresholdFilter = new Threshold( pixelChangeThreshold );
private MoveTowards moveTowardsFilter = new MoveTowards( );
private FiltersSequence processingFilter1 = new FiltersSequence( );
private BlobCounter blobCounter = new BlobCounter( );
private HaarObjectDetector _haarDetector;
private readonly HaarCascade _haaRclassifier = new FaceHaarCascade();
private Bitmap backgroundFrame;
private BitmapData bitmapData;
private int counter = 0;
private bool calculateMotionLevel = false;
private int width; // image width
private int height; // image height
private int pixelsChanged;
private StreamWriter debugWriter;
private bool faceDetectionEnable = false;
private EventHandler alertFaceDetectedFunc;
private List<Thread> _threadDetectFaces = new List<Thread>();
private const int MAX_THREADCOUNT = 6;
private const int face_MinSize = 50;
// Debug stream writer object
public StreamWriter DebugWriter
{
get { return debugWriter; }
set { debugWriter = value; }
}
// Motion level calculation - calculate or not motion level
public bool MotionLevelCalculation
{
get { return calculateMotionLevel; }
set { calculateMotionLevel = value; }
}
// Motion level - amount of changes in percents
public double MotionLevel
{
get { return (double) pixelsChanged / ( width * height ); }
}
// Face detection - use additional processing to detect faces
public bool EnableFaceDetection
{
get { return faceDetectionEnable; }
set { faceDetectionEnable = value; }
}
// alertFaceDetected handler
public EventHandler AlertFaceDetected
{
get { return alertFaceDetectedFunc; }
set { alertFaceDetectedFunc = value; }
}
// Constructor
public javaCamDetector()
{
processingFilter1.Add( grayscaleFilter );
processingFilter1.Add( pixellateFilter );
blobCounter.MinWidth = minMotionObjectWidth;
blobCounter.MinHeight = minMotionObjectHeight;
blobCounter.MaxWidth = maxMotionObjectWidth;
blobCounter.MaxHeight = maxMotionObjectHeight;
blobCounter.FilterBlobs = true;
blobCounter.ObjectsOrder = ObjectsOrder.Size;
_haarDetector = new HaarObjectDetector(
_haaRclassifier, face_MinSize, ObjectDetectorSearchMode.NoOverlap,
1.2f, ObjectDetectorScalingMode.SmallerToGreater);
_haarDetector.Channel = 0;
}
// Reset detector to initial state
public void Reset( )
{
if ( backgroundFrame != null )
{
backgroundFrame.Dispose( );
backgroundFrame = null;
}
counter = 0;
currentDelay = -1;
lastTime = DateTime.MinValue;
ResetThreads();
_haarDetector = new HaarObjectDetector(
_haaRclassifier, face_MinSize, ObjectDetectorSearchMode.NoOverlap,
1.2f, ObjectDetectorScalingMode.SmallerToGreater);
_haarDetector.Channel = 0;
}
// Process new frame
public void ProcessFrame( ref Bitmap image )
{
if ( backgroundFrame == null )
{
// create initial backgroung image
backgroundFrame = processingFilter1.Apply( image );
// get image dimension
width = image.Width;
height = image.Height;
// just return for the first time
return;
}
Rectangle[] rects;
try
{
Bitmap tmpImage;
// apply the the first filters sequence
tmpImage = processingFilter1.Apply(image);
if (++counter == 2)
{
counter = 0;
// move background towards current frame
moveTowardsFilter.OverlayImage = tmpImage;
moveTowardsFilter.ApplyInPlace(backgroundFrame);
}
// set backgroud frame as an overlay for difference filter
differenceFilter.OverlayImage = backgroundFrame;
// lock temporary image to apply several filters
bitmapData = tmpImage.LockBits(new Rectangle(0, 0, width, height),
ImageLockMode.ReadWrite, PixelFormat.Format8bppIndexed);
// apply difference filter
differenceFilter.ApplyInPlace(bitmapData);
// apply threshold filter
thresholdFilter.ApplyInPlace(bitmapData);
// get object rectangles
blobCounter.ProcessImage(bitmapData);
rects = blobCounter.GetObjectsRectangles();
// unlock temporary image
tmpImage.UnlockBits(bitmapData);
tmpImage.Dispose();
tmpImage = null;
}
catch (Exception ex)
{
rects = null;
System.Diagnostics.Debug.WriteLine("=============: " + ex.Message);
}
if (!calculateMotionLevel) return;
pixelsChanged = 0;
if (rects.Length > 0 && currentDelay == 0)
{
Bitmap imageClone = (Bitmap)image.Clone();
// create graphics object from initial image
Graphics g = Graphics.FromImage(image);
using (Pen pen = new Pen(Color.Red, 1))
{
int n = 0;
// draw each rectangle
foreach (Rectangle rc in rects)
{
g.DrawRectangle(pen, rc);
// a little bit inaccurate, but fast
if (calculateMotionLevel)
pixelsChanged += rc.Width * rc.Height;
if (++n == maxBlobObjectsToHighlight) break;
}
}
g.Dispose();
if (faceDetectionEnable)
{
lock (_threadDetectFaces)
{
if (_threadDetectFaces.Count < MAX_THREADCOUNT)
{
// use a separate thread for face detection
ParameterizedThreadStart start = new ParameterizedThreadStart(DetectFaces);
Thread _processor = new Thread(start);
_processor.IsBackground = true;
_processor.Start(imageClone.Clone());
_threadDetectFaces.Add(_processor);
}
}
}
if (debugWriter != null)
{
try
{
String line = ((rects.Length > maxBlobObjectsToHighlight) ? maxBlobObjectsToHighlight : rects.Length).ToString() + ":";
int n = 0;
foreach (Rectangle rc in rects)
{
line += "{[" + rc.X.ToString() + "," + rc.Y.ToString() + "],[" + rc.Width.ToString() + "," + rc.Height.ToString() + "]},";
if (++n == maxBlobObjectsToHighlight) break;
}
if (line.EndsWith(",")) line = line.Substring(0, line.Length - 1);
debugWriter.WriteLine(line);
}
catch
{
debugWriter = null;
}
}
imageClone.Dispose();
}
else if (currentDelay > 0)
{
currentDelay -= DateTime.Now.Subtract(lastTime).Milliseconds;
if (currentDelay <= 0)
{
if (backgroundFrame != null)
{
backgroundFrame.Dispose();
backgroundFrame = null;
}
counter = 0;
currentDelay = 0;
}
}
else if (lastTime == DateTime.MinValue)
{
currentDelay = startMotionDelay;
}
lastTime = DateTime.Now;
}
private void DetectFaces(object frame)
{
try
{
Rectangle[] overlays = null;
Bitmap bmpFace = (Bitmap)frame;
try
{
Grayscale.CommonAlgorithms.BT709.Apply(bmpFace);
overlays = _haarDetector.ProcessFrame(bmpFace);
}
catch
{
}
bmpFace.Dispose();
if (overlays != null && overlays.Length > 0)
{
if (alertFaceDetectedFunc != null) alertFaceDetectedFunc.Invoke(overlays.Length, null);
}
lock (_threadDetectFaces)
{
_threadDetectFaces.Remove(Thread.CurrentThread);
}
}
catch
{
}
}
private void ResetThreads()
{
lock (_threadDetectFaces)
{
if (_threadDetectFaces.Count > 0)
{
for (int i = _threadDetectFaces.Count - 1; i >= 0; i--)
{
try
{
Thread _processor = _threadDetectFaces[i];
try
{
if (_processor != null && _processor.IsAlive)
{
_processor.Abort();
_processor = null;
}
}
catch
{
}
_threadDetectFaces.RemoveAt(i);
}
catch
{
}
}
}
}
}
}
}
Download javaCamViewer/javaCamDetector.cs