-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.cpp
3792 lines (2964 loc) · 150 KB
/
main.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
///*
/// \title Zebrafish tracker used in combination with darkfield IR illumination
/// \date Jun 2018
/// \author Konstantinos Lagogiannis
/// \version 1.0
/// \brief Video Analysis software to track zebrafish behaviour from images obtained at high frame rates (>350fps) using darkfield IR
/// illumination(IR light-ring) on a 35mm petridish containing a single animal.
///
/// \note
/// * Chooses input video file, then on the second dialogue choose the text file to export track info in CSV format.
/// * The green box defines the region over which the larvae are counted-tracked and recorded to file.
/// * Once the video begins to show, use to left mouse clicks to define a new region in the image over which you want to count the larvae.
/// * Press p to pause Image. once paused:
/// * s to save snapshots in CSV outdir pics subfolder.
/// * 2 Left Clicks to define the 2 points of region-of interest for tracking.
/// * m to show the masked image of the larva against BG.
/// * t Start Tracking
/// * f toggle food tracking
/// * p to Pause
/// * r to UnPause/Run
/// * D to delete currently used template from cache
/// * R reset fish Spline
/// * W Toggle output to CSV file writing
/// * T to save current tracked region as new template
/// * M Manual Measure Distance (px) -
/// * E Manually Set Eye Angles
/// * F Manually set prey position (which is then tracked)
/// * q Exit Quit application
///*
///*
///
///* Dependencies : opencv3 (W/O CUDA ) QT5
///* /// \details
/// Heurestic optimizations:
/// * Detection of stopped Larva or loss of features from BG Substraction - via mask correction
/// * Filter blobs and maintain separate lists for each class (food/fish)
/// * track blobs of different class (food/fish) separatelly so tracks do not interfere
/// * Second method of Ellipsoid fitting, using a fast algorithm on edge points
/// * Changes template Match region, wide for new blobs, narrow for known fish - Can track at 50fps (06/2018)
/// * Combines blob tracking with optic flow at the point of food particle (using Lucas-Kanade) to improve track of prey motion near fish
/// * Tail spine is tracking with both, sequential intensity scanning and a variational approach on fitting smoothed fish contour angle and length (estimates fish's tail size)
/// * Detect tail and Head points of candidate fish contours: extend tail mask to improve tail spine fitting /Use head pt to inform template matching search region for speed optimizing of larva tracing.
/// \remark OutputFiles
/// Data processing:
/// * Added Record of Food Count at regular intervals on each video in case, so that even if no fish is being tracked ROI
/// the evolution of prey Count in time can be observed. saveTracks outputs a count of prey numbers at a regular interval 1sec, it shows up with fishID 0
///
///
/// \bug MOG use under Multi-Processing gives a SegFault in OpenCL - Workaround: Added try block on MOG2, and then flag to switch off OpenCL.
/// \note Cmd line arguments: /zebraprey~_track --ModelBG=0 --SkipTracked=0 --PolygonROI=1
/// --invideofile=/media/extStore/ExpData/zebrapreyCap/AnalysisSet/AutoSet450fps_18-01-18/AutoSet450fps_18-01-18_WTLiveFed4Roti_3591_009.mp4
/// --outputdir=/media/extStore/kostasl/Dropbox/Calculations/zebrafishtrackerData/TrackerOnHuntEvents_UpTo22Feb/
///
///
/// \note Example: /zebraprey_track --ModelBG=0 --SkipTracked=0 --PolygonROI=1 --invideolist=VidFilesToProcessSplit1.txt --outputdir=/media/kostasl/Maxtor/KOSTAS/Tracked/
/// \todo * Add Learning to exclude large detected blobs that fail to be detected as fish - so as to stop fish detection failures
/// :added fishdetector class
/// \todo limit rendered tracking traces to most recent time - to improve visibility and tracking time.
///
/// \remarks * Using Kalman Filtering of Fish and GL filtering for Prey position
/// * Uses DNN trained model to classify blob as fish and locate head position - Template matching is the used to fix orientation of head inset for furtther feature detection
///
/// \bug Fish Blob fails to detected when running multiple instances (eg x4) of Tracker over list of video files.
////////
#include <config.h> // Tracker Constant Defines
#include <larvatrack.h>
#include <ellipse_detect.h>
#include <template_detect.h>
#include <zfttracks.h>
#include <fgmaskprocessing.h>
#include "eyesdetector.h"
#include "fishdetector.h"
#include <QtOpenGL/QtOpenGL> // Included so qmake selects correct lib location for these
#include <QtTest/QTest>
#include <errorhandlers.h> // My Custom Mem Fault Handling Functions and Debug
#include <random>
#include <gsl/gsl_rng.h>
#include <gsl/gsl_randist.h>
#include <string.h>
#include <cereal/archives/json.hpp> //Data Serialize of EyeDetector
#include <cereal/archives/xml.hpp> //Data Serialize of EyeDetector
#include <fstream>
#include <QFile>
#include <QDirIterator>
#include <QDir>
#include <QDebug>
//#include <QThread>
#include <QTime>
//Open CV
#include <opencv2/opencv_modules.hpp> //THe Cuda Defines are in here
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
#include "opencv2/core/utility.hpp"
//#include <opencv2/bgsegm.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/video/background_segm.hpp>
#include <opencv2/core/ocl.hpp> //For setting setUseOpenCL
/// CUDA //
/// #include <opencv2/opencv_modules.hpp> //THe Cuda Defines are in here
#if defined(USE_CUDA) && defined(HAVE_OPENCV_CUDAARITHM) && defined(HAVE_OPENCV_CUDAIMGPROC)
#include "opencv2/cudaimgproc.hpp"
#include "opencv2/cudaarithm.hpp"
#include <opencv2/core/cuda.hpp>
#include <opencv2/photo/cuda.hpp>
#include <opencv2/core/cuda_types.hpp>
#endif
#include <GUI/mainwindow.h>
///Curve Smoothing and Matching
#include <CSS/CurveCSS.h>
MainWindow* pwindow_main = nullptr;
// Custom RL optimization of eyeSegmentation and fitting
//init with 20 seg thres states , and 10 eye vergence states
EyesDetector* pRLEye = new EyesDetector(-5,15,-10,80); // RL For eye segmentation
//The fish ones are then revaluated using simple thresholding to obtain more accurate contours
fishModels vfishmodels; //Vector containing live fish models
zftblobs vfishblobs_pt; // Vector of Blob KeyPoints
foodModels vfoodmodels;
pointPairs vMeasureLines; //Point pairs defining line distances
trackerState gTrackerState;
int main(int argc, char *argv[])
{
gTimer.start();
// Get the rdbuf of clog.
// We will need it to reset the value before exiting.
auto old_rdbufclog = std::clog.rdbuf();
auto old_rdbufcerr = std::cerr.rdbuf();
qDebug() << fixed << qSetRealNumberPrecision(4);
installErrorHandlers();
QApplication app(argc, argv);
//QQmlApplicationEngine engine;
MainWindow window_main;
pwindow_main = &window_main;
/// Handle Command Line Parameters //
const cv::String keys =
"{help h usage ? | | print this help message}"
"{outputdir o | | Dir where To save sequence of images }"
"{invideofile v | | Behavioural Video file to analyse }"
"{invideolist f | | A text file listing full path to video files to process}"
"{startframe s | 1 | Video Will start by Skipping to this frame}"
"{stopframe p | 0 | Video Will stop at this frame / or override totalFrames if needed}"
"{startpaused P | 0 | Start tracking Paused On 1st Frame/Need to Run Manually}"
"{duration d | 0 | Number of frames to Track for starting from start frame}"
"{logtofile l | | Filename to save clog stream to }"
"{ModelBG b | 1 | Initiate BG modelling by running over scattered video frames to obtain Foreground mask}"
"{UseTemplateMatching T | 1 | After DNN Classifier, also use template matching to Detect orientation and position of larva (speed up if false)}" //bUseTemplateMatching
"{BGThreshold bgthres | 2 | Absolute grey value used to segment Fish from BG (combined with BGModel) (g_FGSegthresh)}"
"{HeadMaskVW hmw | 4 | Head Vertical mask width that separates eyes}"
"{HeadMaskHR hmh | 36 | Head horizontal posterior mask radius (eye threshold sampling arc)}"
"{SkipTracked t | 0 | Skip Previously Tracked Videos}"
"{PolygonROI r | 0 | Use pointArray for Custom ROI Region}"
"{CircleROIRadius cr | 512 | px radius for default centred ROI}"
"{ModelBGOnAllVids a | 1 | Only Update BGModel At start of vid when needed}"
"{FilterPixelNoise pn | 0 | Filter Pixel Noise During Tracking (Note:This has major perf impact so use only when necessary due to pixel noise. BGProcessing does it by default)}"
"{DisableOpenCL ocl | 0 | Disabling the use of OPENCL can avoid some SEG faults hit when running multiple trackers in parallel}"
"{EnableCUDA cuda | 0 | Use CUDA for MOG, and mask processing - if available }"
"{HideDataSource srcShow | 0 | Do not reveal datafile source, so user can label data blindly }"
"{EyeHistEqualization histEq | 0 | Use hist. equalization to enhance eye detection contrast }"
"{TrackFish ft | 1 | Track Fish not just the moving prey }"
"{MeasureMode M | 0 | Click 2 points to measure distance to prey}"
"{DNNModelFile T | /home/meyerlab/workspace/zebrafishtrack/tensorDNN/savedmodels/fishNet_loc/ | Location of Tensorflow model file used for classification}"
"{HuntEventsFile H | | csv data file with detected hunt events}"
;
///Parse Command line Args
cv::CommandLineParser parser(argc, argv, keys);
stringstream ssMsg;
ssMsg<<"Zebrafish Behaviour Tracker V0.5 Using Trained DNN Classifier"<< std::endl;
ssMsg<<"--------------------------" << std::endl;
ssMsg<<"Author : Konstantinos Lagogiannis 2017, King's College London"<<std::endl;
ssMsg<< "email: costaslag@gmail.com"<<std::endl;
ssMsg<<"./zebraprey_track <outfolder> <inVideoFile> <startframe=1> <stopframe=0> <duration=inf>"<<std::endl;
ssMsg<<"(note: output folder is automatically generated when absent)"<<std::endl;
ssMsg << "Example: \n Use checkFilesProcessed.sh script to generate list of videos to processes then execute as : " << std::endl;
ssMsg << "./zebrafish_track -f=VidFilesToProcessSplit1.txt -o=/media/kostasl/extStore/kostasl/Dropbox/Calculations/zebrafishtrackerData/Tracked30-11-17/" << std::endl;
ssMsg << "-Make Sure QT can be found : use export LD_LIBRARY_PATH= path to Qt/5.11.1/gcc_64/lib/ " << std::endl;
ssMsg << "Double click on food item to start tracking it. Dbl click on Fish head to adjust Template position." << std::endl;
parser.about(ssMsg.str() );
if (parser.has("help") || parser.has("usage"))
{
parser.printMessage();
exit(0);
}
window_main.show();
gTrackerState.initGlobalParams(parser,gTrackerState.inVidFileNames);
pwindow_main->updateHuntEventTable(gTrackerState.vHuntEvents); //Update Hunt Events Table
//If No video Files have been loaded then Give GUI to User //
if (gTrackerState.inVidFileNames.empty())
gTrackerState.inVidFileNames =QFileDialog::getOpenFileNames(nullptr, "Select videos to Process",gTrackerState.gstrinDirVid.c_str(),
"Video file (*.mpg *.avi *.mp4 *.h264 *.mkv *.tiff *.png *.jpg *.pgm)", nullptr, nullptr);
// get the applications dir path and expose it to QML
//engine.load(QUrl(QStringLiteral("qrc:///main.qml")));
#ifdef _ZTFDEBUG_
cv::namedWindow("Debug D",CV_WINDOW_NORMAL | CV_WINDOW_KEEPRATIO);
cv::namedWindow("Debug A",CV_WINDOW_NORMAL | CV_WINDOW_KEEPRATIO);
cv::namedWindow("Debug B",CV_WINDOW_NORMAL | CV_WINDOW_KEEPRATIO);
frameDebugA = cv::Mat::zeros(640, 480, CV_8U);
frameDebugB = cv::Mat::zeros(640, 480, CV_8U);
frameDebugD = cv::Mat::zeros(640, 480, CV_8U);
#endif
frameDebugC = cv::Mat::zeros(640, 480, CV_8U);
/// create Background Subtractor objects
//(int history=500, double varThreshold=16, bool detectShadows=true
//Init MOG BG substractor
initBGSubstraction();
if (gTrackerState.bUseTemplateMatching){
pwindow_main->LogEvent(QString("<<Using Template matching along DNN classifier. Loading samples into cache:"));
int iLoadedTemplates = initDetectionTemplates();
pwindow_main->nFrame = 1;
pwindow_main->LogEvent(QString::number(iLoadedTemplates) + QString("# Templates Loaded "));
}
/// Run Unit Tests ///
// qDebug() << "<<< fishDetector Tests >>>";
// fishdetector::test();
// //testAngleDiff();
// std::cout << "Test FishNET DNN - Load FISH Image..." << std::endl;
// std::vector<cv::Mat> vtimg;
// cv::Mat imageA = cv::imread( "/home/kostasl/workspace/zebrafishtrack/tensorDNN/valid/fish/templ_HB40_LR_camB_Templ_51629.jpg", cv::IMREAD_UNCHANGED );
// vtimg.push_back(imageA);
// //fishdetector::testTFModelPrediction(image);
// std::cout << "Test FishNET DNN - Load NONFISH Image..." << std::endl;
// cv::Mat imageB = cv::imread( "/home/kostasl/workspace/zebrafishtrack/tensorDNN/test/nonfish/00219-308x0.jpg", cv::IMREAD_UNCHANGED );
// vtimg.push_back(imageB);
// fishdetector::testTFModelPrediction(vtimg);
// qDebug() << "<<< fishDetector Tests Complete >>>";
// cv::waitKey(1000);
// resize the image to fit the model's input:
/// Start Tracking of Video Files ///
try{
//app.exec();
std::clog << gTimer.elapsed()/60000.0 << " >>> Start frame: " << gTrackerState.uiStartFrame << " StopFrame: " << gTrackerState.uiStopFrame << " <<<<<<<<<" << std::endl;
trackVideofiles(window_main, QString::fromStdString(gTrackerState.gstroutDirCSV),
gTrackerState.inVidFileNames,
gTrackerState.uiStartFrame,gTrackerState.uiStopFrame);
}catch (char *e)
{
//printf("Exception Caught: %s\n",e);
qDebug() << "[Error] >>> Exception Caught while processing: " << outfishdatafile.fileName();
std::cerr << "[Error] Memory Allocation Error :" << e;
//std::cerr << "Memory Allocation Error! - Exiting";
std::cerr << "[Error] Close And Delete Current output file: " << outfishdatafile.fileName().toStdString() ;
closeDataFile(outfishdatafile);
removeDataFile(outfishdatafile);
app.quit();
std::exit(EXIT_FAILURE);
return EXIT_FAILURE;
}
//destroy GUI windows
//cv::waitKey(0); // Wait for a keystroke in the window
//pMOG2->getBackgroundImage();
//pMOG->~BackgroundSubtractor();
//pMOG2->~BackgroundSubtractor();
//pKNN->~BackgroundSubtractor();
//pGMG->~BackgroundSubtractor();
//Empty The Track and blob vectors
//cvb::cvReleaseTracks(tracks);
//cvb::cvReleaseBlobs(blobs);
std::cout << "Total processing time : mins " << gTimer.elapsed()/60000.0 << std::endl;
std::clog << "Total processing time : mins " << gTimer.elapsed()/60000.0 << std::endl;
///Clean Up //
frameDebugA.release();
frameDebugB.release();
frameDebugC.release();
frameDebugD.release();
///* Create Morphological Kernel Elements used in processFrame *///
kernelClose.release();
kernelOpenfish.release();
kernelDilateMOGMask.release();
kernelOpen.release();
gFishTemplateCache.release();
// Save State Space of Reinforcement Learning
pRLEye->SaveState();
window_main.LogEvent("[INFO] Saved EyeDetector State.");
delete pRLEye;//Destroy EyeSeg Assistant
//gFishTemplateCache.deallocate();
//app.quit();
window_main.close();
cv::destroyAllWindows();
// Reset the rdbuf of clog.
std::clog.rdbuf(old_rdbufclog);
std::cerr.rdbuf(old_rdbufcerr);
app.quit();
//Catch Any Mem Alloc Error
///\note ever since I converted gFishCache to UMat, a deallocation error Is Hit - UMat was then Removed
/// This Is KNown But When OpenCL Is False https://github.com/opencv/opencv/issues/8693
std::exit(EXIT_SUCCESS);
return EXIT_SUCCESS;
}
unsigned int trackVideofiles(MainWindow& window_main,QString outputFileName,QStringList invideonames,unsigned int istartFrame = 0,unsigned int istopFrame = 0)
{
cv::Mat fgMask;
cv::Mat bgStaticMask;
QString invideoname = "*.mp4";
QString nextvideoname;
//Show Video list to process
//std::cout << "Video List To process:" <<std::endl;
if (!gTrackerState.bBlindSourceTracking)
{
window_main.LogEvent("Video List To process:");
for (int i = 0; i<invideonames.size(); ++i)
{
invideoname = invideonames.at(i);
//std::cout << "*" << invideoname.toStdString() << std::endl;
window_main.LogEvent(invideoname );
}
}
//Go through Each Image/Video - Hold Last Frame N , make it the start of the next vid.
for (int i = 0; i<invideonames.size() && !gTrackerState.bExiting; ++i)
{
//Empty Vector of Fish Models - and Reset ID Counter // gi_MaxFoodID = gi_MaxFishID = 1; - Done In Release
ReleaseFishModels(vfishmodels);
ReleaseFoodModels(vfoodmodels);
invideoname = invideonames.at(i);
nextvideoname = invideonames.at(std::min(invideonames.size()-1,i+1));
gTrackerState.gstrvidFilename = invideoname.toStdString(); //Global
QFileInfo vidFile(QString::fromStdString(gTrackerState.gstrvidFilename) );
gTrackerState.strHuntEventsDataFile = gTrackerState.gstroutDirCSV + "/" + vidFile.baseName().toStdString() + "_huntEvents.csv"; //Make Default file to export manually indicated hunt events
std::clog << gTimer.elapsed()/60000.0 << " Now Processing : "<< invideoname.toStdString() << " StartFrame: " << istartFrame << std::endl;
//cv::displayOverlay(gstrwinName,"file:" + invideoname.toStdString(), 10000 );
///Open Output File Check If We Skip Processed Files
if ( !openDataFile(outputFileName,invideoname,outfishdatafile) )
{
if (gTrackerState.bSkipExisting) //Failed Due to Skip Flag
continue; //Do Next File
}else
writeFishDataCSVHeader(outfishdatafile);
///Open Output File Check If We Skip Processed Files
if (openDataFile(outputFileName,invideoname,outfooddatafile,"_food") )
writeFoodDataCSVHeader(outfooddatafile);
else
pwindow_main->LogEvent("[Error] Cannot open tracked prey data file.");
// Removed If MOG Is not being Used Currently - Remember to Enable usage in enhanceMask if needed//
if ((gTrackerState.bUseBGModelling && gTrackerState.gbUpdateBGModel) || (gTrackerState.bUseBGModelling && gTrackerState.gbUpdateBGModelOnAllVids) )
{
//If BG Model Returns >1 frames
if (getBGModelFromVideo(bgStaticMask, window_main,invideoname,gTrackerState.outfilename,gTrackerState.MOGhistory))
{
//cv::dilate(bgStaticMask,bgStaticMask,kernelDilateMOGMask,cv::Point(-1,-1),1);
cv::morphologyEx(bgStaticMask,bgStaticMask,cv::MORPH_OPEN,kernelOpen,cv::Point(-1,-1),1); //
cv::bitwise_not ( bgStaticMask, bgStaticMask ); //Invert Accumulated MAsk TO Make it an Fg Mask
//Next Video File Most Likely belongs to the same Experiment / So Do not Recalc the BG Model
if (compString(invideoname,nextvideoname) < 3 && !gTrackerState.gbUpdateBGModelOnAllVids)
gTrackerState.gbUpdateBGModel = false; //Turn Off BG Updates
}
} // If modelling BG Prior To Starting the Video
//Next File Is Different Experiment, Update The BG
if (compString(invideoname,nextvideoname) > 2 )
gTrackerState.gbUpdateBGModel = true;
QFileInfo fiVidFile(invideoname);
if (gTrackerState.bBlindSourceTracking)
window_main.setWindowTitle("Labelling Hunt Event");
else
window_main.setWindowTitle("Tracking:" + fiVidFile.completeBaseName() );
window_main.nFrame = 0;
window_main.tickProgress(); //Update Slider
//if (bStaticAccumulatedBGMaskRemove) //Hide The PopUp
//cv::destroyWindow("Accumulated BG Model");
//Can Return 0 If DataFile Already Exists and bSkipExisting is true
uint ret = processVideo(bgStaticMask,window_main,invideoname,outfishdatafile,istartFrame,istopFrame);
if (ret == 0)
window_main.LogEvent(" [Error] Could not open Video file for last video");
if (ret == 1)
{
if (!gTrackerState.bSkipExisting)
std::cerr << gTimer.elapsed()/60000.0 << " Error Occurred Could not open data file for last video" << std::endl;
else
window_main.LogEvent(" Skipping previously processed Video."); // std::cerr << gTimer.elapsed()/60000.0 << " Error Occurred Could not process last video" << std::endl;
continue; //Do Next File
}
istartFrame = 1; //Reset So Next Vid Starts From The Beginnning
istopFrame = 0; //Rest So No Stopping On Next Video
} // For each Video File
return istartFrame;
}
/// \brief
void processFrame(MainWindow& window_main, const cv::Mat& frame, cv::Mat& bgStaticMask, unsigned int nFrame,
cv::Mat& outframe, cv::Mat& outframeHeadEyeDetected, cv::Mat& frameHead)
{
cv::Mat frame_gray,fgMask,fgFishMask,fgFishImgMasked,fgImgFrame;
cv::Mat fgFoodMask,bgROIMask;
//std::vector<cv::KeyPoint> ptFoodblobs;
zfdblobs ptFoodblobs;
vfishblobs_pt.clear();
//zftblobs ptFishblobs; //Now global
std::vector<std::vector<cv::Point> > fishbodycontours;
std::vector<cv::Vec4i> fishbodyhierarchy;
unsigned int nLarva = 0;
unsigned int nFood = 0;
//double dblRatioPxChanged = 0.0;
QString frameNumberString;
frameNumberString = QString::number(nFrame);
gTrackerState.uiCurrentFrame = nFrame;
assert(!frame.empty());
//For Morphological Filter
////cv::Size sz = cv::Size(3,3);
//frame.copyTo(inputframe); //Keep Original Before Painting anything on it
//update the background model
//OPEN CV 2.4
// dLearningRate is now Nominal value
// frame.copyTo(outframe); //Make Replicate On which we draw output
///DRAW ROI
if (gTrackerState.bRenderToDisplay)
drawAllROI(outframe);
if (gTrackerState.bMeasure2pDistance)
drawUserDefinedPoints(outframe);
/// DRAW ROI Mask
if (gTrackerState.bROIChanged)
{
bgROIMask = cv::Mat::zeros(frame.rows,frame.cols,CV_8UC1);
for (int i=0; i < gTrackerState.vRoi.size();i++ )
gTrackerState.vRoi.at(i).drawMask(bgROIMask);
}
//lplframe = frameMasked; //Convert to legacy format
//cvb::CvBlobs blobs;
///DO Tracking
if (gTrackerState.bTracking)
{
//Simple Solution was to Use Contours To measure LUarvae
//cvtColo frame_grey
//Draw THe fish Masks more accuratelly by threshold detection - Enhances full fish body detection
// enhanceFishMask(outframe, fgMask,fishbodycontours,fishbodyhierarchy);// Add fish Blobs
if (frame.channels() > 2)
cv::cvtColor( frame, frame_gray, cv::COLOR_BGR2GRAY);
else
frame.copyTo(frame_gray);
// Save COpy as Last Frame
gframeCurrent.copyTo(gframeLast);
frame_gray.copyTo(gframeCurrent); //Copy To global Frame
/// DO BG-FG SEGMENTATION MASKING and processing///
/// \brief processMasks - Returns FG mask And Image -
if (gTrackerState.bPaused) //Stop Mask Learning If Paused on the same Frame
extractFGMask(frame_gray,bgStaticMask,fgMask,fgImgFrame,0.0); //No BGModel Updating
else
extractFGMask(frame_gray,bgStaticMask,fgMask,fgImgFrame,gTrackerState.dactiveMOGLearningRate); //Applies MOG if BGModelling Flag is set
/// Use ROI MASK For All FG
if (!fgMask.empty())
cv::bitwise_and(bgROIMask,fgMask,fgMask);
//Generates separate masks for Fish/Prey and Draws Fish Contourmask
// Returns Fish Locations/Keypoints
enhanceMasks(frame_gray,fgMask,fgFishMask,fgFoodMask,outframe,fishbodycontours,vfishblobs_pt);
// Combine Roi Mask Only For The foodMask
//if (!fgFoodMask.empty())
// cv::bitwise_and(bgROIMask,fgFoodMask,fgFoodMask);
/// Choose FG image prior to template matching
/// \note this can fail badly if Mask is thick outline of larva/or a bad match hidding features
if (gTrackerState.bApplyFishMaskBeforeFeatureDetection)
frame_gray.copyTo(fgFishImgMasked,fgFishMask); //fgMask allows prey to interfere with Eye detection //Use Enhanced Mask
else
frame_gray.copyTo(fgFishImgMasked);
///Update Fish Models Against Image and Tracks - Obtain Bearing Angle Using Template
//Can Use Fish Masked - But Templates Dont Include The masking
//UpdateFishModels(fgFishImgMasked,vfishmodels,ptFishblobs,nFrame,outframe);
if (gTrackerState.bTrackFish)
{
//Can Use Fish Masked fgFishImgMasked - But Templates Dont Include The masking
// Blob Detect No Longer Needed - Keypoints detect from Mask Processing - faster processing//
//processFishBlobs(fgFishImgMasked,fgFishMask, outframe , ptFishblobs);
// Check Blobs With Template And Update Fish Model
UpdateFishModels(fgFishImgMasked, vfishmodels, vfishblobs_pt, nFrame, outframe);
if (vfishmodels.size() > 0)
{
//cv::imshow("deteczFishFeatures",fgFishImgMasked);
/// Isolate Head Measure* Eye Position For each fish and pass measurement to Model make Spine model and draw it
detectZfishFeatures(window_main, frame_gray, outframe,
frameHead,outframeHeadEyeDetected,
fgFishImgMasked, fishbodycontours,
fishbodyhierarchy); //Creates & Updates Fish Models
gTrackerState.rect_pasteregion.width = outframeHeadEyeDetected.cols;
gTrackerState.rect_pasteregion.height = outframeHeadEyeDetected.rows;
}
/// Draw Tracks And Eye Angles //
//If A fish Is Detected Then Draw Its tracks
fishModels::iterator ft = vfishmodels.begin();
while (ft != vfishmodels.end() && gTrackerState.bRenderToDisplay) //Render All Fish
{
fishModel* pfish = ft->second;
assert(pfish);
zftRenderTrack(pfish->zTrack, frame, outframe,CV_TRACK_RENDER_PATH, cv::FONT_HERSHEY_PLAIN, gTrackerState.trackFntScale+0.2 );
//Draw KFiltered Axis
drawExtendedMajorAxis(outframeHeadEyeDetected,pfish->lastLeftEyeMeasured,CV_RGB(150,20,20));
drawExtendedMajorAxis(outframeHeadEyeDetected,pfish->lastRightEyeMeasured,CV_RGB(20,60,150));
++ft;
}
if (!outframeHeadEyeDetected.empty())
outframeHeadEyeDetected.copyTo(outframe(gTrackerState.rect_pasteregion) ) ;
}
nLarva = vfishmodels.size();
/////// Process Food Blobs ////
// Process Food blobs
if (gTrackerState.bTrackFood)
{
processPreyBlobs(frame_gray,fgFoodMask, outframe , ptFoodblobs); //Use Just The Mask
UpdateFoodModels(fgImgFrame,vfoodmodels,ptFoodblobs,nFrame,true); //Make New Food Models based on identified Blob
//For those foodModels which have not been updated/ Optic Flow may provide a new position Estimate
if (nFrame > gTrackerState.gcMinFoodModelActiveFrames)
{
processFoodOpticFlow(frame_gray, gframeLast ,vfoodmodels,nFrame,ptFoodblobs ); // Use Optic Flow
UpdateFoodModels(fgImgFrame,vfoodmodels,ptFoodblobs,nFrame,false); //Update but no new Food models
}
//cv::drawKeypoints(outframe,ptFoodblobs)
if (ptFoodblobs.size() >0)
cv::drawKeypoints( outframe, ptFoodblobs, outframe, cv::Scalar(20,70,255,60), cv::DrawMatchesFlags::DRAW_RICH_KEYPOINTS );
///Draw Food Tracks
foodModels::iterator ft = vfoodmodels.begin();
nFood = 0;
while (ft != vfoodmodels.end() && gTrackerState.bRenderToDisplay)
{
preyModel* pfood = ft->second;
assert(pfood);
// Render Food that has been on for A Min of Active frames / Skip unstable Detected Food Blob - Except If Food is being Tracked
if ( (pfood->isNew) && (!pfood->isTargeted))
{
//++ft; //Item Is not Counted
//continue;
}
if (pfood->isTargeted) //Draw Track Only on Targetted Food
zftRenderTrack(pfood->zTrack, frame, outframe,CV_TRACK_RENDER_ID | CV_TRACK_RENDER_HIGHLIGHT | CV_TRACK_RENDER_PATH | CV_TRACK_RENDER_BOUNDING_CIRCLE, gTrackerState.trackFnt, gTrackerState.trackFntScale*1.1 ); //| CV_TRACK_RENDER_BOUNDING_BOX
else{
if (pfood->isActive)
{
nFood++; //only count the rendered Food Items ie. Active Ones
zftRenderTrack(pfood->zTrack, frame, outframe,CV_TRACK_RENDER_ID | CV_TRACK_RENDER_BOUNDING_CIRCLE , gTrackerState.trackFnt,gTrackerState.trackFntScale );
} //else
//zftRenderTrack(pfood->zTrack, frame, outframe,CV_TRACK_RENDER_ID | CV_TRACK_RENDER_BOUNDING_BOX ,gTrackerState.trackFnt ,gTrackerState.trackFntScale );
}
++ft;
}
}
} //If Tracking
//fishbodycontours.clear();
//fishbodyhierarchy.clear();
//Save to Disk
///
/// \brief drawFrameText
if (gTrackerState.bRenderToDisplay)
{
drawFrameText(window_main,nFrame,nLarva,nFood,outframe);
}
if (gTrackerState.bshowMask && gTrackerState.bTracking)
cv::imshow("Segmented FG with Fish",fgFishImgMasked);
// fgFishImgMasked.release();
// fgFishMask.release();
// fgFishImgMasked.release();
// int RefCount = frame_gray.u ? (frame_gray.u->refcount) : 0; //Its 1 at this point as required
//assert(RefCount == 1);
//qDebug() << "frame_gray.u->refcount:" << RefCount;
// frame_gray.release();
} //End Of Process Frame
///
/// \brief drawFrameText ///TEXT INFO Put Info TextOn Frame
/// \param inFrame
/// \param frameNumberString
/// \param nLarva
/// \param nFood
/// \param outFrame
///
void drawFrameText(MainWindow& window_main, uint nFrame,uint nLarva,uint nFood,cv::Mat& outframe)
{
//Frame Number
std::stringstream ss;
QString frameNumberString;
frameNumberString = QString::number(nFrame);
char buff[200];
static double vm, rss;
cv::rectangle(outframe, cv::Point(10, 2), cv::Point(100,20),
CV_RGB(10,10,10), cv::FILLED,LINE_8);
cv::putText(outframe, frameNumberString.toStdString(), cv::Point(15, 15),
gTrackerState.trackFnt, gTrackerState.trackFntScale , CV_RGB(150,80,50));
//Count on Original Frame
std::stringstream strCount;
strCount << "Nf:" << (nLarva) << " Nr:" << nFood;
cv::rectangle(outframe, cv::Point(10, 25), cv::Point(90,45), CV_RGB(10,10,10), cv::FILLED);
cv::putText(outframe, strCount.str(), cv::Point(15, 38),
gTrackerState.trackFnt, gTrackerState.trackFntScale , CV_RGB(150,80,50));
/*
* //Report Time
std::sprintf(buff,"t: %0.2f",gTimer.elapsed()/(1000.0*60.0) );
//strLearningRate << "dL:" << (double)(dLearningRate);
cv::rectangle(outframe, cv::Point(10, 50), cv::Point(50,70), cv::Scalar(10,10,10), -1);
cv::putText(outframe, buff, cv::Point(15, 63),
trackFnt, trackFntScale , CV_RGB(250,250,0));
*/
} //DrawFrameText
//
// Process Larva video, removing BG, detecting moving larva- Setting the learning rate will change the time required
// to remove a pupa from the scene -
//
unsigned int processVideo(cv::Mat& bgStaticMask, MainWindow& window_main, QString videoFilename, QFile& outdatafile, unsigned int startFrameCount,unsigned int stopFrame=0)
{
QElapsedTimer otLastUpdate; //Time Since Last Progress Report
otLastUpdate.start();
//Speed that stationary objects are removed
cv::Mat frame,outframe,outframeHeadEyeDetect,outframeHead; //bgROIMask,bgMaskWithRoi
outframeHead = cv::Mat::zeros(gTrackerState.gszTemplateImg.height,gTrackerState.gszTemplateImg.width,CV_8UC1); //Initiatialize to Avoid SegFaults
unsigned int nFrame = 0;
unsigned int nErrorFrames = 0;
QString frameNumberString;
//?Replicate FG Mask to method specific
//fgMask.copyTo(fgMaskMOG2);
//fgMask.copyTo(fgMaskMOG);
//fgMask.copyTo(fgMaskGMG);
gTrackerState.bPaused = false;
//Make Global Roi on 1st frame if it doesn't prexist
//Check If FG Mask Has Been Created - And Make A new One
//create the capture object
cv::VideoCapture capture(videoFilename.toStdString());
if(!capture.isOpened())
{
//error in opening the video input
window_main.LogEvent("[ERROR] Failed to open video capture device");
std::cerr << gTimer.elapsed()/60000.0 << " [Error] Unable to open video file: " << videoFilename.toStdString() << std::endl;
return 0;
//std::exit(EXIT_FAILURE);
}
gTrackerState.setVidFps( capture.get(cv::CAP_PROP_FPS) );
gTrackerState.uiStopFrame = stopFrame;
gTrackerState.uiTotalFrames = capture.get(cv::CAP_PROP_FRAME_COUNT);
if (gTrackerState.uiTotalFrames < stopFrame)//Sometimes FRAME-COunt is reported wrong so user needs to supply actuall number of frames in video
{
gTrackerState.uiTotalFrames = stopFrame;
// Update Frames to user set Value
capture.set(cv::CAP_PROP_FRAME_COUNT,gTrackerState.uiTotalFrames);
pwindow_main->LogEvent("[INFO] Updated video number of frames to user input");
}
gTrackerState.frame_pxwidth = (uint)capture.get(cv::CAP_PROP_FRAME_WIDTH);
gTrackerState.rect_pasteregion.x = (gTrackerState.frame_pxwidth-gTrackerState.gszTemplateImg.width*3);
gTrackerState.frame_pxheight = (uint)capture.get(cv::CAP_PROP_FRAME_HEIGHT);
//Default ROI
gTrackerState.initROI(gTrackerState.frame_pxwidth,gTrackerState.frame_pxheight);
window_main.setTotalFrames(gTrackerState.uiTotalFrames);
/// Make ROI //
//window_main.nFrame = nFrame;
// Check If it contains no Frames And Exit
if (gTrackerState.uiTotalFrames < 2)
{
window_main.LogEvent("[ERROR] This Video File is empty ");
capture.release();
return 0;
}
if (!gTrackerState.bBlindSourceTracking)
{
QFileInfo vidFileInfo(videoFilename);
window_main.LogEvent(" **Begin Processing: " + vidFileInfo.completeBaseName());
std::cout << " **Begin Processing: " << vidFileInfo.completeBaseName().toStdString() << std::endl; //Show Vid Name To StdOUt
}else
window_main.LogEvent("** Begin Processing of video file ** ");
window_main.stroutDirCSV = QString::fromStdString( gTrackerState.gstroutDirCSV);
window_main.vidFilename = videoFilename;
QString strMsg( " Vid Fps:" + QString::number(gTrackerState.gfVidfps) + " Total frames:" + QString::number(gTrackerState.uiTotalFrames) + " Start:" + QString::number(startFrameCount));
window_main.LogEvent(strMsg);
//qDebug() << strMsg;
// Open OutputFile
// if (!openDataFile(trkoutFileCSV,videoFilename,outdatafile))
// return 1;
gTrackerState.outfilename = outdatafile.fileName();
capture.set(cv::CAP_PROP_POS_FRAMES,startFrameCount);
nFrame = capture.get(cv::CAP_PROP_POS_FRAMES);
frameNumberString = QString("%1").arg(nFrame, 5, 10, QChar('0')); //QString::number(nFrame);
window_main.nFrame = nFrame;
//read input data. ESC or 'q' for quitting
while( !gTrackerState.bExiting && (char)gTrackerState.keyboard != 27 )
{
/// Flow Control Code - For When Looking at Specific Frame Region ///
// 1st Check If user changed Frame - and go to that frame
if (gTrackerState.cFrameDelayms < 0)
{
//gTrackerState.bStartFrameChanged = true;
window_main.nFrame += -gTrackerState.cFrameDelayms;
nFrame = window_main.nFrame;
capture.set(cv::CAP_PROP_POS_FRAMES,window_main.nFrame);
}
if (gTrackerState.bStartFrameChanged)
{
nFrame = window_main.nFrame;
capture.set(cv::CAP_PROP_POS_FRAMES,window_main.nFrame);
gTrackerState.bPaused = true;
gTrackerState.bTracking = gTrackerState.bTracking; //Do Not Change
//bStartFrameChanged = false; //This is Reset Once The frame Is captured
//Since we are jumping Frames - The fish Models Are invalidated / Delete
ReleaseFishModels(vfishmodels);
ReleaseFoodModels(vfoodmodels);
}
if (!gTrackerState.bPaused )
{
nFrame = capture.get(cv::CAP_PROP_POS_FRAMES);
window_main.nFrame = nFrame; //Update The Frame Value Stored in Tracker Window
window_main.tickProgress();
}
if (nFrame == startFrameCount && !gTrackerState.bPaused) //Only Switch Tracking On When Running Vid.
{
gTrackerState.bTracking = true;
}
frameNumberString = QString("%1").arg(nFrame, 5, 10, QChar('0')); //QString::number(nFrame); //QString::number(nFrame); //Update Display String Holding FrameNumber
if (!gTrackerState.bPaused || gTrackerState.bStartFrameChanged)
{
gTrackerState.bStartFrameChanged = false; //Reset
try //Try To Read The Image of that video Frame
{
//read the current frame - Returns false in next frame-read fails or End of video
if(!capture.read(frame))
{
if (nFrame == startFrameCount)
{
std::cerr << gTimer.elapsed()/60000.0 << " " << nFrame << "# [Error] Unable to read first frame." << std::endl;
nFrame = 0; //Signals To caller that video could not be loaded.
//Delete the Track File //
std::cerr << gTimer.elapsed()/60000.0 << " [Error] Problem with Tracking - Delete Data File To Signal its Not tracked" << std::endl;
removeDataFile(outdatafile);
exit(EXIT_FAILURE);
}
else //Not Stuck On 1st Frame / Maybe Vid Is Over?>
{
std::cerr << gTimer.elapsed()/60000.0 << " [Error] " << nFrame << "# *Unable to read next frame." << std::endl;
std::clog << gTimer.elapsed()/60000.0 << " Reached " << nFrame << "# frame of " << gTrackerState.uiTotalFrames << " of Video. Moving to next video." << std::endl;
//assert(outframe.cols > 1);
double dVidRelativePosition = capture.get(cv::CAP_PROP_POS_AVI_RATIO);
std::cerr << gTimer.elapsed()/60000.0 << " [INFO] Relative Vid.Position : " << dVidRelativePosition << std::endl;
if (nFrame < gTrackerState.uiTotalFrames -1 || nFrame < gTrackerState.uiStopFrame || dVidRelativePosition < 0.99)
{
std::cerr << gTimer.elapsed()/60000.0 << " [Error] " << nFrame << " [Error] Cannot read next frame! Skipping to " << nFrame+nErrorFrames << std::endl;
nErrorFrames++;
capture.set(cv::CAP_PROP_POS_FRAMES, nFrame+nErrorFrames); //Skip Frame
//removeDataFile(outdatafile); //Delete The Output File
//continue; //Skip Frame
/// Too Many Errors On Reading Frame
if (nErrorFrames > gTrackerState.c_MaxFrameErrors) //Avoid Getting Stuck Here
{
// Too Many Errors / Fail On Tracking
std::cerr << gTimer.elapsed()/60000.0 << " [Error] " << nErrorFrames << " Too Many Read Frame Errors - Stopping Here and Deleting Data File To Signal Failure" << std::endl;
QFileInfo outFileNfo(outdatafile);
gTrackerState.saveState( (outFileNfo.baseName() + QString::fromStdString("_settings.xml")).toStdString() );
removeDataFile(outdatafile);
break;
}
}
if (nFrame >= gTrackerState.uiTotalFrames || nFrame == gTrackerState.uiStopFrame)
{
std::clog << gTimer.elapsed()/60000.0 << " [info] processVideo loop done on frame: " << nFrame << std::endl;
::saveImage(frameNumberString,QString::fromStdString( gTrackerState.gstroutDirCSV),videoFilename,outframe);
QFileInfo outFileNfo(outdatafile);
gTrackerState.saveState( (outFileNfo.baseName() + QString::fromStdString("_settings.xml")).toStdString() );
if (gTrackerState.bTracking || !gTrackerState.bPauseAtVideoEnd) //If in Tracking MOde then Exit Loop - Processing done
break;
if (gTrackerState.bPauseAtVideoEnd) //In Playback mode - just pause on last frame
{
gTrackerState.bPaused = true;
gframeLast.copyTo(frame); //Stick to the last Frame
}
}
//continue;
}
} //Can't Read Next Frame
else{
// nErrorFrames = 0;
}
}catch(const std::exception &e)
{
std::cerr << gTimer.elapsed()/60000.0 << " [Error] reading frame " << nFrame << " skipping." << std::endl;
if (nFrame < gTrackerState.uiTotalFrames)
capture.set(cv::CAP_PROP_POS_FRAMES,nFrame+1); //Skip Frame
nErrorFrames++;
if (nErrorFrames > gTrackerState.c_MaxFrameErrors) //Avoid Getting Stuck Here
{
// Too Many Error / Fail On Tracking
std::cerr << gTimer.elapsed()/60000.0 << " [Error] Problem with Tracking Too Many Read Frame Errors - Stopping Here and Deleting Data File To Signal Failure" << std::endl;
std::cout << "Try fixing with : ffmpeg -v error -i broken_video.mp4 -c copy fixed.mp4" << std::endl;
removeDataFile(outdatafile);
break;
}
else
continue;
}
} //If Not Paused //
if (frame.empty())
{
std::cerr << gTimer.elapsed()/60000.0 << " [Error] " << nFrame << " Empty frame read. Skipping " << std::endl;
nErrorFrames++;
break;
}
//Check If StopFrame Reached And Pause
if (nFrame == gTrackerState.uiStopFrame && gTrackerState.uiStopFrame > 0 && !gTrackerState.bPaused)
{
gTrackerState.bPaused = true; //Stop Here
std::cout << nFrame << " Stop Frame Reached - Video Paused" <<std::endl;
pwindow_main->LogEvent(QString(">>Stop Frame Reached - Video Paused<<"));
}