OpenShot Library | libopenshot  0.7.0
ObjectDetection.cpp
Go to the documentation of this file.
1 
10 // Copyright (c) 2008-2019 OpenShot Studios, LLC
11 //
12 // SPDX-License-Identifier: LGPL-3.0-or-later
13 
14 #include <fstream>
15 #include <iostream>
16 #include <algorithm>
17 
19 #include "effects/Tracker.h"
20 #include "Exceptions.h"
21 #include "Timeline.h"
22 #include "objdetectdata.pb.h"
23 
24 #include <QImage>
25 #include <QPainter>
26 #include <QBrush>
27 #include <QColor>
28 #include <QRectF>
29 #include <QString>
30 #include <QStringList>
31 using namespace std;
32 using namespace openshot;
33 
34 
35 // Default constructor
36 ObjectDetection::ObjectDetection()
37  : display_box_text(1.0)
38  , display_boxes(1.0)
39 {
40  // Init effect metadata
41  init_effect_details();
42 
43  // We haven’t loaded any protobuf yet, so there's nothing to pick.
45 }
46 
47 // Init effect settings
48 void ObjectDetection::init_effect_details()
49 {
52 
54  info.class_name = "ObjectDetection";
55  info.name = "Object Detector";
56  info.description = "Detect objects through the video.";
57  info.has_audio = false;
58  info.has_video = true;
59  info.has_tracked_object = true;
60 }
61 
62 // This method is required for all derived classes of EffectBase, and returns a
63 // modified openshot::Frame object
64 std::shared_ptr<Frame> ObjectDetection::GetFrame(std::shared_ptr<Frame> frame, int64_t frame_number) {
65  // Get the frame's QImage
66  std::shared_ptr<QImage> frame_image = frame->GetImage();
67 
68  // Check if frame isn't NULL
69  if(!frame_image || frame_image->isNull()) {
70  return frame;
71  }
72 
73  QPainter painter(frame_image.get());
74  painter.setRenderHints(QPainter::Antialiasing | QPainter::SmoothPixmapTransform);
75 
76  if (detectionsData.find(frame_number) != detectionsData.end()) {
77  DetectionData detections = detectionsData[frame_number];
78  for (int i = 0; i < detections.boxes.size(); i++) {
79  if (detections.confidences.at(i) < confidence_threshold ||
80  (!display_classes.empty() &&
81  std::find(display_classes.begin(), display_classes.end(), classNames[detections.classIds.at(i)]) == display_classes.end())) {
82  continue;
83  }
84 
85  int objectId = detections.objectIds.at(i);
86  auto trackedObject_it = trackedObjects.find(objectId);
87 
88  if (trackedObject_it != trackedObjects.end()) {
89  std::shared_ptr<TrackedObjectBBox> trackedObject = std::static_pointer_cast<TrackedObjectBBox>(trackedObject_it->second);
90 
91  Clip* parentClip = (Clip*) trackedObject->ParentClip();
92  if (parentClip && trackedObject->Contains(frame_number) && trackedObject->visible.GetValue(frame_number) == 1) {
93  BBox trackedBox = trackedObject->GetBox(frame_number);
94  QRectF boxRect((trackedBox.cx - trackedBox.width / 2) * frame_image->width(),
95  (trackedBox.cy - trackedBox.height / 2) * frame_image->height(),
96  trackedBox.width * frame_image->width(),
97  trackedBox.height * frame_image->height());
98 
99  // Get properties of tracked object (i.e. colors, stroke width, etc...)
100  std::vector<int> stroke_rgba = trackedObject->stroke.GetColorRGBA(frame_number);
101  std::vector<int> bg_rgba = trackedObject->background.GetColorRGBA(frame_number);
102  int stroke_width = trackedObject->stroke_width.GetValue(frame_number);
103  float stroke_alpha = trackedObject->stroke_alpha.GetValue(frame_number);
104  float bg_alpha = trackedObject->background_alpha.GetValue(frame_number);
105  float bg_corner = trackedObject->background_corner.GetValue(frame_number);
106 
107  // Set the pen for the border
108  QPen pen(QColor(stroke_rgba[0], stroke_rgba[1], stroke_rgba[2], 255 * stroke_alpha));
109  pen.setWidthF(trackedObject->ScaledStrokeWidth(
110  frame_number, frame_image->width(), frame_image->height()));
111  painter.setPen(pen);
112 
113  // Set the brush for the background
114  QBrush brush(QColor(bg_rgba[0], bg_rgba[1], bg_rgba[2], 255 * bg_alpha));
115  painter.setBrush(brush);
116 
117  if (display_boxes.GetValue(frame_number) == 1 && trackedObject->draw_box.GetValue(frame_number) == 1) {
118  // Only draw boxes if both properties are set to YES (draw all boxes, and draw box of the selected box)
119  painter.drawRoundedRect(boxRect, bg_corner, bg_corner);
120  }
121 
122  if(display_box_text.GetValue(frame_number) == 1) {
123  // Draw text label above bounding box
124  // Get the confidence and classId for the current detection
125  int classId = detections.classIds.at(i);
126 
127  // Get the label for the class name and its confidence
128  QString label = QString::number(objectId);
129  if (!classNames.empty()) {
130  label = QString::fromStdString(classNames[classId]) + ":" + label;
131  }
132 
133  // Set up the painter, font, and pen
134  QFont font;
135  font.setPixelSize(14);
136  painter.setFont(font);
137 
138  // Calculate the size of the text
139  QFontMetrics fontMetrics(font);
140  QSize labelSize = fontMetrics.size(Qt::TextSingleLine, label);
141 
142  // Define the top left point of the rectangle
143  double left = boxRect.center().x() - (labelSize.width() / 2.0);
144  double top = std::max(static_cast<int>(boxRect.top()), labelSize.height()) - 4.0;
145 
146  // Draw the text
147  painter.drawText(QPointF(left, top), label);
148  }
149  }
150  }
151  }
152  }
153 
154  painter.end();
155 
156  // The frame's QImage has been modified in place, so we just return the original frame
157  return frame;
158 }
159 
160 // Load protobuf data file
161 bool ObjectDetection::LoadObjDetectdData(std::string inputFilePath)
162 {
163  // Parse the file
164  pb_objdetect::ObjDetect objMessage;
165  std::fstream input(inputFilePath, std::ios::in | std::ios::binary);
166  if (!objMessage.ParseFromIstream(&input)) {
167  std::cerr << "Failed to parse protobuf message." << std::endl;
168  return false;
169  }
170 
171  // Clear out any old state
172  classNames.clear();
173  detectionsData.clear();
174  trackedObjects.clear();
175 
176  // Seed colors for each class
177  std::srand(1);
178  for (int i = 0; i < objMessage.classnames_size(); ++i) {
179  classNames.push_back(objMessage.classnames(i));
180  classesColor.push_back(cv::Scalar(
181  std::rand() % 205 + 50,
182  std::rand() % 205 + 50,
183  std::rand() % 205 + 50
184  ));
185  }
186 
187  // Walk every frame in the protobuf
188  for (size_t fi = 0; fi < objMessage.frame_size(); ++fi) {
189  const auto &pbFrame = objMessage.frame(fi);
190  size_t frameId = pbFrame.id();
191 
192  // Buffers for DetectionData
193  std::vector<int> classIds;
194  std::vector<float> confidences;
195  std::vector<cv::Rect_<float>> boxes;
196  std::vector<int> objectIds;
197 
198  // For each bounding box in this frame
199  for (int di = 0; di < pbFrame.bounding_box_size(); ++di) {
200  const auto &b = pbFrame.bounding_box(di);
201  float x = b.x(), y = b.y(), w = b.w(), h = b.h();
202  int classId = b.classid();
203  float confidence= b.confidence();
204  int objectId = b.objectid();
205 
206  // Record for DetectionData
207  classIds.push_back(classId);
208  confidences.push_back(confidence);
209  boxes.emplace_back(x, y, w, h);
210  objectIds.push_back(objectId);
211 
212  // Either append to an existing TrackedObjectBBox…
213  auto it = trackedObjects.find(objectId);
214  if (it != trackedObjects.end()) {
215  it->second->AddBox(frameId, x + w/2, y + h/2, w, h, 0.0);
216  }
217  else {
218  // …or create a brand-new one
219  TrackedObjectBBox tmpObj(
220  (int)classesColor[classId][0],
221  (int)classesColor[classId][1],
222  (int)classesColor[classId][2],
223  /*alpha=*/0
224  );
225  tmpObj.stroke_alpha = Keyframe(1.0);
226  tmpObj.AddBox(frameId, x + w/2, y + h/2, w, h, 0.0);
227 
228  auto ptr = std::make_shared<TrackedObjectBBox>(tmpObj);
229  ptr->ParentClip(this->ParentClip());
230 
231  // Prefix with effect UUID for a unique string ID
232  std::string prefix = this->Id();
233  if (!prefix.empty())
234  prefix += "-";
235  ptr->Id(prefix + std::to_string(objectId));
236  trackedObjects.emplace(objectId, ptr);
237  }
238  }
239 
240  // Save the DetectionData for this frame
241  detectionsData[frameId] = DetectionData(
242  classIds, confidences, boxes, frameId, objectIds
243  );
244  }
245 
246  google::protobuf::ShutdownProtobufLibrary();
247 
248  // Finally, pick a default selectedObjectIndex if we have any
249  if (!trackedObjects.empty()) {
250  selectedObjectIndex = trackedObjects.begin()->first;
251  }
252 
253  return true;
254 }
255 
256 // Get the indexes and IDs of all visible objects in the given frame
257 std::string ObjectDetection::GetVisibleObjects(int64_t frame_number) const{
258 
259  // Initialize the JSON objects
260  Json::Value root;
261  root["visible_objects_index"] = Json::Value(Json::arrayValue);
262  root["visible_objects_id"] = Json::Value(Json::arrayValue);
263  root["visible_class_names"] = Json::Value(Json::arrayValue);
264 
265  // Check if track data exists for the requested frame
266  if (detectionsData.find(frame_number) == detectionsData.end()){
267  return root.toStyledString();
268  }
269  DetectionData detections = detectionsData.at(frame_number);
270 
271  // Iterate through the tracked objects
272  for(int i = 0; i<detections.boxes.size(); i++){
273  // Does not show boxes with confidence below the threshold
274  if(detections.confidences.at(i) < confidence_threshold){
275  continue;
276  }
277 
278  // Get class name of tracked object
279  auto className = classNames[detections.classIds.at(i)];
280 
281  // If display_classes is not empty, check if className is in it
282  if (!display_classes.empty()) {
283  auto it = std::find(display_classes.begin(), display_classes.end(), className);
284  if (it == display_classes.end()) {
285  // If not in display_classes, skip this detection
286  continue;
287  }
288  root["visible_class_names"].append(className);
289  } else {
290  // include all class names
291  root["visible_class_names"].append(className);
292  }
293 
294  int objectId = detections.objectIds.at(i);
295  // Search for the object in the trackedObjects map
296  auto trackedObject = trackedObjects.find(objectId);
297 
298  // Get the tracked object JSON properties for this frame
299  Json::Value trackedObjectJSON = trackedObject->second->PropertiesJSON(frame_number);
300 
301  if (trackedObjectJSON["visible"]["value"].asBool() &&
302  trackedObject->second->ExactlyContains(frame_number)){
303  // Save the object's index and ID if it's visible in this frame
304  root["visible_objects_index"].append(trackedObject->first);
305  root["visible_objects_id"].append(trackedObject->second->Id());
306  }
307  }
308 
309  return root.toStyledString();
310 }
311 
312 std::shared_ptr<QImage> ObjectDetection::TrackedObjectMask(std::shared_ptr<QImage> target_image, int64_t frame_number) const {
313  if (!target_image || target_image->isNull())
314  return {};
315 
316  auto detections_it = detectionsData.find(frame_number);
317  if (detections_it == detectionsData.end())
318  return {};
319 
320  auto mask_image = std::make_shared<QImage>(
321  target_image->width(), target_image->height(), QImage::Format_RGBA8888_Premultiplied);
322  mask_image->fill(QColor(0, 0, 0, 255));
323 
324  QPainter painter(mask_image.get());
325  painter.setRenderHint(QPainter::Antialiasing, false);
326  painter.setPen(Qt::NoPen);
327  painter.setBrush(QBrush(QColor(255, 255, 255, 255)));
328 
329  bool drew_any_box = false;
330  const DetectionData& detections = detections_it->second;
331  for (int i = 0; i < detections.boxes.size(); i++) {
332  if (detections.confidences.at(i) < confidence_threshold)
333  continue;
334 
335  const int class_id = detections.classIds.at(i);
336  if (class_id < 0 || class_id >= classNames.size())
337  continue;
338 
339  const std::string class_name = classNames[class_id];
340  if (!display_classes.empty() &&
341  std::find(display_classes.begin(), display_classes.end(), class_name) == display_classes.end()) {
342  continue;
343  }
344 
345  int object_id = detections.objectIds.at(i);
346  auto tracked_object_it = trackedObjects.find(object_id);
347  if (tracked_object_it == trackedObjects.end() || !tracked_object_it->second)
348  continue;
349 
350  auto tracked_object = std::static_pointer_cast<TrackedObjectBBox>(tracked_object_it->second);
351  if (!tracked_object->ExactlyContains(frame_number) ||
352  tracked_object->visible.GetValue(frame_number) != 1) {
353  continue;
354  }
355 
356  BBox box = tracked_object->GetBox(frame_number);
357  if (box.width <= 0.0f || box.height <= 0.0f || box.cx < 0.0f || box.cy < 0.0f)
358  continue;
359 
360  const double x = (box.cx - box.width / 2.0) * target_image->width();
361  const double y = (box.cy - box.height / 2.0) * target_image->height();
362  const double w = box.width * target_image->width();
363  const double h = box.height * target_image->height();
364  painter.drawRect(QRectF(x, y, w, h));
365  drew_any_box = true;
366  }
367 
368  painter.end();
369  if (!drew_any_box)
370  return {};
371  return mask_image;
372 }
373 
374 // Generate JSON string of this object
375 std::string ObjectDetection::Json() const {
376 
377  // Return formatted string
378  return JsonValue().toStyledString();
379 }
380 
381 // Generate Json::Value for this object
382 Json::Value ObjectDetection::JsonValue() const {
383 
384  // Create root json object
385  Json::Value root = EffectBase::JsonValue(); // get parent properties
386  root["type"] = info.class_name;
387  root["protobuf_data_path"] = protobuf_data_path;
388  root["selected_object_index"] = selectedObjectIndex;
389  root["confidence_threshold"] = confidence_threshold;
390  root["display_box_text"] = display_box_text.JsonValue();
391  root["display_boxes"] = display_boxes.JsonValue();
392 
393  // Add tracked object's IDs to root
394  Json::Value objects;
395  for (auto const& trackedObject : trackedObjects){
396  Json::Value trackedObjectJSON = trackedObject.second->JsonValue();
397  // add object json
398  objects[trackedObject.second->Id()] = trackedObjectJSON;
399  }
400  root["objects"] = objects;
401 
402  // return JsonValue
403  return root;
404 }
405 
406 // Load JSON string into this object
407 void ObjectDetection::SetJson(const std::string value) {
408 
409  // Parse JSON string into JSON objects
410  try
411  {
412  const Json::Value root = openshot::stringToJson(value);
413  // Set all values that match
414  SetJsonValue(root);
415  }
416  catch (const std::exception& e)
417  {
418  // Error parsing JSON (or missing keys)
419  throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
420  }
421 }
422 
423 // Load Json::Value into this object
424 void ObjectDetection::SetJsonValue(const Json::Value root)
425 {
426  // Parent properties
428 
429  // If a protobuf path is provided, load & prefix IDs
430  if (!root["protobuf_data_path"].isNull()) {
431  std::string new_path = root["protobuf_data_path"].asString();
432  if (protobuf_data_path != new_path || trackedObjects.empty()) {
433  protobuf_data_path = new_path;
434  if (!LoadObjDetectdData(protobuf_data_path)) {
435  throw InvalidFile("Invalid protobuf data path", "");
436  }
437  }
438  }
439 
440  // Selected index, thresholds, UI flags, filters, etc.
441  if (!root["selected_object_index"].isNull())
442  selectedObjectIndex = root["selected_object_index"].asInt();
443  if (!root["confidence_threshold"].isNull())
444  confidence_threshold = root["confidence_threshold"].asFloat();
445  if (!root["display_box_text"].isNull())
446  display_box_text.SetJsonValue(root["display_box_text"]);
447  if (!root["display_boxes"].isNull())
448  display_boxes.SetJsonValue(root["display_boxes"]);
449 
450  if (!root["class_filter"].isNull()) {
451  class_filter = root["class_filter"].asString();
452  QStringList parts = QString::fromStdString(class_filter).split(',');
453  display_classes.clear();
454  for (auto &p : parts) {
455  auto s = p.trimmed().toLower();
456  if (!s.isEmpty()) {
457  display_classes.push_back(s.toStdString());
458  }
459  }
460  }
461 
462  // Apply any per-object overrides
463  if (!root["objects"].isNull()) {
464  // Iterate over the supplied objects (indexed by id or position)
465  const auto memberNames = root["objects"].getMemberNames();
466  for (const auto& name : memberNames)
467  {
468  // Determine the numeric index of this object
469  int index = -1;
470  bool numeric_key = std::all_of(name.begin(), name.end(), ::isdigit);
471  if (numeric_key) {
472  index = std::stoi(name);
473  }
474  else
475  {
476  size_t pos = name.find_last_of('-');
477  if (pos != std::string::npos) {
478  try {
479  index = std::stoi(name.substr(pos + 1));
480  } catch (...) {
481  index = -1;
482  }
483  }
484  }
485 
486  auto obj_it = trackedObjects.find(index);
487  if (obj_it != trackedObjects.end() && obj_it->second) {
488  // Update object id if provided as a non-numeric key
489  if (!numeric_key)
490  obj_it->second->Id(name);
491  obj_it->second->SetJsonValue(root["objects"][name]);
492  }
493  }
494  }
495  // Set the tracked object's ids (legacy format)
496  if (!root["objects_id"].isNull()) {
497  for (auto& kv : trackedObjects) {
498  if (!root["objects_id"][kv.first].isNull())
499  kv.second->Id(root["objects_id"][kv.first].asString());
500  }
501  }
502 }
503 
504 // Get all properties for a specific frame
505 std::string ObjectDetection::PropertiesJSON(int64_t requested_frame) const {
506 
507  // Generate JSON properties list
508  Json::Value root = BasePropertiesJSON(requested_frame);
509 
510  Json::Value objects;
511  if(trackedObjects.count(selectedObjectIndex) != 0){
512  auto selectedObject = trackedObjects.at(selectedObjectIndex);
513  if (selectedObject){
514  Json::Value trackedObjectJSON = selectedObject->PropertiesJSON(requested_frame);
515  // add object json
516  objects[selectedObject->Id()] = trackedObjectJSON;
517  }
518  }
519  root["objects"] = objects;
520 
521  root["selected_object_index"] = add_property_json("Selected Object", selectedObjectIndex, "int", "", NULL, 0, 200, false, requested_frame);
522  root["confidence_threshold"] = add_property_json("Confidence Theshold", confidence_threshold, "float", "", NULL, 0, 1, false, requested_frame);
523  root["class_filter"] = add_property_json("Class Filter", 0.0, "string", class_filter, NULL, -1, -1, false, requested_frame);
524 
525  root["display_box_text"] = add_property_json("Draw All Text", display_box_text.GetValue(requested_frame), "int", "", &display_box_text, 0, 1, false, requested_frame);
526  root["display_box_text"]["choices"].append(add_property_choice_json("Yes", true, display_box_text.GetValue(requested_frame)));
527  root["display_box_text"]["choices"].append(add_property_choice_json("No", false, display_box_text.GetValue(requested_frame)));
528 
529  root["display_boxes"] = add_property_json("Draw All Boxes", display_boxes.GetValue(requested_frame), "int", "", &display_boxes, 0, 1, false, requested_frame);
530  root["display_boxes"]["choices"].append(add_property_choice_json("Yes", true, display_boxes.GetValue(requested_frame)));
531  root["display_boxes"]["choices"].append(add_property_choice_json("No", false, display_boxes.GetValue(requested_frame)));
532 
533  // Return formatted string
534  return root.toStyledString();
535 }
openshot::ClipBase::add_property_json
Json::Value add_property_json(std::string name, float value, std::string type, std::string memo, const Keyframe *keyframe, float min_value, float max_value, bool readonly, int64_t requested_frame) const
Generate JSON for a property.
Definition: ClipBase.cpp:96
openshot::stringToJson
const Json::Value stringToJson(const std::string value)
Definition: Json.cpp:16
openshot::TrackedObjectBBox::stroke_alpha
Keyframe stroke_alpha
Stroke box opacity.
Definition: TrackedObjectBBox.h:146
openshot::ObjectDetection::SetJson
void SetJson(const std::string value) override
Load JSON string into this object.
Definition: ObjectDetection.cpp:407
openshot::ObjectDetection::GetFrame
std::shared_ptr< Frame > GetFrame(std::shared_ptr< Frame > frame, int64_t frame_number) override
This method is required for all derived classes of EffectBase, and returns a modified openshot::Frame...
Definition: ObjectDetection.cpp:64
openshot::TrackedObjectBBox::AddBox
void AddBox(int64_t _frame_num, float _cx, float _cy, float _width, float _height, float _angle) override
Add a BBox to the BoxVec map.
Definition: TrackedObjectBBox.cpp:48
openshot::EffectBase::info
EffectInfoStruct info
Information about the current effect.
Definition: EffectBase.h:114
openshot::ObjectDetection::JsonValue
Json::Value JsonValue() const override
Generate Json::Value for this object.
Definition: ObjectDetection.cpp:382
openshot::BBox::height
float height
bounding box height
Definition: TrackedObjectBBox.h:42
DetectionData
Definition: ObjectDetection.h:27
openshot
This namespace is the default namespace for all code in the openshot library.
Definition: AnimatedCurve.h:24
openshot::EffectBase::ParentClip
openshot::ClipBase * ParentClip()
Parent clip object of this effect (which can be unparented and NULL)
Definition: EffectBase.cpp:654
openshot::ClipBase::add_property_choice_json
Json::Value add_property_choice_json(std::string name, int value, int selected_value) const
Generate JSON choice for a property (dropdown properties)
Definition: ClipBase.cpp:132
ObjectDetection.h
Header file for Object Detection effect class.
openshot::Clip
This class represents a clip (used to arrange readers on the timeline)
Definition: Clip.h:89
openshot::EffectBase::JsonValue
virtual Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: EffectBase.cpp:102
openshot::BBox::cy
float cy
y-coordinate of the bounding box center
Definition: TrackedObjectBBox.h:40
Timeline.h
Header file for Timeline class.
DetectionData::objectIds
std::vector< int > objectIds
Definition: ObjectDetection.h:46
openshot::Keyframe::SetJsonValue
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: KeyFrame.cpp:372
openshot::EffectBase::trackedObjects
std::map< int, std::shared_ptr< openshot::TrackedObjectBase > > trackedObjects
Map of Tracked Object's by their indices (used by Effects that track objects on clips)
Definition: EffectBase.h:111
openshot::Keyframe::JsonValue
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: KeyFrame.cpp:339
openshot::EffectBase::BasePropertiesJSON
Json::Value BasePropertiesJSON(int64_t requested_frame) const
Generate JSON object of base properties (recommended to be used by all effects)
Definition: EffectBase.cpp:245
openshot::TrackedObjectBBox
This class contains the properties of a tracked object and functions to manipulate it.
Definition: TrackedObjectBBox.h:130
openshot::Keyframe
A Keyframe is a collection of Point instances, which is used to vary a number or property over time.
Definition: KeyFrame.h:53
openshot::InvalidJSON
Exception for invalid JSON.
Definition: Exceptions.h:223
openshot::BBox::width
float width
bounding box width
Definition: TrackedObjectBBox.h:41
openshot::ObjectDetection::Json
std::string Json() const override
Generate JSON string of this object.
Definition: ObjectDetection.cpp:375
openshot::EffectBase::InitEffectInfo
void InitEffectInfo()
Definition: EffectBase.cpp:42
openshot::EffectInfoStruct::has_audio
bool has_audio
Determines if this effect manipulates the audio of a frame.
Definition: EffectBase.h:44
DetectionData::classIds
std::vector< int > classIds
Definition: ObjectDetection.h:43
DetectionData::confidences
std::vector< float > confidences
Definition: ObjectDetection.h:44
Tracker.h
Header file for Tracker effect class.
openshot::EffectInfoStruct::has_tracked_object
bool has_tracked_object
Determines if this effect track objects through the clip.
Definition: EffectBase.h:45
openshot::InvalidFile
Exception for files that can not be found or opened.
Definition: Exceptions.h:193
openshot::EffectInfoStruct::class_name
std::string class_name
The class name of the effect.
Definition: EffectBase.h:39
openshot::EffectInfoStruct::description
std::string description
The description of this effect and what it does.
Definition: EffectBase.h:41
openshot::BBox
This struct holds the information of a bounding-box.
Definition: TrackedObjectBBox.h:37
openshot::EffectInfoStruct::has_video
bool has_video
Determines if this effect manipulates the image of a frame.
Definition: EffectBase.h:43
openshot::ClipBase::Id
void Id(std::string value)
Definition: ClipBase.h:94
openshot::ObjectDetection::LoadObjDetectdData
bool LoadObjDetectdData(std::string inputFilePath)
Load protobuf data file.
Definition: ObjectDetection.cpp:161
openshot::ObjectDetection::PropertiesJSON
std::string PropertiesJSON(int64_t requested_frame) const override
Definition: ObjectDetection.cpp:505
DetectionData::boxes
std::vector< cv::Rect_< float > > boxes
Definition: ObjectDetection.h:45
openshot::EffectInfoStruct::name
std::string name
The name of the effect.
Definition: EffectBase.h:40
openshot::ObjectDetection::TrackedObjectMask
std::shared_ptr< QImage > TrackedObjectMask(std::shared_ptr< QImage > target_image, int64_t frame_number) const override
Generate a black/white mask from visible detected bounding boxes.
Definition: ObjectDetection.cpp:312
openshot::ObjectDetection::GetVisibleObjects
std::string GetVisibleObjects(int64_t frame_number) const override
Get the indexes and IDs of all visible objects in the given frame.
Definition: ObjectDetection.cpp:257
openshot::BBox::cx
float cx
x-coordinate of the bounding box center
Definition: TrackedObjectBBox.h:39
openshot::ObjectDetection::selectedObjectIndex
int selectedObjectIndex
Index of the Tracked Object that was selected to modify it's properties.
Definition: ObjectDetection.h:83
Exceptions.h
Header file for all Exception classes.
openshot::EffectBase::SetJsonValue
virtual void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: EffectBase.cpp:146
openshot::Keyframe::GetValue
double GetValue(int64_t index) const
Get the value at a specific index.
Definition: KeyFrame.cpp:258
openshot::ReaderBase::ParentClip
openshot::ClipBase * ParentClip()
Parent clip object of this reader (which can be unparented and NULL)
Definition: ReaderBase.cpp:244
openshot::ObjectDetection::SetJsonValue
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: ObjectDetection.cpp:424