detections: add sort tracker

This commit is contained in:
Koushik Dutta
2023-01-13 21:00:47 -08:00
parent 73c89760d3
commit e40f5f426a
7 changed files with 96 additions and 3 deletions

3
.gitmodules vendored
View File

@@ -49,3 +49,6 @@
[submodule "plugins/tensorflow-lite/src/predict/sort_oh"]
path = plugins/tensorflow-lite/src/predict/sort_oh
url = https://github.com/nonocam/sort_oh.git
[submodule "plugins/tensorflow-lite/sort_oh"]
path = plugins/tensorflow-lite/sort_oh
url = https://github.com/nonocam/sort_oh.git

View File

@@ -2,3 +2,10 @@
Pillow>=5.4.1
PyGObject>=3.30.4
coremltools~=6.1
# sort_oh
scipy
numba
matplotlib
filterpy
numpy

View File

@@ -49,6 +49,8 @@ export interface DenoisedDetectionState<T> {
tracked?: TrackedItem<T>[];
frameCount?: number;
lastDetection?: number;
// id to time
externallyTracked?: Map<string, DenoisedDetectionEntry<T>>;
}
type Rectangle = {
@@ -93,6 +95,43 @@ export function denoiseDetections<T>(state: DenoisedDetectionState<T>,
if (!state.previousDetections)
state.previousDetections = [];
const now = options.now || Date.now();
const externallyTracked = currentDetections.filter(d => d.id);
if (externallyTracked.length) {
if (!state.externallyTracked)
state.externallyTracked = new Map();
for (const tracked of externallyTracked) {
tracked.lastSeen = now;
let previous = state.externallyTracked.get(tracked.id);
if (state.externallyTracked.has(tracked.id)) {
previous.lastSeen = now;
tracked.firstBox = previous.firstBox;
tracked.lastBox = previous.lastBox = tracked.boundingBox;
previous.durationGone = 0;
options?.retained(tracked, previous);
}
else {
state.externallyTracked.set(tracked.id, tracked);
tracked.firstSeen = now;
tracked.durationGone = 0;
tracked.firstBox = tracked.lastBox = tracked.boundingBox;
options?.added(tracked);
}
}
for (const tracked of state.externallyTracked.values()) {
if (now - tracked.lastSeen > options.timeout) {
options?.expiring(tracked);
}
}
}
if (state.externallyTracked)
return;
const { tracker, previousDetections } = state;
const items: TrackerItem<T>[] = currentDetections.filter(cd => cd.boundingBox).map(cd => {
@@ -111,7 +150,6 @@ export function denoiseDetections<T>(state: DenoisedDetectionState<T>,
// console.log(to.velocity);
// }
const now = options.now || Date.now();
const lastDetection = state.lastDetection || now;
const sinceLastDetection = now - lastDetection;

View File

@@ -9,3 +9,6 @@ dist/*.js
dist/*.txt
__pycache__
all_models
sort_oh
download_models.sh
tsconfig.json

View File

@@ -12,6 +12,9 @@ import time
from detect import DetectionSession, DetectPlugin
from collections import namedtuple
from .sort_oh import tracker
import numpy as np
Rectangle = namedtuple('Rectangle', 'xmin ymin xmax ymax')
def intersect_area(a: Rectangle, b: Rectangle): # returns None if rectangles don't intersect
@@ -22,12 +25,14 @@ def intersect_area(a: Rectangle, b: Rectangle): # returns None if rectangles do
class PredictSession(DetectionSession):
image: Image.Image
tracker: sort_oh.tracker.Sort_OH
def __init__(self, start_time: float) -> None:
super().__init__()
self.image = None
self.processed = 0
self.start_time = start_time
self.tracker = None
def parse_label_contents(contents: str):
lines = contents.splitlines()
@@ -250,9 +255,15 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
def run_detection_image(self, detection_session: PredictSession, image: Image.Image, settings: Any, src_size, convert_to_src_size: Any = None, multipass_crop: Tuple[float, float, float, float] = None):
(w, h) = self.get_input_size()
(iw, ih) = image.size
if not detection_session.tracker:
detection_session.tracker = tracker.Sort_OH(scene=np.array([iw, ih]))
conf_trgt = 0.35
conf_objt = 0.75
detection_session.tracker.conf_trgt = conf_trgt
detection_session.tracker.conf_objt = conf_objt
# this a single pass or the second pass. detect once and return results.
if multipass_crop:
(l, t, dx, dy) = multipass_crop
@@ -359,6 +370,36 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
dedupe_detections()
ret['detections'] = detections
if not multipass_crop:
sort_input = []
for d in ret['detections']:
r: ObjectDetectionResult = d
l, t, w, h = r['boundingBox']
sort_input.append([l, t, l + w, t + h, r['score']])
trackers, unmatched_trckr, unmatched_gts = detection_session.tracker.update(np.array(sort_input), [])
for td in trackers:
x0, y0, x1, y1, trackID = td[0].item(), td[1].item(
), td[2].item(), td[3].item(), td[4].item()
overlap = 0
detections = ret['detections']
ret['detections'] = []
for d in detections:
obj: ObjectDetectionResult = None
ob: ObjectDetectionResult = d
dx0, dy0, dw, dh = ob['boundingBox']
dx1 = dx0 + dw
dy1 = dy0 + dh
area = (min(dx1, x1)-max(dx0, x0))*(min(dy1, y1)-max(dy0, y0))
if (area > overlap):
overlap = area
obj = ob
if obj:
obj['id'] = str(trackID)
ret['detections'].append(obj)
return ret, RawImage(image)
def run_detection_crop(self, detection_session: DetectionSession, sample: RawImage, settings: Any, src_size, convert_to_src_size, bounding_box: Tuple[float, float, float, float]) -> ObjectsDetected:

View File

@@ -0,0 +1 @@
../../sort_oh/libs