Skip to content

Commit 3d984ec

Browse files
authored
Opencv_onnx example (#49)
1 parent 25455f0 commit 3d984ec

7 files changed

Lines changed: 257 additions & 0 deletions

File tree

examples/CMakeLists.txt

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,9 @@ add_subdirectory(ExternalInterfaceLib)
88
add_subdirectory(Identity)
99
add_subdirectory(Dahlquist)
1010
add_subdirectory(mass_spring_damper)
11+
12+
find_package(OpenCV QUIET)
13+
if (OpenCV_FOUND)
14+
message("OpenCV found, building OpenCV example(s)...")
15+
add_subdirectory(opencv)
16+
endif ()

examples/opencv/CMakeLists.txt

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
2+
generateFMU(onnx_tracker
3+
SOURCES "tracking.cpp"
4+
RESOURCE_FOLDER
5+
"${CMAKE_CURRENT_SOURCE_DIR}/resources"
6+
LINK_TARGETS ${OpenCV_LIBRARIES}
7+
FMI_VERSIONS
8+
"fmi3"
9+
)
463 KB
Loading

examples/opencv/readme.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
This FMU must be built with dynamic MSVC runtime.
10.2 MB
Binary file not shown.

examples/opencv/run_tracking.py

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
2+
from ecospy import *
3+
4+
from pathlib import Path
5+
from time import sleep
6+
7+
def read_png_file_to_bytes(path: str) -> bytes:
8+
with open(path, "rb") as f:
9+
return f.read()
10+
11+
def main():
12+
print(f"Ecoslib version: {EcosLib.version()}")
13+
14+
EcosLib.set_log_level("debug")
15+
16+
fmu_path = str((Path(__file__).parent.parent.parent / 'cmake-build-debug' / 'models' /'fmi3' / 'onnx_tracker' / 'onnx_tracker.fmu').resolve())
17+
image = read_png_file_to_bytes("Lenna_(test_image).png")
18+
19+
print(f"image size: {len(image)} bytes")
20+
21+
with EcosSimulationStructure() as ss:
22+
ss.add_model("model", fmu_path)
23+
24+
with(EcosSimulation(structure=ss, step_size=1/100)) as sim:
25+
26+
sim.init()
27+
sim.set_binary("model::blob", image)
28+
sim.step()
29+
sleep(1)
30+
sim.terminate()
31+
32+
33+
34+
if __name__ == "__main__":
35+
main()

examples/opencv/tracking.cpp

Lines changed: 206 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,206 @@
1+
2+
#include "fmu4cpp/fmu_base.hpp"
3+
4+
5+
#include <opencv2/dnn.hpp>
6+
#include <opencv2/opencv.hpp>
7+
8+
#include <fstream>
9+
#include <iostream>
10+
11+
const std::vector<std::string> &coco_names();
12+
13+
class BoxDrawer {
14+
public:
15+
BoxDrawer(double confThreshold = 0.5, double nmsThreshold = 0.4)
16+
: confThreshold(confThreshold),
17+
nmsThreshold(nmsThreshold),
18+
classNames(coco_names()) {
19+
}
20+
21+
void setConfThreshold(double confThreshold) {
22+
this->confThreshold = confThreshold;
23+
}
24+
25+
void draw(cv::Mat &frame, const std::vector<int> &classIds,
26+
const std::vector<float> &confidences,
27+
const std::vector<cv::Rect> &boxes) const {
28+
// Non-maximum suppression to remove redundant overlapping boxes
29+
std::vector<int> indices;
30+
cv::dnn::NMSBoxes(boxes, confidences, confThreshold, nmsThreshold, indices);
31+
32+
for (const int idx: indices) {
33+
const cv::Rect &box = boxes[idx];
34+
drawPred(classIds[idx], confidences[idx], box.x, box.y, box.x + box.width, box.y + box.height, frame,
35+
classNames);
36+
}
37+
}
38+
39+
private:
40+
float confThreshold;
41+
float nmsThreshold;
42+
std::vector<std::string> classNames;
43+
44+
static void drawPred(int classId, float conf, int left, int top, int right, int bottom, cv::Mat &frame,
45+
const std::vector<std::string> &classNames) {
46+
rectangle(frame, cv::Point(left, top), cv::Point(right, bottom), cv::Scalar(255, 178, 50), 3);
47+
48+
std::ostringstream oss;
49+
oss << std::fixed << std::setprecision(2) << conf;
50+
std::string label = oss.str();
51+
if (!classNames.empty()) {
52+
CV_Assert(classId < classNames.size());
53+
label = classNames[classId] + ": " + label;
54+
}
55+
56+
int baseLine;
57+
const auto labelSize = getTextSize(label, cv::FONT_HERSHEY_SIMPLEX, 0.5, 1, &baseLine);
58+
top = cv::max(top, labelSize.height);
59+
rectangle(frame, cv::Point(left, top - labelSize.height),
60+
cv::Point(left + labelSize.width, top + baseLine), cv::Scalar::all(255), cv::FILLED);
61+
putText(frame, label, cv::Point(left, top), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 0), 1);
62+
}
63+
};
64+
65+
class Tracking : public fmu4cpp::fmu_base {
66+
public:
67+
FMU4CPP_CTOR(Tracking) {
68+
69+
register_binary("blob", &binary_).setCausality(fmu4cpp::causality_t::INPUT);
70+
71+
register_real("confThreshold", &confThreshold)
72+
.setCausality(fmu4cpp::causality_t::PARAMETER)
73+
.setVariability(fmu4cpp::variability_t::TUNABLE)
74+
.setMin(0.0)
75+
.setMax(1.0);
76+
77+
Tracking::reset();
78+
}
79+
80+
void exit_initialisation_mode() override {
81+
const auto onnxPath = resourceLocation() / "yolo11n.onnx";
82+
83+
if (!std::filesystem::exists(onnxPath)) {
84+
throw std::runtime_error("ONNX model file not found: " + onnxPath.string());
85+
}
86+
87+
net_ = cv::dnn::readNetFromONNX(onnxPath.string());
88+
net_.setPreferableBackend(cv::dnn::DNN_BACKEND_OPENCV);
89+
net_.setPreferableTarget(cv::dnn::DNN_TARGET_CPU);
90+
}
91+
92+
bool do_step(double) override {
93+
94+
static cv::Size size(640, 640);
95+
96+
std::cout << "Running inference..." << std::endl;
97+
std::cout << binary_.size() << " bytes received." << std::endl;
98+
99+
cv::Mat frame, blob;
100+
if (!binary_.empty()) {
101+
frame = cv::imdecode(binary_, cv::IMREAD_COLOR);
102+
103+
int height = frame.rows;
104+
int width = frame.cols;
105+
std::cout << "Frame size: " << width << "x" << height << std::endl;
106+
int length = std::max(height, width);
107+
cv::Mat image = cv::Mat::zeros(length, length, CV_8UC3);
108+
frame.copyTo(image(cv::Rect(0, 0, width, height)));
109+
110+
// Create a 4D blob from the frame
111+
cv::dnn::blobFromImage(image, blob, 1 / 255.0, size, {}, true, false);
112+
net_.setInput(blob);
113+
114+
// Run forward pass
115+
std::vector<cv::Mat> outs;
116+
net_.forward(outs);
117+
118+
// Extract the first output and reshape if necessary
119+
cv::Mat output = outs.front();
120+
121+
// If output is not 2D, reshape it (e.g., for YOLO, it may be 3D: [1, N, M])
122+
if (output.dims > 2) {
123+
output = output.reshape(1, output.size[1]);// Flatten to 2D if required
124+
}
125+
126+
// Transpose if needed (verify first)
127+
const cv::Mat& transposedOutput = output.t();// Transpose output for processing
128+
int rows = transposedOutput.rows;
129+
130+
// Post-process detections
131+
std::vector<int> classIds;
132+
std::vector<float> confidences;
133+
std::vector<cv::Rect> boxes;
134+
135+
for (int i = 0; i < rows; ++i) {
136+
cv::Mat scores = transposedOutput.row(i).colRange(4, transposedOutput.cols);
137+
cv::Point classIdPoint;
138+
double confidence;
139+
minMaxLoc(scores, nullptr, &confidence, nullptr, &classIdPoint);
140+
if (confidence > confThreshold) {
141+
// Extract bounding box coordinates
142+
float centerX = transposedOutput.at<float>(i, 0);
143+
float centerY = transposedOutput.at<float>(i, 1);
144+
float width = transposedOutput.at<float>(i, 2);
145+
float height = transposedOutput.at<float>(i, 3);
146+
147+
// Calculate top-left corner of the bounding box
148+
int x = static_cast<int>(centerX - 0.5 * width);
149+
int y = static_cast<int>(centerY - 0.5 * height);
150+
int w = static_cast<int>(width);
151+
int h = static_cast<int>(height);
152+
153+
classIds.push_back(classIdPoint.x);
154+
confidences.emplace_back(static_cast<float>(confidence));
155+
boxes.emplace_back(x, y, w, h);
156+
}
157+
}
158+
159+
drawer.setConfThreshold(confThreshold);
160+
drawer.draw(frame, classIds, confidences, boxes);
161+
imshow(windowName_, frame);
162+
163+
cv::waitKey(1);
164+
}
165+
166+
return true;
167+
}
168+
169+
void reset() override {
170+
// do nothing
171+
}
172+
173+
private:
174+
cv::dnn::Net net_;
175+
176+
std::string windowName_ = "Tracking";
177+
std::vector<uint8_t> binary_;
178+
179+
double confThreshold = 0.5;
180+
BoxDrawer drawer{};
181+
};
182+
183+
const std::vector<std::string> &coco_names() {
184+
static std::vector<std::string> names = {
185+
"person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", "boat", "traffic light",
186+
"fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow",
187+
"elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee",
188+
"skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard",
189+
"surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl",
190+
"banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake",
191+
"chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor", "laptop", "mouse",
192+
"remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator",
193+
"book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"};
194+
return names;
195+
}
196+
197+
198+
fmu4cpp::model_info fmu4cpp::get_model_info() {
199+
model_info info;
200+
info.modelName = "Tracking";
201+
info.description = "A tracking model using ONNX and OpenCV";
202+
return info;
203+
}
204+
205+
206+
FMU4CPP_INSTANTIATE(Tracking);

0 commit comments

Comments
 (0)