diff --git a/Cargo.toml b/Cargo.toml index 3722b18..16aed50 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "usls" -version = "0.0.12" +version = "0.0.13" edition = "2021" description = "A Rust library integrated with ONNXRuntime, providing a collection of ML models." repository = "https://github.com/jamjamjon/usls" @@ -12,21 +12,7 @@ exclude = ["assets/*", "examples/*", "scripts/*", "runs/*"] [dependencies] clap = { version = "4.2.4", features = ["derive"] } ndarray = { version = "0.16.1", features = ["rayon"] } -ort = { version = "2.0.0-rc.5", default-features = false, features = [ - "load-dynamic", - "copy-dylibs", - "half", - "cann", - "rknpu", - "ndarray", - "cuda", - "tensorrt", - "coreml", - "openvino", - "rocm", - "openvino", - "operator-libraries" -]} +ort = { version = "2.0.0-rc.5", default-features = false} anyhow = { version = "1.0.75" } regex = { version = "1.5.4" } rand = { version = "0.8.5" } @@ -45,10 +31,22 @@ imageproc = { version = "0.24" } ab_glyph = "0.2.23" geo = "0.28.0" prost = "0.12.4" -human_bytes = "0.4.3" fast_image_resize = { version = "4.2.1", features = ["image"]} +[features] +default = [ + "ort/load-dynamic", + "ort/copy-dylibs", + "ort/half", + "ort/ndarray", + "ort/cuda", + "ort/tensorrt", + "ort/coreml", + "ort/operator-libraries" +] +auto = ["ort/download-binaries"] + [dev-dependencies] criterion = "0.5.1" diff --git a/README.md b/README.md index 8a2c9aa..a3a8912 100644 --- a/README.md +++ b/README.md @@ -75,16 +75,27 @@ -## ⛳️ Linking +## ⛳️ ONNXRuntime Linking -- #### For detailed setup instructions, refer to the [ORT documentation](https://ort.pyke.io/setup/linking). +You have two options to link the ONNXRuntime library + +- ### Option 1: Manual Linking + + - #### For detailed setup instructions, refer to the [ORT documentation](https://ort.pyke.io/setup/linking). + + - #### For Linux or macOS Users: + - Download the ONNX Runtime package from the [Releases page](https://github.com/microsoft/onnxruntime/releases). + - Set up the library path by exporting the `ORT_DYLIB_PATH` environment variable: + ```shell + export ORT_DYLIB_PATH=/path/to/onnxruntime/lib/libonnxruntime.so.1.19.0 + ``` + +- ### Option 2: Automatic Download + Just use `--features auto` + ```shell + cargo run -r --example yolo --features auto + ``` -- #### For Linux or macOS users - 1. Download the ONNXRuntime package from the [Releases page](https://github.com/microsoft/onnxruntime/releases). - 2. Set up the library path by exporting the `ORT_DYLIB_PATH` environment variable: - ```shell - export ORT_DYLIB_PATH=/path/to/onnxruntime/lib/libonnxruntime.so.1.19.0 - ``` ## 🎈 Quick Start diff --git a/src/core/ort_engine.rs b/src/core/ort_engine.rs index cec6cfd..1379997 100644 --- a/src/core/ort_engine.rs +++ b/src/core/ort_engine.rs @@ -1,6 +1,5 @@ use anyhow::Result; use half::f16; -use human_bytes::human_bytes; use ndarray::{Array, IxDyn}; use ort::{ ExecutionProvider, Session, SessionBuilder, TensorElementType, TensorRTExecutionProvider, @@ -8,7 +7,9 @@ use ort::{ use prost::Message; use std::collections::HashSet; -use crate::{home_dir, onnx, Device, MinOptMax, Ops, Options, Ts, Xs, CHECK_MARK, CROSS_MARK, X}; +use crate::{ + home_dir, human_bytes, onnx, Device, MinOptMax, Ops, Options, Ts, Xs, CHECK_MARK, CROSS_MARK, X, +}; /// Ort Tensor Attrs: name, data_type, dims #[derive(Debug)] @@ -169,10 +170,9 @@ impl OrtEngine { // summary println!( - "{CHECK_MARK} Backend: ONNXRuntime | OpSet: {} | EP: {:?} | DType: {:?} | Params: {}", + "{CHECK_MARK} Backend: ONNXRuntime | Opset: {} | Device: {:?} | Params: {}", model_proto.opset_import[0].version, device, - inputs_attrs.dtypes, human_bytes(params as f64), ); diff --git a/src/models/db.rs b/src/models/db.rs index 89bed64..c4ba46b 100644 --- a/src/models/db.rs +++ b/src/models/db.rs @@ -129,7 +129,8 @@ impl DB { .unclip(delta, image_width as f64, image_height as f64) .resample(50) // .simplify(6e-4) - .convex_hull(); + .convex_hull() + .verify(); if let Some(bbox) = polygon.bbox() { if bbox.height() < self.min_height || bbox.width() < self.min_width { diff --git a/src/models/sam.rs b/src/models/sam.rs index d6f37d4..94a7e7b 100644 --- a/src/models/sam.rs +++ b/src/models/sam.rs @@ -292,7 +292,7 @@ impl SAM { let contours: Vec> = imageproc::contours::find_contours_with_threshold(&luma, 0); for c in contours.iter() { - let polygon = Polygon::default().with_points_imageproc(&c.points); + let polygon = Polygon::default().with_points_imageproc(&c.points).verify(); y_polygons.push(polygon.with_confidence(iou[0]).with_id(id)); } } diff --git a/src/models/sapiens.rs b/src/models/sapiens.rs index 448149e..26f3d3a 100644 --- a/src/models/sapiens.rs +++ b/src/models/sapiens.rs @@ -119,7 +119,8 @@ impl Sapiens { .map(|x| { let mut polygon = Polygon::default() .with_id(*i as _) - .with_points_imageproc(&x.points); + .with_points_imageproc(&x.points) + .verify(); if let Some(names_body) = &self.names_body { polygon = polygon.with_name(&names_body[*i]); } diff --git a/src/models/yolo.rs b/src/models/yolo.rs index f79b2ca..e5da132 100644 --- a/src/models/yolo.rs +++ b/src/models/yolo.rs @@ -463,7 +463,8 @@ impl Vision for YOLO { .map(|x| { let mut polygon = Polygon::default() .with_id(bbox.id()) - .with_points_imageproc(&x.points); + .with_points_imageproc(&x.points) + .verify(); if let Some(name) = bbox.name() { polygon = polygon.with_name(name); } diff --git a/src/models/yolop.rs b/src/models/yolop.rs index 65b1d51..2f02860 100644 --- a/src/models/yolop.rs +++ b/src/models/yolop.rs @@ -127,6 +127,7 @@ impl YOLOPv2 { .with_id(0) .with_points_imageproc(&x.points) .with_name("Drivable area") + .verify() }) .max_by(|x, y| x.area().total_cmp(&y.area())) { @@ -152,6 +153,7 @@ impl YOLOPv2 { .with_id(1) .with_points_imageproc(&x.points) .with_name("Lane line") + .verify() }) .max_by(|x, y| x.area().total_cmp(&y.area())) { diff --git a/src/utils/mod.rs b/src/utils/mod.rs index eb52f2d..b881a9d 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -16,6 +16,20 @@ pub(crate) const CHECK_MARK: &str = "✅"; pub(crate) const CROSS_MARK: &str = "❌"; pub(crate) const SAFE_CROSS_MARK: &str = "❎"; +pub fn human_bytes(size: f64) -> String { + let units = ["B", "KB", "MB", "GB", "TB", "PB", "EB"]; + let mut size = size; + let mut unit_index = 0; + let k = 1024.; + + while size >= k && unit_index < units.len() - 1 { + size /= k; + unit_index += 1; + } + + format!("{:.1} {}", size, units[unit_index]) +} + pub(crate) fn auto_load>(src: P, sub: Option<&str>) -> Result { let src = src.as_ref(); let p = if src.is_file() { diff --git a/src/ys/polygon.rs b/src/ys/polygon.rs index 62a8e1c..48fa6b9 100644 --- a/src/ys/polygon.rs +++ b/src/ys/polygon.rs @@ -214,4 +214,33 @@ impl Polygon { self.polygon = geo::Polygon::new(LineString::from(new_points), vec![]); self } + + pub fn verify(mut self) -> Self { + // Remove duplicates and redundant points + let mut points = self.polygon.exterior().points().collect::>(); + Self::remove_duplicates(&mut points); + self.polygon = geo::Polygon::new(LineString::from(points), vec![]); + self + } + + fn remove_duplicates(xs: &mut Vec) { + // Step 1: Remove elements from the end if they match the first element + if let Some(first) = xs.first() { + let p_1st_x = first.x() as i32; + let p_1st_y = first.y() as i32; + while xs.len() > 1 { + if let Some(last) = xs.last() { + if last.x() as i32 == p_1st_x && last.y() as i32 == p_1st_y { + xs.pop(); + } else { + break; + } + } + } + } + + // Step 2: Remove duplicates + let mut seen = std::collections::HashSet::new(); + xs.retain(|point| seen.insert((point.x() as i32, point.y() as i32))); + } }