diff --git a/bin/video.dart b/bin/video.dart index 3157e07..0e5193d 100644 --- a/bin/video.dart +++ b/bin/video.dart @@ -2,6 +2,6 @@ import "package:burt_network/logging.dart"; import "package:video/video.dart"; void main() async { - Logger.level = LogLevel.info; + Logger.level = LogLevel.trace; await collection.init(); } diff --git a/colorized.jpg b/colorized.jpg new file mode 100644 index 0000000..55dafc2 Binary files /dev/null and b/colorized.jpg differ diff --git a/lib/src/generated/librealsense_ffi_bindings.dart b/lib/src/generated/librealsense_ffi_bindings.dart index 05b922d..b43712e 100644 --- a/lib/src/generated/librealsense_ffi_bindings.dart +++ b/lib/src/generated/librealsense_ffi_bindings.dart @@ -182,6 +182,11 @@ final class NativeFrames extends ffi.Struct { @ffi.Int() external int colorized_length; + + external ffi.Pointer rgb_data; + + @ffi.Int() + external int rgb_length; } /// A fake ("opaque") C-friendly struct that we'll use a pointer to. diff --git a/lib/src/isolates/realsense.dart b/lib/src/isolates/realsense.dart index c265c93..46a8f12 100644 --- a/lib/src/isolates/realsense.dart +++ b/lib/src/isolates/realsense.dart @@ -1,7 +1,9 @@ +import "dart:io"; import "dart:ffi"; import "package:burt_network/generated.dart"; import "package:burt_network/logging.dart"; +import "package:protobuf/protobuf.dart"; import "package:opencv_ffi/opencv_ffi.dart"; import "package:video/video.dart"; @@ -44,22 +46,33 @@ class RealSenseIsolate extends CameraIsolate { void sendFrame() { // Get frames from RealSense final frames = camera.getFrames(); - sendLog(LogLevel.trace, "Got frames: "); if (frames == nullptr) return; - sendLog(LogLevel.trace, " Depth: ${frames.ref.depth_data}"); - sendLog(LogLevel.trace, " Colorized: ${frames.ref.colorized_data}"); - // // Compress colorized frame - final Pointer rawColorized = frames.ref.depth_data; - final Pointer matrix = getMatrix(camera.height, camera.width, rawColorized); - final OpenCVImage? jpg = encodeJpg(matrix, quality: 30); - nativeLib.Mat_destroy(matrix); - sendLog(LogLevel.trace, " Done"); - - if (jpg != null) { - send(FramePayload(details: details, address: jpg.pointer.address, length: jpg.data.length)); + // Compress colorized frame + final Pointer rawColorized = frames.ref.colorized_data; + final Pointer colorizedMatrix = getMatrix(camera.height, camera.width, rawColorized); + final OpenCVImage? colorizedJpg = encodeJpg(colorizedMatrix, quality: details.quality); + if (colorizedJpg == null) { + sendLog(LogLevel.debug, "Could not encode colorized frame"); + } else { + send(FramePayload(details: details, address: colorizedJpg.pointer.address, length: colorizedJpg.data.length)); } + + // Compress RGB frame + final Pointer rawRGB = frames.ref.rgb_data; + final Pointer rgbMatrix = getMatrix(camera.height, camera.width, rawRGB); + final OpenCVImage? rgbJpg = encodeJpg(rgbMatrix, quality: details.quality); + if (rgbJpg == null) { + sendLog(LogLevel.debug, "Could not encode RGB frame"); + } else { + final newDetails = details.deepCopy()..name = CameraName.ROVER_FRONT; + send(FramePayload(details: newDetails, address: rgbJpg.pointer.address, length: rgbJpg.data.length)); + } + + fpsCount++; // send(DepthFramePayload(frames.address)); + nativeLib.Mat_destroy(colorizedMatrix); + nativeLib.Mat_destroy(rgbMatrix); frames.dispose(); } } diff --git a/lib/src/realsense/realsense_ffi.dart b/lib/src/realsense/realsense_ffi.dart index 2110dc8..7351c2c 100644 --- a/lib/src/realsense/realsense_ffi.dart +++ b/lib/src/realsense/realsense_ffi.dart @@ -42,19 +42,4 @@ class RealSenseFFI extends RealSenseInterface { @override Pointer getFrames() => realsenseLib.RealSense_getDepthFrame(device); - - // @override - // OpenCVImage? colorize(Pointer depthFrame, {int quality = 75}) { - // logger.trace("Colorizing frame: $depthFrame"); - // final colorizedPointer = realsenseLib.BurtRsFrame_colorize(depthFrame); - // logger.trace(" Result: $colorizedPointer"); - // if (colorizedPointer.isEmpty) return null; - // logger.trace(" Converting to matrix..."); - // final image = getMatrix(_height, _width, colorizedPointer.frame); - // logger.trace(" Matrix: $image"); - // final jpg = encodeJpg(image, quality: quality); - // // colorizedPointer.dispose(); - // logger.trace(" Jpg: $jpg"); - // return jpg; - // } } diff --git a/src/realsense_ffi.cpp b/src/realsense_ffi.cpp index 970cd2d..b3d4ed5 100644 --- a/src/realsense_ffi.cpp +++ b/src/realsense_ffi.cpp @@ -46,10 +46,6 @@ NativeFrames* RealSense_getDepthFrame(NativeRealSense* ptr) { return reinterpret_cast(ptr)->getDepthFrame(); } -// NativeFrames* colorize(NativeFrames* framePtr) { -// return colorize(framePtr); -// } - void NativeFrames_free(NativeFrames* ptr) { freeFrame(ptr); } diff --git a/src/realsense_ffi.h b/src/realsense_ffi.h index 0957ab0..bc8bee1 100644 --- a/src/realsense_ffi.h +++ b/src/realsense_ffi.h @@ -33,6 +33,8 @@ typedef struct { int depth_length; const uint8_t* colorized_data; int colorized_length; + const uint8_t* rgb_data; + int rgb_length; } NativeFrames; // A fake ("opaque") C-friendly struct that we'll use a pointer to. @@ -54,7 +56,6 @@ FFI_PLUGIN_EXPORT void RealSense_stopStream(NativeRealSense* ptr); // Frames FFI_PLUGIN_EXPORT NativeFrames* RealSense_getDepthFrame(NativeRealSense* ptr); FFI_PLUGIN_EXPORT void NativeFrames_free(NativeFrames* ptr); -// FFI_PLUGIN_EXPORT NativeFrames* colorize(NativeFrames* framePtr); #ifdef __cplusplus } diff --git a/src/realsense_internal.cpp b/src/realsense_internal.cpp index 9ec8bd3..d781cb4 100644 --- a/src/realsense_internal.cpp +++ b/src/realsense_internal.cpp @@ -47,6 +47,7 @@ const char* burt_rs::RealSense::getDeviceName() { BurtRsStatus burt_rs::RealSense::startStream() { rs2::config rs_config; rs_config.enable_stream(RS2_STREAM_DEPTH, WIDTH, HEIGHT); + rs_config.enable_stream(RS2_STREAM_COLOR, WIDTH, HEIGHT, RS2_FORMAT_BGR8); auto profile = pipeline.start(rs_config); auto frames = pipeline.wait_for_frames(); auto frame = frames.get_depth_frame(); @@ -74,13 +75,16 @@ NativeFrames* burt_rs::RealSense::getDepthFrame() { if (!pipeline.poll_for_frames(&frames)) return nullptr; rs2::depth_frame depth_frame = frames.get_depth_frame(); rs2::frame colorized_frame = colorizer.colorize(depth_frame); + rs2::frame rgb_frame = frames.get_color_frame(); // Copy both frames -- TODO: optimize this to be a move instead int depth_length = depth_frame.get_data_size(); int colorized_length = colorized_frame.get_data_size(); - if (depth_length == 0 || colorized_length == 0) return nullptr; + int rgb_length = rgb_frame.get_data_size(); + if (depth_length == 0 || colorized_length == 0 || rgb_length == 0) return nullptr; uint8_t* depth_copy = new uint8_t[depth_length]; uint8_t* colorized_copy = new uint8_t[colorized_length]; + uint8_t* rgb_copy = new uint8_t[rgb_length]; // Copy all the data in the depth frame const uint8_t* depth_data = static_cast(depth_frame.get_data()); @@ -94,17 +98,26 @@ NativeFrames* burt_rs::RealSense::getDepthFrame() { colorized_copy[i] = colorized_data[i]; } + // Copy all the data in the RGB frame + const uint8_t* rgb_data = static_cast(rgb_frame.get_data()); + for (int i = 0; i < rgb_length; i++) { + rgb_copy[i] = rgb_data[i]; + } + // Return both frames return new NativeFrames { depth_data: depth_copy, depth_length: depth_length, colorized_data: colorized_copy, colorized_length: colorized_length, + rgb_data: rgb_copy, + rgb_length: rgb_length, }; } void freeFrame(NativeFrames* frames) { delete[] frames->depth_data; delete[] frames->colorized_data; + delete[] frames->rgb_data; delete frames; }