RGB Full Resolution Saver¶
This example does its best to save full-resolution 3840x2160 .jpeg files as fast at it can from the RGB sensor. It serves as an example of recording high resolution to disk for the purposes of high-resolution ground-truth data. We also recently added the options to save isp - YUV420p uncompressed frames, processed by ISP, and raw - BayerRG (R_Gr_Gb_B), as read from sensor, 10-bit packed.
Be careful, this example saves pictures to your host storage. So if you leave it running, you could fill up your storage on your host.
Similar samples:
Demo¶
Setup¶
Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script
git clone https://github.com/luxonis/depthai-python.git
cd depthai-python/examples
python3 install_requirements.py
For additional information, please follow installation guide
Source code¶
Also available on GitHub
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 | #!/usr/bin/env python3
import time
from pathlib import Path
import cv2
import depthai as dai
# Create pipeline
pipeline = dai.Pipeline()
# Define sources and outputs
camRgb = pipeline.create(dai.node.ColorCamera)
videoEnc = pipeline.create(dai.node.VideoEncoder)
xoutJpeg = pipeline.create(dai.node.XLinkOut)
xoutRgb = pipeline.create(dai.node.XLinkOut)
xoutJpeg.setStreamName("jpeg")
xoutRgb.setStreamName("rgb")
# Properties
camRgb.setBoardSocket(dai.CameraBoardSocket.RGB)
camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_4_K)
videoEnc.setDefaultProfilePreset(camRgb.getFps(), dai.VideoEncoderProperties.Profile.MJPEG)
# Linking
camRgb.video.link(xoutRgb.input)
camRgb.video.link(videoEnc.input)
videoEnc.bitstream.link(xoutJpeg.input)
# Connect to device and start pipeline
with dai.Device(pipeline) as device:
# Output queue will be used to get the rgb frames from the output defined above
qRgb = device.getOutputQueue(name="rgb", maxSize=30, blocking=False)
qJpeg = device.getOutputQueue(name="jpeg", maxSize=30, blocking=True)
# Make sure the destination path is present before starting to store the examples
dirName = "rgb_data"
Path(dirName).mkdir(parents=True, exist_ok=True)
while True:
inRgb = qRgb.tryGet() # Non-blocking call, will return a new data that has arrived or None otherwise
if inRgb is not None:
cv2.imshow("rgb", inRgb.getCvFrame())
for encFrame in qJpeg.tryGetAll():
with open(f"{dirName}/{int(time.time() * 1000)}.jpeg", "wb") as f:
f.write(bytearray(encFrame.getData()))
if cv2.waitKey(1) == ord('q'):
break
|
Also available on GitHub
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 | #include <chrono>
#include <iostream>
// Includes common necessary includes for development using depthai library
#include "depthai/depthai.hpp"
#include "utility.hpp"
int main() {
using namespace std::chrono;
// Create pipeline
dai::Pipeline pipeline;
// Define source and outputs
auto camRgb = pipeline.create<dai::node::ColorCamera>();
auto videoEnc = pipeline.create<dai::node::VideoEncoder>();
auto xoutJpeg = pipeline.create<dai::node::XLinkOut>();
auto xoutRgb = pipeline.create<dai::node::XLinkOut>();
xoutJpeg->setStreamName("jpeg");
xoutRgb->setStreamName("rgb");
// Properties
camRgb->setBoardSocket(dai::CameraBoardSocket::RGB);
camRgb->setResolution(dai::ColorCameraProperties::SensorResolution::THE_4_K);
videoEnc->setDefaultProfilePreset(camRgb->getFps(), dai::VideoEncoderProperties::Profile::MJPEG);
// Linking
camRgb->video.link(xoutRgb->input);
camRgb->video.link(videoEnc->input);
videoEnc->bitstream.link(xoutJpeg->input);
// Connect to device and start pipeline
dai::Device device(pipeline);
// Queues
auto qRgb = device.getOutputQueue("rgb", 30, false);
auto qJpeg = device.getOutputQueue("jpeg", 30, true);
std::string dirName = "rgb_data";
createDirectory(dirName);
while(true) {
auto inRgb = qRgb->tryGet<dai::ImgFrame>();
if(inRgb != NULL) {
cv::imshow("rgb", inRgb->getCvFrame());
}
auto encFrames = qJpeg->tryGetAll<dai::ImgFrame>();
for(const auto& encFrame : encFrames) {
uint64_t time = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
std::stringstream videoStr;
videoStr << dirName << "/" << time << ".jpeg";
auto videoFile = std::ofstream(videoStr.str(), std::ios::binary);
videoFile.write((char*)encFrame->getData().data(), encFrame->getData().size());
}
int key = cv::waitKey(1);
if(key == 'q' || key == 'Q') {
return 0;
}
}
return 0;
}
|
Got questions?
We’re always happy to help with code or other questions you might have.