RGB Camera Control

This example shows how to control the device-side crop and camera triggers. An output is a displayed RGB cropped frame, that can be manipulated using the following keys:

  1. w will move the crop up

  2. a will move the crop left

  3. s will move the crop down

  4. d will move the crop right

  5. c will trigger a still event, causing the current frame to be captured and sent over still output from camera node

  6. t will trigger autofocus

  7. f will trigger autofocus continuously

  8. e will trigger autoexposure

  9. i and o will decrease/increase the exposure time

  10. k and l will decrease/increase the sensitivity iso

  11. , and . will decrease/increase the focus range

Similar samples:

Demo

Setup

Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script

git clone https://github.com/luxonis/depthai-python.git
cd depthai-python/examples
python3 install_requirements.py

For additional information, please follow installation guide

Source code

Also available on GitHub

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
#!/usr/bin/env python3

"""
This example shows usage of Camera Control message as well as ColorCamera configInput to change crop x and y
Uses 'WASD' controls to move the crop window, 'C' to capture a still image, 'T' to trigger autofocus, 'IOKL,.[]'
for manual exposure/focus/white-balance:
  Control:      key[dec/inc]  min..max
  exposure time:     I   O      1..33000 [us]
  sensitivity iso:   K   L    100..1600
  focus:             ,   .      0..255 [far..near]
  white balance:     [   ]   1000..12000 (light color temperature K)
To go back to auto controls:
  'E' - autoexposure
  'F' - autofocus (continuous)
  'B' - auto white-balance
"""

import depthai as dai
import cv2

# Step size ('W','A','S','D' controls)
STEP_SIZE = 8
# Manual exposure/focus/white-balance set step
EXP_STEP = 500  # us
ISO_STEP = 50
LENS_STEP = 3
WB_STEP = 200

def clamp(num, v0, v1):
    return max(v0, min(num, v1))

# Create pipeline
pipeline = dai.Pipeline()

# Define sources and outputs
camRgb = pipeline.create(dai.node.ColorCamera)
videoEncoder = pipeline.create(dai.node.VideoEncoder)
stillEncoder = pipeline.create(dai.node.VideoEncoder)

controlIn = pipeline.create(dai.node.XLinkIn)
configIn = pipeline.create(dai.node.XLinkIn)
videoMjpegOut = pipeline.create(dai.node.XLinkOut)
stillMjpegOut = pipeline.create(dai.node.XLinkOut)
previewOut = pipeline.create(dai.node.XLinkOut)

controlIn.setStreamName('control')
configIn.setStreamName('config')
videoMjpegOut.setStreamName('video')
stillMjpegOut.setStreamName('still')
previewOut.setStreamName('preview')

# Properties
camRgb.setVideoSize(640, 360)
camRgb.setPreviewSize(300, 300)
videoEncoder.setDefaultProfilePreset(camRgb.getFps(), dai.VideoEncoderProperties.Profile.MJPEG)
stillEncoder.setDefaultProfilePreset(1, dai.VideoEncoderProperties.Profile.MJPEG)

# Linking
camRgb.video.link(videoEncoder.input)
camRgb.still.link(stillEncoder.input)
camRgb.preview.link(previewOut.input)
controlIn.out.link(camRgb.inputControl)
configIn.out.link(camRgb.inputConfig)
videoEncoder.bitstream.link(videoMjpegOut.input)
stillEncoder.bitstream.link(stillMjpegOut.input)

# Connect to device and start pipeline
with dai.Device(pipeline) as device:

    # Get data queues
    controlQueue = device.getInputQueue('control')
    configQueue = device.getInputQueue('config')
    previewQueue = device.getOutputQueue('preview')
    videoQueue = device.getOutputQueue('video')
    stillQueue = device.getOutputQueue('still')

    # Max cropX & cropY
    maxCropX = (camRgb.getResolutionWidth() - camRgb.getVideoWidth()) / camRgb.getResolutionWidth()
    maxCropY = (camRgb.getResolutionHeight() - camRgb.getVideoHeight()) / camRgb.getResolutionHeight()

    # Default crop
    cropX = 0
    cropY = 0
    sendCamConfig = True

    # Defaults and limits for manual focus/exposure controls
    lensPos = 150
    lensMin = 0
    lensMax = 255

    expTime = 20000
    expMin = 1
    expMax = 33000

    sensIso = 800
    sensMin = 100
    sensMax = 1600
    
    wbManual = 4000
    wbMin = 1000
    wbMax = 12000

    while True:
        previewFrames = previewQueue.tryGetAll()
        for previewFrame in previewFrames:
            cv2.imshow('preview', previewFrame.getData().reshape(previewFrame.getHeight(), previewFrame.getWidth(), 3))

        videoFrames = videoQueue.tryGetAll()
        for videoFrame in videoFrames:
            # Decode JPEG
            frame = cv2.imdecode(videoFrame.getData(), cv2.IMREAD_UNCHANGED)
            # Display
            cv2.imshow('video', frame)

            # Send new cfg to camera
            if sendCamConfig:
                cfg = dai.ImageManipConfig()
                cfg.setCropRect(cropX, cropY, 0, 0)
                configQueue.send(cfg)
                print('Sending new crop - x: ', cropX, ' y: ', cropY)
                sendCamConfig = False

        stillFrames = stillQueue.tryGetAll()
        for stillFrame in stillFrames:
            # Decode JPEG
            frame = cv2.imdecode(stillFrame.getData(), cv2.IMREAD_UNCHANGED)
            # Display
            cv2.imshow('still', frame)

        # Update screen (1ms pooling rate)
        key = cv2.waitKey(1)
        if key == ord('q'):
            break
        elif key == ord('c'):
            ctrl = dai.CameraControl()
            ctrl.setCaptureStill(True)
            controlQueue.send(ctrl)
        elif key == ord('t'):
            print("Autofocus trigger (and disable continuous)")
            ctrl = dai.CameraControl()
            ctrl.setAutoFocusMode(dai.CameraControl.AutoFocusMode.AUTO)
            ctrl.setAutoFocusTrigger()
            controlQueue.send(ctrl)
        elif key == ord('f'):
            print("Autofocus enable, continuous")
            ctrl = dai.CameraControl()
            ctrl.setAutoFocusMode(dai.CameraControl.AutoFocusMode.CONTINUOUS_VIDEO)
            controlQueue.send(ctrl)
        elif key == ord('e'):
            print("Autoexposure enable")
            ctrl = dai.CameraControl()
            ctrl.setAutoExposureEnable()
            controlQueue.send(ctrl)
        elif key == ord('b'):
            print("Auto white-balance enable")
            ctrl = dai.CameraControl()
            ctrl.setAutoWhiteBalanceMode(dai.CameraControl.AutoWhiteBalanceMode.AUTO)
            controlQueue.send(ctrl)
        elif key in [ord(','), ord('.')]:
            if key == ord(','): lensPos -= LENS_STEP
            if key == ord('.'): lensPos += LENS_STEP
            lensPos = clamp(lensPos, lensMin, lensMax)
            print("Setting manual focus, lens position: ", lensPos)
            ctrl = dai.CameraControl()
            ctrl.setManualFocus(lensPos)
            controlQueue.send(ctrl)
        elif key in [ord('i'), ord('o'), ord('k'), ord('l')]:
            if key == ord('i'): expTime -= EXP_STEP
            if key == ord('o'): expTime += EXP_STEP
            if key == ord('k'): sensIso -= ISO_STEP
            if key == ord('l'): sensIso += ISO_STEP
            expTime = clamp(expTime, expMin, expMax)
            sensIso = clamp(sensIso, sensMin, sensMax)
            print("Setting manual exposure, time: ", expTime, "iso: ", sensIso)
            ctrl = dai.CameraControl()
            ctrl.setManualExposure(expTime, sensIso)
            controlQueue.send(ctrl)
        elif key in [ord('['), ord(']')]:
            if key == ord('['): wbManual -= WB_STEP
            if key == ord(']'): wbManual += WB_STEP
            wbManual = clamp(wbManual, wbMin, wbMax)
            print("Setting manual white balance, temperature: ", wbManual, "K")
            ctrl = dai.CameraControl()
            ctrl.setManualWhiteBalance(wbManual)
            controlQueue.send(ctrl)
        elif key in [ord('w'), ord('a'), ord('s'), ord('d')]:
            if key == ord('a'):
                cropX = cropX - (maxCropX / camRgb.getResolutionWidth()) * STEP_SIZE
                if cropX < 0: cropX = maxCropX
            elif key == ord('d'):
                cropX = cropX + (maxCropX / camRgb.getResolutionWidth()) * STEP_SIZE
                if cropX > maxCropX: cropX = 0
            elif key == ord('w'):
                cropY = cropY - (maxCropY / camRgb.getResolutionHeight()) * STEP_SIZE
                if cropY < 0: cropY = maxCropY
            elif key == ord('s'):
                cropY = cropY + (maxCropY / camRgb.getResolutionHeight()) * STEP_SIZE
                if cropY > maxCropY: cropY = 0
            sendCamConfig = True

Also available on GitHub

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
/**
 * This example shows usage of Camera Control message as well as ColorCamera configInput to change crop x and y
 * Uses 'WASD' controls to move the crop window, 'C' to capture a still image, 'T' to trigger autofocus, 'IOKL,.[]'
 * for manual exposure/focus/white-balance:
 *   Control:      key[dec/inc]  min..max
 *   exposure time:     I   O      1..33000 [us]
 *   sensitivity iso:   K   L    100..1600
 *   focus:             ,   .      0..255 [far..near]
 *   white balance:     [   ]   1000..12000 (light color temperature K)
 * To go back to auto controls:
 *   'E' - autoexposure
 *   'F' - autofocus (continuous)
 *   'B' - auto white-balance
 */
#include <iostream>

#include "utility.hpp"

// Includes common necessary includes for development using depthai library
#include "depthai/depthai.hpp"

// Step size ('W','A','S','D' controls)
static constexpr int STEP_SIZE = 8;

// Manual exposure/focus set step
static constexpr int EXP_STEP = 500;  // us
static constexpr int ISO_STEP = 50;
static constexpr int LENS_STEP = 3;
static constexpr int WB_STEP = 200;

static int clamp(int num, int v0, int v1) {
    return std::max(v0, std::min(num, v1));
}

int main() {
    // Create pipeline
    dai::Pipeline pipeline;

    // Define sources and outputs
    auto camRgb = pipeline.create<dai::node::ColorCamera>();
    auto videoEncoder = pipeline.create<dai::node::VideoEncoder>();
    auto stillEncoder = pipeline.create<dai::node::VideoEncoder>();

    auto controlIn = pipeline.create<dai::node::XLinkIn>();
    auto configIn = pipeline.create<dai::node::XLinkIn>();
    auto videoMjpegOut = pipeline.create<dai::node::XLinkOut>();
    auto stillMjpegOut = pipeline.create<dai::node::XLinkOut>();
    auto previewOut = pipeline.create<dai::node::XLinkOut>();

    controlIn->setStreamName("control");
    configIn->setStreamName("config");
    videoMjpegOut->setStreamName("video");
    stillMjpegOut->setStreamName("still");
    previewOut->setStreamName("preview");

    // Properties
    camRgb->setVideoSize(640, 360);
    camRgb->setPreviewSize(300, 300);
    videoEncoder->setDefaultProfilePreset(camRgb->getFps(), dai::VideoEncoderProperties::Profile::MJPEG);
    stillEncoder->setDefaultProfilePreset(1, dai::VideoEncoderProperties::Profile::MJPEG);

    // Linking
    camRgb->video.link(videoEncoder->input);
    camRgb->still.link(stillEncoder->input);
    camRgb->preview.link(previewOut->input);
    controlIn->out.link(camRgb->inputControl);
    configIn->out.link(camRgb->inputConfig);
    videoEncoder->bitstream.link(videoMjpegOut->input);
    stillEncoder->bitstream.link(stillMjpegOut->input);

    // Connect to device and start pipeline
    dai::Device device(pipeline);

    // Get data queues
    auto controlQueue = device.getInputQueue("control");
    auto configQueue = device.getInputQueue("config");
    auto previewQueue = device.getOutputQueue("preview");
    auto videoQueue = device.getOutputQueue("video");
    auto stillQueue = device.getOutputQueue("still");

    // Max cropX & cropY
    float maxCropX = (camRgb->getResolutionWidth() - camRgb->getVideoWidth()) / (float)camRgb->getResolutionWidth();
    float maxCropY = (camRgb->getResolutionHeight() - camRgb->getVideoHeight()) / (float)camRgb->getResolutionHeight();

    // Default crop
    float cropX = 0;
    float cropY = 0;
    bool sendCamConfig = true;

    // Defaults and limits for manual focus/exposure controls
    int lensPos = 150;
    int lensMin = 0;
    int lensMax = 255;

    int expTime = 20000;
    int expMin = 1;
    int expMax = 33000;

    int sensIso = 800;
    int sensMin = 100;
    int sensMax = 1600;

    int wbManual = 4000;
    int wbMin = 1000;
    int wbMax = 12000;

    while(true) {
        auto previewFrames = previewQueue->tryGetAll<dai::ImgFrame>();
        for(const auto& previewFrame : previewFrames) {
            cv::Mat frame(previewFrame->getHeight(), previewFrame->getWidth(), CV_8UC3, previewFrame->getData().data());
            cv::imshow("preview", frame);
        }

        auto videoFrames = videoQueue->tryGetAll<dai::ImgFrame>();
        for(const auto& videoFrame : videoFrames) {
            // Decode JPEG
            auto frame = cv::imdecode(videoFrame->getData(), cv::IMREAD_UNCHANGED);
            // Display
            cv::imshow("video", frame);

            // Send new cfg to camera
            if(sendCamConfig) {
                dai::ImageManipConfig cfg;
                cfg.setCropRect(cropX, cropY, 0, 0);
                configQueue->send(cfg);
                printf("Sending new crop - x: %f, y: %f\n", cropX, cropY);
                sendCamConfig = false;
            }
        }

        auto stillFrames = stillQueue->tryGetAll<dai::ImgFrame>();
        for(const auto& stillFrame : stillFrames) {
            // Decode JPEG
            auto frame = cv::imdecode(stillFrame->getData(), cv::IMREAD_UNCHANGED);
            // Display
            cv::imshow("still", frame);
        }

        // Update screen (1ms pooling rate)
        int key = cv::waitKey(1);
        if(key == 'q') {
            break;
        } else if(key == 'c') {
            dai::CameraControl ctrl;
            ctrl.setCaptureStill(true);
            controlQueue->send(ctrl);
        } else if(key == 't') {
            printf("Autofocus trigger (and disable continuous)\n");
            dai::CameraControl ctrl;
            ctrl.setAutoFocusMode(dai::CameraControl::AutoFocusMode::AUTO);
            ctrl.setAutoFocusTrigger();
            controlQueue->send(ctrl);
        } else if(key == 'f') {
            printf("Autofocus enable, continuous\n");
            dai::CameraControl ctrl;
            ctrl.setAutoFocusMode(dai::CameraControl::AutoFocusMode::CONTINUOUS_VIDEO);
            controlQueue->send(ctrl);
        } else if(key == 'e') {
            printf("Autoexposure enable\n");
            dai::CameraControl ctrl;
            ctrl.setAutoExposureEnable();
            controlQueue->send(ctrl);
        } else if(key == 'b') {
            printf("Auto white-balance enable\n");
            dai::CameraControl ctrl;
            ctrl.setAutoWhiteBalanceMode(dai::CameraControl::AutoWhiteBalanceMode::AUTO);
            controlQueue->send(ctrl);
        } else if(key == ',' || key == '.') {
            if(key == ',') lensPos -= LENS_STEP;
            if(key == '.') lensPos += LENS_STEP;
            lensPos = clamp(lensPos, lensMin, lensMax);
            printf("Setting manual focus, lens position: %d\n", lensPos);
            dai::CameraControl ctrl;
            ctrl.setManualFocus(lensPos);
            controlQueue->send(ctrl);
        } else if(key == 'i' || key == 'o' || key == 'k' || key == 'l') {
            if(key == 'i') expTime -= EXP_STEP;
            if(key == 'o') expTime += EXP_STEP;
            if(key == 'k') sensIso -= ISO_STEP;
            if(key == 'l') sensIso += ISO_STEP;
            expTime = clamp(expTime, expMin, expMax);
            sensIso = clamp(sensIso, sensMin, sensMax);
            printf("Setting manual exposure, time: %d, iso: %d\n", expTime, sensIso);
            dai::CameraControl ctrl;
            ctrl.setManualExposure(expTime, sensIso);
            controlQueue->send(ctrl);
        } else if(key == '[' || key == ']') {
            if(key == '[') wbManual -= WB_STEP;
            if(key == ']') wbManual += WB_STEP;
            wbManual = clamp(wbManual, wbMin, wbMax);
            printf("Setting manual white balance, temperature: %d K\n", wbManual);
            dai::CameraControl ctrl;
            ctrl.setManualWhiteBalance(wbManual);
            controlQueue->send(ctrl);
        } else if(key == 'w' || key == 'a' || key == 's' || key == 'd') {
            if(key == 'a') {
                cropX -= (maxCropX / camRgb->getResolutionWidth()) * STEP_SIZE;
                if(cropX < 0) cropX = maxCropX;
            } else if(key == 'd') {
                cropX += (maxCropX / camRgb->getResolutionWidth()) * STEP_SIZE;
                if(cropX > maxCropX) cropX = 0.0f;
            } else if(key == 'w') {
                cropY -= (maxCropY / camRgb->getResolutionHeight()) * STEP_SIZE;
                if(cropY < 0) cropY = maxCropY;
            } else if(key == 's') {
                cropY += (maxCropY / camRgb->getResolutionHeight()) * STEP_SIZE;
                if(cropY > maxCropY) cropY = 0.0f;
            }
            sendCamConfig = true;
        }
    }
    return 0;
}

Got questions?

We’re always happy to help with code or other questions you might have.