YARP
Yet Another Robot Platform
 
Loading...
Searching...
No Matches
V4L_camera.cpp
Go to the documentation of this file.
1/*
2 * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT)
3 * SPDX-License-Identifier: LGPL-2.1-or-later
4 */
5
6
7#include "V4L_camera.h"
8#include "list.h"
10
11#include <yarp/os/LogStream.h>
12#include <yarp/os/Time.h>
13#include <yarp/os/Value.h>
14
15#include <cstdio>
16#include <ctime>
17#include <opencv2/core/core.hpp>
18#include <opencv2/imgproc/imgproc.hpp>
19#include <opencv2/core/core_c.h>
20
21using namespace yarp::os;
22using namespace yarp::dev;
23
24
25static double getEpochTimeShift()
26{
27 struct timeval epochtime;
28 struct timespec vsTime;
29
30 gettimeofday(&epochtime, nullptr);
32
33 double uptime = vsTime.tv_sec + vsTime.tv_nsec / 1000000000.0;
34 double epoch = epochtime.tv_sec + epochtime.tv_usec / 1000000.0;
35 return epoch - uptime;
36}
37
38
39double V4L_camera::checkDouble(yarp::os::Searchable& config, const char* key)
40{
41 if (config.check(key)) {
42 return config.find(key).asFloat64();
43 }
44
45 return -1.0;
46}
47
48#define NOT_PRESENT -1
49int V4L_camera::convertYARP_to_V4L(cameraFeature_id_t feature)
50{
51 switch (feature) {
52 case cameraFeature_id_t::YARP_FEATURE_BRIGHTNESS:
54 case cameraFeature_id_t::YARP_FEATURE_SHUTTER: // this maps also on exposure
55 case cameraFeature_id_t::YARP_FEATURE_EXPOSURE:
56 return V4L2_CID_EXPOSURE;
57 case cameraFeature_id_t::YARP_FEATURE_SHARPNESS:
58 return V4L2_CID_SHARPNESS;
59 case cameraFeature_id_t::YARP_FEATURE_HUE:
60 return V4L2_CID_HUE;
61 case cameraFeature_id_t::YARP_FEATURE_SATURATION:
63 case cameraFeature_id_t::YARP_FEATURE_GAMMA:
64 return V4L2_CID_GAMMA;
65 case cameraFeature_id_t::YARP_FEATURE_GAIN:
66 return V4L2_CID_GAIN;
67 case cameraFeature_id_t::YARP_FEATURE_IRIS:
69 default:
70 return NOT_PRESENT;
71 // case YARP_FEATURE_WHITE_BALANCE: -> this has to e mapped on the couple V4L2_CID_BLUE_BALANCE && V4L2_CID_RED_BALANCE
72
74 // not yet implemented //
76 // case YARP_FEATURE_FOCUS: return DC1394_FEATURE_FOCUS;
77 // case YARP_FEATURE_TEMPERATURE: return DC1394_FEATURE_TEMPERATURE;
78 // case YARP_FEATURE_TRIGGER: return DC1394_FEATURE_TRIGGER;
79 // case YARP_FEATURE_TRIGGER_DELAY: return DC1394_FEATURE_TRIGGER_DELAY;
80 // case YARP_FEATURE_FRAME_RATE: return DC1394_FEATURE_FRAME_RATE;
81 // case YARP_FEATURE_ZOOM: return DC1394_FEATURE_ZOOM;
82 // case YARP_FEATURE_PAN: return DC1394_FEATURE_PAN;
83 // case YARP_FEATURE_TILT: return DC1394_FEATURE_TILT;
84 }
85 return NOT_PRESENT;
86}
87
89 PeriodicThread(1.0 / DEFAULT_FRAMERATE), doCropping(false), toEpochOffset(getEpochTimeShift())
90{
91 verbose = false;
92 param.fps = DEFAULT_FRAMERATE;
93 param.io = IO_METHOD_MMAP;
94 param.deviceId = "/dev/video0";
95 param.fd = -1;
96 param.n_buffers = 0;
97 param.buffers = nullptr;
98 param.camModel = STANDARD_UVC;
99 param.dual = false;
100
101 param.addictionalResize = false;
102 param.resizeOffset_x = 0;
103 param.resizeOffset_y = 0;
104 param.resizeWidth = 0;
105 param.resizeHeight = 0;
106
107 _v4lconvert_data = YARP_NULLPTR;
108 myCounter = 0;
109 timeTot = 0;
110
113 param.raw_image = YARP_NULLPTR;
114 param.raw_image_size = 0;
115 param.read_image = YARP_NULLPTR;
116
117 param.src_image = YARP_NULLPTR;
118 param.src_image_size = 0;
119
121 param.dst_image_size_rgb = 0;
122
123 use_exposure_absolute = false;
124 camMap["default"] = STANDARD_UVC;
125 camMap["leopard_python"] = LEOPARD_PYTHON;
126
127 configFx = false;
128 configFy = false;
129 configPPx = false;
130 configPPy = false;
131 configRet = false;
132 configDistM = false;
133 configIntrins = false;
134 configured = false;
135
136 // leopard debugging
137 pixel_fmt_leo = V4L2_PIX_FMT_SGRBG8;
138 bit_shift = 2; // after firmware update, the shift has to be 2 instead of 4
139 bit_bayer = 8;
140}
141
143{
144 return timeStamp;
145}
146
147int V4L_camera::convertV4L_to_YARP_format(int format)
148{
149 switch (format) {
151 return VOCAB_PIXEL_MONO;
152 case V4L2_PIX_FMT_Y16:
153 return VOCAB_PIXEL_MONO16;
155 return VOCAB_PIXEL_RGB;
156// case V4L2_PIX_FMT_ABGR32 : return VOCAB_PIXEL_BGRA; //unsupported by linux travis configuration
158 return VOCAB_PIXEL_BGR;
170 return VOCAB_PIXEL_YUV_420;
172 return VOCAB_PIXEL_YUV_444;
174 return VOCAB_PIXEL_YUV_422;
176 return VOCAB_PIXEL_YUV_411;
177 }
178 return NOT_PRESENT;
179}
180
181void V4L_camera::populateConfigurations()
182{
183 struct v4l2_fmtdesc fmt;
186
187 fmt.index = 0;
189
190 while (ioctl(param.fd, VIDIOC_ENUM_FMT, &fmt) >= 0) {
191 memset(&frmsize, 0, sizeof(v4l2_frmsizeenum));
192 frmsize.pixel_format = fmt.pixelformat;
193 frmsize.index = 0;
195 while (xioctl(param.fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) >= 0) {
197 memset(&frmival, 0, sizeof(v4l2_frmivalenum));
198 frmival.index = 0;
199 frmival.pixel_format = fmt.pixelformat;
200 frmival.width = frmsize.discrete.width;
201 frmival.height = frmsize.discrete.height;
203 while (xioctl(param.fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival) >= 0) {
205 c.pixelCoding = (YarpVocabPixelTypesEnum)convertV4L_to_YARP_format(frmival.pixel_format);
206 c.width = frmival.width;
207 c.height = frmival.height;
208 c.framerate = (1.0 * frmival.discrete.denominator) / frmival.discrete.numerator;
209 param.configurations.push_back(c);
210 frmival.index++;
211 }
212 }
213 frmsize.index++;
214 }
215 fmt.index++;
216 }
217}
218
223{
224 struct stat st;
225 yCTrace(USBCAMERA) << "input params are " << config.toString();
226
227
228 if (!fromConfig(config)) {
229 return false;
230 }
231
232 // stat file
233 if (-1 == stat(param.deviceId.c_str(), &st)) {
234 yCError(USBCAMERA, "Cannot identify '%s': %d, %s", param.deviceId.c_str(), errno, strerror(errno));
235 return false;
236 }
237
238 // check if it is a device
239 if (!S_ISCHR(st.st_mode)) {
240 yCError(USBCAMERA, "%s is no device", param.deviceId.c_str());
241 return false;
242 }
243
244 // open device
245 param.fd = v4l2_open(param.deviceId.c_str(), O_RDWR /* required */ | O_NONBLOCK, 0);
246
247 // check if opening was successfull
248 if (-1 == param.fd) {
249 yCError(USBCAMERA, "Cannot open '%s': %d, %s", param.deviceId.c_str(), errno, strerror(errno));
250 return false;
251 }
252
253 // if previous instance crashed, maybe will help (?)
254 captureStop();
255 deviceUninit();
256 v4l2_close(param.fd);
257
259 // re-open device
260 param.fd = v4l2_open(param.deviceId.c_str(), O_RDWR /* required */ | O_NONBLOCK, 0);
261
262 // check if opening was successfull
263 if (-1 == param.fd) {
264 yCError(USBCAMERA, "Cannot open '%s': %d, %s", param.deviceId.c_str(), errno, strerror(errno));
265 return false;
266 }
267
268
269 // Initting video device
270 deviceInit();
271 if (verbose) {
272 enumerate_controls();
273 }
274 if (!check_V4L2_control(V4L2_CID_EXPOSURE)) {
275 use_exposure_absolute = check_V4L2_control(V4L2_CID_EXPOSURE_ABSOLUTE);
276 }
277 captureStart();
279 start();
280
281 populateConfigurations();
282
283 // Configure the device settings from input file
284 setFeature(cameraFeature_id_t::YARP_FEATURE_GAIN, checkDouble(config, "gain"));
285 setFeature(cameraFeature_id_t::YARP_FEATURE_EXPOSURE, checkDouble(config, "exposure"));
286 setFeature(cameraFeature_id_t::YARP_FEATURE_BRIGHTNESS, checkDouble(config, "brightness"));
287 setFeature(cameraFeature_id_t::YARP_FEATURE_SHARPNESS, checkDouble(config, "sharpness"));
288 yarp::os::Bottle& white_balance = config.findGroup("white_balance");
289 if (!white_balance.isNull()) {
290 setFeature(cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE, white_balance.get(2).asFloat64(), white_balance.get(1).asFloat64());
291 }
292 setFeature(cameraFeature_id_t::YARP_FEATURE_HUE, checkDouble(config, "hue"));
293 setFeature(cameraFeature_id_t::YARP_FEATURE_SATURATION, checkDouble(config, "saturation"));
294 setFeature(cameraFeature_id_t::YARP_FEATURE_GAMMA, checkDouble(config, "gamma"));
295 setFeature(cameraFeature_id_t::YARP_FEATURE_SHUTTER, checkDouble(config, "shutter"));
296 setFeature(cameraFeature_id_t::YARP_FEATURE_IRIS, checkDouble(config, "iris"));
297
298 return true;
299}
300
302{
303 return height();
304}
305
307{
308 return width();
309}
310
311ReturnValue V4L_camera::getRgbSupportedConfigurations(std::vector<CameraConfig>& configurations)
312{
313 configurations = param.configurations;
314 return ReturnValue_ok;
315}
317{
318 width = param.user_width;
319 height = param.user_height;
320 return ReturnValue_ok;
321}
322
324{
325 mutex.wait();
326 captureStop();
327 deviceUninit();
328 param.user_width = width;
329 param.user_height = height;
330 bool res = deviceInit();
331 captureStart();
332 mutex.post();
333 if (res) return ReturnValue_ok;
334 return ReturnValue::return_code::return_value_error_method_failed;
335}
336
337ReturnValue V4L_camera::getRgbFOV(double& horizontalFov, double& verticalFov)
338{
339 horizontalFov = param.horizontalFov;
340 verticalFov = param.verticalFov;
341 bool b = configFx && configFy;
342 if (b) return ReturnValue_ok;
343 return ReturnValue::return_code::return_value_error_method_failed;
344}
345
346ReturnValue V4L_camera::setRgbFOV(double horizontalFov, double verticalFov)
347{
348 yCError(USBCAMERA) << "cannot set fov";
349 return ReturnValue::return_code::return_value_error_not_implemented_by_device;
350}
351
353{
354 intrinsic = param.intrinsic;
355 return ReturnValue_ok;
356}
357
359{
360
361 mirror = (ioctl(param.fd, V4L2_CID_HFLIP) != 0);
362 return ReturnValue_ok;
363}
364
366{
367 int ret = ioctl(param.fd, V4L2_CID_HFLIP, &mirror);
368 if (ret < 0) {
369 yCError(USBCAMERA) << "V4L2_CID_HFLIP - Unable to mirror image-" << strerror(errno);
370 return ReturnValue::return_code::return_value_error_method_failed;
371 }
372 return ReturnValue_ok;
373}
374
375bool V4L_camera::fromConfig(yarp::os::Searchable& config)
376{
377 if (config.check("verbose")) {
378 verbose = true;
379 }
380
381 if (!config.check("width")) {
382 yCDebug(USBCAMERA) << "width parameter not found, using default value of " << DEFAULT_WIDTH;
384 } else {
385 param.user_width = config.find("width").asInt32();
386 }
387
388 if (!config.check("height")) {
389 yCDebug(USBCAMERA) << "height parameter not found, using default value of " << DEFAULT_HEIGHT;
391 } else {
392 param.user_height = config.find("height").asInt32();
393 }
394
395 if (!config.check("framerate")) {
396 yCDebug(USBCAMERA) << "framerate parameter not found, using default value of " << DEFAULT_FRAMERATE;
397 param.fps = DEFAULT_FRAMERATE;
398 } else {
399 param.fps = config.find("framerate").asInt32();
400 }
401
402 if (!config.check("d")) {
403 yCError(USBCAMERA) << "No camera identifier was specified! (e.g. '--d /dev/video0' on Linux OS)";
404 return false;
405 }
406
407 param.deviceId = config.find("d").asString();
408 param.flip = config.check("flip", Value("false")).asBool();
409
410 if (!config.check("camModel")) {
411 yCInfo(USBCAMERA) << "No 'camModel' was specified, working with 'standard' uvc";
412 param.camModel = STANDARD_UVC;
413 } else {
414 std::map<std::string, supported_cams>::iterator it = camMap.find(config.find("camModel").asString());
415 if (it != camMap.end()) {
416 param.camModel = it->second;
417 yCDebug(USBCAMERA) << "cam model name : " << config.find("camModel").asString() << " -- number : " << it->second;
418 } else {
419 yCError(USBCAMERA) << "Unknown camera model <" << config.find("camModel").asString() << ">";
420 yCInfo(USBCAMERA) << "Supported models are: ";
421 for (it = camMap.begin(); it != camMap.end(); it++) {
422 yCInfo(USBCAMERA, " <%s>", it->first.c_str());
423 }
424 return false;
425 }
426 }
427
428 // Check for addictional leopard parameter for debugging purpose
429 if (param.camModel == LEOPARD_PYTHON) {
430 yCDebug(USBCAMERA) << "-------------------------------\nusbCamera: Using leopard camera!!";
431 bit_shift = config.check("shift", Value(bit_shift), "right shift of <n> bits").asInt32();
432 bit_bayer = config.check("bit_bayer", Value(bit_bayer), "uses <n> bits bayer conversion").asInt32();
433 switch (bit_bayer) {
434 case 8:
435 pixel_fmt_leo = V4L2_PIX_FMT_SGRBG8;
436 break;
437
438 case 10:
439 pixel_fmt_leo = V4L2_PIX_FMT_SGRBG10;
440 break;
441
442 case 12:
443 pixel_fmt_leo = V4L2_PIX_FMT_SGRBG12;
444 break;
445
446 default:
447 yCError(USBCAMERA) << "bayer conversion with " << bit_bayer << "not supported";
448 return false;
449 }
450
451 yCDebug(USBCAMERA) << "--------------------------------";
452 yCDebug(USBCAMERA) << bit_shift << "bits of right shift applied to raw data";
453 yCDebug(USBCAMERA) << "Bits used for de-bayer " << bit_bayer;
454 }
455
456 //crop is used to pass from 16:9 to 4:3
457 if (config.check("crop")) {
458 doCropping = true;
459 yCInfo(USBCAMERA, "Cropping enabled.");
460 } else {
461 doCropping = false;
462 }
463
464 Value isDual = config.check("dual", Value(0), "Is this a dual camera? Two cameras merged into a single frame");
465
466 if (config.find("dual").asBool()) {
467 param.dual = true;
468 yCInfo(USBCAMERA, "Using dual input camera.");
469 } else {
470 param.dual = false;
471 }
472
473 int type = 0;
474 if (!config.check("pixelType")) {
475 yCError(USBCAMERA) << "No 'pixelType' was specified!";
476 return false;
477 }
478 {
479 type = config.find("pixelType").asInt32();
480 }
481
482 switch (type) {
483 case VOCAB_PIXEL_MONO:
484 // Pixel type raw is the native one from the camera
485 param.pixelType = convertV4L_to_YARP_format(param.src_fmt.fmt.pix.pixelformat);
486 break;
487
488 case VOCAB_PIXEL_RGB:
489 // is variable param.pixelType really required??
491 break;
492
493 default:
494 yCError(USBCAMERA, "no valid pixel format found!! This should not happen!!");
495 return false;
496 break;
497 }
498 Value* retM;
499 retM = Value::makeList("1.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 1.0");
500 configFx = config.check("horizontalFov");
501 configFy = config.check("verticalFov");
502 configPPx = config.check("principalPointX");
503 configPPy = config.check("principalPointY");
504 configRet = config.check("rectificationMatrix");
505 configDistM = config.check("distortionModel");
506 Bottle bt;
507 bt = config.findGroup("cameraDistortionModelGroup");
508 if (!bt.isNull()) {
509 if (bt.find("name").isNull() || bt.find("k1").isNull()
510 || bt.find("k2").isNull() || bt.find("k3").isNull()
511 || bt.find("t1").isNull() || bt.find("t2").isNull()) {
512 yCError(USBCAMERA) << "group cameraDistortionModelGroup incomplete, "
513 "fields k1, k2, k3, t1, t2, name are required when using cameraDistortionModelGroup";
514 configIntrins = false;
515 return false;
516 }
517 {
518 configIntrins = true;
519 }
520 } else {
521 configIntrins = false;
522 }
523 param.horizontalFov = config.check("horizontalFov", Value(0.0), "desired horizontal fov of test image").asFloat64();
524 param.verticalFov = config.check("verticalFov", Value(0.0), "desired vertical fov of test image").asFloat64();
525 if (config.check("mirror")) {
526 if (!setRgbMirroring(config.check("mirror",
527 Value(0),
528 "mirroring disabled by default")
529 .asBool())) {
530 yCError(USBCAMERA, "cannot set mirroring option");
531 return false;
532 }
533 }
534
535 param.intrinsic.put("focalLengthX", config.check("focalLengthX", Value(0.0), "Horizontal component of the focal lenght").asFloat64());
536 param.intrinsic.put("focalLengthY", config.check("focalLengthY", Value(0.0), "Vertical component of the focal lenght").asFloat64());
537 param.intrinsic.put("principalPointX", config.check("principalPointX", Value(0.0), "X coordinate of the principal point").asFloat64());
538 param.intrinsic.put("principalPointY", config.check("principalPointY", Value(0.0), "Y coordinate of the principal point").asFloat64());
539 param.intrinsic.put("rectificationMatrix", config.check("rectificationMatrix", *retM, "Matrix that describes the lens' distortion"));
540 param.intrinsic.put("distortionModel", config.check("distortionModel", Value(""), "Reference to group of parameters describing the distortion model of the camera").asString());
541 if (bt.isNull()) {
542 param.intrinsic.put("name", "");
543 param.intrinsic.put("k1", 0.0);
544 param.intrinsic.put("k2", 0.0);
545 param.intrinsic.put("k3", 0.0);
546 param.intrinsic.put("t1", 0.0);
547 param.intrinsic.put("t2", 0.0);
548 } else {
549 param.intrinsic.put("name", bt.check("name", Value(""), "Name of the distortion model").asString());
550 param.intrinsic.put("k1", bt.check("k1", Value(0.0), "Radial distortion coefficient of the lens").asFloat64());
551 param.intrinsic.put("k2", bt.check("k2", Value(0.0), "Radial distortion coefficient of the lens").asFloat64());
552 param.intrinsic.put("k3", bt.check("k3", Value(0.0), "Radial distortion coefficient of the lens").asFloat64());
553 param.intrinsic.put("t1", bt.check("t1", Value(0.0), "Tangential distortion of the lens").asFloat64());
554 param.intrinsic.put("t2", bt.check("t2", Value(0.0), "Tangential distortion of the lens").asFloat64());
555 }
556 delete retM;
557
558 yCDebug(USBCAMERA) << "using following device " << param.deviceId << "with the configuration: " << param.user_width << "x" << param.user_height << "; camModel is " << param.camModel;
559 return true;
560}
561
562int V4L_camera::getfd()
563{
564 return param.fd;
565}
566
567bool V4L_camera::threadInit()
568{
570
571 timeStart = timeNow = timeElapsed = yarp::os::Time::now();
572 frameCounter = 0;
573 return true;
574}
575
576void V4L_camera::run()
577{
578 if (full_FrameRead()) {
579 frameCounter++;
580 } else {
581 yCError(USBCAMERA) << "Failed acquiring new frame";
582 }
583
584 timeNow = yarp::os::Time::now();
585 if ((timeElapsed = timeNow - timeStart) > 1.0f) {
586 yCInfo(USBCAMERA, "frames acquired %d in %f sec", frameCounter, timeElapsed);
587 frameCounter = 0;
588 timeStart = timeNow;
589 }
590}
591
592void V4L_camera::threadRelease()
593{
595}
596
597
601bool V4L_camera::deviceInit()
602{
603 struct v4l2_capability cap;
604 struct v4l2_cropcap cropcap;
605 struct v4l2_crop crop;
607 configured = false;
608
609 if (-1 == xioctl(param.fd, VIDIOC_QUERYCAP, &cap)) {
610 if (EINVAL == errno) {
611 yCError(USBCAMERA, "%s is no V4L2 device", param.deviceId.c_str());
612 }
613 return false;
614 }
615
616 if (verbose) {
617 list_cap_v4l2(param.fd);
618 }
619
620 if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
621 yCError(USBCAMERA, "%s is no video capture device", param.deviceId.c_str());
622 return false;
623 }
624
625 yCInfo(USBCAMERA, "%s is good V4L2_CAP_VIDEO_CAPTURE", param.deviceId.c_str());
626
627 switch (param.io) {
628 case IO_METHOD_READ:
629 if (!(cap.capabilities & V4L2_CAP_READWRITE)) {
630 yCError(USBCAMERA, "%s does not support read i/o", param.deviceId.c_str());
631 return false;
632 }
633 break;
634
635 case IO_METHOD_MMAP:
637 if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
638 yCError(USBCAMERA, "%s does not support streaming i/o", param.deviceId.c_str());
639 return false;
640 }
641 break;
642
643 default:
644 yCError(USBCAMERA, "Unknown io method for device %s", param.deviceId.c_str());
645 return false;
646 break;
647 }
648
649 CLEAR(cropcap);
651
652 if (0 == xioctl(param.fd, VIDIOC_CROPCAP, &cropcap)) {
653 crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
654 crop.c = cropcap.defrect; /* reset to default */
655
656 /* Reset cropping to default if possible.
657 * Don't care about errors
658 */
659 xioctl(param.fd, VIDIOC_S_CROP, &crop);
660 }
661
662 CLEAR(param.src_fmt);
663 CLEAR(param.dst_fmt);
664
665 _v4lconvert_data = v4lconvert_create(param.fd);
666 if (_v4lconvert_data == nullptr) {
667 yCError(USBCAMERA) << "Failed to initialize v4lconvert. Conversion to required format may not work";
668 }
669
670 /*
671 * dst_fmt is the image format the user require.
672 * With try_format, V4l does an handshake with the camera and the best match from
673 * the available formats provided by the camera is selected.
674 * src_fmt will contain the source format, i.e. the configuration to be sent to the
675 * camera to optimize the conversion which will be done afterwards.
676 *
677 * VERY IMPORTANT NOTE:
678 *
679 * In case no match is found for the user input provided in dst_fmt, than dst_fmt
680 * itself may be changed to provide the best conversion possible similar to user
681 * input. In particular, pixel format conversion together with rescaling may not
682 * be possible to achieve. In this case only pixel format conversion will be done
683 * and we need to take care of the rescaling.
684 */
685
687 param.dst_fmt.fmt.pix.width = param.user_width;
688 param.dst_fmt.fmt.pix.height = param.user_height;
689 param.dst_fmt.fmt.pix.field = V4L2_FIELD_NONE;
690 param.dst_fmt.fmt.pix.pixelformat = param.pixelType;
691
692 if (v4lconvert_try_format(_v4lconvert_data, &(param.dst_fmt), &(param.src_fmt)) != 0) {
693 yCError(USBCAMERA, "v4lconvert_try_format -> Error is: %s", v4lconvert_get_error_message(_v4lconvert_data));
694 return false;
695 }
696
697 // Check if dst_fmt has been changed by the v4lconvert_try_format
698 if (param.dst_fmt.fmt.pix.width != param.user_width ||
699 param.dst_fmt.fmt.pix.height != param.user_height ||
700 param.dst_fmt.fmt.pix.pixelformat != param.pixelType) {
701 yCWarning(USBCAMERA) << "Conversion from HW supported configuration into user requested format will require addictional step.\n"
702 << "Performance issue may arise.";
703
704 param.addictionalResize = true;
705
706 // Compute offsets for cropping image in case the source image and the one
707 // required by the user have different form factors, i.e 16/9 vs 4/3
708 double inputFF = (double)param.dst_fmt.fmt.pix.width / (double)param.dst_fmt.fmt.pix.height;
709 double outputFF = (double)param.user_width / (double)param.user_height;
710
711 if (outputFF < inputFF) {
712 // Use all vertical pixels, crop lateral pixels to get the central portion of the image
713 param.resizeOffset_y = 0;
714 param.resizeHeight = param.dst_fmt.fmt.pix.height;
715
716 if (!param.dual) {
717 param.resizeOffset_x = (param.dst_fmt.fmt.pix.width - (param.dst_fmt.fmt.pix.height * outputFF)) / 2;
718 param.resizeWidth = param.dst_fmt.fmt.pix.width - param.resizeOffset_x * 2;
719 } else {
720 param.resizeOffset_x = (param.dst_fmt.fmt.pix.width - (param.dst_fmt.fmt.pix.height * outputFF)) / 4; // "/4" is "/2" 2 times because there are 2 images
721 param.resizeWidth = param.dst_fmt.fmt.pix.width / 2 - param.resizeOffset_x * 2;
722 }
723 } else {
724 // Use all horizontal pixels, crop top/bottom pixels to get the central portion of the image
725 param.resizeOffset_x = 0;
726
727 if (!param.dual) {
728 param.resizeWidth = param.dst_fmt.fmt.pix.width;
729 param.resizeOffset_y = (param.dst_fmt.fmt.pix.height - (param.dst_fmt.fmt.pix.width / outputFF)) / 2;
730 param.resizeHeight = param.dst_fmt.fmt.pix.height - param.resizeOffset_y * 2;
731 } else {
732 param.resizeWidth = param.dst_fmt.fmt.pix.width / 2;
733 param.resizeOffset_y = (param.dst_fmt.fmt.pix.height - (param.dst_fmt.fmt.pix.width / outputFF)) / 2;
734 param.resizeHeight = param.dst_fmt.fmt.pix.height - param.resizeOffset_y * 2;
735 }
736 }
737 } else {
738 param.addictionalResize = false;
739 param.resizeOffset_x = 0;
740 param.resizeWidth = param.user_width / 2;
741 param.resizeOffset_y = 0;
742 param.resizeHeight = param.user_height;
743 }
744
745 if (-1 == xioctl(param.fd, VIDIOC_S_FMT, &param.src_fmt)) {
746 yCError(USBCAMERA) << "xioctl error VIDIOC_S_FMT" << strerror(errno);
747 return false;
748 }
749
750 /* If the user has set the fps to -1, don't try to set the frame interval */
751 if (param.fps != -1) {
753
754 /* Attempt to set the frame interval. */
756 frameint.parm.capture.timeperframe.numerator = 1;
757 frameint.parm.capture.timeperframe.denominator = param.fps;
758 if (-1 == xioctl(param.fd, VIDIOC_S_PARM, &frameint)) {
759 yCError(USBCAMERA, "Unable to set frame interval.");
760 }
761 }
762
763 param.src_image_size = param.src_fmt.fmt.pix.sizeimage;
764 param.src_image = new unsigned char[param.src_image_size];
765
766 param.dst_image_size_rgb = param.dst_fmt.fmt.pix.width * param.dst_fmt.fmt.pix.height * 3;
767 param.dst_image_rgb = new unsigned char[param.dst_image_size_rgb];
768
769 // raw image is for non-standard type only, for example leopard_python
770 if (param.camModel == LEOPARD_PYTHON) {
771 /* This camera sends bayer 10bit over 2bytes for each piece of information,
772 * therefore the total size of the image is 2 times the number of pixels.
773 */
774 param.raw_image_size = param.src_fmt.fmt.pix.width * param.src_fmt.fmt.pix.height * 2;
775 param.raw_image = new unsigned char[param.raw_image_size];
776 param.read_image = param.raw_image; // store the image read in the raw_image buffer
777 } else // This buffer should not be used for STANDARD_UVC cameras
778 {
779 param.read_image = param.src_image; // store the image read in the src_image buffer
780 param.raw_image_size = 0;
781 param.raw_image = YARP_NULLPTR;
782 }
783
784 switch (param.io) {
785 case IO_METHOD_READ:
786 readInit(param.src_fmt.fmt.pix.sizeimage);
787 break;
788
789 case IO_METHOD_MMAP:
790 mmapInit();
791 break;
792
794 userptrInit(param.src_fmt.fmt.pix.sizeimage);
795 break;
796 }
797
798 if (verbose) {
800 }
801 configured = true;
802
803 return true;
804}
805
806bool V4L_camera::deviceUninit()
807{
808 unsigned int i;
809 bool ret = true;
810 configured = false;
811
812 switch (param.io) {
813 case IO_METHOD_READ:
814 free(param.buffers[0].start);
815 break;
816
817 case IO_METHOD_MMAP:
818 for (i = 0; i < param.n_buffers; ++i) {
819 if (-1 == v4l2_munmap(param.buffers[i].start, param.buffers[i].length)) {
820 ret = false;
821 }
822 }
823
824 CLEAR(param.req);
825 // memset(param.req, 0, sizeof(struct v4l2_requestbuffers));
826 param.req.count = 0;
827 param.req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
828 param.req.memory = V4L2_MEMORY_MMAP;
829 if (xioctl(param.fd, VIDIOC_REQBUFS, &param.req) < 0) {
830 yCError(USBCAMERA, "VIDIOC_REQBUFS - Failed to delete buffers: %s (errno %d)", strerror(errno), errno);
831 return false;
832 }
833
834 break;
835
837 for (i = 0; i < param.n_buffers; ++i) {
838 free(param.buffers[i].start);
839 }
840 break;
841 }
842
843 if (param.buffers != nullptr) {
844 free(param.buffers);
845 }
846
847 if (param.raw_image != YARP_NULLPTR) {
848 delete[] param.raw_image;
849 param.raw_image = YARP_NULLPTR;
850 }
851
852 if (param.src_image != YARP_NULLPTR) {
853 delete[] param.src_image;
854 param.src_image = YARP_NULLPTR;
855 }
856
857 if (param.dst_image_rgb != YARP_NULLPTR) {
858 delete[] param.dst_image_rgb;
860 }
861
862 if (_v4lconvert_data != YARP_NULLPTR) {
863 v4lconvert_destroy(_v4lconvert_data);
864 _v4lconvert_data = YARP_NULLPTR;
865 }
866
867 return ret;
868}
869
874{
876
877 stop(); // stop yarp thread acquiring images
878
879 if (param.fd != -1) {
880 captureStop();
881 deviceUninit();
882
883 if (-1 == v4l2_close(param.fd)) {
884 yCError(USBCAMERA) << "Error closing V4l2 device";
885 }
886 return false;
887 }
888 param.fd = -1;
889 return true;
890}
891
893{
894 image.resize(width(), height());
895
896 bool res = false;
897 mutex.wait();
898 if (configured) {
899 imagePreProcess();
900 imageProcess();
901
902 if (!param.addictionalResize) {
903 memcpy(image.getRawImage(), param.dst_image_rgb, param.dst_image_size_rgb);
904 } else {
905 memcpy(image.getRawImage(), param.outMat.data, param.outMat.total() * 3);
906 }
907 mutex.post();
908 res = true;
909 } else {
910 yCError(USBCAMERA) << "unable to get the buffer, device uninitialized";
911 mutex.post();
912 res = false;
913 }
914 if (res) return ReturnValue_ok;
915 return ReturnValue::return_code::return_value_error_method_failed;
916}
917
919{
920 image.resize(width(), height());
921
922 bool res = false;
923 mutex.wait();
924 if (configured) {
925 imagePreProcess();
926 memcpy(image.getRawImage(), param.src_image, param.src_image_size);
927 res = true;
928 } else {
929 yCError(USBCAMERA) << "unable to get the buffer, device uninitialized";
930 res = false;
931 }
932 mutex.post();
933 if (res) return ReturnValue_ok;
934 return ReturnValue::return_code::return_value_error_method_failed;
935}
936
942{
943 /*
944 * return user setting because at the end of the day, this is what
945 * the image must look like
946 */
947 return param.user_height;
948}
949
955{
956 /*
957 * return user setting because at the end of the day, this is what
958 * the image must look like
959 */
960 return param.user_width;
961}
962
971int V4L_camera::xioctl(int fd, int request, void* argp)
972{
973 int r;
974
975 do {
976 r = v4l2_ioctl(fd, request, argp);
977 } while (-1 == r && EINTR == errno);
978
979 return r;
980}
981
982
984
985
988
989void V4L_camera::enumerate_menu()
990{
991 yCInfo(USBCAMERA, "Menu items:");
992
993 memset(&querymenu, 0, sizeof(querymenu));
994 querymenu.id = queryctrl.id;
995
996 for (querymenu.index = (__u32)queryctrl.minimum; querymenu.index <= (__u32)queryctrl.maximum; querymenu.index++) {
997 if (0 == ioctl(param.fd, VIDIOC_QUERYMENU, &querymenu)) {
998 yCInfo(USBCAMERA, " %s", querymenu.name);
999 } else {
1000 yCError(USBCAMERA, "VIDIOC_QUERYMENU: %d, %s", errno, strerror(errno));
1001 return;
1002 }
1003 }
1004}
1005
1006
1007bool V4L_camera::enumerate_controls()
1008{
1009 memset(&queryctrl, 0, sizeof(queryctrl));
1010
1012 if (0 == ioctl(param.fd, VIDIOC_QUERYCTRL, &queryctrl)) {
1013 if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
1014 continue;
1015 }
1016
1017 yCInfo(USBCAMERA, "Control %s (id %d)", queryctrl.name, queryctrl.id);
1018
1019 if (queryctrl.type == V4L2_CTRL_TYPE_MENU) {
1020 enumerate_menu();
1021 }
1022 } else {
1023 if (errno == EINVAL) {
1024 continue;
1025 }
1026
1027 yCError(USBCAMERA, "VIDIOC_QUERYCTRL: %d, %s", errno, strerror(errno));
1028 return false;
1029 }
1030 }
1031
1032 for (queryctrl.id = V4L2_CID_PRIVATE_BASE;; queryctrl.id++) {
1033 if (0 == ioctl(param.fd, VIDIOC_QUERYCTRL, &queryctrl)) {
1034 if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
1035 continue;
1036 }
1037
1038 yCInfo(USBCAMERA, "Control %s", queryctrl.name);
1039
1040 if (queryctrl.type == V4L2_CTRL_TYPE_MENU) {
1041 enumerate_menu();
1042 }
1043 } else {
1044 if (errno == EINVAL) {
1045 break;
1046 }
1047
1048 yCError(USBCAMERA, "VIDIOC_QUERYCTRL: %d, %s", errno, strerror(errno));
1049 return false;
1050 }
1051 }
1052 return true;
1053}
1054
1058bool V4L_camera::full_FrameRead()
1059{
1060 bool got_it = false;
1061 void* image_ret = nullptr;
1062 unsigned int count;
1063 unsigned int numberOfTimeouts;
1064
1065 fd_set fds;
1066 struct timeval tv;
1067 int r;
1068
1069 numberOfTimeouts = 0;
1070 count = 10; //trials
1071
1072
1073 for (unsigned int i = 0; i < count; i++) {
1074 FD_ZERO(&fds);
1075 FD_SET(param.fd, &fds);
1076
1077 /* Timeout. */
1078 tv.tv_sec = 1;
1079 tv.tv_usec = 0;
1080
1081 r = select(param.fd + 1, &fds, nullptr, nullptr, &tv);
1082
1083 if (r < 0) {
1084 if (EINTR == errno) {
1085 continue;
1086 }
1087
1088 return image_ret != nullptr;
1089 }
1090 if (0 == r) {
1092 {
1093 yCWarning(USBCAMERA, "timeout while reading image [%d/%d]", numberOfTimeouts, count);
1094 got_it = false;
1095 }
1096 } else if ((r > 0) && (FD_ISSET(param.fd, &fds))) {
1097 if (frameRead()) {
1098 //yCTrace(USBCAMERA, "got an image");
1099 got_it = true;
1100 break;
1101 }
1102 yCWarning(USBCAMERA, "trial %d failed", i);
1103
1104 } else {
1105 yCWarning(USBCAMERA, "select woke up for something else");
1106 }
1107
1108 /* EAGAIN - continue select loop. */
1109 }
1110 return got_it;
1111}
1112
1116bool V4L_camera::frameRead()
1117{
1118 unsigned int i;
1119 struct v4l2_buffer buf;
1120 mutex.wait();
1121
1122 switch (param.io) {
1123 case IO_METHOD_READ:
1124 if (-1 == v4l2_read(param.fd, param.buffers[0].start, param.buffers[0].length)) {
1125 mutex.post();
1126 return false;
1127 }
1128
1129 timeStamp.update(toEpochOffset + buf.timestamp.tv_sec + buf.timestamp.tv_usec / 1000000.0);
1130 // imageProcess(param.buffers[0].start);
1131 break;
1132
1133
1134 case IO_METHOD_MMAP:
1135 CLEAR(buf);
1136
1137 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1138 buf.memory = V4L2_MEMORY_MMAP;
1139
1140 if (-1 == xioctl(param.fd, VIDIOC_DQBUF, &buf)) {
1141 yCError(USBCAMERA, "usbCamera VIDIOC_DQBUF");
1142 mutex.post();
1143 return false;
1144 }
1145
1146 if (!(buf.index < param.n_buffers)) {
1147 mutex.post();
1148 return false;
1149 }
1150
1151 memcpy(param.read_image, param.buffers[buf.index].start, param.buffers[0].length);
1152 // imageProcess(param.raw_image);
1153 timeStamp.update(toEpochOffset + buf.timestamp.tv_sec + buf.timestamp.tv_usec / 1000000.0);
1154
1155 if (-1 == xioctl(param.fd, VIDIOC_QBUF, &buf)) {
1156 yCError(USBCAMERA, "VIDIOC_QBUF");
1157 mutex.post();
1158 return false;
1159 }
1160
1161 break;
1162
1163 case IO_METHOD_USERPTR:
1164 CLEAR(buf);
1165
1166 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1167 buf.memory = V4L2_MEMORY_USERPTR;
1168
1169 if (-1 == xioctl(param.fd, VIDIOC_DQBUF, &buf)) {
1170 yCError(USBCAMERA, "VIDIOC_DQBUF");
1171 mutex.post();
1172 return false;
1173 }
1174
1175 for (i = 0; i < param.n_buffers; ++i) {
1176 if (buf.m.userptr == (unsigned long)param.buffers[i].start && buf.length == param.buffers[i].length) {
1177 break;
1178 }
1179 }
1180
1181 if (!(i < param.n_buffers)) {
1182 mutex.post();
1183 return false;
1184 }
1185
1186 memcpy(param.read_image, param.buffers[buf.index].start, param.buffers[0].length);
1187 timeStamp.update(toEpochOffset + buf.timestamp.tv_sec + buf.timestamp.tv_usec / 1000000.0);
1188
1189
1190 if (-1 == xioctl(param.fd, VIDIOC_QBUF, &buf)) {
1191 yCError(USBCAMERA, "VIDIOC_QBUF");
1192 }
1193 break;
1194
1195 default:
1196 yCError(USBCAMERA, "frameRead no read method configured");
1197 }
1198 mutex.post();
1199 return true;
1200}
1201
1202/*
1203 * This function is intended to perform custom code to adapt
1204 * non standard pixel types to a standard one, in order to
1205 * use standard conversion libraries afterward.
1206 */
1207void V4L_camera::imagePreProcess()
1208{
1209 switch (param.camModel) {
1210 case LEOPARD_PYTHON:
1211 {
1212 // Here we are resizing the byte information from 10 to 8 bits.
1213 // Width and Height are not modified by this operation.
1214 const uint _pixelNum = param.src_fmt.fmt.pix.width * param.src_fmt.fmt.pix.height;
1215
1216 uint16_t* raw_p = (uint16_t*)param.raw_image;
1217 for (uint i = 0; i < _pixelNum; i++) {
1218 param.src_image[i] = (unsigned char)(raw_p[i] >> bit_shift);
1219 }
1220
1221 // Set the correct pixel type fot the v4l_convert to work on.
1222 param.src_fmt.fmt.pix.bytesperline = param.src_fmt.fmt.pix.width;
1223 param.src_fmt.fmt.pix.pixelformat = pixel_fmt_leo;
1224 break;
1225 }
1226 case STANDARD_UVC:
1227 default:
1228 // Nothing to do here
1229 break;
1230 }
1231}
1232
1236void V4L_camera::imageProcess()
1237{
1238 static bool initted = false;
1239 static int err = 0;
1240
1241 timeStart = yarp::os::Time::now();
1242
1243 // imagePreProcess() should already be called before entering here!!
1244 // src_fmt and dst_fmt must be alredy fixed up if needed!!
1245
1246 // Convert from src type to RGB
1247 if (v4lconvert_convert((v4lconvert_data*)_v4lconvert_data,
1248 &param.src_fmt,
1249 &param.dst_fmt,
1250 param.src_image,
1251 param.src_image_size,
1252 param.dst_image_rgb,
1253 param.dst_image_size_rgb)
1254 < 0) {
1255 if ((err % 20) == 0) {
1256 yCError(USBCAMERA, "error converting \n\t Error message is: %s", v4lconvert_get_error_message(_v4lconvert_data));
1257 err = 0;
1258 }
1259 err++;
1260 return;
1261 }
1262
1263 if (param.addictionalResize) {
1264 if (!param.dual) {
1265 cv::Mat img(cv::Size(param.dst_fmt.fmt.pix.width, param.dst_fmt.fmt.pix.height), CV_8UC3, param.dst_image_rgb);
1266 cv::Rect crop(param.resizeOffset_x, param.resizeOffset_y, param.resizeWidth, param.resizeHeight);
1267 cv::resize(img(crop), param.outMat, cvSize(param.user_width, param.user_height), 0, 0, cv::INTER_CUBIC);
1268 } else {
1269 // Load whole image in a cv::Mat
1270 cv::Mat img(cv::Size(param.dst_fmt.fmt.pix.width, param.dst_fmt.fmt.pix.height), CV_8UC3, param.dst_image_rgb);
1271 cv::Mat img_right;
1272 cv::Rect crop(param.resizeOffset_x, param.resizeOffset_y, param.resizeWidth, param.resizeHeight);
1273
1274 cv::resize(img(crop), param.outMat, cvSize(param.user_width / 2, param.user_height), 0, 0, cv::INTER_CUBIC);
1275 cv::Rect crop2(param.resizeWidth + param.resizeOffset_x * 2, param.resizeOffset_y, param.resizeWidth, param.resizeHeight);
1276 cv::resize(img(crop2), img_right, cvSize(param.user_width / 2, param.user_height), 0, 0, cv::INTER_CUBIC);
1277 cv::hconcat(param.outMat, img_right, param.outMat);
1278 }
1279 if (param.flip) {
1280 cv::flip(param.outMat, param.outMat, 1);
1281 }
1282 } else {
1283 if (param.flip) {
1284 cv::Mat img(cv::Size(param.dst_fmt.fmt.pix.width, param.dst_fmt.fmt.pix.height), CV_8UC3, param.dst_image_rgb);
1285 param.outMat = img;
1286 cv::flip(param.outMat, param.outMat, 1);
1287 }
1288 }
1289
1290 timeElapsed = yarp::os::Time::now() - timeStart;
1291 myCounter++;
1292 timeTot += timeElapsed;
1293
1294 if ((myCounter % 60) == 0) {
1295 if (!initted) {
1296 timeTot = 0;
1297 myCounter = 0;
1298 initted = true;
1299 }
1300 }
1301}
1302
1306void V4L_camera::captureStop()
1307{
1308 int ret = 0;
1309 int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1310 switch (param.io) {
1311 case IO_METHOD_READ:
1312 //do nothing
1313 break;
1314
1315 case IO_METHOD_MMAP:
1316 default:
1317 ret = xioctl(param.fd, VIDIOC_STREAMOFF, &type);
1318 if (ret < 0) {
1319 if (errno != 9) { /* errno = 9 means the capture was allready stoped*/
1320 yCError(USBCAMERA, "VIDIOC_STREAMOFF - Unable to stop capture: %d, %s", errno, strerror(errno));
1321 }
1322 }
1323 break;
1324 }
1325}
1326
1330void V4L_camera::captureStart()
1331{
1332 unsigned int i;
1333 enum v4l2_buf_type type;
1334
1335 switch (param.io) {
1336 case IO_METHOD_READ:
1337 /* Nothing to do. */
1338 break;
1339
1340 case IO_METHOD_MMAP:
1341 for (i = 0; i < param.n_buffers; ++i) {
1342 struct v4l2_buffer buf;
1343 CLEAR(buf);
1344
1345 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1346 buf.memory = V4L2_MEMORY_MMAP;
1347 buf.index = i;
1348
1349 if (-1 == xioctl(param.fd, VIDIOC_QBUF, &buf)) {
1350 yCError(USBCAMERA, "VIDIOC_QBUF");
1351 }
1352 }
1353
1355
1356 if (-1 == xioctl(param.fd, VIDIOC_STREAMON, &type)) {
1357 yCError(USBCAMERA, "VIDIOC_STREAMON");
1358 }
1359
1360 break;
1361
1362 case IO_METHOD_USERPTR:
1363 for (i = 0; i < param.n_buffers; ++i) {
1364 struct v4l2_buffer buf;
1365
1366 CLEAR(buf);
1367
1368 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1369 buf.memory = V4L2_MEMORY_USERPTR;
1370 buf.index = i;
1371 buf.m.userptr = (unsigned long)param.buffers[i].start;
1372 buf.length = param.buffers[i].length;
1373
1374 if (-1 == xioctl(param.fd, VIDIOC_QBUF, &buf)) {
1375 yCError(USBCAMERA, "VIDIOC_QBUF");
1376 }
1377 }
1378
1380
1381 if (-1 == xioctl(param.fd, VIDIOC_STREAMON, &type)) {
1382 yCError(USBCAMERA, "VIDIOC_STREAMON");
1383 }
1384
1385 break;
1386 }
1387}
1388
1389
1390bool V4L_camera::readInit(unsigned int buffer_size)
1391{
1392 param.buffers = (struct buffer*)calloc(1, sizeof(*(param.buffers)));
1393
1394 if (param.buffers == nullptr) {
1395 yCError(USBCAMERA, "cannot allocate buffer, out of memory");
1396 return false;
1397 }
1398
1399 param.buffers[0].length = buffer_size;
1400 param.buffers[0].start = malloc(buffer_size);
1401
1402 if (param.buffers[0].start == nullptr) {
1403 yCError(USBCAMERA, "cannot allocate buffer, out of memory");
1404 return false;
1405 }
1406 return true;
1407}
1408
1409bool V4L_camera::mmapInit()
1410{
1411 CLEAR(param.req);
1412
1414 param.req.count = param.n_buffers;
1415 param.req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1416 param.req.memory = V4L2_MEMORY_MMAP;
1417
1418 if (-1 == xioctl(param.fd, VIDIOC_REQBUFS, &param.req)) {
1419 if (EINVAL == errno) {
1420 yCError(USBCAMERA, "%s does not support memory mapping", param.deviceId.c_str());
1421 return false;
1422 }
1423 yCError(USBCAMERA, "Error on device %s requesting memory mapping (VIDIOC_REQBUFS)", param.deviceId.c_str());
1424 return false;
1425 }
1426
1427 if (param.req.count < 1) {
1428 yCError(USBCAMERA, "Insufficient buffer memory on %s", param.deviceId.c_str());
1429 return false;
1430 }
1431
1432 if (param.req.count == 1) {
1433 yCError(USBCAMERA, "Only 1 buffer was available, you may encounter performance issue acquiring images from device %s", param.deviceId.c_str());
1434 }
1435
1436 param.buffers = (struct buffer*)calloc(param.req.count, sizeof(*(param.buffers)));
1437
1438 if (param.buffers == nullptr) {
1439 yCError(USBCAMERA, "Out of memory");
1440 return false;
1441 }
1442
1443 struct v4l2_buffer buf;
1444
1445 for (param.n_buffers = 0; param.n_buffers < param.req.count; param.n_buffers++) {
1446 CLEAR(buf);
1447
1448 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1449 buf.memory = V4L2_MEMORY_MMAP;
1450 buf.index = param.n_buffers;
1451
1452 if (-1 == xioctl(param.fd, VIDIOC_QUERYBUF, &buf)) {
1453 yCError(USBCAMERA, "VIDIOC_QUERYBUF");
1454 }
1455
1456 param.buffers[param.n_buffers].length = buf.length;
1457 param.buffers[param.n_buffers].start = v4l2_mmap(nullptr, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, param.fd, buf.m.offset);
1458
1459 if (MAP_FAILED == param.buffers[param.n_buffers].start) {
1460 yCError(USBCAMERA, "mmap");
1461 }
1462 }
1463 return true;
1464}
1465
1466bool V4L_camera::userptrInit(unsigned int buffer_size)
1467{
1468 unsigned int page_size;
1469
1471 buffer_size = (buffer_size + page_size - 1) & ~(page_size - 1);
1472
1473 CLEAR(param.req);
1474
1475 param.req.count = VIDIOC_REQBUFS_COUNT;
1476 param.req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1477 param.req.memory = V4L2_MEMORY_USERPTR;
1478
1479 if (-1 == xioctl(param.fd, VIDIOC_REQBUFS, &param.req)) {
1480 if (EINVAL == errno) {
1481 yCError(USBCAMERA, "%s does not support user pointer i/o", param.deviceId.c_str());
1482 return false;
1483 }
1484 yCError(USBCAMERA, "Error requesting VIDIOC_REQBUFS for device %s", param.deviceId.c_str());
1485 return false;
1486 }
1487
1488 param.buffers = (struct buffer*)calloc(4, sizeof(*(param.buffers)));
1489
1490 if (param.buffers == nullptr) {
1491 yCError(USBCAMERA, "cannot allocate buffer, out of memory");
1492 return false;
1493 }
1494
1495 for (param.n_buffers = 0; param.n_buffers < 4; ++param.n_buffers) {
1496 param.buffers[param.n_buffers].length = buffer_size;
1497 param.buffers[param.n_buffers].start = memalign(/* boundary */ page_size, buffer_size);
1498
1499 if (param.buffers[param.n_buffers].start == nullptr) {
1500 yCError(USBCAMERA, "cannot allocate buffer, out of memory");
1501 return false;
1502 }
1503 }
1504 return true;
1505}
1506
1507bool V4L_camera::set_V4L2_control(uint32_t id, double value, bool verbatim)
1508{
1509 if (value < 0) {
1510 return false;
1511 }
1512
1514 struct v4l2_control control;
1515
1516 memset(&queryctrl, 0, sizeof(queryctrl));
1517 queryctrl.id = id;
1518
1519 if (-1 == ioctl(param.fd, VIDIOC_QUERYCTRL, &queryctrl)) {
1520 if (errno != EINVAL) {
1521 yCError(USBCAMERA, "VIDIOC_QUERYCTRL: %d, %s", errno, strerror(errno));
1522 } else {
1523 yCError(USBCAMERA, "Cannot set control <%s> (id 0x%0X) is not supported", queryctrl.name, queryctrl.id);
1524 }
1525 return false;
1526 }
1527
1528 if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
1529 yCError(USBCAMERA, "Control %s is disabled", queryctrl.name);
1530 return false;
1531 }
1532 memset(&control, 0, sizeof(control));
1533 control.id = id;
1534 if (verbatim) {
1535 control.value = value;
1536 } else {
1537 if (param.camModel == LEOPARD_PYTHON) {
1538 if ((V4L2_CID_EXPOSURE == id) || (V4L2_CID_EXPOSURE_ABSOLUTE == id) || (V4L2_CID_EXPOSURE_AUTO == id)) {
1539 queryctrl.maximum = 8000;
1540 queryctrl.minimum = 0;
1541 }
1542 }
1543 control.value = (int32_t)(value * (queryctrl.maximum - queryctrl.minimum) + queryctrl.minimum);
1544 }
1545 if (-1 == ioctl(param.fd, VIDIOC_S_CTRL, &control)) {
1546 yCError(USBCAMERA, "VIDIOC_S_CTRL: %d, %s", errno, strerror(errno));
1547 if (errno == ERANGE) {
1548 yCError(USBCAMERA, "Normalized input value %f ( equivalent to raw value of %d) was out of range for control %s: Min and Max are: %d - %d", value, control.value, queryctrl.name, queryctrl.minimum, queryctrl.maximum);
1549 }
1550 return false;
1551 }
1552 if (verbose) {
1553 yCInfo(USBCAMERA, "set control %s to %d done!", queryctrl.name, control.value);
1554 }
1555
1556 return true;
1557}
1558
1559bool V4L_camera::check_V4L2_control(uint32_t id)
1560{
1561 // yCTrace(USBCAMERA);
1563 struct v4l2_control control;
1564
1565 memset(&control, 0, sizeof(control));
1566 memset(&queryctrl, 0, sizeof(queryctrl));
1567
1568 control.id = id;
1569 queryctrl.id = id;
1570
1571 if (-1 == ioctl(param.fd, VIDIOC_QUERYCTRL, &queryctrl)) {
1572 if (errno != EINVAL) {
1573 yCError(USBCAMERA, "VIDIOC_QUERYCTRL: %d, %s", errno, strerror(errno));
1574 }
1575 return false;
1576 }
1577 return true;
1578}
1579
1580double V4L_camera::get_V4L2_control(uint32_t id, bool verbatim)
1581{
1583 struct v4l2_control control;
1584
1585 memset(&control, 0, sizeof(control));
1586 memset(&queryctrl, 0, sizeof(queryctrl));
1587
1588 control.id = id;
1589 queryctrl.id = id;
1590
1591 if (-1 == ioctl(param.fd, VIDIOC_QUERYCTRL, &queryctrl)) {
1592 if (errno != EINVAL) {
1593 yCError(USBCAMERA, "VIDIOC_QUERYCTRL: %d, %s", errno, strerror(errno));
1594 }
1595
1596 return -1.0;
1597 }
1598
1599 if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
1600 yCError(USBCAMERA, "Control %s is disabled", queryctrl.name);
1601 } else {
1602 if (-1 == ioctl(param.fd, VIDIOC_G_CTRL, &control)) {
1603 yCError(USBCAMERA, "VIDIOC_G_CTRL: %d, %s", errno, strerror(errno));
1604 return -1.0;
1605 }
1606 }
1607 if (verbatim) {
1608 return control.value;
1609 }
1610
1611 if (param.camModel == LEOPARD_PYTHON) {
1612 if ((V4L2_CID_EXPOSURE == id) || (V4L2_CID_EXPOSURE_ABSOLUTE == id) || (V4L2_CID_EXPOSURE_AUTO == id)) {
1613 queryctrl.maximum = 8000;
1614 queryctrl.minimum = 0;
1615 }
1616 }
1617 return (double)(control.value - queryctrl.minimum) / (queryctrl.maximum - queryctrl.minimum);
1618}
1619
1626
1628{
1629 bool tmpMan(false);
1630 bool tmpAuto(false);
1631 bool tmpOnce(false);
1632
1633 switch (feature) {
1634 case cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE:
1635 tmpMan = check_V4L2_control(V4L2_CID_RED_BALANCE) && check_V4L2_control(V4L2_CID_BLUE_BALANCE);
1636 tmpOnce = check_V4L2_control(V4L2_CID_DO_WHITE_BALANCE);
1637 tmpAuto = check_V4L2_control(V4L2_CID_AUTO_WHITE_BALANCE);
1638 break;
1639
1640 case cameraFeature_id_t::YARP_FEATURE_EXPOSURE:
1641 tmpMan = check_V4L2_control(V4L2_CID_EXPOSURE) || check_V4L2_control(V4L2_CID_EXPOSURE_ABSOLUTE);
1642 tmpAuto = check_V4L2_control(V4L2_CID_EXPOSURE_AUTO);
1643 break;
1644
1645 default:
1646 tmpMan = check_V4L2_control(convertYARP_to_V4L(feature));
1647 break;
1648 }
1649
1651 return ReturnValue_ok;
1652}
1653
1655{
1656 bool ret = false;
1657 switch (feature) {
1658 case cameraFeature_id_t::YARP_FEATURE_EXPOSURE:
1659 if (use_exposure_absolute) {
1660 ret = set_V4L2_control(V4L2_CID_EXPOSURE_ABSOLUTE, value);
1661 } else {
1662 ret = set_V4L2_control(V4L2_CID_EXPOSURE, value);
1663 }
1664 break;
1665
1666 default:
1667 ret = set_V4L2_control(convertYARP_to_V4L(feature), value);
1668 break;
1669 }
1670 if (ret) return ReturnValue_ok;
1671 return ReturnValue::return_code::return_value_error_method_failed;
1672}
1673
1675{
1676 double tmp = 0.0;
1677 switch (feature) {
1678 case cameraFeature_id_t::YARP_FEATURE_EXPOSURE:
1679 if (use_exposure_absolute) {
1680 tmp = get_V4L2_control(V4L2_CID_EXPOSURE_ABSOLUTE);
1681 } else {
1682 tmp = get_V4L2_control(V4L2_CID_EXPOSURE);
1683 }
1684 break;
1685
1686 default:
1687 tmp = get_V4L2_control(convertYARP_to_V4L(feature));
1688 break;
1689 }
1690
1691 if (tmp == -1) {
1692 return ReturnValue::return_code::return_value_error_method_failed;
1693 }
1694
1695 value = tmp;
1696 return ReturnValue_ok;
1697}
1698
1700{
1701 if (feature == cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE) {
1702 bool ret = true;
1703 ret &= set_V4L2_control(V4L2_CID_AUTO_WHITE_BALANCE, false);
1705 ret &= set_V4L2_control(V4L2_CID_RED_BALANCE, value1);
1706 ret &= set_V4L2_control(V4L2_CID_BLUE_BALANCE, value2);
1707 if (ret) return ReturnValue_ok;
1708 else return ReturnValue::return_code::return_value_error_method_failed;
1709 }
1710 return ReturnValue::return_code::return_value_error_method_failed;
1711}
1712
1714{
1715 if (feature == cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE) {
1716 value1 = get_V4L2_control(V4L2_CID_RED_BALANCE);
1717 value2 = get_V4L2_control(V4L2_CID_BLUE_BALANCE);
1718 bool b = !((value1 == -1) || (value2 == -1));
1719 if (b) return ReturnValue_ok;
1720 else return ReturnValue::return_code::return_value_error_method_failed;
1721 }
1722 return ReturnValue::return_code::return_value_error_method_failed;
1723}
1724
1726{
1727 bool _hasAuto;
1728 // I can't find any meaning of setting a feature to off on V4l ... what it is supposed to do????
1729 switch (feature) {
1730 // The following do have a way to set them auto/manual
1731 case cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE:
1732 case cameraFeature_id_t::YARP_FEATURE_EXPOSURE:
1733 if (hasAuto(feature, _hasAuto)) {
1734 _hasOnOff = true;
1735 } else {
1736 _hasOnOff = false;
1737 }
1738 break;
1739
1740 // try it out
1741 default:
1743 if (_hasAuto) {
1744 _hasOnOff = true;
1745 } else {
1746 _hasOnOff = false;
1747 }
1748 break;
1749 }
1750 return ReturnValue_ok;
1751}
1752
1754{
1755 // I can't find any meaning of setting a feature to off on V4l ... what it is supposed to do????
1756 bool tmp;
1757 switch (feature) {
1758 case cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE:
1759 tmp = set_V4L2_control(V4L2_CID_AUTO_WHITE_BALANCE, onoff);
1760 if (tmp) {
1761 isActive_vector[(int)feature] = onoff;
1762 }
1763 break;
1764
1765 case cameraFeature_id_t::YARP_FEATURE_EXPOSURE:
1766 if (onoff) {
1767 set_V4L2_control(V4L2_LOCK_EXPOSURE, false);
1768
1770 if (tmp) {
1771 tmp = set_V4L2_control(V4L2_CID_EXPOSURE_AUTO, V4L2_EXPOSURE_AUTO);
1772 } else {
1773 tmp = set_V4L2_control(V4L2_CID_EXPOSURE_AUTO, V4L2_EXPOSURE_MANUAL);
1774 }
1775
1776 if (tmp) {
1777 isActive_vector[(int)feature] = onoff;
1778 }
1779 } else {
1780 bool man = set_V4L2_control(V4L2_CID_EXPOSURE_AUTO, V4L2_EXPOSURE_MANUAL);
1781 if (!man) {
1782 man = set_V4L2_control(V4L2_CID_EXPOSURE_AUTO, V4L2_EXPOSURE_SHUTTER_PRIORITY, true);
1783 if (!man) {
1784 yCError(USBCAMERA) << "Cannot set manual exposure";
1785 }
1786 }
1787 set_V4L2_control(V4L2_LOCK_EXPOSURE, true);
1788 isActive_vector[(int)feature] = onoff;
1789 }
1790 break;
1791
1792 default: // what to do in each case?
1793 if (onoff) {
1794 isActive_vector[(int)feature] = true;
1795 return ReturnValue_ok;
1796 }
1797 isActive_vector[(int)feature] = false;
1798 return ReturnValue::return_code::return_value_error_method_failed;
1799 }
1800 return ReturnValue_ok;
1801}
1802
1804{
1805 switch (feature) {
1806 case cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE:
1807 {
1808 double tmp = get_V4L2_control(V4L2_CID_AUTO_WHITE_BALANCE);
1809 if (tmp == 1) {
1810 _isActive = true;
1811 } else {
1812 _isActive = false;
1813 }
1814 break;
1815 }
1816
1817 case cameraFeature_id_t::YARP_FEATURE_EXPOSURE:
1818 {
1819 bool _hasMan = check_V4L2_control(V4L2_CID_EXPOSURE); // check manual version (normal and asbolute)
1820 bool _hasMan2 = check_V4L2_control(V4L2_CID_EXPOSURE_ABSOLUTE);
1821 double _hasAuto = get_V4L2_control(V4L2_CID_EXPOSURE_AUTO, true); // check auto version
1822
1824 break;
1825 }
1826
1827 default:
1828 _isActive = true;
1829 break;
1830 }
1831
1832 return ReturnValue_ok;
1833}
1834
1836{
1837 switch (feature) {
1838 case cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE:
1839 _hasAuto = check_V4L2_control(V4L2_CID_AUTO_WHITE_BALANCE);
1840 break;
1841
1842 case cameraFeature_id_t::YARP_FEATURE_BRIGHTNESS:
1843 _hasAuto = check_V4L2_control(V4L2_CID_AUTOBRIGHTNESS);
1844 break;
1845
1846 case cameraFeature_id_t::YARP_FEATURE_GAIN:
1847 _hasAuto = check_V4L2_control(V4L2_CID_AUTOGAIN);
1848 break;
1849
1850 case cameraFeature_id_t::YARP_FEATURE_EXPOSURE:
1851 _hasAuto = check_V4L2_control(V4L2_CID_EXPOSURE_AUTO);
1852 break;
1853
1854 case cameraFeature_id_t::YARP_FEATURE_HUE:
1855 _hasAuto = check_V4L2_control(V4L2_CID_HUE_AUTO);
1856 break;
1857
1858 default:
1859 _hasAuto = false;
1860 break;
1861 }
1862 return ReturnValue_ok;
1863}
1864
1866{
1867 if (feature == cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE) {
1868 _hasManual = check_V4L2_control(V4L2_CID_RED_BALANCE) && check_V4L2_control(V4L2_CID_BLUE_BALANCE);
1869 return ReturnValue_ok;
1870 }
1871
1872 if (feature == cameraFeature_id_t::YARP_FEATURE_EXPOSURE) {
1873 _hasManual = check_V4L2_control(V4L2_CID_EXPOSURE) || check_V4L2_control(V4L2_CID_EXPOSURE_ABSOLUTE);
1874 return ReturnValue_ok;
1875 }
1876 return hasFeature(feature, _hasManual);
1877}
1878
1880{
1881 // I'm not able to map a 'onePush' request on V4L api
1882 switch (feature) {
1883 case cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE:
1884 _hasOnePush = check_V4L2_control((int)V4L2_CID_DO_WHITE_BALANCE);
1885 return ReturnValue_ok;
1886
1887 default:
1888 _hasOnePush = false;
1889 break;
1890 }
1891 return ReturnValue_ok;
1892}
1893
1895{
1896 bool ret = false;
1897 switch (feature) {
1898 case cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE:
1899 if (mode == FeatureMode::MODE_AUTO) {
1900 ret = set_V4L2_control(V4L2_CID_AUTO_WHITE_BALANCE, true);
1901 } else {
1902 ret = set_V4L2_control(V4L2_CID_AUTO_WHITE_BALANCE, false);
1903 }
1904 break;
1905
1906 case cameraFeature_id_t::YARP_FEATURE_EXPOSURE:
1907 bool _tmpAuto;
1908 hasAuto(cameraFeature_id_t::YARP_FEATURE_EXPOSURE, _tmpAuto);
1909
1910 if (_tmpAuto) {
1911 if (mode == FeatureMode::MODE_AUTO) {
1912 ret = set_V4L2_control(V4L2_CID_EXPOSURE_AUTO, true);
1913 } else {
1914 ret = set_V4L2_control(V4L2_CID_EXPOSURE_AUTO, false);
1915 }
1916 } else {
1917 ret = mode != FeatureMode::MODE_AUTO;
1918 }
1919 break;
1920
1921 case cameraFeature_id_t::YARP_FEATURE_GAIN:
1922 if (mode == FeatureMode::MODE_AUTO) {
1923 yCInfo(USBCAMERA) << "GAIN: set mode auto";
1924 ret = set_V4L2_control(V4L2_CID_AUTOGAIN, true);
1925 } else {
1926 yCInfo(USBCAMERA) << "GAIN: set mode manual";
1927 ret = set_V4L2_control(V4L2_CID_AUTOGAIN, false);
1928 }
1929 break;
1930
1931 case cameraFeature_id_t::YARP_FEATURE_BRIGHTNESS:
1932 {
1933 bool _tmpAuto;
1934 hasAuto(cameraFeature_id_t::YARP_FEATURE_BRIGHTNESS, _tmpAuto);
1935
1936 if (_tmpAuto) {
1937 if (mode == FeatureMode::MODE_AUTO) {
1938 ret = set_V4L2_control(V4L2_CID_AUTOBRIGHTNESS, true);
1939 } else {
1940 ret = set_V4L2_control(V4L2_CID_AUTOBRIGHTNESS, false);
1941 }
1942 } else {
1943 ret = mode != FeatureMode::MODE_AUTO;
1944 }
1945 break;
1946 }
1947
1948 case cameraFeature_id_t::YARP_FEATURE_HUE:
1949 if (mode == FeatureMode::MODE_AUTO) {
1950 ret = set_V4L2_control(V4L2_CID_HUE_AUTO, true);
1951 } else {
1952 ret = set_V4L2_control(V4L2_CID_HUE_AUTO, false);
1953 }
1954 break;
1955
1956 default:
1957 yCError(USBCAMERA) << "Feature " << (int)feature << " does not support auto mode";
1958 break;
1959 }
1960
1961 if (ret) return ReturnValue_ok;
1962 return ReturnValue::return_code::return_value_error_method_failed;
1963}
1964
1966{
1967 bool _tmpAuto;
1968 switch (feature) {
1969 case cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE:
1970 {
1971 double ret = get_V4L2_control(V4L2_CID_AUTO_WHITE_BALANCE);
1972 mode = toFeatureMode(ret != 0.0);
1973 break;
1974 }
1975
1976 case cameraFeature_id_t::YARP_FEATURE_EXPOSURE:
1977 {
1978 double ret = get_V4L2_control(V4L2_CID_EXPOSURE_AUTO);
1979 if (ret == -1.0) {
1980 mode = FeatureMode::MODE_MANUAL;
1981 break;
1982 }
1983
1984 if (ret == V4L2_EXPOSURE_MANUAL) {
1985 mode = FeatureMode::MODE_MANUAL;
1986 } else {
1987 mode = FeatureMode::MODE_AUTO;
1988 }
1989 break;
1990 }
1991
1992 case cameraFeature_id_t::YARP_FEATURE_BRIGHTNESS:
1993 hasAuto(cameraFeature_id_t::YARP_FEATURE_BRIGHTNESS, _tmpAuto);
1995 if (!_tmpAuto) {
1996 mode = FeatureMode::MODE_MANUAL;
1997 } else {
1998 double ret = get_V4L2_control(V4L2_CID_AUTOBRIGHTNESS);
1999 mode = toFeatureMode(ret != 0.0);
2000 }
2001 break;
2002
2003 case cameraFeature_id_t::YARP_FEATURE_GAIN:
2004 hasAuto(cameraFeature_id_t::YARP_FEATURE_GAIN, _tmpAuto);
2006 if (!_tmpAuto) {
2007 mode = FeatureMode::MODE_MANUAL;
2008 } else {
2009 double ret = get_V4L2_control(V4L2_CID_AUTOGAIN);
2010 mode = toFeatureMode(ret != 0.0);
2011 }
2012 break;
2013
2014 case cameraFeature_id_t::YARP_FEATURE_HUE:
2015 hasAuto(cameraFeature_id_t::YARP_FEATURE_HUE, _tmpAuto);
2017 if (!_tmpAuto) {
2018 mode = FeatureMode::MODE_MANUAL;
2019 } else {
2020 double ret = get_V4L2_control(V4L2_CID_HUE_AUTO);
2021 mode = toFeatureMode(ret != 0.0);
2022 }
2023 break;
2024
2025 default:
2026 mode = FeatureMode::MODE_MANUAL;
2027 break;
2028 }
2029 return ReturnValue_ok;
2030}
2031
2033{
2034 // I'm not able to map a 'onePush' request on each V4L api
2035 if (feature == cameraFeature_id_t::YARP_FEATURE_WHITE_BALANCE) {
2036 if (set_V4L2_control(V4L2_CID_DO_WHITE_BALANCE, true))
2037 {
2038 return ReturnValue_ok;
2039 }
2040 else
2041 {
2042 return ReturnValue::return_code::return_value_error_method_failed;
2043 }
2044
2045 }
2046 return ReturnValue::return_code::return_value_error_method_failed;
2047}
CameraDescriptor camera
FeatureMode mode
YarpVocabPixelTypesEnum
Definition Image.h:40
@ VOCAB_PIXEL_ENCODING_BAYER_BGGR16
Definition Image.h:59
@ VOCAB_PIXEL_YUV_420
Definition Image.h:64
@ VOCAB_PIXEL_MONO16
Definition Image.h:43
@ VOCAB_PIXEL_ENCODING_BAYER_BGGR8
Definition Image.h:58
@ VOCAB_PIXEL_YUV_444
Definition Image.h:65
@ VOCAB_PIXEL_BGR
Definition Image.h:49
@ VOCAB_PIXEL_ENCODING_BAYER_RGGB8
Definition Image.h:62
@ VOCAB_PIXEL_ENCODING_BAYER_GRBG8
Definition Image.h:56
@ VOCAB_PIXEL_YUV_422
Definition Image.h:66
@ VOCAB_PIXEL_ENCODING_BAYER_GBRG8
Definition Image.h:60
@ VOCAB_PIXEL_MONO
Definition Image.h:42
@ VOCAB_PIXEL_YUV_411
Definition Image.h:67
@ VOCAB_PIXEL_RGB
Definition Image.h:44
bool ret
#define ReturnValue_ok
Definition ReturnValue.h:80
const yarp::os::LogComponent & USBCAMERA()
struct v4l2_queryctrl queryctrl
static double getEpochTimeShift()
struct v4l2_querymenu querymenu
#define NOT_PRESENT
#define DEFAULT_HEIGHT
Definition V4L_camera.h:44
#define DEFAULT_FRAMERATE
Definition V4L_camera.h:45
#define VIDIOC_REQBUFS_COUNT
Definition V4L_camera.h:46
#define DEFAULT_WIDTH
Definition V4L_camera.h:43
@ STANDARD_UVC
Definition V4L_camera.h:57
@ LEOPARD_PYTHON
Definition V4L_camera.h:58
#define CLEAR(x)
Definition V4L_camera.h:40
@ IO_METHOD_MMAP
Definition V4L_camera.h:51
@ IO_METHOD_READ
Definition V4L_camera.h:50
@ IO_METHOD_USERPTR
Definition V4L_camera.h:52
yarp::dev::ReturnValue getRgbIntrinsicParam(yarp::os::Property &intrinsic) override
Get the intrinsic parameters of the rgb camera.
int getRgbWidth() override
Return the width of each frame.
yarp::dev::ReturnValue getRgbResolution(int &width, int &height) override
Get the resolution of the rgb image from the camera.
yarp::dev::ReturnValue setRgbFOV(double horizontalFov, double verticalFov) override
Set the field of view (FOV) of the rgb camera.
yarp::dev::ReturnValue getFeature(yarp::dev::cameraFeature_id_t feature, double &value) override
Get the current value for the requested feature.
yarp::dev::ReturnValue getRgbSupportedConfigurations(std::vector< yarp::dev::CameraConfig > &configurations) override
Get the possible configurations of the camera.
yarp::os::Stamp getLastInputStamp() override
Return the time stamp relative to the last acquisition.
yarp::dev::ReturnValue setMode(yarp::dev::cameraFeature_id_t feature, yarp::dev::FeatureMode mode) override
Set the requested mode for the feature.
yarp::dev::ReturnValue hasAuto(yarp::dev::cameraFeature_id_t feature, bool &hasAuto) override
Check if the requested feature has the 'auto' mode.
yarp::dev::ReturnValue getImage(yarp::sig::ImageOf< yarp::sig::PixelRgb > &image) override
Get an image from the frame grabber.
yarp::dev::ReturnValue setRgbMirroring(bool mirror) override
Set the mirroring setting of the sensor.
yarp::dev::ReturnValue hasManual(yarp::dev::cameraFeature_id_t feature, bool &hasManual) override
Check if the requested feature has the 'manual' mode.
yarp::dev::ReturnValue setActive(yarp::dev::cameraFeature_id_t feature, bool onoff) override
Set the requested feature on or off.
bool close() override
close device
yarp::dev::ReturnValue setRgbResolution(int width, int height) override
Set the resolution of the rgb image from the camera.
int getRgbHeight() override
Return the height of each frame.
yarp::dev::ReturnValue getActive(yarp::dev::cameraFeature_id_t feature, bool &isActive) override
Get the current status of the feature, on or off.
yarp::dev::ReturnValue getRgbFOV(double &horizontalFov, double &verticalFov) override
Get the field of view (FOV) of the rgb camera.
int width() const override
Return the width of each frame.
yarp::dev::ReturnValue hasOnOff(yarp::dev::cameraFeature_id_t feature, bool &HasOnOff) override
Check if the camera has the ability to turn on/off the requested feature.
yarp::dev::ReturnValue getMode(yarp::dev::cameraFeature_id_t feature, yarp::dev::FeatureMode &mode) override
Get the current mode for the feature.
yarp::dev::ReturnValue setFeature(yarp::dev::cameraFeature_id_t feature, double value) override
Set the requested feature to a value (saturation, brightness ... )
yarp::dev::ReturnValue hasOnePush(yarp::dev::cameraFeature_id_t feature, bool &hasOnePush) override
Check if the requested feature has the 'onePush' mode.
yarp::dev::ReturnValue getRgbMirroring(bool &mirror) override
Get the mirroring setting of the sensor.
int height() const override
Return the height of each frame.
yarp::dev::ReturnValue setOnePush(yarp::dev::cameraFeature_id_t feature) override
Set the requested feature to a value (saturation, brightness ... )
yarp::dev::ReturnValue hasFeature(yarp::dev::cameraFeature_id_t feature, bool &hasFeature) override
Check if camera has the requested feature (saturation, brightness ... )
bool open(yarp::os::Searchable &config) override
open device
yarp::dev::ReturnValue getCameraDescription(yarp::dev::CameraDescriptor &camera) override
Get a basic description of the camera hw.
Struct describing a possible camera configuration.
virtual std::string id() const
Return the id assigned to the PolyDriver.
A simple collection of objects that can be described and transmitted in a portable way.
Definition Bottle.h:65
bool check(const std::string &key) const override
Check if there exists a property of the given name.
Definition Bottle.cpp:283
bool isNull() const override
Checks if the object is invalid.
Definition Bottle.cpp:349
Value & find(const std::string &key) const override
Gets a value corresponding to a given keyword.
Definition Bottle.cpp:293
A mini-server for performing network communication in the background.
An abstraction for a periodic thread.
bool start()
Call this to start the thread.
void stop()
Call this to stop the thread, this call blocks until the thread is terminated (and releaseThread() ca...
A class for storing options and configuration information.
Definition Property.h:33
void put(const std::string &key, const std::string &value)
Associate the given key with the given string.
Definition Property.cpp:987
A base class for nested structures that can be searched.
Definition Searchable.h:31
virtual bool check(const std::string &key) const =0
Check if there exists a property of the given name.
virtual std::string toString() const =0
Return a standard text representation of the content of the object.
virtual Value & find(const std::string &key) const =0
Gets a value corresponding to a given keyword.
virtual Bottle & findGroup(const std::string &key) const =0
Gets a list corresponding to a given keyword.
void wait()
Decrement the counter, even if we must wait to do that.
Definition Semaphore.cpp:96
void post()
Increment the counter.
An abstraction for a time stamp and/or sequence number.
Definition Stamp.h:21
void update()
Set the timestamp to the current time, and increment the sequence number (wrapping to 0 if the sequen...
Definition Stamp.cpp:124
A single value (typically within a Bottle).
Definition Value.h:44
static Value * makeList()
Create a list Value.
Definition Value.cpp:452
bool isNull() const override
Checks if the object is invalid.
Definition Value.cpp:392
Typed image class.
Definition Image.h:603
#define YARP_NULLPTR
Expands to either the standard nullptr or to 0 elsewhere.
Definition compiler.h:2937
#define yCInfo(component,...)
#define yCError(component,...)
#define yCTrace(component,...)
#define yCWarning(component,...)
#define yCDebug(component,...)
void list_cap_v4l2(int fd)
Definition list.cpp:278
void query_current_image_fmt_v4l2(int fd)
Definition list.cpp:44
For streams capable of holding different kinds of content, check what they actually have.
double now()
Return the current time in seconds, relative to an arbitrary starting point.
Definition Time.cpp:121
void delay(double seconds)
Wait for a certain number of seconds.
Definition Time.cpp:111
An interface to the operating system, including Port based communication.
int stat(const char *path)
Portable wrapper for the stat() function.
Definition Os.cpp:78
std::string deviceId
Definition V4L_camera.h:72
struct v4l2_format dst_fmt
Definition V4L_camera.h:118
unsigned char * read_image
Definition V4L_camera.h:97
unsigned int n_buffers
Definition V4L_camera.h:115
size_t pixelType
Definition V4L_camera.h:120
struct buffer * buffers
Definition V4L_camera.h:116
__u32 user_height
Definition V4L_camera.h:79
struct v4l2_format src_fmt
Definition V4L_camera.h:117
unsigned char * raw_image
Definition V4L_camera.h:92
io_method io
Definition V4L_camera.h:86
unsigned char * dst_image_rgb
Definition V4L_camera.h:106
std::vector< yarp::dev::CameraConfig > configurations
Definition V4L_camera.h:112
int resizeOffset_x
Definition V4L_camera.h:75
yarp::os::Property intrinsic
Definition V4L_camera.h:83
int resizeOffset_y
Definition V4L_camera.h:75
struct v4l2_requestbuffers req
Definition V4L_camera.h:119
__u32 user_width
Definition V4L_camera.h:78
double horizontalFov
Definition V4L_camera.h:81
unsigned char * src_image
Definition V4L_camera.h:101
bool addictionalResize
Definition V4L_camera.h:74
cv::Mat outMat
Definition V4L_camera.h:110
supported_cams camModel
Definition V4L_camera.h:121
double verticalFov
Definition V4L_camera.h:82
unsigned int dst_image_size_rgb
Definition V4L_camera.h:107
unsigned int src_image_size
Definition V4L_camera.h:102
unsigned int raw_image_size
Definition V4L_camera.h:93