nvidia xaiver平台四路camera同步采集调试

free-jdx 2021-03-16 17:25:52 9031
1. 前言

平台:xavier

相机: Leopard Imaging LI-XAVIER-KIT-IMX477CS-Q套件和四个基于imx477的相机。【nvidia官方合作的相机供应商】

功能:
尝试实现一个简单的应用程序,从4个相机同步捕捉图像。

2. 应用代码实现
#include <stdio.h>
#include <Argus/Argus.h>
#include <EGLStream/EGLStream.h>

#define SENSOR_MODE                     0

#define FIVE_SECONDS_IN_NANOSECONDS     5000000000L

int main(int argc, char** argv) {
    // Initialize camera provider
    Argus::UniqueObj<Argus::CameraProvider> cameraProvider(Argus::CameraProvider::create());
    Argus::ICameraProvider *iCameraProvider = Argus::interface_cast<Argus::ICameraProvider>(cameraProvider);
    if(!iCameraProvider) {
        fprintf(stderr, "Cannot get core camera provider interface\n");
        return -1;
    }
    printf("\n\nArgus Version: %s\n", iCameraProvider->getVersion().c_str());

    // Get camera devices
    std::vector<Argus::CameraDevice *> cameraDevices;
    iCameraProvider->getCameraDevices(&cameraDevices);
    if(cameraDevices.size() < 1) {
        fprintf(stderr, "No cameras found\n");
        return -2;
    }
    printf("Camera count: %u\n", (unsigned) cameraDevices.size());

    // Create capture session
    Argus::Status status;
    Argus::UniqueObj<Argus::CaptureSession> captureSession(iCameraProvider->createCaptureSession(cameraDevices, &status));
    Argus::ICaptureSession *iCaptureSession = Argus::interface_cast<Argus::ICaptureSession>(captureSession);
    if(!iCaptureSession) {
        fprintf(stderr, "Failed to create capture session\n");
        return -3;
    }

    // Get sensor mode
    Argus::ICameraProperties *iCameraProperties = Argus::interface_cast<Argus::ICameraProperties>(cameraDevices[0]);
    if(!iCameraProperties) {
        fprintf(stderr, "Failed to get camera properties\n");
        return -3;
    }
    std::vector<Argus::SensorMode *> sensormodes;
    iCameraProperties->getAllSensorModes(&sensormodes);
    Argus::ISensorMode *iSensorMode = Argus::interface_cast<Argus::ISensorMode>(sensormodes[SENSOR_MODE]);
    if(!iSensorMode) {
        fprintf(stderr, "Failed to get sensor mode\n");
        return -3;
    }

    // Create output stream
    Argus::UniqueObj<Argus::OutputStreamSettings> streamSettings(iCaptureSession->createOutputStreamSettings(Argus::STREAM_TYPE_EGL));
    Argus::IEGLOutputStreamSettings *iEglStreamSettings = Argus::interface_cast<Argus::IEGLOutputStreamSettings>(streamSettings);
    if(!iEglStreamSettings) {
        fprintf(stderr, "Failed to create EGL stream settings\n");
        return -4;
    }
    iEglStreamSettings->setPixelFormat(Argus::PIXEL_FMT_YCbCr_420_888);
    iEglStreamSettings->setResolution(iSensorMode->getResolution());
    iEglStreamSettings->setMetadataEnable(true);
    Argus::IOutputStreamSettings *iStreamSettings = Argus::interface_cast<Argus::IOutputStreamSettings>(streamSettings);
    if(!iEglStreamSettings) {
        fprintf(stderr, "Failed to create output stream settings\n");
        return -4;
    }
    std::vector<Argus::UniqueObj<Argus::OutputStream> *> outputStreams;
    std::vector<EGLStream::IFrameConsumer *> iFrameConsumers;
    for(int i = 0; i < cameraDevices.size(); i++) {
        iStreamSettings->setCameraDevice(cameraDevices[i]);
        Argus::UniqueObj<Argus::OutputStream> *os = new Argus::UniqueObj<Argus::OutputStream>;
        os->reset(iCaptureSession->createOutputStream(streamSettings.get()));
        if(!os) {
            fprintf(stderr, "Failed to create output stream #%d\n", i);
            return -4;
        }
        outputStreams.push_back(os);

        Argus::UniqueObj<EGLStream::FrameConsumer> *cons = new Argus::UniqueObj<EGLStream::FrameConsumer>;
        cons->reset(EGLStream::FrameConsumer::create(outputStreams[i]->get()));
        EGLStream::IFrameConsumer *iFrameConsumer = Argus::interface_cast<EGLStream::IFrameConsumer>(*cons);
        if(!iFrameConsumer) {
            fprintf(stderr, "Failed to create consumer #%d\n", i);
            return -5;
        }
        iFrameConsumers.push_back(iFrameConsumer);
    }

    Argus::UniqueObj<Argus::Request> request(iCaptureSession->createRequest());
    Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(request);
    if(!iRequest) {
        fprintf(stderr, "Failed to create request\n");
        return -5;
    }
    for(int i = 0; i < outputStreams.size(); i++) {
        status = iRequest->enableOutputStream(outputStreams[i]->get());
        if(status) {
            fprintf(stderr, "Failed to enable output stream #%d\n", i);
            return -5;
        }
    }

    uint32_t requestId = iCaptureSession->capture(request.get());
    if(!requestId) {
        fprintf(stderr, "Failed to submit capture request\n");
        return -6;
    }

    for(int i = 0; i < iFrameConsumers.size(); i++) {
        Argus::UniqueObj<EGLStream::Frame> frame(iFrameConsumers[i]->acquireFrame(FIVE_SECONDS_IN_NANOSECONDS, &status));
        EGLStream::IFrame *iFrame = Argus::interface_cast<EGLStream::IFrame>(frame);
        if(!iFrame) {
            fprintf(stderr, "Failed to get IFrame interface #%d\n", i);
            return -6;
        }
        EGLStream::Image *image = iFrame->getImage();
        if(!image) {
            fprintf(stderr, "Failed to get image #%d\n", i);
            return -6;
        }
        EGLStream::IImageJPEG *iImageJPEG = Argus::interface_cast<EGLStream::IImageJPEG>(image);
        if(!iImageJPEG) {
            fprintf(stderr, "Failed to get IImageJPEG interface #%d\n", i);
            return -6;
        }
        char filename[255];
        snprintf(filename, sizeof(filename), "argus_syncshot_%d.jpg", i);
        status = iImageJPEG->writeJPEG(filename);
        if(status) {
            fprintf(stderr, "Failed to write JPEG #%d\n", i);
            return -6;
        }
    }

    printf("\n\nDONE!\n");
    return 0;
}
3. 调试过程遇到如下问题

(1) 两个相机采集时间有 120ms左右延迟
(2) 从两个相机(同时)捕捉到的颜色明显不同, error log:

vCaptureStatusErrorDecode Stream 2.0 failed: sof_ts 0 eof_ts 6035105617568 frame 0 error 2 data 0x000000a2
NvCaptureStatusErrorDecode Capture-Error: CSIMUX_FRAME (0x00000002)
CsimuxFrameError_Regular : 0x000000a2
Stream ID [ 2: 0]: 2
VPR state from fuse block [ 3]: 0
Frame end (FE) [ 5]: 1
A frame end has been found on a regular mode stream.
FS_FAULT [ 7]: 1
A FS packet was found for a virtual channel that was already in frame.An errored FE packet was injected before FS was allowed through.
Binary VC number [3:2] [27:26]: 0
To get full binary VC number, user need to concatenate VC[3:2] and VC[1:0] together.
SCF: Error InvalidState: Capture error with status 2 (channel 0) (in src/services/capture/NvCaptureViCsiHw.cpp, function waitCsiFrameEnd(), line 880)
(Argus) Objects still active during exit: [CameraProvider (0x5591075f00): refs: 1, cref: 0]

(3)当尝试使用所有四个相机时,所有四个输出流返回从添加的第一个相机捕获的相同图像,并显示如下错误:

lipHelper
allowIspClipping: true
maxIspDownscale: 4.0:1 4096
maxIspOutWidth: 6144,4096
ispIn: (4056 x 3040)
PRU enabled: false, interleaved input: (0 x 0)
postProcessingSize: (4056 x 3040)
postIspClip: (0.00,0.00, 1.00,1.00)
ispOut[0]: (4056 x 3040)
ispClip[0]: (0.00,0.00, 1.00,1.00)
ispOut[1]: (0 x 0)
ispClip[1]: (0.00,0.00, 1.00,1.00)
out[0] 4056x3040 req (0.00,0.00, 1.00,1.00) final (0.00,0.00, 1.00,1.00) isp from isp[0]
StageGroup 0x7f28000d00 parent=(nil) 4056x3040 (1 exposure) obufMask=f finalMask=0
stages[0] = 35 SensorCaptureStage(in = 12, outA= 6, outB = 12, outThumb = 12, outMeta = 7, outStats = 12) routed
StageGroup 0x7f280018d0 parent=0x7f28000d00 4056x3040 (1 exposure) obufMask=f finalMask=f
stages[0] = 27 MemoryToISPCaptureStage(in = 6, outA= 0, outB = 12, outThumb = 4, outMeta = 12, outStats = 5) routed
m_bufStates[0] = 0 attached output done readOrder=0 writeOrder=2 group=0x7f280018d0 fbs=isp0
4056x3040 BL U8_V8_ER 420SP
m_bufStates[1] = 1 attached output done readOrder=0 writeOrder=2 group=0x7f28000d00 fbs=none
4056x3040 BL U8_V8_ER 420SP
m_bufStates[2] = 2 attached output done readOrder=0 writeOrder=2 group=0x7f28000d00 fbs=none
4056x3040 BL U8_V8_ER 420SP
m_bufStates[3] = 3 attached output done readOrder=0 writeOrder=2 group=0x7f28000d00 fbs=none
4056x3040 BL U8_V8_ER 420SP
m_bufStates[4] = 4 attached readOrder=0 writeOrder=2 group=0x7f280018d0 AF fbs=none
640x480 Pitch U8_V8_ER 420SP
m_bufStates[5] = 5 readOrder=0 writeOrder=2 group=0x7f280018d0 fbs=none
524288x1 Pitch NonColor8
m_bufStates[6] = 6 readOrder=1 writeOrder=1 group=0x7f28000d00 fbs=none
4056x3040 Pitch BayerS16RGGB
m_bufStates[7] = 7 readOrder=0 writeOrder=1 group=0x7f28000d00 fbs=none
4056x1 Pitch NonColor8
GraphHelper blit pixel count=73981440 != ClipHelper blit pixel count=0
(Argus) Objects still active during exit: [CameraProvider (0x5578a15f00): refs: 1, cref: 0]

(4)有几个疑问需要和nvidia技术人员沟通

a)相机之间大约100毫秒的时间差是这种同步方法的最佳实现方式吗?
(同时还在等猎豹的消息,看他们是否支持任何基于硬件的同步方式)

b)根据syncSensor代码,自动校正对所有传感器应用相同的校正,这是根据添加的第一个传感器计算出来的。为什么相邻照相机拍摄的两幅图像会有色相上的差异?

c)上面的错误信息是什么意思?

d)在一个捕获会话中支持四个摄像头吗?
如果不是,这里最好的方法是什么,特别是关于自动校正?

e)当我开始拍摄视频时,ISP是否能够处理四个摄像头以每秒30帧的全分辨率(12米)拍摄的实时视频?

回复如下:

a.需要同步时间戳。
b.是的,应该是一样的,可能是设备树导致ISP设置错误。
c.我认为那些信息已经透露了信息。
d.同步传感器需要硬件设计,否则帧不会同步。
e.是支持4 * 4K @30fps
4. 解决syncSensor支持四个摄像头工作的问题

现在问题是:

如果在我的代码中限制cameraDevices向量只包含两个相机,它就能够正确地捕获,但如果按原样运行,所有四个输出文件最终都有来自相机#0的图像,即全是一个相机的图像,这是异常的

(1)尝试缩小分辨率

目前四个imx477相机都是4K分辨率的,可尝试将分辨率降到1080P;
同时禁用预览渲染来缩小问题的范围。

尝试缩小到692 × 520,但还是有同样的问题:所有4个文件都有来自相机#0的图像

调试log:

OFParserListModules: module list: /proc/device-tree/tegra-camera-platform/modules/module0
OFParserListModules: module list: /proc/device-tree/tegra-camera-platform/modules/module1
OFParserListModules: module list: /proc/device-tree/tegra-camera-platform/modules/module2
OFParserListModules: module list: /proc/device-tree/tegra-camera-platform/modules/module3
NvPclHwGetModuleList: WARNING: Could not map module to ISP config string
NvPclHwGetModuleList: No module data found
NvPclHwGetModuleList: WARNING: Could not map module to ISP config string
NvPclHwGetModuleList: No module data found
NvPclHwGetModuleList: WARNING: Could not map module to ISP config string
NvPclHwGetModuleList: No module data found
NvPclHwGetModuleList: WARNING: Could not map module to ISP config string
NvPclHwGetModuleList: No module data found
OFParserGetVirtualDevice: NVIDIA Camera virtual enumerator not found in proc device-tree
---- imager: Found override file [/var/nvidia/nvcam/settings/camera_overrides.isp]. ----
CAM: serial no file already exists, skips storing again---- imager: Found override file [/var/nvidia/nvcam/settings/camera_overrides.isp]. ----
CAM: serial no file already exists, skips storing again---- imager: Found override file [/var/nvidia/nvcam/settings/camera_overrides.isp]. ----
CAM: serial no file already exists, skips storing again---- imager: Found override file [/var/nvidia/nvcam/settings/camera_overrides.isp]. ----
CAM: serial no file already exists, skips storing again
CAM: serial no file already exists, skips storing againClipHelper
allowIspClipping: true
maxIspDownscale: 4.0:1 4096
maxIspOutWidth: 6144,4096
ispIn: (4056 x 3040)
PRU enabled: false, interleaved input: (0 x 0)
postProcessingSize: (1014 x 760)
postIspClip: (0.00,0.00, 1.00,1.00)
ispOut[0]: (1014 x 760)
ispClip[0]: (0.00,0.00, 1.00,1.00)
ispOut[1]: (0 x 0)
ispClip[1]: (0.00,0.00, 1.00,1.00)
out[0] 692x520 req (0.00,0.00, 1.00,1.00) final (0.00,0.00, 1.00,1.00) isp from isp[0]
StageGroup 0x7f48000d00 parent=(nil) 4056x3040 (1 exposure) obufMask=f finalMask=0
stages[0] = 35 SensorCaptureStage(in = 12, outA= 7, outB = 12, outThumb = 12, outMeta = 8, outStats = 12) routed
StageGroup 0x7f480018d0 parent=0x7f48000d00 1014x760 (1 exposure) obufMask=f finalMask=f
stages[0] = 27 MemoryToISPCaptureStage(in = 7, outA= 5, outB = 12, outThumb = 4, outMeta = 12, outStats = 6) routed
m_bufStates[0] = 0 attached output done readOrder=0 writeOrder=2 group=0x7f48000d00 fbs=none
692x520 BL U8_V8_ER 420SP
m_bufStates[1] = 1 attached output done readOrder=0 writeOrder=2 group=0x7f48000d00 fbs=none
692x520 BL U8_V8_ER 420SP
m_bufStates[2] = 2 attached output done readOrder=0 writeOrder=2 group=0x7f48000d00 fbs=none
692x520 BL U8_V8_ER 420SP
m_bufStates[3] = 3 attached output done readOrder=0 writeOrder=2 group=0x7f48000d00 fbs=none
692x520 BL U8_V8_ER 420SP
m_bufStates[4] = 4 attached readOrder=0 writeOrder=2 group=0x7f480018d0 AF fbs=none
640x480 Pitch U8_V8_ER 420SP
m_bufStates[5] = 5 attached readOrder=0 writeOrder=2 group=0x7f480018d0 fbs=isp0
1014x760 BL U8_V8_ER 420SP
m_bufStates[6] = 6 readOrder=0 writeOrder=2 group=0x7f480018d0 fbs=none
524288x1 Pitch NonColor8
m_bufStates[7] = 7 readOrder=1 writeOrder=1 group=0x7f48000d00 fbs=none
4056x3040 Pitch BayerS16RGGB
m_bufStates[8] = 8 readOrder=0 writeOrder=1 group=0x7f48000d00 fbs=none
4056x1 Pitch NonColor8
GraphHelper blit pixel count=4521920 != ClipHelper blit pixel count=1130480
(Argus) Objects still active during exit: [CameraProvider (0x558f16cf00): refs: 1, cref: 0]
Argus Version: 0.97.3 (single-process)
Camera count: 4
DONE!

(2)查看sdk版本

cat /etc/nv_tegra_release

Linux agx 4.9.140 #1 SMP PREEMPT Sat Jan 16 23:56:01 WIB 2021 aarch64 aarch64 aarch64 GNU/Linux
(only change to the stock kernel is Leopard Imaging’s camera driver and DTB)
R32 (release), REVISION: 4.4, GCID: 23942405, BOARD: t186ref, EABI: aarch64, DATE: Fri Oct 16 19:37:08 UTC 2020
Argus Version: 0.97.3 (single-process)

经过和nvidia 技术人员沟通;
当前版本只能支持3个摄像头的单一session,不支持4个摄像头的单一session。

(3)将摄像头设置成3个

3840x2160或692x520 分辨率是不能工作;
但是4056 × 3040可以工作。

如下:

E.g., 4056 × 3040 works OK, but 3840x2160 or 692x520 both fail.
This is output when it fails:

OFParserListModules: module list: /proc/device-tree/tegra-camera-platform/modules/module0
OFParserListModules: module list: /proc/device-tree/tegra-camera-platform/modules/module1
OFParserListModules: module list: /proc/device-tree/tegra-camera-platform/modules/module2
OFParserListModules: module list: /proc/device-tree/tegra-camera-platform/modules/module3
NvPclHwGetModuleList: WARNING: Could not map module to ISP config string
NvPclHwGetModuleList: No module data found
NvPclHwGetModuleList: WARNING: Could not map module to ISP config string
NvPclHwGetModuleList: No module data found
NvPclHwGetModuleList: WARNING: Could not map module to ISP config string
NvPclHwGetModuleList: No module data found
NvPclHwGetModuleList: WARNING: Could not map module to ISP config string
NvPclHwGetModuleList: No module data found
OFParserGetVirtualDevice: NVIDIA Camera virtual enumerator not found in proc device-tree
---- imager: Found override file [/var/nvidia/nvcam/settings/camera_overrides.isp]. ----
CAM: serial no file already exists, skips storing again---- imager: Found override file [/var/nvidia/nvcam/settings/camera_overrides.isp]. ----
CAM: serial no file already exists, skips storing again---- imager: Found override file [/var/nvidia/nvcam/settings/camera_overrides.isp]. ----
CAM: serial no file already exists, skips storing again---- imager: Found override file [/var/nvidia/nvcam/settings/camera_overrides.isp]. ----
CAM: serial no file already exists, skips storing again
CAM: serial no file already exists, skips storing again
SCF: Error NotSupported: Output buffer format not supported: 692x520 BL U8_V8_ER 420SP (in src/components/GraphHelper.cpp, function findBlitSource(), line 1449)
SCF: Error NotSupported: (propagating from src/components/GraphHelper.cpp, function addScalingBlit(), line 2025)
SCF: Error NotSupported: (propagating from src/components/GraphHelper.cpp, function addOutputScalingBlits(), line 1984)
SCF: Error NotSupported: (propagating from src/components/CaptureSetupEngineImpl.cpp, function genInstructionsCoordinatedCamera(), line 1626)
SCF: Error NotSupported: (propagating from src/components/CaptureSetupEngineImpl.cpp, function doGetInstructions(), line 2211)
SCF: Error NotSupported: (propagating from src/components/CaptureSetupEngine.cpp, function getInstructionList(), line 300)
SCF: Error NotSupported: (propagating from src/components/CaptureSetupEngine.cpp, function setupCC(), line 214)
SCF: Error NotSupported: (propagating from src/api/Session.cpp, function capture(), line 815)
(Argus) Error NotSupported: (propagating from src/api/ScfCaptureThread.cpp, function run(), line 109)
Failed to get IFrame interface #0
Argus Version: 0.97.3 (single-process)
Camera count: 3
(Argus) Objects still active during exit: [CameraProvider (0x558a399f00): refs: 1, cref: 0]

(4)更新动态库

经过和nvidia技术人员反复沟通;
那边提供了新的动态库

/usr/lib/aarch64-xxxx/tegra/替换

按照上面修改后
单一session可以正常运行4个 camera;

5. 添加camera硬件同步

通过猎豹支持,进行前端设置,将四个imx477进行同步触发;
就可以实现四路相机的硬件同步

声明:本文内容由易百纳平台入驻作者撰写,文章观点仅代表作者本人,不代表易百纳立场。如有内容侵权或者其他问题,请联系本站进行删除。
free-jdx
红包 97 6 评论 打赏
评论
0个
内容存在敏感词
手气红包
    易百纳技术社区暂无数据
相关专栏
置顶时间设置
结束时间
删除原因
  • 广告/SPAM
  • 恶意灌水
  • 违规内容
  • 文不对题
  • 重复发帖
打赏作者
易百纳技术社区
free-jdx
您的支持将鼓励我继续创作!
打赏金额:
¥1易百纳技术社区
¥5易百纳技术社区
¥10易百纳技术社区
¥50易百纳技术社区
¥100易百纳技术社区
支付方式:
微信支付
支付宝支付
易百纳技术社区微信支付
易百纳技术社区
打赏成功!

感谢您的打赏,如若您也想被打赏,可前往 发表专栏 哦~

举报反馈

举报类型

  • 内容涉黄/赌/毒
  • 内容侵权/抄袭
  • 政治相关
  • 涉嫌广告
  • 侮辱谩骂
  • 其他

详细说明

审核成功

发布时间设置
发布时间:
是否关联周任务-专栏模块

审核失败

失败原因
备注
拼手气红包 红包规则
祝福语
恭喜发财,大吉大利!
红包金额
红包最小金额不能低于5元
红包数量
红包数量范围10~50个
余额支付
当前余额:
可前往问答、专栏板块获取收益 去获取
取 消 确 定

小包子的红包

恭喜发财,大吉大利

已领取20/40,共1.6元 红包规则

    易百纳技术社区