该方法只启动usb摄像头

import cv2
import numpy
import matplotlib.pyplot as plot class Camera:
cap = cv2.VideoCapture(0) @staticmethod
def getCamera():
ret, frame = Camera.cap.read()
return ret, frame @staticmethod
def getCap():
return Camera.cap def main():
camera = Camera()
while(1):
ret, frame = camera.getCamera() cv2.imshow("capture", frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break camera.cap.release()
# cv2.destroyAllWindows() if __name__ == '__main__':
main()

C++ start onboard camera

#include <stdio.h>
#include <opencv2/opencv.hpp> using namespace cv;
using namespace std; int main(int argc, char** argv)
{
VideoCapture cap("nvcamerasrc ! video/x-raw(memory:NVMM), width=(int)1280, height=(int)720,format=(string)I420, framerate=(fraction)24/1 ! nvvidconv flip-method=2 ! video/x-raw, format=(string)BGRx ! videoconvert ! video/x-raw, format=(string)BGR ! appsink");
if (!cap.isOpened())
{
cout << "Failed to open camera." << endl;
return -;
} for(;;)
{
Mat frame;
cap >> frame;
imshow("original", frame);
//waitKey(1);
if(waitKey() >= )
break;
}
return ;
}

C++ start usb camera

/*
Author:Jack-Cui
Blog:http://blog.csdn.net/c406495762
Time:25 May 2017
*/
#include <unistd.h>
#include <error.h>
#include <errno.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <pthread.h>
#include <linux/videodev2.h>
#include <sys/mman.h>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <stdio.h>
#include <stdlib.h>
#include <string.h> #include <iostream>
#include <iomanip>
#include <string> using namespace std; #define CLEAR(x) memset(&(x), 0, sizeof(x)) #define IMAGEWIDTH 3264
#define IMAGEHEIGHT 2448 class V4L2Capture {
public:
V4L2Capture(char *devName, int width, int height);
virtual ~V4L2Capture(); int openDevice();
int closeDevice();
int initDevice();
int startCapture();
int stopCapture();
int freeBuffers();
int getFrame(void **,size_t *);
int backFrame();
static void test(); private:
int initBuffers(); struct cam_buffer
{
void* start;
unsigned int length;
};
char *devName;
int capW;
int capH;
int fd_cam;
cam_buffer *buffers;
unsigned int n_buffers;
int frameIndex;
}; V4L2Capture::V4L2Capture(char *devName, int width, int height) {
// TODO Auto-generated constructor stub
this->devName = devName;
this->fd_cam = -;
this->buffers = NULL;
this->n_buffers = ;
this->frameIndex = -;
this->capW=width;
this->capH=height;
} V4L2Capture::~V4L2Capture() {
// TODO Auto-generated destructor stub
} int V4L2Capture::openDevice() {
/*设备的打开*/
printf("video dev : %s\n", devName);
fd_cam = open(devName, O_RDWR);
if (fd_cam < ) {
perror("Can't open video device");
}
return ;
} int V4L2Capture::closeDevice() {
if (fd_cam > ) {
int ret = ;
if ((ret = close(fd_cam)) < ) {
perror("Can't close video device");
}
return ;
} else {
return -;
}
} int V4L2Capture::initDevice() {
int ret;
struct v4l2_capability cam_cap; //显示设备信息
struct v4l2_cropcap cam_cropcap; //设置摄像头的捕捉能力
struct v4l2_fmtdesc cam_fmtdesc; //查询所有支持的格式:VIDIOC_ENUM_FMT
struct v4l2_crop cam_crop; //图像的缩放
struct v4l2_format cam_format; //设置摄像头的视频制式、帧格式等 /* 使用IOCTL命令VIDIOC_QUERYCAP,获取摄像头的基本信息*/
ret = ioctl(fd_cam, VIDIOC_QUERYCAP, &cam_cap);
if (ret < ) {
perror("Can't get device information: VIDIOCGCAP");
}
printf(
"Driver Name:%s\nCard Name:%s\nBus info:%s\nDriver Version:%u.%u.%u\n",
cam_cap.driver, cam_cap.card, cam_cap.bus_info,
(cam_cap.version >> ) & 0XFF, (cam_cap.version >> ) & 0XFF,
cam_cap.version & 0XFF); /* 使用IOCTL命令VIDIOC_ENUM_FMT,获取摄像头所有支持的格式*/
cam_fmtdesc.index = ;
cam_fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
printf("Support format:\n");
while (ioctl(fd_cam, VIDIOC_ENUM_FMT, &cam_fmtdesc) != -) {
printf("\t%d.%s\n", cam_fmtdesc.index + , cam_fmtdesc.description);
cam_fmtdesc.index++;
} /* 使用IOCTL命令VIDIOC_CROPCAP,获取摄像头的捕捉能力*/
cam_cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if ( == ioctl(fd_cam, VIDIOC_CROPCAP, &cam_cropcap)) {
printf("Default rec:\n\tleft:%d\n\ttop:%d\n\twidth:%d\n\theight:%d\n",
cam_cropcap.defrect.left, cam_cropcap.defrect.top,
cam_cropcap.defrect.width, cam_cropcap.defrect.height);
/* 使用IOCTL命令VIDIOC_S_CROP,获取摄像头的窗口取景参数*/
cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
cam_crop.c = cam_cropcap.defrect; //默认取景窗口大小
if (- == ioctl(fd_cam, VIDIOC_S_CROP, &cam_crop)) {
//printf("Can't set crop para\n");
}
} else {
printf("Can't set cropcap para\n");
} /* 使用IOCTL命令VIDIOC_S_FMT,设置摄像头帧信息*/
cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
cam_format.fmt.pix.width = capW;
cam_format.fmt.pix.height = capH;
cam_format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //要和摄像头支持的类型对应
cam_format.fmt.pix.field = V4L2_FIELD_INTERLACED;
ret = ioctl(fd_cam, VIDIOC_S_FMT, &cam_format);
if (ret < ) {
perror("Can't set frame information");
}
/* 使用IOCTL命令VIDIOC_G_FMT,获取摄像头帧信息*/
cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl(fd_cam, VIDIOC_G_FMT, &cam_format);
if (ret < ) {
perror("Can't get frame information");
}
printf("Current data format information:\n\twidth:%d\n\theight:%d\n",
cam_format.fmt.pix.width, cam_format.fmt.pix.height);
ret = initBuffers();
if (ret < ) {
perror("Buffers init error");
//exit(-1);
}
return ;
} int V4L2Capture::initBuffers() {
int ret;
/* 使用IOCTL命令VIDIOC_REQBUFS,申请帧缓冲*/
struct v4l2_requestbuffers req;
CLEAR(req);
req.count = ;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
ret = ioctl(fd_cam, VIDIOC_REQBUFS, &req);
if (ret < ) {
perror("Request frame buffers failed");
}
if (req.count < ) {
perror("Request frame buffers while insufficient buffer memory");
}
buffers = (struct cam_buffer*) calloc(req.count, sizeof(*buffers));
if (!buffers) {
perror("Out of memory");
}
for (n_buffers = ; n_buffers < req.count; n_buffers++) {
struct v4l2_buffer buf;
CLEAR(buf);
// 查询序号为n_buffers 的缓冲区,得到其起始物理地址和大小
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
ret = ioctl(fd_cam, VIDIOC_QUERYBUF, &buf);
if (ret < ) {
printf("VIDIOC_QUERYBUF %d failed\n", n_buffers);
return -;
}
buffers[n_buffers].length = buf.length;
//printf("buf.length= %d\n",buf.length);
// 映射内存
buffers[n_buffers].start = mmap(
NULL, // start anywhere
buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd_cam,
buf.m.offset);
if (MAP_FAILED == buffers[n_buffers].start) {
printf("mmap buffer%d failed\n", n_buffers);
return -;
}
}
return ;
} int V4L2Capture::startCapture() {
unsigned int i;
for (i = ; i < n_buffers; i++) {
struct v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (- == ioctl(fd_cam, VIDIOC_QBUF, &buf)) {
printf("VIDIOC_QBUF buffer%d failed\n", i);
return -;
}
}
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (- == ioctl(fd_cam, VIDIOC_STREAMON, &type)) {
printf("VIDIOC_STREAMON error");
return -;
}
return ;
} int V4L2Capture::stopCapture() {
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (- == ioctl(fd_cam, VIDIOC_STREAMOFF, &type)) {
printf("VIDIOC_STREAMOFF error\n");
return -;
}
return ;
} int V4L2Capture::freeBuffers() {
unsigned int i;
for (i = ; i < n_buffers; ++i) {
if (- == munmap(buffers[i].start, buffers[i].length)) {
printf("munmap buffer%d failed\n", i);
return -;
}
}
free(buffers);
return ;
} int V4L2Capture::getFrame(void **frame_buf, size_t* len) {
struct v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
if (- == ioctl(fd_cam, VIDIOC_DQBUF, &queue_buf)) {
printf("VIDIOC_DQBUF error\n");
return -;
}
*frame_buf = buffers[queue_buf.index].start;
*len = buffers[queue_buf.index].length;
frameIndex = queue_buf.index;
return ;
} int V4L2Capture::backFrame() {
if (frameIndex != -) {
struct v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
queue_buf.index = frameIndex;
if (- == ioctl(fd_cam, VIDIOC_QBUF, &queue_buf)) {
printf("VIDIOC_QBUF error\n");
return -;
}
return ;
}
return -;
} void V4L2Capture::test() {
unsigned char *yuv422frame = NULL;
unsigned long yuvframeSize = ; string videoDev="/dev/video0";
V4L2Capture *vcap = new V4L2Capture(const_cast<char*>(videoDev.c_str()),
, );
vcap->openDevice();
vcap->initDevice();
vcap->startCapture();
vcap->getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize); vcap->backFrame();
vcap->freeBuffers();
vcap->closeDevice();
} void VideoPlayer() {
unsigned char *yuv422frame = NULL;
unsigned long yuvframeSize = ; string videoDev = "/dev/video0";
V4L2Capture *vcap = new V4L2Capture(const_cast<char*>(videoDev.c_str()), , );
vcap->openDevice();
vcap->initDevice();
vcap->startCapture(); cvNamedWindow("Capture",CV_WINDOW_AUTOSIZE);
IplImage* img;
CvMat cvmat;
double t;
while(){
t = (double)cvGetTickCount();
vcap->getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize);
cvmat = cvMat(IMAGEHEIGHT,IMAGEWIDTH,CV_8UC3,(void*)yuv422frame); //CV_8UC3 //解码
img = cvDecodeImage(&cvmat,);
if(!img){
printf("DecodeImage error!\n");
} cvShowImage("Capture",img);
cvReleaseImage(&img); vcap->backFrame();
if((cvWaitKey()&) == ){
exit();
}
t = (double)cvGetTickCount() - t;
printf("Used time is %g ms\n",( t / (cvGetTickFrequency()*)));
}
vcap->stopCapture();
vcap->freeBuffers();
vcap->closeDevice(); } int main() {
VideoPlayer();
return ;
}

Jetson TX1使用usb camera采集图像 (2)的更多相关文章

  1. Jetson TX1使用usb camera采集图像 (1)

    使用python实现 https://jkjung-avt.github.io/tx2-camera-with-python/ How to Capture and Display Camera Vi ...

  2. Camera 采集图像的方法

    使用 Camera 采集图像, 实现步骤如下: 需要权限: android.permission.CAMERA android.permission.WRITE_EXTERNAL_STORAGE // ...

  3. 【Xilinx-Petalinux学习】-06-OpenCV通过USB摄像头采集图像。

    占位, 实现USB摄像头的图像采集与保存

  4. 基于英伟达Jetson TX1的GPU处理平台

    基于英伟达Jetson TX1 GPU的HDMI图像输入的深度学习套件 [309] 本平台基于英伟达的Jetson TX1视觉计算的全功能开发板,配合本公司研发的HDMI输入图像采集板:Jetson ...

  5. camera按键采集图像及waitKey的用法(转)

    源: camera按键采集图像及waitKey的用法

  6. camera按键采集图像及waitKey的用法

    前言 项目需要通过摄像头采集图像并保存,主要是用于后续的摄像头标定.实现过程其实很简单,需要注意一些细节. 系统环境 系统版本:ubuntu16.04:opencv版本:opencv2.4.13:编程 ...

  7. [转]Jetson TX1 开发教程(1)配置与刷机

    开箱 Jetson TX1是英伟达公司新出的GPU开发板,拥有世界上先进的嵌入式视觉计算系统,提供高性能.新技术和极佳的开发平台.在进行配置和刷机工作之前,先来一张全家福: 可以看到,Jetson T ...

  8. 【并行计算-CUDA开发】 NVIDIA Jetson TX1

    概述 NVIDIA Jetson TX1是计算机视觉系统的SoM(system-on-module)解决方案.它组合了最新的NVIDIAMaxwell GPU架构,其具有ARM Cortex-A57 ...

  9. ffmpeg从USB摄像头采集一张原始图片(转)

    本文讲解使用ffmpeg从USB摄像头中采集一帧数据并写入文件保存,测试平台使用全志A20平台,其他平台修改交叉工具链即可移植.开发环境使用eclipse+CDT.交叉工具链使用arm-Linux-g ...

随机推荐

  1. 从PRISM开始学WPF(六)MVVM(二)Command-更新至Prism7.1

    命令绑定(Command) [7.1updated]这一节除了基础app部分,并没有什么变化 什么是Command? 先看下微软官方的说明: Commanding is an input mechan ...

  2. jQuery(三)、属性、CSS

    jQuery设置了很多为标签进行属性的操作,比如添加.删除. 一 .属性 1 attr(name | properties | [key, value | fn]) 设置或返回被选择的属性值. 参数: ...

  3. java中求质数(素数)的问题

    这篇笔记讲讲关于java中质数的问题. 一.什么是质数(素数)? 定义:质数又称素数.一个大于1的自然数,除了1和它自身外,不能被其他自然数整除的数叫做质数:否则称为合数.它可以有无限个数. 二.ja ...

  4. java--基本数据类型的转换(自动转换)

    概念:Java中,经常可以遇到类型转换的场景,从变量的定义到复制.数值变量的计算到方法的参数传递.基类与派生类间的造型等,随处可见类型转换的身影.Java中的类型转换在Java编码中具有重要的作用.首 ...

  5. PHP一些常用的正则表达式分享给大家

    一.校验数字的表达式 1 数字:^[0-9]*$2 n位的数字:^\d{n}$3 至少n位的数字:^\d{n,}$4 m-n位的数字:^\d{m,n}$5 零和非零开头的数字:^(0|[1-9][0- ...

  6. 第十三课 CSS外观及样式的应用 css学习3

    一.1.color: 文本颜色 预定义文本颜色值,如red,blue等 十六进制的颜色值 #fff白色 建议常用的表示方法 RGB代码,如红色可以表示为rgb(255,0,0)或rgb(100%,%0 ...

  7. 利用IDisposable接口构建包含非托管资源对象

    托管资源与非托管资源 在.net中,对象使用的资源分为两种:托管资源与非托管资源.托管资源由CLR进行管理,不需要开发人员去人工进行控制,.NET中托管资源主要指"对象在堆中的内存" ...

  8. android常犯错误记录(三)

    java.lang.IndexOutOfBoundsException: Inconsistency detected. Invalid view holder adapter positionVie ...

  9. java.lang.IllegalArgumentException: Called attach on a child which is not detached: ViewHolder

    转载请标明出处,维权必究:https://www.cnblogs.com/tangZH/p/10116298.html 在项目过程中出现了上述错误. 会出现这样的错误是在我使用: notifyItem ...

  10. android中使用afinal一行源码显示网络图片

    下面代码是关于android中使用afinal一行显示网络图片的代码. public class DemoActivity extends FinalActivity { @Override publ ...