该方法只启动usb摄像头

import cv2
import numpy
import matplotlib.pyplot as plot class Camera:
cap = cv2.VideoCapture(0) @staticmethod
def getCamera():
ret, frame = Camera.cap.read()
return ret, frame @staticmethod
def getCap():
return Camera.cap def main():
camera = Camera()
while(1):
ret, frame = camera.getCamera() cv2.imshow("capture", frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break camera.cap.release()
# cv2.destroyAllWindows() if __name__ == '__main__':
main()

C++ start onboard camera

#include <stdio.h>
#include <opencv2/opencv.hpp> using namespace cv;
using namespace std; int main(int argc, char** argv)
{
VideoCapture cap("nvcamerasrc ! video/x-raw(memory:NVMM), width=(int)1280, height=(int)720,format=(string)I420, framerate=(fraction)24/1 ! nvvidconv flip-method=2 ! video/x-raw, format=(string)BGRx ! videoconvert ! video/x-raw, format=(string)BGR ! appsink");
if (!cap.isOpened())
{
cout << "Failed to open camera." << endl;
return -;
} for(;;)
{
Mat frame;
cap >> frame;
imshow("original", frame);
//waitKey(1);
if(waitKey() >= )
break;
}
return ;
}

C++ start usb camera

/*
Author:Jack-Cui
Blog:http://blog.csdn.net/c406495762
Time:25 May 2017
*/
#include <unistd.h>
#include <error.h>
#include <errno.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <pthread.h>
#include <linux/videodev2.h>
#include <sys/mman.h>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <stdio.h>
#include <stdlib.h>
#include <string.h> #include <iostream>
#include <iomanip>
#include <string> using namespace std; #define CLEAR(x) memset(&(x), 0, sizeof(x)) #define IMAGEWIDTH 3264
#define IMAGEHEIGHT 2448 class V4L2Capture {
public:
V4L2Capture(char *devName, int width, int height);
virtual ~V4L2Capture(); int openDevice();
int closeDevice();
int initDevice();
int startCapture();
int stopCapture();
int freeBuffers();
int getFrame(void **,size_t *);
int backFrame();
static void test(); private:
int initBuffers(); struct cam_buffer
{
void* start;
unsigned int length;
};
char *devName;
int capW;
int capH;
int fd_cam;
cam_buffer *buffers;
unsigned int n_buffers;
int frameIndex;
}; V4L2Capture::V4L2Capture(char *devName, int width, int height) {
// TODO Auto-generated constructor stub
this->devName = devName;
this->fd_cam = -;
this->buffers = NULL;
this->n_buffers = ;
this->frameIndex = -;
this->capW=width;
this->capH=height;
} V4L2Capture::~V4L2Capture() {
// TODO Auto-generated destructor stub
} int V4L2Capture::openDevice() {
/*设备的打开*/
printf("video dev : %s\n", devName);
fd_cam = open(devName, O_RDWR);
if (fd_cam < ) {
perror("Can't open video device");
}
return ;
} int V4L2Capture::closeDevice() {
if (fd_cam > ) {
int ret = ;
if ((ret = close(fd_cam)) < ) {
perror("Can't close video device");
}
return ;
} else {
return -;
}
} int V4L2Capture::initDevice() {
int ret;
struct v4l2_capability cam_cap; //显示设备信息
struct v4l2_cropcap cam_cropcap; //设置摄像头的捕捉能力
struct v4l2_fmtdesc cam_fmtdesc; //查询所有支持的格式:VIDIOC_ENUM_FMT
struct v4l2_crop cam_crop; //图像的缩放
struct v4l2_format cam_format; //设置摄像头的视频制式、帧格式等 /* 使用IOCTL命令VIDIOC_QUERYCAP,获取摄像头的基本信息*/
ret = ioctl(fd_cam, VIDIOC_QUERYCAP, &cam_cap);
if (ret < ) {
perror("Can't get device information: VIDIOCGCAP");
}
printf(
"Driver Name:%s\nCard Name:%s\nBus info:%s\nDriver Version:%u.%u.%u\n",
cam_cap.driver, cam_cap.card, cam_cap.bus_info,
(cam_cap.version >> ) & 0XFF, (cam_cap.version >> ) & 0XFF,
cam_cap.version & 0XFF); /* 使用IOCTL命令VIDIOC_ENUM_FMT,获取摄像头所有支持的格式*/
cam_fmtdesc.index = ;
cam_fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
printf("Support format:\n");
while (ioctl(fd_cam, VIDIOC_ENUM_FMT, &cam_fmtdesc) != -) {
printf("\t%d.%s\n", cam_fmtdesc.index + , cam_fmtdesc.description);
cam_fmtdesc.index++;
} /* 使用IOCTL命令VIDIOC_CROPCAP,获取摄像头的捕捉能力*/
cam_cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if ( == ioctl(fd_cam, VIDIOC_CROPCAP, &cam_cropcap)) {
printf("Default rec:\n\tleft:%d\n\ttop:%d\n\twidth:%d\n\theight:%d\n",
cam_cropcap.defrect.left, cam_cropcap.defrect.top,
cam_cropcap.defrect.width, cam_cropcap.defrect.height);
/* 使用IOCTL命令VIDIOC_S_CROP,获取摄像头的窗口取景参数*/
cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
cam_crop.c = cam_cropcap.defrect; //默认取景窗口大小
if (- == ioctl(fd_cam, VIDIOC_S_CROP, &cam_crop)) {
//printf("Can't set crop para\n");
}
} else {
printf("Can't set cropcap para\n");
} /* 使用IOCTL命令VIDIOC_S_FMT,设置摄像头帧信息*/
cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
cam_format.fmt.pix.width = capW;
cam_format.fmt.pix.height = capH;
cam_format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //要和摄像头支持的类型对应
cam_format.fmt.pix.field = V4L2_FIELD_INTERLACED;
ret = ioctl(fd_cam, VIDIOC_S_FMT, &cam_format);
if (ret < ) {
perror("Can't set frame information");
}
/* 使用IOCTL命令VIDIOC_G_FMT,获取摄像头帧信息*/
cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl(fd_cam, VIDIOC_G_FMT, &cam_format);
if (ret < ) {
perror("Can't get frame information");
}
printf("Current data format information:\n\twidth:%d\n\theight:%d\n",
cam_format.fmt.pix.width, cam_format.fmt.pix.height);
ret = initBuffers();
if (ret < ) {
perror("Buffers init error");
//exit(-1);
}
return ;
} int V4L2Capture::initBuffers() {
int ret;
/* 使用IOCTL命令VIDIOC_REQBUFS,申请帧缓冲*/
struct v4l2_requestbuffers req;
CLEAR(req);
req.count = ;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
ret = ioctl(fd_cam, VIDIOC_REQBUFS, &req);
if (ret < ) {
perror("Request frame buffers failed");
}
if (req.count < ) {
perror("Request frame buffers while insufficient buffer memory");
}
buffers = (struct cam_buffer*) calloc(req.count, sizeof(*buffers));
if (!buffers) {
perror("Out of memory");
}
for (n_buffers = ; n_buffers < req.count; n_buffers++) {
struct v4l2_buffer buf;
CLEAR(buf);
// 查询序号为n_buffers 的缓冲区,得到其起始物理地址和大小
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
ret = ioctl(fd_cam, VIDIOC_QUERYBUF, &buf);
if (ret < ) {
printf("VIDIOC_QUERYBUF %d failed\n", n_buffers);
return -;
}
buffers[n_buffers].length = buf.length;
//printf("buf.length= %d\n",buf.length);
// 映射内存
buffers[n_buffers].start = mmap(
NULL, // start anywhere
buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd_cam,
buf.m.offset);
if (MAP_FAILED == buffers[n_buffers].start) {
printf("mmap buffer%d failed\n", n_buffers);
return -;
}
}
return ;
} int V4L2Capture::startCapture() {
unsigned int i;
for (i = ; i < n_buffers; i++) {
struct v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (- == ioctl(fd_cam, VIDIOC_QBUF, &buf)) {
printf("VIDIOC_QBUF buffer%d failed\n", i);
return -;
}
}
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (- == ioctl(fd_cam, VIDIOC_STREAMON, &type)) {
printf("VIDIOC_STREAMON error");
return -;
}
return ;
} int V4L2Capture::stopCapture() {
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (- == ioctl(fd_cam, VIDIOC_STREAMOFF, &type)) {
printf("VIDIOC_STREAMOFF error\n");
return -;
}
return ;
} int V4L2Capture::freeBuffers() {
unsigned int i;
for (i = ; i < n_buffers; ++i) {
if (- == munmap(buffers[i].start, buffers[i].length)) {
printf("munmap buffer%d failed\n", i);
return -;
}
}
free(buffers);
return ;
} int V4L2Capture::getFrame(void **frame_buf, size_t* len) {
struct v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
if (- == ioctl(fd_cam, VIDIOC_DQBUF, &queue_buf)) {
printf("VIDIOC_DQBUF error\n");
return -;
}
*frame_buf = buffers[queue_buf.index].start;
*len = buffers[queue_buf.index].length;
frameIndex = queue_buf.index;
return ;
} int V4L2Capture::backFrame() {
if (frameIndex != -) {
struct v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
queue_buf.index = frameIndex;
if (- == ioctl(fd_cam, VIDIOC_QBUF, &queue_buf)) {
printf("VIDIOC_QBUF error\n");
return -;
}
return ;
}
return -;
} void V4L2Capture::test() {
unsigned char *yuv422frame = NULL;
unsigned long yuvframeSize = ; string videoDev="/dev/video0";
V4L2Capture *vcap = new V4L2Capture(const_cast<char*>(videoDev.c_str()),
, );
vcap->openDevice();
vcap->initDevice();
vcap->startCapture();
vcap->getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize); vcap->backFrame();
vcap->freeBuffers();
vcap->closeDevice();
} void VideoPlayer() {
unsigned char *yuv422frame = NULL;
unsigned long yuvframeSize = ; string videoDev = "/dev/video0";
V4L2Capture *vcap = new V4L2Capture(const_cast<char*>(videoDev.c_str()), , );
vcap->openDevice();
vcap->initDevice();
vcap->startCapture(); cvNamedWindow("Capture",CV_WINDOW_AUTOSIZE);
IplImage* img;
CvMat cvmat;
double t;
while(){
t = (double)cvGetTickCount();
vcap->getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize);
cvmat = cvMat(IMAGEHEIGHT,IMAGEWIDTH,CV_8UC3,(void*)yuv422frame); //CV_8UC3 //解码
img = cvDecodeImage(&cvmat,);
if(!img){
printf("DecodeImage error!\n");
} cvShowImage("Capture",img);
cvReleaseImage(&img); vcap->backFrame();
if((cvWaitKey()&) == ){
exit();
}
t = (double)cvGetTickCount() - t;
printf("Used time is %g ms\n",( t / (cvGetTickFrequency()*)));
}
vcap->stopCapture();
vcap->freeBuffers();
vcap->closeDevice(); } int main() {
VideoPlayer();
return ;
}

最新文章

  1. 为Asp.net WebApi 添加跨域支持
  2. 3G产品升级相关知识
  3. BZOJ4152The Captain[DIjkstra]
  4. html5移动端制作知识点总结
  5. 12. Android框架和工具之 StringUtils(字符串操作)
  6. 利用autoit自动关闭指定标题窗口
  7. Historical节点
  8. gprecoverseg导致的元数据库问题致使数据库无法启动以及修复
  9. Dubbo框架应用之(一)--服务体系
  10. PostgreSQL学习笔记(一)-安装PostgreSQL
  11. 第九课 表单及表单控件 html5学习4
  12. 1-hadoop、mr
  13. 学习笔记之Python最简编码规范
  14. scp sparkuser@spark02:/home/sparkuser/.ssh
  15. Python库moviepy
  16. ctrl + alt + o 快速删除掉没有使用的 import
  17. springboot从入门到精通(二)
  18. Oracle基础 03 回滚表空间 undo
  19. [HNOI2004]宠物收养场 BZOJ1208 splay tree
  20. es6 - filter for-chrome

热门文章

  1. 105 - kube-scheduler源码分析 - predicate算法注册
  2. 如何大幅提升web前端性能之看tengine在大公司架构实践
  3. 如何快速高效简洁的打开软件 干净利索的windows快捷程序启动器
  4. .NetCore WebAPI采坑之路(持续更新)
  5. Springboot 系列(五)Spring Boot web 开发之静态资源和模版引擎
  6. Centos7+LVS-NAT+apache实验
  7. Spring MVC的文件上传和下载
  8. 微信小程序 canvas 文字自动换行
  9. nginx预防常见攻击
  10. oracle相关函数