Linux 下V4L2的使用例子


Linux 下V4L2的使用例子

#include
#include
#include
#include
#include
#include

//#define Android_ENV
#ifdef ANDROID_ENV
#define LOG LOGV
#else
#define LOG printf
#endif

#define CAMERA_DEVICE "/dev/video0"
#define CAPTURE_FILE "frame.jpg"

#define VIDEO_WIDTH 640
#define VIDEO_HEIGHT 480
#define VIDEO_FORMAT V4L2_PIX_FMT_YUYV
#define BUFFER_COUNT 4

struct fimc_buffer {
int length;
void *start;
} framebuf[BUFFER_COUNT];


int main()
{
int i, ret;

// Open Device
int fd;
fd = open(CAMERA_DEVICE, O_RDWR, 0);
if (fd < 0) {
LOG("Open %s failed\n", CAMERA_DEVICE);
return -1;
}

// Query Capability
struct v4l2_capability cap;
ret = ioctl(fd, VIDIOC_QUERYCAP, &cap);
if (ret < 0) {
LOG("VIDIOC_QUERYCAP failed (%d)\n", ret);
return ret;
}

// Print capability infomations
LOG("Capability Informations:\n");
LOG(" driver: %s\n", cap.driver);
LOG(" card: %s\n", cap.card);
LOG(" bus_info: %s\n", cap.bus_info);
LOG(" version: %08X\n", cap.version);
LOG(" capabilities: %08X\n", cap.capabilities);

// Set Stream Format
struct v4l2_format fmt;
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = VIDEO_WIDTH;
fmt.fmt.pix.height = VIDEO_HEIGHT;
fmt.fmt.pix.pixelformat = VIDEO_FORMAT;
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
ret = ioctl(fd, VIDIOC_S_FMT, &fmt);
if (ret < 0) {
LOG("VIDIOC_S_FMT failed (%d)\n", ret);
return ret;
}

// Get Stream Format
ret = ioctl(fd, VIDIOC_G_FMT, &fmt);
if (ret < 0) {
LOG("VIDIOC_G_FMT failed (%d)\n", ret);
return ret;
}

// Print Stream Format
LOG("Stream Format Informations:\n");
LOG(" type: %d\n", fmt.type);
LOG(" width: %d\n", fmt.fmt.pix.width);
LOG(" height: %d\n", fmt.fmt.pix.height);
char fmtstr[8];
memset(fmtstr, 0, 8);
memcpy(fmtstr, &fmt.fmt.pix.pixelformat, 4);
LOG(" pixelformat: %s\n", fmtstr);
LOG(" field: %d\n", fmt.fmt.pix.field);
LOG(" bytesperline: %d\n", fmt.fmt.pix.bytesperline);
LOG(" sizeimage: %d\n", fmt.fmt.pix.sizeimage);
LOG(" colorspace: %d\n", fmt.fmt.pix.colorspace);
LOG(" priv: %d\n", fmt.fmt.pix.priv);
LOG(" raw_date: %s\n", fmt.fmt.raw_data);

// Request buffers
struct v4l2_requestbuffers reqbuf;
reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuf.memory = V4L2_MEMORY_MMAP;
reqbuf.count = BUFFER_COUNT;
ret = ioctl(fd , VIDIOC_REQBUFS, &reqbuf);
if(ret < 0) {
LOG("VIDIOC_REQBUFS failed (%d)\n", ret);
return ret;
}

// Queen buffers
struct v4l2_buffer buf;
for(i=0; i // Query buffer
buf.index = i;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
ret = ioctl(fd , VIDIOC_QUERYBUF, &buf);
if(ret < 0) {
LOG("VIDIOC_QUERYBUF (%d) failed (%d)\n", i, ret);
return ret;
}

// mmap buffer
framebuf[i].length = buf.length;
framebuf[i].start = (char *) mmap(0, buf.length, PROT_READ|PROT_WRITE, MAP_SHARED, fd, buf.m.offset);
if (framebuf[i].start == MAP_FAILED) {
LOG("mmap (%d) failed: %s\n", i, strerror(errno));
return -1;
}

// Queen buffer
ret = ioctl(fd , VIDIOC_QBUF, &buf);
if (ret < 0) {
LOG("VIDIOC_QBUF (%d) failed (%d)\n", i, ret);
return -1;
}

LOG("Frame buffer %d: address=0x%x, length=%d\n", i, (unsigned int)framebuf[i].start, framebuf[i].length);
}

// Stream On
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl(fd, VIDIOC_STREAMON, &type);
if (ret < 0) {
LOG("VIDIOC_STREAMON failed (%d)\n", ret);
return ret;
}

// Get frame
ret = ioctl(fd, VIDIOC_DQBUF, &buf);
if (ret < 0) {
LOG("VIDIOC_DQBUF failed (%d)\n", ret);
return ret;
}

// Process the frame
FILE *fp = fopen(CAPTURE_FILE, "wb");
if (fp < 0) {
LOG("open frame data file failed\n");
return -1;
}
fwrite(framebuf[buf.index].start, 1, buf.length, fp);
fclose(fp);
LOG("Capture one frame saved in %s\n", CAPTURE_FILE);

// Re-queen buffer
ret = ioctl(fd, VIDIOC_QBUF, &buf);
if (ret < 0) {
LOG("VIDIOC_QBUF failed (%d)\n", ret);
return ret;
}

// Release the resource
for (i=0; i munmap(framebuf[i].start, framebuf[i].length);
}

close(fd);
LOG("Camera test Done.\n");
return 0;
}

相关内容