使用飞凌 OK153-S 开发板 外接一个 usb摄像头,获取摄像头的YUYV数据

插上USB接口,对应的节点,/dev/video1

代码如下(v4l2_yuyv_capture.c):
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <linux/videodev2.h>
// 配置参数(可根据摄像头支持情况修改)
#define DEVICE_PATH "/dev/video1" // 摄像头设备节点
#define WIDTH 640 // 图像宽度
#define HEIGHT 480 // 图像高度
#define BUFFER_COUNT 4 // 缓冲区数量(推荐2-4个)
// 缓冲区结构体,保存映射后的地址和长度
typedef struct {
void *start;
size_t length;
} BufferInfo;
BufferInfo *buffers = NULL; // 缓冲区数组
// 错误处理辅助函数
static void err_exit(const char *msg) {
perror(msg);
exit(EXIT_FAILURE);
}
// 打开视频设备
static int open_video_device(const char *dev_path) {
int fd = open(dev_path, O_RDWR | O_NONBLOCK, 0); // O_NONBLOCK:非阻塞模式
if (fd == -1) {
err_exit("Failed to open video device");
}
return fd;
}
// 查询设备是否支持视频捕获和YUYV格式
static void check_device_capability(int fd) {
struct v4l2_capability cap;
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == -1) {
err_exit("Failed to query device capability");
}
// 检查是否支持视频捕获(摄像头属于视频输入设备)
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
fprintf(stderr, "Device does not support video capture\n");
exit(EXIT_FAILURE);
}
// 检查是否支持内存映射方式(高效获取帧数据)
if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
fprintf(stderr, "Device does not support streaming I/O\n");
exit(EXIT_FAILURE);
}
}
// 设置视频格式为YUYV
static void set_video_format(int fd) {
struct v4l2_format fmt;
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = WIDTH;
fmt.fmt.pix.height = HEIGHT;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; // 设置为YUYV格式
fmt.fmt.pix.field = V4L2_FIELD_NONE; // 无场(逐行扫描)
if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1) {
err_exit("Failed to set video format");
}
// 验证实际设置的格式(摄像头可能会调整分辨率)
if (fmt.fmt.pix.width != WIDTH || fmt.fmt.pix.height != HEIGHT) {
printf("Warning: Device adjusted resolution to %dx%d\n",
fmt.fmt.pix.width, fmt.fmt.pix.height);
}
if (fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_YUYV) {
fprintf(stderr, "Device does not support YUYV format\n");
exit(EXIT_FAILURE);
}
}
// 请求并分配内核缓冲区
static void request_buffers(int fd) {
struct v4l2_requestbuffers req;
memset(&req, 0, sizeof(req));
req.count = BUFFER_COUNT;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP; // 使用内存映射方式
if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) {
err_exit("Failed to request buffers");
}
if (req.count != BUFFER_COUNT) {
fprintf(stderr, "Device only allocated %d buffers (requested %d)\n",
req.count, BUFFER_COUNT);
exit(EXIT_FAILURE);
}
// 分配缓冲区数组内存
buffers = (BufferInfo *)malloc(req.count * sizeof(BufferInfo));
if (!buffers) {
err_exit("Failed to allocate buffer info array");
}
}
// 将内核缓冲区映射到用户空间
static void mmap_buffers(int fd) {
for (int i = 0; i < BUFFER_COUNT; i++) {
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
// 查询缓冲区信息
if (ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) {
err_exit("Failed to query buffer");
}
// 内存映射
buffers[i].length = buf.length;
buffers[i].start = mmap(NULL, buf.length,
PROT_READ | PROT_WRITE, // 可读可写
MAP_SHARED, // 共享映射(内核/用户空间共享)
fd, buf.m.offset);
if (buffers[i].start == MAP_FAILED) {
err_exit("Failed to mmap buffer");
}
// 将缓冲区放入输入队列(准备捕获数据)
if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {
err_exit("Failed to queue buffer");
}
}
}
// 启动视频流捕获
static void start_stream(int fd) {
enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(fd, VIDIOC_STREAMON, &buf_type) == -1) {
err_exit("Failed to start stream");
}
}
// 获取一帧YUYV格式数据并保存到文件
static void capture_one_frame(int fd) {
struct v4l2_buffer buf;
fd_set fds;
struct timeval tv;
int ret;
// 等待缓冲区就绪(非阻塞模式下需要轮询或select)
FD_ZERO(&fds);
FD_SET(fd, &fds);
tv.tv_sec = 5; // 超时时间5秒
tv.tv_usec = 0;
ret = select(fd + 1, &fds, NULL, NULL, &tv);
if (ret == -1) {
err_exit("Failed to select");
} else if (ret == 0) {
fprintf(stderr, "Select timeout (no frame captured)\n");
exit(EXIT_FAILURE);
}
// 从输出队列取出就绪的缓冲区
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (ioctl(fd, VIDIOC_DQBUF, &buf) == -1) {
// 忽略临时的EAGAIN错误(缓冲区尚未就绪)
if (errno != EAGAIN) {
err_exit("Failed to dequeue buffer");
}
return;
}
// 验证帧数据大小(YUYV格式应为 宽×高×2)
printf("Captured frame: index=%d, length=%zu, expected=%d\n",
buf.index, buf.bytesused, WIDTH * HEIGHT * 2);
// 将YUYV数据保存到文件(后缀可设为.yuv,方便后续工具查看)
FILE *fp = fopen("capture_yuyv.yuv", "wb");
if (!fp) {
err_exit("Failed to open output file");
}
fwrite(buffers[buf.index].start, 1, buf.bytesused, fp);
fclose(fp);
printf("YUYV frame saved to capture_yuyv.yuv\n");
// 将缓冲区重新放入输入队列,继续捕获后续帧(如需连续捕获)
if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {
err_exit("Failed to requeue buffer");
}
}
// 停止视频流捕获
static void stop_stream(int fd) {
enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(fd, VIDIOC_STREAMOFF, &buf_type) == -1) {
err_exit("Failed to stop stream");
}
}
// 解除内存映射并释放资源
static void cleanup_resources(int fd) {
// 解除映射
for (int i = 0; i < BUFFER_COUNT; i++) {
if (munmap(buffers[i].start, buffers[i].length) == -1) {
err_exit("Failed to unmap buffer");
}
}
// 释放缓冲区数组
free(buffers);
buffers = NULL;
// 关闭设备
close(fd);
}
int main() {
int fd = -1;
// 执行完整捕获流程
fd = open_video_device(DEVICE_PATH);
check_device_capability(fd);
set_video_format(fd);
request_buffers(fd);
mmap_buffers(fd);
start_stream(fd);
capture_one_frame(fd);
stop_stream(fd);
cleanup_resources(fd);
return EXIT_SUCCESS;
}
虚拟机中编译:/opt/arm-buildroot-linux-gnueabihf_sdk-buildroot/bin/arm-linux-gnueabihf-gcc v4l2_yuyv_capture.c -o v4l2_yuyv_capture
拷贝v4l2_yuyv_capture到OK153上运行

然后用YUYV软件查看获取的yuyv数据帧

这个摄像头镜头有点花,手头没有别的摄像头了,勉强可以使用,
OK,搞定了







