使用开发板摄像头接口,连接OV5640

按如下修改board.dts重新编译kernel

--- a/device/config/chips/t113/configs/zqboard/linux-5.4/board.dts
+++ b/device/config/chips/t113/configs/zqboard/linux-5.4/board.dts
@@ -507,7 +507,7 @@

        csi_top = <378000000>;
        csi_isp = <327000000>;
-       status = "disable"; /* enable twi and ver1 spi-nand max freq 30M */
+       status = "okay"; /* enable twi and ver1 spi-nand max freq 30M */

        actuator0: actuator@5809450 {
                device_type = "actuator0";
@@ -620,7 +620,7 @@
         pinctrl-1 = <&uart3_pins_b>;
         status = "okay";
 };
-
+#if 0
 &uart2 {
        pinctrl-names = "default", "sleep";
        pinctrl-0 = <&uart2_pins_a>;
@@ -641,8 +641,10 @@
         pinctrl-1 = <&uart5_pins_b>;
         status = "okay";
 };
+#endif

 &soc {
+#if 0
         leds {
                 compatible = "gpio-leds";
                 gpio_pe0 {
@@ -669,6 +671,7 @@
                        default-state = "off";
                 };
         };
+#endif

        card0_boot_para@2 {
                /*

使用usb摄像头

直接插入usb摄像头即可

检查摄像头是否可用

ls /dev/vedio* #检查是否有设备节点产生

Camera拍照

运行:camerademo 运行后会在/tmp目录下产生照片,可拷贝到PC查看

Camera推流预览

## 960x480 分辨率 YUV格式摄像头
mjpg_streamer -i "input_uvc.so -r 960x480 -d /dev/video0 -f 15"  -o  "output_http.so -w /www/webcam"

## 640*480分辨率 YUYV格式摄像头
mjpg_streamer -i "input_uvc.so -y -r 640x480 -d /dev/video0 -f 10"  -o  "output_http.so -w /www/webcam"

浏览器地址查看:192.168.100.100:8080

Camera输出到屏幕显示

按照摄像头不同输出方式,有如下代码:

YUV摄像头显示到屏幕

uvcCamera2lcd.c

#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <fcntl.h>
#include <unistd.h>
#include <ctype.h>
#include <errno.h>
#include <sys/mman.h>
#include <sys/time.h>
#include <asm/types.h>
#include <linux/videodev2.h>
#include <linux/fb.h>
#include <sys/stat.h>
#include <sys/ioctl.h>
#include <poll.h>
#include <math.h>
#include <wchar.h>
#include <time.h>
#include <stdbool.h>

#define CAM_WIDTH       640
#define CAM_HEIGHT      480

static char *dev_video;
static char *dev_fb0;

static char *yuv_buffer;
static char *rgb_buffer;

#define YUVToRGB(Y)                                                            \
        ((u16)((((u8)(Y) >> 3) << 11) | (((u8)(Y) >> 2) << 5) | ((u8)(Y) >> 3)))
struct v4l2_buffer video_buffer;
/*全局变量*/
int lcd_fd;
int video_fd;
static unsigned char *lcd_mem_p = NULL; //保存LCD屏映射到进程空间的首地址
struct fb_var_screeninfo vinfo;
struct fb_fix_screeninfo finfo;

char *video_buff_buff[4]; /*保存摄像头缓冲区的地址*/
int video_height = 0;
int video_width = 0;
unsigned char *lcd_display_buff; //LCD显存空间

static void errno_exit(const char *s)
{
        fprintf(stderr, "%s error %d, %s\n", s, errno, strerror(errno));
        exit(EXIT_FAILURE);
}

static int xioctl(int fh, int request, void *arg)
{
        int r;

        do {
                r = ioctl(fh, request, arg);
        } while (-1 == r && EINTR == errno);

        return r;
}

static int video_init(void)
{
        struct v4l2_capability cap;
        ioctl(video_fd, VIDIOC_QUERYCAP, &cap);

        struct v4l2_fmtdesc dis_fmtdesc;
        dis_fmtdesc.index = 0;
        dis_fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        printf("-----------------------支持格式---------------------\n");
        while (ioctl(video_fd, VIDIOC_ENUM_FMT, &dis_fmtdesc) != -1) {
                printf("\t%d.%s\n", dis_fmtdesc.index + 1,
                       dis_fmtdesc.description);
                dis_fmtdesc.index++;
        }
        struct v4l2_format video_format;
        video_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        video_format.fmt.pix.width = CAM_WIDTH;
        video_format.fmt.pix.height = CAM_HEIGHT;
        video_format.fmt.pix.pixelformat =
                V4L2_PIX_FMT_YUYV; //使用JPEG格式帧,用于静态图像采集

        ioctl(video_fd, VIDIOC_S_FMT, &video_format);

        printf("当前摄像头支持的分辨率:%dx%d\n", video_format.fmt.pix.width,
               video_format.fmt.pix.height);
        if (video_format.fmt.pix.pixelformat != V4L2_PIX_FMT_YUYV) {
                printf("当前摄像头不支持YUYV格式输出.\n");
                video_height = video_format.fmt.pix.height;
                video_width = video_format.fmt.pix.width;
                //return -3;
        } else {
                video_height = video_format.fmt.pix.height;
                video_width = video_format.fmt.pix.width;
                printf("当前摄像头支持YUYV格式输出.width %d height %d\n",
                        video_width, video_height);
        }
        /*3. 申请缓冲区*/
        struct v4l2_requestbuffers video_requestbuffers;
        memset(&video_requestbuffers, 0, sizeof(struct v4l2_requestbuffers));
        video_requestbuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        video_requestbuffers.count = 4;
        video_requestbuffers.memory = V4L2_MEMORY_MMAP;
        if (ioctl(video_fd, VIDIOC_REQBUFS, &video_requestbuffers))
                return -4;
        printf("成功申请的缓冲区数量:%d\n", video_requestbuffers.count);
        /*4. 得到每个缓冲区的地址: 将申请的缓冲区映射到进程空间*/
        struct v4l2_buffer video_buffer;
        memset(&video_buffer, 0, sizeof(struct v4l2_buffer));
        int i;
        for (i = 0; i < video_requestbuffers.count; i++) {
                video_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
                video_buffer.index = i;
                video_buffer.memory = V4L2_MEMORY_MMAP;
                if (ioctl(video_fd, VIDIOC_QUERYBUF, &video_buffer))
                        return -5;
                /*映射缓冲区的地址到进程空间*/
                video_buff_buff[i] =
                        mmap(NULL, video_buffer.length, PROT_READ | PROT_WRITE,
                             MAP_SHARED, video_fd, video_buffer.m.offset);
                printf("第%d个缓冲区地址:%#X\n", i, video_buff_buff[i]);
        }
        /*5. 将缓冲区放入到采集队列*/
        memset(&video_buffer, 0, sizeof(struct v4l2_buffer));
        for (i = 0; i < video_requestbuffers.count; i++) {
                video_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
                video_buffer.index = i;
                video_buffer.memory = V4L2_MEMORY_MMAP;
                if (ioctl(video_fd, VIDIOC_QBUF, &video_buffer)) {
                        printf("VIDIOC_QBUF error\n");
                        return -6;
                }
        }
        printf("启动摄像头采集\n");
        /*6. 启动摄像头采集*/
        int opt_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if (ioctl(video_fd, VIDIOC_STREAMON, &opt_type)) {
                printf("VIDIOC_STREAMON error\n");
                return -7;
        }

        return 0;
}
int lcd_init(void)
{
        /*2. 获取可变参数*/
        if (ioctl(lcd_fd, FBIOGET_VSCREENINFO, &vinfo))
                return -2;
        printf("屏幕X:%d   屏幕Y:%d  像素位数:%d\n", vinfo.xres, vinfo.yres,
               vinfo.bits_per_pixel);
        //分配显存空间,完成图像显示
        lcd_display_buff =
                malloc(vinfo.xres * vinfo.yres * vinfo.bits_per_pixel / 8);
        /*3. 获取固定参数*/
        if (ioctl(lcd_fd, FBIOGET_FSCREENINFO, &finfo))
                return -3;
        printf("smem_len=%d Byte,line_length=%d Byte\n", finfo.smem_len,
               finfo.line_length);

        /*4. 映射LCD屏物理地址到进程空间*/
        lcd_mem_p = (unsigned char *)mmap(0, finfo.smem_len,
                                          PROT_READ | PROT_WRITE, MAP_SHARED,
                                          lcd_fd, 0); //从文件的那个地方开始映射
        //.memset(lcd_mem_p, 0xFFFFFF, finfo.smem_len);
        printf("映射LCD屏物理地址到进程空间\n");
        return 0;
}

static void close_device(void)
{
        if (-1 == close(video_fd))
                errno_exit("close");
        video_fd = -1;

        if (-1 == close(lcd_fd))
                errno_exit("close");
        lcd_fd = -1;
}

static void open_device(void)
{
        video_fd = open(dev_video, O_RDWR /* required */ | O_NONBLOCK, 0);
        if (-1 == video_fd) {
                fprintf(stderr, "Cannot open '%s': %d, %s\n", dev_video, errno,
                        strerror(errno));
                exit(EXIT_FAILURE);
        }

        lcd_fd = open(dev_fb0, O_RDWR, 0);
        if (-1 == lcd_fd) {
                fprintf(stderr, "Cannot open '%s': %d, %s\n", dev_fb0, errno,
                        strerror(errno));
                exit(EXIT_FAILURE);
        }
}

/*
将YUV格式数据转为RGB
*/
void yuv_to_rgb(unsigned char *yuv_buffer, unsigned char *rgb_buffer,
                int iWidth, int iHeight)
{
        int x;
        int z = 0;
        unsigned char *ptr = rgb_buffer;
        unsigned char *yuyv = yuv_buffer;

        for (x = 0; x < iWidth * iHeight; x++) {
                int r, g, b;
                int y, u, v;
                if (!z)
                        y = yuyv[0] << 8;
                else
                        y = yuyv[2] << 8;
                u = yuyv[1] - 128;
                v = yuyv[3] - 128;
                r = (y + (359 * v)) >> 8;
                g = (y - (88 * u) - (183 * v)) >> 8;
                b = (y + (454 * u)) >> 8;
                *(ptr++) = (b > 255) ? 255 : ((b < 0) ? 0 : b); // b color
                *(ptr++) = (g > 255) ? 255 : ((g < 0) ? 0 : g); // g color
                *(ptr++) = (r > 255) ? 255 : ((r < 0) ? 0 : r); // r color
                *(ptr++) = 0xff;                                // a color
                if (z++) {
                        z = 0;
                        yuyv += 4;
                }
        }
}

static void lcd_image(unsigned int start_x, unsigned int end_x,
                    unsigned int start_y, unsigned int end_y,
                    unsigned char* color)
{
    unsigned long i;
    unsigned int j;
    /* 填充颜色 */
    i = start_y * vinfo.xres; //定位到起点行首

    for ( ; start_y <= end_y; start_y++, i+=(vinfo.xres*4))
    {
        for (j = start_x; j <= end_x*4; j++)
        {
            lcd_mem_p[i + j] = *color++;
        }
                *color--;
    }
}
int main(int argc, char **argv)
{
        dev_video = "/dev/video0";
        dev_fb0 = "/dev/fb0";
        open_device();
        video_init();
        lcd_init();
        /*3. 读取摄像头的数据*/
        struct pollfd video_fds;
        video_fds.events = POLLIN;
        video_fds.fd = video_fd;

        memset(&video_buffer, 0, sizeof(struct v4l2_buffer));
        rgb_buffer = malloc(video_width * video_height * 4);
        yuv_buffer = malloc(video_width * video_height * 4);

        while (1) {
                /*等待摄像头采集数据*/
                poll(&video_fds, 1, -1);
                /*得到缓冲区的编号*/
                video_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
                video_buffer.memory = V4L2_MEMORY_MMAP;
                ioctl(video_fd, VIDIOC_DQBUF, &video_buffer);
                //printf("当前采集OK的缓冲区编号:%d,地址:%#X num:%d\n",
                       //video_buffer.index, video_buff_buff[video_buffer.index],
                       //strlen(video_buff_buff[video_buffer.index]));

                /*对缓冲区数据进行处理*/
                yuv_to_rgb(video_buff_buff[video_buffer.index], yuv_buffer, video_height, video_width);

                //printf("显示屏进行显示\n");
                //显示屏进行显示: 将显存空间的数据拷贝到LCD屏进行显示
                /*memcpy(lcd_mem_p, yuv_buffer,
                       vinfo.xres * vinfo.yres * vinfo.bits_per_pixel / 8);*/
                // video stream show
                lcd_image(0, CAM_WIDTH, 0, CAM_HEIGHT, yuv_buffer);

                //printf("buffer size: %d\r\n", (vinfo.xres * vinfo.yres * vinfo.bits_per_pixel / 8));
                /*将缓冲区放入采集队列*/
                ioctl(video_fd, VIDIOC_QBUF, &video_buffer);
                //printf("将缓冲区放入采集队列\n");
        }
        /*4. 关闭视频设备*/
        close(video_fd);

        return 0;
}

MJPEG摄像头显示到屏幕

mjpegCamera2lcd.c

#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <linux/fb.h>
#include <jpeglib.h>

#define CAMERA_DEV "/dev/video0"
#define FB_DEV "/dev/fb0"

// 全局变量
struct v4l2_buffer v4l2_buf;
unsigned char *camera_buffer = NULL;
unsigned char *fb_buffer = NULL;
int camera_fd, fb_fd;
struct fb_var_screeninfo fb_var_info;

// JPEG 解码(MJPEG → RGB24)
void jpeg_to_rgb(uint8_t *jpeg_data, size_t jpeg_size, uint8_t *rgb_output, int width, int height) {
    struct jpeg_decompress_struct cinfo;
    struct jpeg_error_mgr jerr;

    cinfo.err = jpeg_std_error(&jerr);
    jpeg_create_decompress(&cinfo);
    jpeg_mem_src(&cinfo, jpeg_data, jpeg_size);
    jpeg_read_header(&cinfo, TRUE);

    // 强制输出 RGB(默认可能是 YUV)
    cinfo.out_color_space = JCS_RGB;
    jpeg_start_decompress(&cinfo);

    // 检查分辨率是否匹配
    if (cinfo.output_width != width || cinfo.output_height != height) {
        fprintf(stderr, "JPEG resolution mismatch!\n");
        jpeg_abort_decompress(&cinfo);
        return;
    }

    // 逐行读取解码数据
    int row_stride = width * 3; // RGB24
    while (cinfo.output_scanline < height) {
        uint8_t *row_pointer = rgb_output + (cinfo.output_scanline * row_stride);
        jpeg_read_scanlines(&cinfo, &row_pointer, 1);
    }

    jpeg_finish_decompress(&cinfo);
    jpeg_destroy_decompress(&cinfo);
}

// RGB24 → RGBA32 转换(Alpha = 0xFF)
void rgb24_to_rgba32(uint8_t *rgb24, uint8_t *rgba32, int width, int height) {
    for (int i = 0; i < width * height; i++) {
        rgba32[i*4]   = rgb24[i*3 + 2];  // B → R(交换)
        rgba32[i*4+1] = rgb24[i*3 + 1];  // G 不变
        rgba32[i*4+2] = rgb24[i*3];      // R → B(交换)
        rgba32[i*4+3] = 0xFF;            // Alpha
    }
}

// 初始化摄像头(MJPEG 格式)
int init_camera(int width, int height) {
    struct v4l2_format fmt = {0};
    struct v4l2_requestbuffers req = {0};

    camera_fd = open(CAMERA_DEV, O_RDWR);
    if (camera_fd < 0) {
        perror("Failed to open camera");
        return -1;
    }

    // 设置 MJPEG 格式
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width = width;
    fmt.fmt.pix.height = height;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
    fmt.fmt.pix.field = V4L2_FIELD_NONE;

    if (ioctl(camera_fd, VIDIOC_S_FMT, &fmt) < 0) {
        perror("Failed to set MJPEG format");
        return -1;
    }

    // 请求缓冲区
    req.count = 1;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;

    if (ioctl(camera_fd, VIDIOC_REQBUFS, &req) < 0) {
        perror("Failed to request buffers");
        return -1;
    }

    // 映射缓冲区
    v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    v4l2_buf.memory = V4L2_MEMORY_MMAP;
    v4l2_buf.index = 0;

    if (ioctl(camera_fd, VIDIOC_QUERYBUF, &v4l2_buf) < 0) {
        perror("Failed to query buffer");
        return -1;
    }

    camera_buffer = mmap(NULL, v4l2_buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, camera_fd, v4l2_buf.m.offset);
    if (camera_buffer == MAP_FAILED) {
        perror("Failed to mmap camera buffer");
        return -1;
    }

    // 开始采集
    if (ioctl(camera_fd, VIDIOC_STREAMON, &v4l2_buf.type) < 0) {
        perror("Failed to start stream");
        return -1;
    }

    return 0;
}

// 初始化帧缓冲(RGBA32)
int init_framebuffer() {
    fb_fd = open(FB_DEV, O_RDWR);
    if (fb_fd < 0) {
        perror("Failed to open framebuffer");
        return -1;
    }

    if (ioctl(fb_fd, FBIOGET_VSCREENINFO, &fb_var_info) < 0) {
        perror("Failed to get framebuffer info");
        return -1;
    }

    // 检查是否为 RGBA32(32bpp)
    if (fb_var_info.bits_per_pixel != 32) {
        fprintf(stderr, "Error: Framebuffer is not 32bpp (RGBA)\n");
        return -1;
    }

    // 映射帧缓冲
    fb_buffer = mmap(NULL, fb_var_info.yres_virtual * fb_var_info.xres_virtual * 4, PROT_READ | PROT_WRITE, MAP_SHARED, fb_fd, 0);
    if (fb_buffer == MAP_FAILED) {
        perror("Failed to mmap framebuffer");
        return -1;
    }

    return 0;
}

// 主循环:采集 → 解码 → 转换 → 显示
void capture_and_display(int cam_width, int cam_height, int fb_width, int fb_height) {
    uint8_t *rgb_data = malloc(cam_width * cam_height * 3); // RGB24
    uint8_t *rgba_data = malloc(cam_width * cam_height * 4); // RGBA32

    // 加入队列
    if (ioctl(camera_fd, VIDIOC_QBUF, &v4l2_buf) < 0) {
        perror("Failed to enqueue buffer");
        return;
    }

    // 取出数据
    if (ioctl(camera_fd, VIDIOC_DQBUF, &v4l2_buf) < 0) {
        perror("Failed to dequeue buffer");
        return;
    }

    // 解码 JPEG → RGB24
    jpeg_to_rgb(camera_buffer, v4l2_buf.bytesused, rgb_data, cam_width, cam_height);

    // 转换 RGB24 → RGBA32
    rgb24_to_rgba32(rgb_data, rgba_data, cam_width, cam_height);

    // 计算显示位置(左上角)
    int start_x = 0, start_y = 0;

    // 仅更新摄像头画面区域
    for (int y = 0; y < cam_height; y++) {
        uint8_t *fb_line = fb_buffer + ((start_y + y) * fb_width + start_x) * 4;
        memcpy(fb_line, rgba_data + y * cam_width * 4, cam_width * 4);
    }

    free(rgb_data);
    free(rgba_data);
}

int main() {
    int cam_width = 960, cam_height = 480; // 摄像头分辨率
    int fb_width, fb_height;

    // 初始化摄像头
    if (init_camera(cam_width, cam_height) < 0) {
        fprintf(stderr, "Camera init failed\n");
        return -1;
    }

    // 初始化帧缓冲
    if (init_framebuffer() < 0) {
        fprintf(stderr, "Framebuffer init failed\n");
        return -1;
    }

    fb_width = fb_var_info.xres;
    fb_height = fb_var_info.yres;

    printf("Displaying MJPEG camera (%dx%d) on RGBA screen (%dx%d)\n",
           cam_width, cam_height, fb_width, fb_height);

    // 主循环
    while (1) {
        capture_and_display(cam_width, cam_height, fb_width, fb_height);
    }

    return 0;
}

交叉编译脚本:build.sh

export STAGING_DIR="/home/cy/Tina-Linux/out/t113-mq_r/staging_dir/target"
export tools="/home/cy/Tina-Linux/prebuilt/gcc/linux-x86/arm/toolchain-sunxi-musl/toolchain"
export CC_COMPILER=${tools}/bin/arm-openwrt-linux-gcc
export CXX_COMPILER=${tools}/bin/arm-openwrt-linux-g++

${CC_COMPILER} uvcCamera2lcd.c -o uvcCamera2lcd

编译好后push到开发板运行即可!