opencv视频流
- Ai
- 2天前
- 30热度
- 0评论
环境准备
本文通过采集USB摄像头来示例说明
export LD_LIBRARY_PATH=/mnt/extsd/usr/lib:$LD_LIBRARY_PATH
#指定库的路径
cat /sys/devices/platform/soc/usbc0/usb_host
#激活USB host
摄像头采集
摄像头相关的主要使用的是VideoCapture类。
打开摄像头
cv::VideoCapture cap; // 创建视频捕获对象
cap.open(0); // 打开默认摄像头(通常为/dev/video0)
if (!cap.isOpened()) {
std::cerr << "无法打开视频设备" << std::endl;
return -1;
}
设置参数
cap.set(cv::CAP_PROP_FRAME_WIDTH, 640); // 设置宽度
cap.set(cv::CAP_PROP_FRAME_HEIGHT, 480); // 设置高度
cap.set(cv::CAP_PROP_FPS, 30); // 设置帧率
采集数据
cv::Mat frame;
while (true) {
cap >> frame; // 捕获一帧
if (frame.empty()) {
std::cerr << "捕获帧失败" << std::endl;
continue;
}
// 在此处理帧数据...
usleep(1000000 / frame_rate); // 30ms延迟,控制帧率
}
推流显示
主要是打开/dev/fb0,然后写入数据。
获取显示参数
通过ioctl的方式,获取显示分辨率,像素位深(每个像素的占用内存大小),以便将采集到的摄像头数据调整为合适的尺寸进行显示。
struct framebuffer_info get_framebuffer_info(const char* framebuffer_device_path)
{
struct framebuffer_info info;
struct fb_var_screeninfo screen_info;
int fd = -1;
fd = open(framebuffer_device_path, O_RDWR);
if (fd >= 0) {
if (!ioctl(fd, FBIOGET_VSCREENINFO, &screen_info)) {
info.xres_virtual = screen_info.xres_virtual;
//虚拟水平的分辨率,包含不可见区域,即虚拟宽度
info.bits_per_pixel = screen_info.bits_per_pixel;
//每个像素的位深,即像素大小,如16/24/32等
info.xres = screen_info.xres;
//实际水平分辨率,即实际宽度
info.yres = screen_info.yres;
//垂直分辨率,即实际高度
}
}
return info;
};
尺寸调整
cv::resize(frame, frame, cv::Size(fb_info.xres, fb_info.yres));
从/dev/fb0获取的实际分辨率进行调整尺寸。
格式转换
写入前,先将采集的视频帧转换为屏幕显示支持的格式,如RGB565,RGBA等。
switch (framebuffer_depth) {
case 16:
//转换为RGB565格式输出
cv::cvtColor(frame, framebuffer_compat, cv::COLOR_BGR2BGR565);
break;
case 32:
//转换为RGBA格式输出,添加一个alpha通道
std::vector<cv::Mat> split_bgr;
cv::split(frame, split_bgr);//将BGR分离为3个通道,存储到split_bgr中。
split_bgr.push_back(cv::Mat(frame_size, CV_8UC1, cv::Scalar(255)));
//创建一个全白的alpha通道矩阵并进行添加到一个新的通道
cv::merge(split_bgr, framebuffer_compat);
//使用merge将3个颜色通道和alpha通道进行合并为RGBA四通道。
break;
}
写入显示
ofs.seekp(0); //先进行定位
ofs.write(reinterpret_cast<char*>(framebuffer_compat.ptr(0)),
framebuffer_compat.total() * framebuffer_compat.elemSize());
也可以一行一行的写
for (int y = 0; y < frame_size.height; y++) {
ofs.seekp(y * framebuffer_width * 2);
ofs.write(reinterpret_cast<char*>(framebuffer_compat.ptr(y)), frame_size.width * 2);
}
示例程序
#include <fcntl.h>
#include <fstream>
#include <iostream>
#include <linux/fb.h>
#include <signal.h>
#include <stdint.h>
#include <sys/ioctl.h>
#include <unistd.h>
#include <opencv2/opencv.hpp>
static cv::VideoCapture cap;
struct framebuffer_info {
uint32_t bits_per_pixel;
uint32_t xres_virtual;
uint32_t xres;
uint32_t yres;
};
struct framebuffer_info get_framebuffer_info(const char* framebuffer_device_path)
{
struct framebuffer_info info;
struct fb_var_screeninfo screen_info;
int fd = -1;
fd = open(framebuffer_device_path, O_RDWR);
if (fd >= 0) {
if (!ioctl(fd, FBIOGET_VSCREENINFO, &screen_info)) {
info.xres_virtual = screen_info.xres_virtual;
info.bits_per_pixel = screen_info.bits_per_pixel;
info.xres = screen_info.xres;
info.yres = screen_info.yres;
}
}
return info;
};
/* Signal handler */
static void terminate(int sig_no)
{
printf("Got signal
cap.release();
exit(1);
}
int main(int, char**)
{
const int frame_width = 720;
const int frame_height = 1280;
const int frame_rate = 30;
framebuffer_info fb_info = get_framebuffer_info("/dev/fb0");
std::cout << "xres virtua" << fb_info.xres_virtual << std::endl;
std::cout << "bits per pixel" << fb_info.bits_per_pixel << std::endl;
cap.open(0);
if (!cap.isOpened()) {
std::cerr << "Could not open video device." << std::endl;
return 1;
}
std::cout << "Successfully opened video device." << std::endl;
std::cout << "Display resolution:" << fb_info.xres << "x" << fb_info.yres << std::endl;
cap.set(cv::CAP_PROP_FRAME_WIDTH, frame_width);
cap.set(cv::CAP_PROP_FRAME_HEIGHT, frame_height);
cap.set(cv::CAP_PROP_FPS, frame_rate);
std::ofstream ofs("/dev/fb0");
cv::Mat frame;
cv::Mat trams_temp_fream;
cv::Mat yuv_frame;
while (true) {
cap >> frame;
if (frame.depth() != CV_8U) {
std::cerr << "Not 8 bits per pixel and channel." << std::endl;
} else if (frame.channels() != 3) {
std::cerr << "Not 3 channels." << std::endl;
} else {
//cv::transpose(frame, frame);
//cv::flip(frame, frame, 0);
cv::resize(frame, frame, cv::Size(fb_info.xres, fb_info.yres));
int framebuffer_width = fb_info.xres_virtual;
int framebuffer_depth = fb_info.bits_per_pixel;
cv::Size2f frame_size = frame.size();
cv::Mat framebuffer_compat;
switch (framebuffer_depth) {
case 16:
cv::cvtColor(frame, framebuffer_compat, cv::COLOR_BGR2BGR565);
break;
case 32:
std::vector<cv::Mat> split_bgr;
cv::split(frame, split_bgr);
split_bgr.push_back(cv::Mat(frame_size, CV_8UC1, cv::Scalar(255)));
cv::merge(split_bgr, framebuffer_compat);
break;
}
ofs.seekp(0);
ofs.write(reinterpret_cast<char*>(framebuffer_compat.ptr(0)),
framebuffer_compat.total() * framebuffer_compat.elemSize());
usleep(1000000 / frame_rate);
}
}
}