具體源代碼參見鏈接:
http://download.csdn.net/download/qq269131024/10142165
下面是視頻采集的主程序:
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <errno.h>
#include <netinet/in.h>
#include <sys/wait.h>
#include "camera.h"
#define MAXLINE 1024
#define DEBUG 1
void usage(char *command)
{
printf("usage :%s portnum filename\n", command);
exit(0);
}
/*********************************************************************
* filename: tcpserver.c
*purpose: 循環(huán)tcp服務(wù)端程序
*tidied by: zhoulifa(zhoulifa@163.com) 周立發(fā)(http://zhoulifa.bokee.com)
*Linux愛好者 Linux知識傳播者 SOHO族 開發(fā)者 最擅長C語言
*date time:2006-07-04 22:00:00
*Note: 任何人可以任意復(fù)制代碼并運(yùn)用這些文檔,當(dāng)然包括你的商業(yè)用途
*但請遵循GPL
*Thanks to: Google.com
*********************************************************************/
int main(int argc,char **argv)
{
struct sockaddr_in server_addr;
struct sockaddr_in client_addr;
char buf[MAXLINE];
int server_sock_id;
int client_sock_id;
int recv_len;
int write_leng;
int client_addr_len;
FILE *fp;
int send_len;
//文件名
char * fileName="hello.txt";
//端口號
unsigned int serverport = 8050;
//視頻大小
int width = 240;
int height = 320;
char *separateur;
unsigned char* rgb;
/*
參數(shù)采集區(qū)
*/
/************************************************************
命令行輸入?yún)?shù)設(shè)定命令行參數(shù)應(yīng)該這個樣子的:
./servfox -g -d /dev/video0 -s 640x480 -w 7070 -f fileName
***********************************************************/
int i;
for (i = 1; i < argc; i++)
{
/* skip bad arguments */
if (argv[i] == NULL || *argv[i] == 0 || *argv[i] != '-')
{
continue;
}
/************************************************************** /
-d 參數(shù)用于設(shè)置輸入視頻采集設(shè)備。
****************************************************************/
if (strcmp (argv[i], "-d") == 0)
{
if (i + 1 >= argc)
{
if(DEBUG) printf ("No parameter specified with -d, aborting./n");
exit (1);
}
//設(shè)置Parameter
//videodevice = strdup (argv[i + 1]);
}
/**********************************************************************
-g 參數(shù)用于讀取采集到的視頻的方式,用read方式而非mmap方式。
*********************************************************************/
if (strcmp (argv[i], "-g") == 0)
{
/* Ask for read instead default mmap */
//grabmethod = 0;
}
/************************************************************
-s 參數(shù)用于設(shè)置視頻圖像幀的大小,如 640x480
*********************************************************/
if (strcmp (argv[i], "-s") == 0)
{
if (i + 1 >= argc)
{
if(DEBUG) printf ("No parameter specified with -s, aborting./n");
exit (1);
}
//sizestring = strdup (argv[i + 1]);
//width = strtoul (sizestring, &separateur, 10);
if (*separateur != 'x')
{
if(DEBUG) printf ("Error in size use -s widthxheight /n");
exit (1);
}else {
++separateur;
height =strtoul (separateur, &separateur, 10);
if (*separateur != 0)
if(DEBUG) printf ("hmm.. dont like that!! trying this height /n");
if(DEBUG) printf (" size width: %d height: %d /n",
width, height);
}
}
/******************************************************
-w參數(shù)用于設(shè)置端口。
*********************************************************/
if (strcmp (argv[i], "-w") == 0)
{
if (i + 1 >= argc)
{
if(DEBUG) printf ("No parameter specified with -w, aborting./n");
exit (1);
}
serverport = (unsigned short) atoi (argv[i + 1]);
if (serverport < 1024 ){
if(DEBUG) printf ("Port should be between 1024 to 65536 set default 7070 !./n");
serverport = 8050;
}
}
/************************************************************
-f 用于設(shè)置文件名
*************************************************************/
if (strcmp (argv[i], "-f") == 0)
{
if (i + 1 >= argc)
{
if(DEBUG) printf ("No parameter specified with -f, aborting./n");
exit (1);
}
int ii;
char *str;
for(ii=0,str=argv[i + 1];str!='\0';ii++)
{
fileName[ii] = *str;
str++;
}
}
/************************************************************
-h 幫助信息
****************************************************************/
if (strcmp (argv[i], "-h") == 0)
{
printf ("usage: cdse [-h -d -g ] /n");
printf ("-h print this message /n");
printf ("-d /dev/videoX use videoX device/n");
printf ("-g use read method for grab instead mmap /n");
printf ("-s widthxheight use specified input size /n");
printf ("-w port server port /n");
exit (0);
}
}
/*
主代碼區(qū)
*/
// printf("i am alive !");
// if ((fp = fopen(fileName, "w")) == NULL) {
// perror("Open file failed\n");
// exit(0);
// }
printf("i am alive 1!");
if ((server_sock_id = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP)) < 0) {
perror("Create socket failed\n");
exit(0);
}
printf("i am alive 2!");
/*fill the serverer sockaddr_in struct commented by guoqingbo*/
memset(&server_addr, 0, sizeof(server_addr));
server_addr.sin_family = AF_INET;
server_addr.sin_port = htons(serverport);
server_addr.sin_addr.s_addr = INADDR_ANY;
if (bind(server_sock_id, (struct sockaddr *)&server_addr, sizeof(server_addr)) < 0 ) {
perror("Bind socket failed\n");
exit(0);
}
printf("i am alive 3!");
if (-1 == listen(server_sock_id, 20)) {
perror("Listen socket failed\n");
exit(0);
}
/* serverer part commented by guoqingbo*/
while (1) {
client_addr_len = sizeof(client_addr);
/**
#include <sys/types.h>
#include <sys/socket.h>
int accept(int sockfd,struct sockaddr *addr,socklen_t *addrlen);
accept()系統(tǒng)調(diào)用:
主要用在基于連接的套接字類型,比如SOCK_STREAM和SOCK_SEQPACKET。
它提取出所監(jiān)聽套接字的等待連接隊列中第一個連接請求,創(chuàng)建一個新的套接字,
并返回指向該套接字的文件描述符。新建立的套接字不在監(jiān)聽狀態(tài),
原來所監(jiān)聽的套接字也不受該系統(tǒng)調(diào)用的影響。
參數(shù):
sockfd,利用系統(tǒng)調(diào)用socket()建立的套接字描述符,通過bind()綁定到一個本地地址
(一般為服務(wù)器的套接字),并且通過listen()一直在監(jiān)聽連接;
addr, 指向struct sockaddr的指針,該結(jié)構(gòu)用通訊層服務(wù)器對等套接字的地址
(一般為客戶端地址)填寫,返回地址addr的確切格式由套接字的地址類別
(比如TCP或UDP)決定;若addr為NULL,沒有有效地址填寫,這種情況下,addrlen也不使用,
應(yīng)該置為NULL;
備注:addr是個指向局部數(shù)據(jù)結(jié)構(gòu)sockaddr_in的指針,這就是要求接入的信息本地的套接字(地址和指針)。
addrlen, 一個值結(jié)果參數(shù),調(diào)用函數(shù)必須初始化為包含addr所指向結(jié)構(gòu)大小的數(shù)值,
函數(shù)返回時包含對等地址(一般為服務(wù)器地址)的實(shí)際數(shù)值;
備注:addrlen是個局部整形變量,設(shè)置為sizeof(struct sockaddr_in)。
///////////////////////////////////////////////////////////////////////
如果隊列中沒有等待的連接,套接字也沒有被標(biāo)記為Non-blocking,
accept()會阻塞調(diào)用函數(shù)直到連接出現(xiàn);如果套接字被標(biāo)記為Non-blocking,
隊列中也沒有等待的連接,accept()返回錯誤EAGAIN或EWOULDBLOCK。
成功時,返回非負(fù)整數(shù),該整數(shù)是接收到套接字的描述符;出錯時,返回-1,相應(yīng)地設(shè)定全局變量errno。錯誤處理
**/
client_sock_id = accept(server_sock_id, (struct sockaddr *)&client_addr, &client_addr_len);
if (-1 == client_sock_id) {
perror("Accept socket failed\n");
exit(0);
}
camera_t* camera = camera_open("/dev/video0", height, width);
printf("open the camera success!\n");
camera_init(camera);
printf("camera init success!\n");
camera_start(camera);
printf("camera is starting!\n");
struct timeval timeout;
timeout.tv_sec = 1;
timeout.tv_usec = 0;
/* skip 5 frames for booting a cam */
for (i = 0; i < 5; i++) {
camera_frame(camera, timeout);
}
rgb = calloc(width * height * 3, sizeof (uint8_t));
while(1){
// printf("good morning!...");
camera_frame(camera, timeout);
// printf("we will capture a frame! yo\n");
yuyv2rgb888(camera->head.start,rgb,camera->width, camera->height);
// uint ii;
// uint8_t jj;
// for(ii=0;ii<height;ii++)
// for(jj=0;jj<width;jj++)
// {
// rgb[(ii*width+jj)*3+0]=jj;
// rgb[(ii*width+jj)*3+1]=jj;
// rgb[(ii*width+jj)*3+2]=jj;
// }
// printf("Sending...\n");
send_len = send(client_sock_id, rgb, width * height * 3*sizeof(uint8_t), 0);
if ( send_len < 0 ) {
perror("Send file failed\n");
exit(0);
}
// printf("I am going to sleepping good night!...\n");
// sleep(0.1);
}
free(rgb);
camera_stop(camera);
printf("now I'm so sorry to tell you that we'l close the camera!haha\n");
camera_finish(camera);
camera_close(camera);
printf("Remeber to close the door,When you leave\n");
// printf("i am alive 4!");
// bzero(buf, MAXLINE);
// while (recv_len = recv(client_sock_id, buf, MAXLINE, 0)) {
// /* receiver data part commented by guoqingbo*/
// if(recv_len < 0) {
// printf("Recieve Data From Server Failed!\n");
// break;
// }
// printf("#");
// write_leng = fwrite(buf, sizeof(char), recv_len, fp);
// if (write_leng < recv_len) {
// printf("Write file failed\n");
// break;
// }
// bzero(buf,MAXLINE);
// }
printf("\nFinish Recieve\n");
fclose(fp);
close(client_sock_id);
}
printf("i am alive always yo!");
close(server_sock_id);
return 0;
}
下面是所需的庫函數(shù)
/*
* capturing from UVC cam
* requires: libjpeg-dev
* build: gcc -std=c99 capture.c -ljpeg -o capture
*/
#include <stdint.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <errno.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <asm/types.h>
#include <linux/videodev2.h>
#include <sys/time.h>
#include <sys/types.h>
#include <unistd.h>
#define FALSE -1
#define TRUE 1
void quit(const char * msg)
{
fprintf(stderr, "[%s] %d: %s\n", msg, errno, strerror(errno));
exit(EXIT_FAILURE);
}
int xioctl(int fd, int request, void* arg)
{
int i;
for (i = 0; i < 100; i++) {
int r = ioctl(fd, request, arg);
if (r != -1 || errno != EINTR) return r;
}
return -1;
}
typedef struct {
uint8_t* start;
size_t length;
} buffer_t;
typedef struct {
int fd;
uint32_t width;
uint32_t height;
size_t buffer_count;
buffer_t* buffers;
buffer_t head;
} camera_t;
camera_t* camera_open(const char * device, uint32_t width, uint32_t height)
{
int fd = open(device, O_RDWR | O_NONBLOCK, 0);
if (fd == -1)
{
perror("open error:");
quit("open");
}
camera_t* camera = malloc(sizeof (camera_t));
camera->fd = fd;
camera->width = width;
camera->height = height;
camera->buffer_count = 0;
camera->buffers = NULL;
camera->head.length = 0;
camera->head.start = NULL;
return camera;
}
void camera_init(camera_t* camera) {
/**
struct v4l2_capability
{
__u8 driver[16]; //驅(qū)動名。
__u8 card[32]; // Device名
__u8 bus_info[32]; //在Bus系統(tǒng)中存放位置
__u32 version; //driver 版本
__u32 capabilities; //能力集
__u32 reserved[4];
};
**/
//第一個ioctl就是獲取設(shè)備信息
struct v4l2_capability cap;
if (xioctl(camera->fd, VIDIOC_QUERYCAP, &cap) == -1) quit("VIDIOC_QUERYCAP");
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) quit("no capture");
if (!(cap.capabilities & V4L2_CAP_STREAMING)) quit("no streaming");
//打印信息
// printf(“DriverName:%s/nCard Name:%s/nBus info:%s/nDriverVersion:%u.%u.%u/n”,cap.driver,cap.card,cap.bus_info,(cap.version>>16)&0XFF,(cap.version>>8)&0XFF,cap.version&OXFF);
/**
5. 幀格式:
VIDIOC_ENUM_FMT// 顯示所有支持的格式
int ioctl(int fd, int request, struct v4l2_fmtdesc *argp);
struct v4l2_fmtdesc
{
__u32 index; // 要查詢的格式序號,應(yīng)用程序設(shè)置 666
enumv4l2_buf_type type; // 幀類型,應(yīng)用程序設(shè)置 666
__u32 flags; // 是否為壓縮格式
__u8 description[32]; // 格式名稱
__u32pixelformat; // 格式
__u32reserved[4]; // 保留
};
**/
struct v4l2_fmtdesc fmtdesc;
fmtdesc.index=0;
fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
printf("Supportformat:/n");
while(ioctl(camera->fd,VIDIOC_ENUM_FMT,&fmtdesc)!=-1)
{
printf("/t%d.%s/n",fmtdesc.index+1,fmtdesc.description);
fmtdesc.index++;
}
/**
6 圖像的縮放 VIDIOC_CROPCAP
v4l2_cropcap 結(jié)構(gòu)體用來設(shè)置攝像頭的捕捉能力,
在捕捉上視頻時應(yīng)先先設(shè)置v4l2_cropcap 的 type 域,
再通過 VIDIO_CROPCAP 操作命令獲取設(shè)備捕捉能力的參數(shù),
保存于 v4l2_cropcap 結(jié)構(gòu)體中,包括 bounds
(最大捕捉方框的左上角坐標(biāo)和寬高),defrect
(默認(rèn)捕捉方框的左上角坐標(biāo)和寬高)等。
VIDIOC_CROPCAP
int ioctl(int fd,int request, struct v4l2_cropcap *argp);
structv4l2_cropcap
{
enum v4l2_buf_type type;// 應(yīng)用程序設(shè)置
struct v4l2_rectbounds;// 最大邊界
struct v4l2_rectdefrect;// 默認(rèn)值
structv4l2_fract pixelaspect;
};
V4L2_BUF_TYPE_VIDEO_CAPTURE 指定buf的類型為capture,用于視頻捕獲設(shè)備
V4L2_BUF_TYPE_VIDEO_OUTPUT 指定buf的類型output,用于視頻輸出設(shè)備
V4L2_BUF_TYPE_VIDEO_OVERLAY 指定buf的類型為overlay,用于overlay設(shè)備
V4L2_BUF_TYPE_VBI_CAPTURE 用于vbi捕獲設(shè)備
V4L2_BUF_TYPE_VBI_OUTPUT 用于vbi輸出設(shè)備
V4L2_BUF_TYPE_SLICED_VBI_CAPTURE 用于切片vbi捕獲設(shè)備
V4L2_BUF_TYPE_SLICED_VBI_OUTPUT 用于切片vbi輸出設(shè)備
V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY 用于視頻輸出overlay設(shè)備
V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE 用于多平面存儲格式的視頻捕獲設(shè)備
V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE用于多平面存儲格式的視頻輸出設(shè)備
**/
struct v4l2_cropcap cropcap;
memset(&cropcap, 0, sizeof (cropcap));
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(camera->fd, VIDIOC_CROPCAP, &cropcap) == 0) {
struct v4l2_crop crop;
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect;
if (xioctl(camera->fd, VIDIOC_S_CROP, &crop) == -1) {
// cropping not supported
}
}
/**
// 查看或設(shè)置當(dāng)前格式
VIDIOC_G_FMT,VIDIOC_S_FMT
// 檢查是否支持某種格式
v4l2_format 結(jié)構(gòu)體用來設(shè)置攝像頭的視頻制式、幀格式等,
在設(shè)置這個參數(shù)時應(yīng)先填好 v4l2_format 的各個域,
如 type(傳輸流類型),fmt.pix.width(寬),
fmt.pix.heigth(高),fmt.pix.field(采樣區(qū)域,如隔行采樣),
fmt.pix.pixelformat(采樣類型,如 yuyv4:2:2),
然后通過 VIDIO_S_FMT 操作命令設(shè)置視頻捕捉格式。
如下圖所示:
**/
struct v4l2_format format;
memset(&format, 0, sizeof (format));
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
format.fmt.pix.width = camera->width;
format.fmt.pix.height = camera->height;
format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
format.fmt.pix.field = V4L2_FIELD_NONE;
if (xioctl(camera->fd, VIDIOC_S_FMT, &format) == -1) quit("VIDIOC_S_FMT");
/**
9. 申請和管理緩沖區(qū)
應(yīng)用程序和設(shè)備有三種交換數(shù)據(jù)的方法,直接 read/write、內(nèi)存映射(memory mapping)
和用戶指針。這里只討論內(nèi)存映射(memory mapping)。
9.1 向設(shè)備申請緩沖區(qū) VIDIOC_REQBUFS
相關(guān)函數(shù):
int ioctl(int fd, int request, struct v4l2_requestbuffers *argp);
struct v4l2_requestbuffers
{
u32 count; // 緩沖區(qū)內(nèi)緩沖幀的數(shù)目
enum v4l2_buf_type type; // 緩沖幀數(shù)據(jù)格式
enum v4l2_memory memory; // 區(qū)別是內(nèi)存映射還是用戶指針方式
u32 reserved[2];
};
**/
struct v4l2_requestbuffers req;
memset(&req, 0, sizeof (req));
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl(camera->fd, VIDIOC_REQBUFS, &req) == -1) quit("VIDIOC_REQBUFS");
camera->buffer_count = req.count;
camera->buffers = calloc(req.count, sizeof (buffer_t));
size_t buf_max = 0;
size_t i;
for (i = 0; i < camera->buffer_count; i++) {
struct v4l2_buffer buf;
memset(&buf, 0, sizeof (buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (xioctl(camera->fd, VIDIOC_QUERYBUF, &buf) == -1)
quit("VIDIOC_QUERYBUF");
/**
#include<sys/mman.h>
void *mmap(void*addr, size_t length, int prot, int flags, int fd, off_t offset);
//addr 映射起始地址,一般為NULL ,讓內(nèi)核自動選擇
//length 被映射內(nèi)存塊的長度
//prot 標(biāo)志映射后能否被讀寫,其值為PROT_EXEC,PROT_READ,PROT_WRITE,PROT_NONE
//flags 確定此內(nèi)存映射能否被其他進(jìn)程共享,MAP_SHARED,MAP_PRIVATE
//fd,offset, 確定被映射的內(nèi)存地址
返回成功映射后的地址,不成功返回MAP_FAILED ((void*)-1);
int munmap(void*addr, size_t length);// 斷開映射
//addr 為映射后的地址,length 為映射后的內(nèi)存長度
**/
if (buf.length > buf_max) buf_max = buf.length;
camera->buffers[i].length = buf.length;
camera->buffers[i].start =
mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED,
camera->fd, buf.m.offset);
printf("the image length is%d",buf.length);
if (camera->buffers[i].start == MAP_FAILED) quit("mmap");
}
camera->head.start = malloc(buf_max);
}
/**
10. 緩沖區(qū)處理好之后,就可以開始獲取數(shù)據(jù)了
10.1 啟動 或 停止數(shù)據(jù)流 VIDIOC_STREAMON,VIDIOC_STREAMOFF
**/
void camera_start(camera_t* camera)
{
size_t i;
/**
//argp 為流類型指針,如V4L2_BUF_TYPE_VIDEO_CAPTURE.
10.2 在開始之前,還應(yīng)當(dāng)把緩沖幀放入緩沖隊列:
VIDIOC_QBUF// 把幀放入隊列
VIDIOC_DQBUF// 從隊列中取出幀
**/
for (i = 0; i < camera->buffer_count; i++) {
struct v4l2_buffer buf;
memset(&buf, 0, sizeof buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;//代表放入第幾個buff
if (xioctl(camera->fd, VIDIOC_QBUF, &buf) == -1) quit("VIDIOC_QBUF");
}
//啟動 或 停止數(shù)據(jù)流 VIDIOC_STREAMON, VIDIOC_STREAMOFF
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(camera->fd, VIDIOC_STREAMON, &type) == -1)
quit("VIDIOC_STREAMON");
}
void camera_stop(camera_t* camera)
{
//啟動 或 停止數(shù)據(jù)流 VIDIOC_STREAMON, VIDIOC_STREAMOFF
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(camera->fd, VIDIOC_STREAMOFF, &type) == -1)
quit("VIDIOC_STREAMOFF");
}
void camera_finish(camera_t* camera)
{
size_t i;
for (i = 0; i < camera->buffer_count; i++) {
munmap(camera->buffers[i].start, camera->buffers[i].length);
}
free(camera->buffers);
camera->buffer_count = 0;
camera->buffers = NULL;
free(camera->head.start);
camera->head.length = 0;
camera->head.start = NULL;
}
void camera_close(camera_t* camera)
{
if (close(camera->fd) == -1) quit("close");
free(camera);
}
//獲取一幀并處理
int camera_capture(camera_t* camera)
{
struct v4l2_buffer buf;
memset(&buf, 0, sizeof buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (xioctl(camera->fd, VIDIOC_DQBUF, &buf) == -1) return FALSE;
memcpy(camera->head.start, camera->buffers[buf.index].start, buf.bytesused);
camera->head.length = buf.bytesused;
if (xioctl(camera->fd, VIDIOC_QBUF, &buf) == -1) return FALSE;
return TRUE;
}
int camera_frame(camera_t* camera, struct timeval timeout) {
/**
Select函數(shù)在Socket編程中還是比較重要的,可是對于初學(xué)Socket的人來說都不太愛用Select寫程序,他們只是習(xí)慣寫諸如connect、 accept、recv或recvfrom這樣的阻塞程序(所謂阻塞方式block,顧名思義,就是進(jìn)程或是線程執(zhí)行到這些函數(shù)時必須等待某個事件的發(fā)生,如果事件沒有發(fā)生,進(jìn)程或線程就被阻塞,函數(shù)不能立即返回)。可是使用Select就可以完成非阻塞(所謂非阻塞方式non-block,就是進(jìn)程或線程執(zhí)行此函數(shù)時不必非要等待事件的發(fā)生,一旦執(zhí)行肯定返回,以返回值的不同來反映函數(shù)的執(zhí)行情況,如果事件發(fā)生則與阻塞方式相同,若事件沒有發(fā)生則返回一個代碼來告知事件未發(fā)生,而進(jìn)程或線程繼續(xù)執(zhí)行,所以效率較高)方式工作的程序,它能夠監(jiān)視我們需要監(jiān)視的文件描述符的變化情況——讀寫或是異常。下面詳細(xì)介紹一下!
**/
fd_set fds;
FD_ZERO(&fds);
FD_SET(camera->fd, &fds);
int r = select(camera->fd + 1, &fds, 0, 0, &timeout);
if (r == -1) quit("select");
if (r == 0) return FALSE;
return camera_capture(camera);
}
int minmax(int min, int v, int max)
{
return (v < min) ? min : (max < v) ? max : v;
}
void yuyv2rgb(uint8_t* yuyv,uint8_t* rgb, uint32_t width, uint32_t height)
{
size_t i,j;
for (i = 0; i < height; i++) {
for (j = 0; j < width; j += 2) {
size_t index = i * width + j;
int y0 = yuyv[index * 2 + 0] << 8;
int u = yuyv[index * 2 + 1] - 128;
int y1 = yuyv[index * 2 + 2] << 8;
int v = yuyv[index * 2 + 3] - 128;
rgb[index * 3 + 0] = minmax(0, (y0 + 359 * v) >> 8, 255);
rgb[index * 3 + 1] = minmax(0, (y0 + 88 * v - 183 * u) >> 8, 255);
rgb[index * 3 + 2] = minmax(0, (y0 + 454 * u) >> 8, 255);
rgb[index * 3 + 3] = minmax(0, (y1 + 359 * v) >> 8, 255);
rgb[index * 3 + 4] = minmax(0, (y1 + 88 * v - 183 * u) >> 8, 255);
rgb[index * 3 + 5] = minmax(0, (y1 + 454 * u) >> 8, 255);
}
}
// return rgb;
}
void yuyv2rgb888(uint8_t* yuyv,uint8_t* rgb, uint32_t width, uint32_t height)
{
uint in, out = 0;
uint pixel_16;
uint8_t pixel_24[3];
uint pixel32;
int y0, u, y1, v;
for(in = 0; in < width * height * 2; in += 4) {
pixel_16 =
yuyv[in + 3] << 24 |
yuyv[in + 2] << 16 |
yuyv[in + 1] << 8 |
yuyv[in + 0];//yuyv422每個像素2字節(jié),每兩個像素共用一個Cr,Cb值,即u和v,RGB24每個像素3個字節(jié)
y0 = (pixel_16 & 0x000000ff);
u = (pixel_16 & 0x0000ff00) >> 8;
y1 = (pixel_16 & 0x00ff0000) >> 16;
v = (pixel_16 & 0xff000000) >> 24;
pixel32 = convert_yuyv_to_rgb_pixel(y0, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];//rgb的一個像素
pixel32 = convert_yuyv_to_rgb_pixel(y1, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
}
// printf("\nthe rgb out is %d",out);
}
int convert_yuyv_to_rgb_pixel(int y, int u, int v)
{
uint pixel32 = 0;
uint8_t *pixel = (uint8_t *)&pixel32;
int r, g, b;
r = y + (1.370705 * (v-128));
g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
b = y + (1.732446 * (u-128));
if(r > 255) r = 255;
if(g > 255) g = 255;
if(b > 255) b = 255;
if(r < 0) r = 0;
if(g < 0) g = 0;
if(b < 0) b = 0;
pixel[0] = r * 220 / 256;
pixel[1] = g * 220 / 256;
pixel[2] = b * 220 / 256;
return pixel32;
}
// int main()
// {
// int i;
// camera_t* camera = camera_open("/dev/video0", 352, 288);
// printf("open the camera success!\n");
// camera_init(camera);
// printf("camera init success!\n");
// camera_start(camera);
// printf("camera is starting!\n");
// struct timeval timeout;
// timeout.tv_sec = 1;
// timeout.tv_usec = 0;
// /* skip 5 frames for booting a cam */
// for (i = 0; i < 5; i++) {
// camera_frame(camera, timeout);
// }
// camera_frame(camera, timeout);
// printf("we will capture a frame! yo\n");
// unsigned char* rgb =
// yuyv2rgb(camera->head.start, camera->width, camera->height);
// FILE* out = fopen("result.jpg", "w");
// fclose(out);
// free(rgb);
// camera_stop(camera);
// printf("now I'm so sorry to tell you that we'l close the camera!haha\n");
// camera_finish(camera);
// camera_close(camera);
// printf("Remeber to close the door,When you leave\n");
// return 0;
// }