hi,I am make program to capture from v4l device, when I use 640x480, it
can't do 12.5 frame per second,this speed is common, with 320x240, it
also do 12.5 fps,it's too slow, why? I compare with the fbtv in console
mode, 640x480 it can do 12.5 fps, the same as mine, but with resolution
320x240,it can do 24~25 fps, the source code of fbtv is fairly complex
and now I have no much time to read it. can anyone give me a sugguest to
improve the capture speed! I attache my code for ref .thanks!
/* v4l.c
* author: linuxer
* date: 2003.4.21
* this file define some function for operating v4l device
*/
#include <stdio.h>
#include <errno.h>
#include <stdlib.h>
#include <unistd.h>
#include <error.h>
#include <assert.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/types.h>
#include <linux/videodev.h>
#include <jpeglib.h>
#include <jerror.h>
#include <jconfig.h>
#include "v4l.h"
#define DEFAULT_DEVICE "/dev/video0"
#define FALSE 0
#define TRUE 1
#define DEFAULT_PALETTE VIDEO_PALETTE_RGB32
#define VIDEO_MAX_FRAME 32
static struct STRTAB norms_bttv[] = {
{ VIDEO_MODE_PAL, "PAL" },
{ VIDEO_MODE_NTSC, "NTSC" },
{ VIDEO_MODE_SECAM, "SECAM" },
{ 3, "PAL-NC" },
{ 4, "PAL-M" },
{ 5, "PAL-N" },
{ 6, "NTSC-JP" },
{ -1, NULL }
};
int v4l_open(char* device, v4l_device* vd)
{
if(!device)
device = DEFAULT_DEVICE;
if((vd->fd = open(device, O_RDWR)) < 0)
{
perror("v4l_open:");
return -1;
}
return 0;
}
int v4l_get_capability(v4l_device* vd)
{
if(ioctl(vd->fd, VIDIOCGCAP, &(vd->capability)) < 0)
{
perror("v4l_get_capability:");
return -1;
}
return 0;
}
int v4l_get_picture(v4l_device* vd)
{
if(ioctl(vd->fd, VIDIOCGPICT, &(vd->picture)) < 0)
{
perror("v4l_get_picture:");
return -1;
}
return 0;
}
int v4l_set_palette(v4l_device* vd, int palette)
{
vd->picture.palette = palette;
if(ioctl(vd->fd, VIDIOCSPICT, &(vd->picture)))
{
perror("v4l_set_palette0:");
return -1;
}
if(v4l_get_picture(vd))
{
perror("v4l_set_palette1:");
return -1;
}
return 0;
}
int v4l_get_channels(v4l_device* vd)
{
int i;
for(i=0; i<vd->capability.channels; i++)
{
vd->channel[i].channel = i;
if(ioctl(vd->fd, VIDIOCGCHAN, &(vd->channel[i])) < 0)
{
perror("v4l_get_channels:");
return -1;
}
}
return 0;
}
int v4l_set_channels(v4l_device* vd)
{
int i;
for(i=0; i<vd->capability.channels; i++)
{
vd->channel[i].channel = i;
if(ioctl(vd->fd, VIDIOCSCHAN, &(vd->channel[i])) < 0)
{
perror("v4l_get_channels:");
return -1;
}
}
return 0;
}
int v4l_get_audios(v4l_device* vd)
{
int i;
for(i=0; i<vd->capability.audios; i++)
{
vd->audio[i].audio = i;
if(ioctl(vd->fd, VIDIOCGAUDIO, &(vd->audio[i])) < 0)
{
perror("v4l_get_audio:");
return -1;
}
}
return 0;
}
int v4l_close(v4l_device* vd)
{
if(vd->map)
munmap(vd->map, vd->mbuf.size);
close(vd->fd);
return 0;
}
int v4l_get_mbuf(v4l_device* vd)
{
if(ioctl(vd->fd, VIDIOCGMBUF, &(vd->mbuf)) < 0)
{
perror("v4l_get_mbuf:");
return -1;
}
return 0;
}
int v4l_set_buffer(v4l_device* vd)
{
if(ioctl(vd->fd, VIDIOCSFBUF, &(vd->buffer)) < 0)
{
perror("v4l_set_buff:");
return -1;
}
return 0;
}
int v4l_get_buffer(v4l_device* vd)
{
if(ioctl(vd->fd, VIDIOCGFBUF, &(vd->buffer)) < 0)
{
perror("v4l_get_buffer:");
return -1;
}
return 0;
}
int v4l_mmap_init(v4l_device* vd)
{
if(v4l_get_mbuf(vd) < 0)
return -1;
if((vd->map = mmap(0, vd->mbuf.size, PROT_READ|PROT_WRITE, MAP_SHARED, vd->fd, 0)) < 0)
{
perror("v4l_mmap_init:mmap");
return -1;
}
return 0;
}
/* *
* c: 0: television;
1: composite1;
2: S-Video;
*/
int v4l_switch_channel(v4l_device* vd, int c)
{
if(ioctl(vd->fd, VIDIOCSCHAN, &(vd->channel[c])) < 0)
{
perror("v4l_switch_channel:");
return -1;
}
return 0;
}
int v4l_set_norm(v4l_device* vd, int norm)
{
int i;
for(i=0; i<vd->capability.channels; i++)
{
vd->channel[i].norm = norm;
}
if(v4l_get_capability(vd))
{
perror("v4l_set_norm 1:");
return -1;
}
if(v4l_get_picture(vd))
{
perror("v4l_set_norm 2:");
}
return 0;
}
int v4l_video_mmap_init(v4l_device* vd, int width, int height)
{
vd->mmap.width = width;
vd->mmap.height = height;
vd->mmap.format = VIDEO_PALETTE_RGB24;
vd->frame_current = 0;
vd->frame_using[0] = FALSE;
vd->frame_using[1] = FALSE;
return 0;
}
int v4l_grab_frame(v4l_device* vd, int frame)
{
if(vd->frame_using[frame])
{
fprintf(stderr, "v4l_grab_frame: frame %d is already used.\n", frame);
return -1;
}
vd->mmap.frame = frame;
if(ioctl(vd->fd, VIDIOCMCAPTURE, &(vd->mmap)) < 0)
{
perror("v4l_grab_frame:");
return -1;
}
vd->frame_using[frame] = TRUE;
vd->frame_current = frame;
return 0;
}
int v4l_grab_sync(v4l_device* vd)
{
if(ioctl(vd->fd, VIDIOCSYNC, &(vd->frame_current)) < 0)
{
perror("v4l_grab_sync:");
}
vd->frame_using[vd->frame_current] = FALSE;
return 0;
}
/* -------------------------------------------------------*/
int device_grab_frame(v4l_device* vd)
{
vd->frame_current = 0;
if(v4l_grab_frame(vd, 0) < 0)
return -1;
return 0;
}
int device_next_frame(v4l_device* vd)
{
vd->frame_current ^=1;
if(v4l_grab_frame(vd, vd->frame_current) < 0)
return -1;
return 0;
}
unsigned char* device_get_address(v4l_device* vd)
{
return vd->map + vd->mbuf.offsets[vd->frame_current];
}
/*------------------------ code for test -----------------------------*/
int write_ppm24(unsigned char* img)
{
FILE* fp;
fp = fopen("test24.ppm","w");
if(fp == NULL)
return -1;
fprintf(fp,"P6\n%d %d\n255\n",PAL_NC_WIDTH, PAL_NC_HEIGHT);
fwrite(img, PAL_NC_HEIGHT, 3*PAL_NC_WIDTH, fp);
fclose(fp);
return 0;
}
int write_jpeg(char *filename, unsigned char* img, int quality, int gray)
{
FILE *fp;
struct jpeg_compress_struct cinfo;
struct jpeg_error_mgr jerr;
unsigned int i;
unsigned char *line;
int line_length;
if (NULL == (fp = fopen(filename,"w"))) {
fprintf(stderr,"grab: can't open %s: %s\n",filename,strerror(errno));
return -1;
}
cinfo.err = jpeg_std_error(&jerr);
jpeg_create_compress(&cinfo);
jpeg_stdio_dest(&cinfo, fp);
cinfo.image_width = PAL_NC_WIDTH;
cinfo.image_height = PAL_NC_HEIGHT;
cinfo.input_components = gray ? 1: 3;
cinfo.in_color_space = gray ? JCS_GRAYSCALE: JCS_RGB;
jpeg_set_defaults(&cinfo);
jpeg_set_quality(&cinfo, quality, TRUE);
jpeg_start_compress(&cinfo, TRUE);
line_length = gray ? PAL_NC_WIDTH : PAL_NC_WIDTH * 3;
for (i = 0, line = img; i < PAL_NC_HEIGHT;
i++, line += line_length)
jpeg_write_scanlines(&cinfo, &line, 1);
jpeg_finish_compress(&(cinfo));
jpeg_destroy_compress(&(cinfo));
fclose(fp);
return 0;
}
int device_init(char* dev, v4l_device* vd,int channel, int norm)
{
int i;
//v4l_device vd;
unsigned char * img;
if( dev == NULL)
dev = "/dev/video0";
if(v4l_open(dev, vd))
{
return -1;
}
else
{
v4l_close(vd);
}
if(v4l_open(dev, vd))
return -1;
if(v4l_get_capability(vd))
return -1;
if(v4l_get_channels(vd))
return -1;
if(v4l_set_norm(vd, 3))
return -1;
if(v4l_set_channels(vd))
return -1;
if(v4l_switch_channel(vd,channel))
return -1;
//start initialze grab
if(v4l_get_picture(vd))
return -1;
if(v4l_video_mmap_init(vd, PAL_NC_WIDTH, PAL_NC_HEIGHT))
return -1;
if(v4l_set_palette(vd, VIDEO_PALETTE_RGB24))
return -1;
// if(v4l_get_buffer(vd))
// return -1;
// if(v4l_get_norm(&vd))
// return -1;
if(v4l_mmap_init(vd))
return -1;
printf("device_init() info:\n\t%s: initialization OK ...%s\n\t\t%d channels\n\t\t%d audios\n\n",
dev, vd->capability.name, vd->capability.channels, vd->capability.audios);
for(i=0; i<vd->capability.channels; i++)
{
printf("Channel %d: %s(%s)\n", i, vd->channel[i].name, norms_bttv[vd->channel[i].norm].str);
}
printf("v4l: mmap's address = %p\n", vd->map);
printf("v4l: mmap's buffer size = 0x%x\n", vd->mbuf.size);
printf("v4l: mmap's frames = %d(%d max)\n",vd->mbuf.frames, VIDEO_MAX_FRAME);
for(i=0; i<vd->mbuf.frames; i++)
{
printf("v4l: frames %d's offset = 0x%x\n", i ,vd->mbuf.offsets[i]);
}
printf("v4l: chanel switch to %d(%s)\n", channel, vd->channel, vd->channel[channel].name);
if(v4l_grab_frame(vd, 0))
return -1;
if(v4l_grab_sync(vd))
return -1;
img = device_get_address(vd);
// if(write_ppm24(img))
// return -1;
// if(write_jpeg("tess01.jpeg", img, 75, 0))
// return -1;
return 0;
}
int main(void)
{
unsigned int i=0, t1, t2;
FILE* fp;
unsigned char* img;
v4l_device vd;
if(device_init("/dev/video0", &vd, 0, 3) == -1)
{
perror("device_init: failed...");
exit(1);
}
i=0;
t1 = time(NULL);
printf("t1 = %d\n",t1);
while(i<2000)
{
device_next_frame(&vd);
v4l_grab_sync(&vd);
img = device_get_address(&vd);
i++;
if(i%100 == 0)
fprintf(stderr, "grab %d frames in %d seconds\n", i, (time(NULL) - t1));
}
t2 = time(NULL);
printf("width*height: %dx%d grab %d frames, use %d seconds!\n", PAL_NC_WIDTH, PAL_NC_HEIGHT, i, (t2 - t1));
v4l_close(&vd);
exit(0);
}
/* v4l.h
* author: linuxer
* date: 2003.4.21
*/
#ifndef _V4L_H
#define _V4l_H
#include <pthread.h>
#include <sys/types.h>
#include <linux/videodev.h>
#define PAL_WIDTH 768
#define PAL_HEIGHT 576
#define NTSC_WIDTH 640
#define NTSC_HEIGHT 480
#define PAL_NC_WIDTH 320
#define PAL_NC_HEIGHT 240
//#define PAL_NC_WIDTH 640
//#define PAL_NC_HEIGHT 480
struct STRTAB {
long ref;
const char *str;
};
struct _v4l_device {
int fd;
struct video_capability capability;
struct video_window window;
struct video_channel channel[8];
struct video_picture picture;
struct video_tuner tuner;
struct video_audio audio[8];
struct video_buffer buffer;
struct video_mmap mmap;
struct video_mbuf mbuf;
unsigned char* map;
pthread_mutex_t mutex;
int frame_current;
int frame_using[2];
int overlay;
};
typedef struct _v4l_device v4l_device;
int v4l_open(char* device, v4l_device* vd);
int v4l_close(v4l_device* vd);
int v4l_get_capability(v4l_device* vd);
int v4l_get_picture(v4l_device* vd);
int v4l_get_channels(v4l_device* vd);
int v4l_set_channels(v4l_device* vd);
int v4l_set_norm(v4l_device* vd, int norm);
int v4l_get_audios(v4l_device* vd);
int v4l_set_buffer(v4l_device* vd);
int v4l_get_buffer(v4l_device* vd);
int v4l_get_mbuf(v4l_device* vd);
int v4l_mmap_init(v4l_device* vd);
int v4l_grab_sync(v4l_device* vd);
int v4l_video_mmap_init(v4l_device* vd, int width, int height);
int v4l_grab_frame(v4l_device* vd, int frame);
int v4l_grab_picture(v4l_device* vd, unsigned int );
int v4l_switch_channel(v4l_device* vd, int channel);
#endif