#include <stdio.h>
#include "iostream"
#include <stdint.h>
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "jpeglib.h"
}
using namespace std;
char* format_time(float time)
{
char strtime[13];
sprintf(strtime,"%f",time);
string strms = strtime;
int second_time = (int)time;
int h = second_time/3600;
int m = second_time%3600/60;
int s = second_time%60;
string str = strms.substr(strms.find(".")+1,3);
char realtime[13];
sprintf(realtime,"%02d-%02d-%02d-%s",h,m,s,str.c_str());
return realtime;
}
//实现视频帧的jpeg压缩
void saveFrame(AVFrame* pFrame, int width, int height, float iFrame)
{
char fname[128] = { 0 };
struct jpeg_compress_struct cinfo;
struct jpeg_error_mgr jerr;
JSAMPROW row_pointer[1];
int row_stride;
uint8_t *buffer;
FILE *fp;
buffer = pFrame->data[0];
sprintf(fname, "%s.jpg",format_time(iFrame));
fp = fopen(fname, "wb");
cinfo.err = jpeg_std_error(&jerr);
jpeg_create_compress(&cinfo);
jpeg_stdio_dest(&cinfo, fp);
cinfo.image_width = width;
cinfo.image_height = height;
cinfo.input_components = 3;
cinfo.in_color_space = JCS_RGB;
jpeg_set_defaults(&cinfo);
jpeg_set_quality(&cinfo, 80, TRUE);
jpeg_start_compress(&cinfo, TRUE);
row_stride = width * 3;
while (cinfo.next_scanline < height)
{
row_pointer[0] = &buffer[cinfo.next_scanline * row_stride];
(void)jpeg_write_scanlines(&cinfo, row_pointer, 1);
}
jpeg_finish_compress(&cinfo);
fclose(fp);
jpeg_destroy_compress(&cinfo);
return;
}
int main(int argc,char* argv[])
{
if (argc < 4)
{
return 0;
}
AVFormatContext *pFormatCtx;
int i, videoIndex;
AVCodecContext *pCodecCtx = NULL;
AVCodec *pCodec = NULL;
AVPacket packet;
int frameFinished;
int numBytes;
uint8_t *buffer;
pFormatCtx = avformat_alloc_context();
av_register_all();
if (avformat_open_input(&pFormatCtx, argv[1], NULL, 0) != 0)
return -1;
if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
return -1;
av_dump_format(pFormatCtx, 0, argv[1], 0);
videoIndex = -1;
for (i = 0; i < pFormatCtx->nb_streams; ++i){
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
videoIndex = i;
break;
}
}
if (videoIndex == -1)
{
fprintf(stderr, "unsupport codec\n");
return -1;
}
pCodecCtx = pFormatCtx->streams[videoIndex]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
return -1;
AVFrame *pFrameRGB, *pFrame;
pFrame = av_frame_alloc();
pFrameRGB = av_frame_alloc();
if (pFrame == NULL)
return -1;
numBytes = avpicture_get_size(PIX_FMT_RGB24, atoi(argv[2]), atoi(argv[3]));
buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
avpicture_fill((AVPicture*)pFrameRGB, buffer, PIX_FMT_RGB24, atoi(argv[2]),atoi(argv[3]));
i = 0;
struct SwsContext* img_convert_ctx;
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
atoi(argv[2]), atoi(argv[3]), PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
int count =0;
while (av_read_frame(pFormatCtx, &packet) >= 0){
if (packet.stream_index == videoIndex){
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
count++;
if (frameFinished)
{
if (pFrame->key_frame == 1)
{
sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
pFrameRGB->data, pFrameRGB->linesize);
//AVRational frame_rate = pFormatCtx->streams[videoIndex]->avg_frame_rate;
float time = count*0.04;
saveFrame(pFrameRGB, atoi(argv[2]), atoi(argv[3]), time);
}
}
}
av_free_packet(&packet);
}
av_free(buffer);
av_free(pFrameRGB);
av_free(pFrame);
avcodec_close(pCodecCtx);
return 0;
}