1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142
| // A small sample program that shows how to use libavformat and libavcodec to
// read video from a file.
#include <string>
#include <iostream>
#include <stdexcept>
#include <ffmpeg/avcodec.h>
#include <ffmpeg/avformat.h>
#include <Data/VideoParser.hpp>
using namespace std;
using namespace caviar;
string VideoParser::SaveFrame(AVFrame *pFrame, int width, int height) {
FILE *pFile;
char szFilename[32];
int y;
// Open file
sprintf(szFilename, "frame.ppm");
pFile=fopen(szFilename, "wb");
if (pFile==NULL)
throw runtime_error("Could not open file (VideoParser::SaveFrame)");
// Write header
fprintf(pFile, "P6\n%d %d\n255\n", width, height);
// Write pixel data
for (y=0; y<height; y++)
fwrite(pFrame->data[0]+y*pFrame->linesize[0], 1, width*3, pFile);
// Close file
fclose(pFile);
return szFilename;
}
VideoParser::VideoParser(const string& filename) {
// Register all formats and codecs
av_register_all();
// Open video file
if (av_open_input_file(&pFormatCtx, filename.c_str(), NULL, 0, NULL)!=0)
throw runtime_error("Couldn't open file");
// Retrieve stream information
if (av_find_stream_info(pFormatCtx)<0)
throw runtime_error("Couldn't find stream information");
// Dump information about file onto standard error
dump_format(pFormatCtx, 0, filename.c_str(), 0);
// Find the first video stream
videoStream=-1;
for (int i=0; i<pFormatCtx->nb_streams; i++)
if (pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO) {
videoStream=i;
break;
}
if (videoStream==-1)
throw runtime_error("Didn't find a video stream");
// Get a pointer to the codec context for the video stream
pCodecCtx=pFormatCtx->streams[videoStream]->codec;
// Find the decoder for the video stream
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec==NULL) {
cerr << "Unsupported codec!\n";
throw runtime_error("Codec not found");
}
// Open codec
if (avcodec_open(pCodecCtx, pCodec)<0)
throw runtime_error("Could not open codec");
// Allocate video frame
pFrame=avcodec_alloc_frame();
// Allocate an AVFrame structure
pFrameRGB=avcodec_alloc_frame();
if (pFrameRGB==NULL)
throw runtime_error("Allocation problem");
// Determine required buffer size and allocate buffer
numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width,
pCodecCtx->height);
buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
// Assign appropriate parts of buffer to image planes in pFrameRGB
// Note that pFrameRGB is an AVFrame, but AVFrame is a superset
// of AVPicture
avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
pCodecCtx->width, pCodecCtx->height);
}
string VideoParser::getFrame() {
int frameFinished=0;
string frameFilename("");
while (!frameFinished) {
if (av_read_frame(pFormatCtx, &packet)>=0) {
// Is this a packet from the video stream?
if (packet.stream_index==videoStream) {
// Decode video frame
avcodec_decode_video(pCodecCtx, pFrame, &frameFinished,
packet.data, packet.size);
// Did we get a video frame?
if (frameFinished) {
// Convert the image from its native format to RGB
img_convert((AVPicture *)pFrameRGB, PIX_FMT_RGB24,
(AVPicture*)pFrame, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height);
// Save the frame to disk
frameFilename = SaveFrame(pFrameRGB, pCodecCtx->width,
pCodecCtx->height);
}
}
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
}
}
return frameFilename;
}
/**
* Free the RGB image
* Free the YUV frame
* Close the codec
* Close the video file
* */
VideoParser::~VideoParser() {
av_free(buffer);
av_free(pFrameRGB);
av_free(pFrame);
avcodec_close(pCodecCtx);
av_close_input_file(pFormatCtx);
} |
Partager