Skip to content

Instantly share code, notes, and snippets.

@aneury1
Created May 31, 2025 13:55
Show Gist options
  • Save aneury1/4289fba9b4fb202d6149ae0879067ca7 to your computer and use it in GitHub Desktop.
Save aneury1/4289fba9b4fb202d6149ae0879067ca7 to your computer and use it in GitHub Desktop.
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}
////g++ mvideo.cpp -o play_video `pkg-config --cflags --libs sdl2` -lavformat -lavcodec -lavutil -lswscale
#include <SDL2/SDL.h>
#include <iostream>
int main(int argc, char* argv[]) {
if (argc < 2) {
std::cout << "Usage: " << argv[0] << " <video_file>\n";
return -1;
}
const char* filepath = argv[1];
///av_register_all();
AVFormatContext* pFormatCtx = avformat_alloc_context();
if (avformat_open_input(&pFormatCtx, filepath, nullptr, nullptr) != 0) {
std::cerr << "Couldn't open file.\n";
return -1;
}
avformat_find_stream_info(pFormatCtx, nullptr);
int videoStreamIndex = -1;
for (unsigned i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStreamIndex = i;
break;
}
}
if (videoStreamIndex == -1) {
std::cerr << "Couldn't find a video stream.\n";
return -1;
}
AVCodecParameters* pCodecParams = pFormatCtx->streams[videoStreamIndex]->codecpar;
const AVCodec* pCodec = avcodec_find_decoder(pCodecParams->codec_id);
AVCodecContext* pCodecCtx = avcodec_alloc_context3(pCodec);
avcodec_parameters_to_context(pCodecCtx, pCodecParams);
avcodec_open2(pCodecCtx, pCodec, nullptr);
AVFrame* pFrame = av_frame_alloc();
AVFrame* pFrameRGB = av_frame_alloc();
SwsContext* swsCtx = sws_getContext(
pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB24,
SWS_BILINEAR, nullptr, nullptr, nullptr);
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1);
uint8_t* buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_RGB24,
pCodecCtx->width, pCodecCtx->height, 1);
SDL_Init(SDL_INIT_VIDEO);
SDL_Window* window = SDL_CreateWindow("SDL2 Video Player", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
pCodecCtx->width, pCodecCtx->height, SDL_WINDOW_SHOWN);
SDL_Renderer* renderer = SDL_CreateRenderer(window, -1, 0);
SDL_Texture* texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_RGB24,
SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
AVPacket* packet = av_packet_alloc();
while (av_read_frame(pFormatCtx, packet) >= 0) {
if (packet->stream_index == videoStreamIndex) {
if (avcodec_send_packet(pCodecCtx, packet) == 0 &&
avcodec_receive_frame(pCodecCtx, pFrame) == 0) {
sws_scale(swsCtx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
pFrameRGB->data, pFrameRGB->linesize);
SDL_UpdateTexture(texture, nullptr, pFrameRGB->data[0], pFrameRGB->linesize[0]);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, texture, nullptr, nullptr);
SDL_RenderPresent(renderer);
SDL_Event e;
SDL_PollEvent(&e);
if (e.type == SDL_QUIT) break;
SDL_Delay(1000 / 30); // crude frame delay (~30 FPS)
}
}
av_packet_unref(packet);
}
// Cleanup
av_frame_free(&pFrame);
av_frame_free(&pFrameRGB);
avcodec_free_context(&pCodecCtx);
avformat_close_input(&pFormatCtx);
SDL_DestroyTexture(texture);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment