Skip to content

Instantly share code, notes, and snippets.

@KunYi
Created July 25, 2025 00:37
Show Gist options
  • Select an option

  • Save KunYi/20f27287653ba36505f90c6073a6c547 to your computer and use it in GitHub Desktop.

Select an option

Save KunYi/20f27287653ba36505f90c6073a6c547 to your computer and use it in GitHub Desktop.
RTSP clent use FFMPEG/SDL2 for 720P
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/imgutils.h>
#include <libavutil/time.h>
#include <SDL2/SDL.h>
#include <stdio.h>
#include <stdlib.h>
#define RTSP_URL "rtsp://172.32.0.93:554/live/0"
int main() {
// 禁用 FFmpeg 日誌
av_log_set_level(AV_LOG_QUIET);
// 初始化 FFmpeg
AVFormatContext *fmt_ctx = NULL;
AVCodecContext *dec_ctx = NULL;
AVCodec *decoder = NULL;
AVFrame *frame = NULL;
AVPacket *pkt = NULL;
// 開啟 RTSP 串流
AVDictionary *opts = NULL;
av_dict_set(&opts, "rtsp_transport", "udp", 0); // 使用 UDP 傳輸
av_dict_set(&opts, "max_delay", "100000", 0); // 最大延遲 100ms
av_dict_set(&opts, "buffer_size", "131072", 0); // 緩衝區大小 128KB
av_dict_set(&opts, "probesize", "8192", 0);
av_dict_set(&opts, "analyzeduration", "100000", 0);
if (avformat_open_input(&fmt_ctx, RTSP_URL, NULL, &opts) < 0) {
fprintf(stderr, "Failed to open RTSP stream\n");
return -1;
}
av_dict_free(&opts);
if (avformat_find_stream_info(fmt_ctx, NULL) < 0) {
fprintf(stderr, "Failed to find stream info\n");
return -1;
}
// 尋找視頻流
int video_stream_idx = -1;
for (unsigned int i = 0; i < fmt_ctx->nb_streams; i++) {
if (fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
video_stream_idx = i;
break;
}
}
if (video_stream_idx == -1) {
fprintf(stderr, "No video stream found\n");
return -1;
}
// 初始化解碼器
decoder = avcodec_find_decoder(fmt_ctx->streams[video_stream_idx]->codecpar->codec_id);
if (!decoder) {
fprintf(stderr, "Decoder not found\n");
return -1;
}
dec_ctx = avcodec_alloc_context3(decoder);
if (!dec_ctx) {
fprintf(stderr, "Failed to allocate codec context\n");
return -1;
}
avcodec_parameters_to_context(dec_ctx, fmt_ctx->streams[video_stream_idx]->codecpar);
if (avcodec_open2(dec_ctx, decoder, NULL) < 0) {
fprintf(stderr, "Failed to open codec\n");
return -1;
}
// 初始化 SDL
if (SDL_Init(SDL_INIT_VIDEO) < 0) {
fprintf(stderr, "SDL init failed: %s\n", SDL_GetError());
return -1;
}
SDL_Window *window = SDL_CreateWindow("Camera", SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED, 1280, 720, 0);
SDL_Renderer *renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED);
SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_IYUV,
SDL_TEXTUREACCESS_STREAMING, 1280, 720);
// 初始化 FFmpeg 幀
frame = av_frame_alloc();
if (!frame) {
fprintf(stderr, "Failed to allocate frame\n");
return -1;
}
pkt = av_packet_alloc();
if (!pkt) {
fprintf(stderr, "Failed to allocate packet\n");
return -1;
}
// 主迴圈
SDL_Event event;
while (1) {
if (av_read_frame(fmt_ctx, pkt) < 0) {
break;
}
if (pkt->stream_index == video_stream_idx) {
if (avcodec_send_packet(dec_ctx, pkt) < 0) {
av_packet_unref(pkt);
continue;
}
while (avcodec_receive_frame(dec_ctx, frame) == 0) {
// 渲染到 SDL
SDL_UpdateYUVTexture(texture, NULL,
frame->data[0], frame->linesize[0],
frame->data[1], frame->linesize[1],
frame->data[2], frame->linesize[2]);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, texture, NULL, NULL);
SDL_RenderPresent(renderer);
}
}
av_packet_unref(pkt);
// 處理 SDL 事件
while (SDL_PollEvent(&event)) {
if (event.type == SDL_QUIT) {
goto cleanup;
}
}
}
cleanup:
SDL_DestroyTexture(texture);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();
av_frame_free(&frame);
avcodec_free_context(&dec_ctx);
avformat_close_input(&fmt_ctx);
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment