version: ffmpeg 6.x

FFmpeg API

打开一个视频文件,读取其流信息,找到视频编解码器,进行视频帧的解码和格式转换

1
2
3
4
5
6
7
8
9
10
11
12
#pragma once
#include <stdio.h>
#define __STDC_CONSTANT_MACROS
#define SDL_MAIN_HANDLED

extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}
int ffmpeg_test();
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
// myffmpeg.cpp
#include "myffmpeg.h"
int ffmpeg_test() {
const char filepath[] = "D:/ffmpeg/learn/miku.mp4";
// av_register_all();
// avformat_network_init();

AVFormatContext *pFormatCtx = avformat_alloc_context();
// open av && input msg into pFormatCtx
avformat_open_input(&pFormatCtx, filepath, NULL, NULL); // !=0 error
// get stream msg and codec param
avformat_find_stream_info(pFormatCtx, NULL); // < 0 error

int videoindex = -1;
for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex = i;
break;
}
}
printf("v codec is: %d", pFormatCtx->streams[videoindex]->codecpar->codec_id); // 27 -> h264
const AVCodec *pCodec = avcodec_find_decoder(pFormatCtx->streams[videoindex]->codecpar->codec_id); // ==null error
AVCodecContext *pCodecCtx = avcodec_alloc_context3(pCodec);
// decode and encode parameters filled into codec context
avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoindex]->codecpar); // <0 error
// bind codec and context
avcodec_open2(pCodecCtx, pCodec, NULL); // <0 error

AVFrame *pFrameYUV = av_frame_alloc();
// alloc buffer to decoded frame
uint8_t *out_buffer = (uint8_t *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
pCodecCtx->width, pCodecCtx->height, 1));
// dpending on specified image parameters and provieded array set data ptr and line size
av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer, AV_PIX_FMT_YUV420P,
pCodecCtx->width, pCodecCtx->height, 1);

// before decoding
AVPacket *packet = av_packet_alloc();

// cout file msg
printf("--------------- File Information ----------------\n");
av_dump_format(pFormatCtx, 0, filepath, 0);
printf("-------------------------------------------------\n");

// single pic transation to specified format
SwsContext *img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);

int frame_cnt = 0;
AVFrame *pFrame = av_frame_alloc();
while (av_read_frame(pFormatCtx, packet) >= 0) {
// while read stream is video
if (packet->stream_index == videoindex) {
int ret = avcodec_send_packet(pCodecCtx, packet); //send packet into codec
while (ret >= 0) {
ret = avcodec_receive_frame(pCodecCtx, pFrame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
}

sws_scale(img_convert_ctx, (const uint8_t *const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height,
pFrameYUV->data, pFrameYUV->linesize);

printf("Decoded frame index: %d\n", frame_cnt++);
}
}
av_packet_unref(packet); // 替代 av_free_packet
}

sws_freeContext(img_convert_ctx);
av_frame_free(&pFrameYUV);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
return 0;
}

SDL

将yuv文件通过SDL显示至屏幕

1
2
3
4
5
6
7
8
9
10
// mysdl.h
#pragma once
#define SDL_MAIN_HANDLED
#include <stdio.h>
#include <stdint.h>
extern "C"{
#include <SDL.h>
}

int sdl_first();
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
// mysdl.cpp
#define _CRT_SECURE_NO_WARNINGS
#include "mysdl.h"


int sdl_first()
{
const int bpp = 12;
int screen_w = 640, screen_h = 360;
const int pixel_w = 640, pixel_h = 360;
unsigned char buffer[pixel_w * pixel_h * bpp / 8];
SDL_Init(SDL_INIT_VIDEO);
SDL_Window *screen;
//SDL 2.0 Support for multiple windows
screen = SDL_CreateWindow("Simplest Video Play SDL2", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
screen_w, screen_h, SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE);
if (!screen) {
printf("SDL: could not create window - exiting:%s\n", SDL_GetError());
return -1;
}
SDL_Renderer *sdlRenderer = SDL_CreateRenderer(screen, -1, 0);

Uint32 pixformat = SDL_PIXELFORMAT_IYUV;
//IYUV: Y + U + V (3 planes)
//YV12: Y + V + U (3 planes)
SDL_Texture *sdlTexture = SDL_CreateTexture(sdlRenderer, pixformat,
SDL_TEXTUREACCESS_STREAMING, pixel_w, pixel_h);

FILE *fp = fopen("D:/ffmpeg/learn/sintel_640_360.yuv", "rb+");

SDL_Rect sdlRect;

while (1) {
if (fread(buffer, 1, pixel_w * pixel_h * bpp / 8, fp) != pixel_w * pixel_h * bpp / 8) {
// Loop
fseek(fp, 0, SEEK_SET);
fread(buffer, 1, pixel_w * pixel_h * bpp / 8, fp);
}

SDL_UpdateTexture(sdlTexture, NULL, buffer, pixel_w);

sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = screen_w;
sdlRect.h = screen_h;

SDL_RenderClear(sdlRenderer);
SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, &sdlRect);
SDL_RenderPresent(sdlRenderer);
//Delay 40ms
SDL_Delay(40);

}
SDL_Quit();
return 0;
}

通过多线程展现yuv

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
// mysdll.h
#pragma once
#define _CRT_SECURE_NO_WARNINGS
#include <stdio.h>

extern "C"
{
#include <SDL.h>
};

//Refresh Event
#define REFRESH_EVENT (SDL_USEREVENT + 1)
//Break
#define BREAK_EVENT (SDL_USEREVENT + 2)

int refresh_video(void *opaque);
int sdl_second();
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
// mysdll.cpp
#include "mysdll.h"

const int bpp = 12;
int screen_w = 640, screen_h = 360;
const int pixel_w = 640, pixel_h = 360;
unsigned char buffer[pixel_w * pixel_h * bpp / 8];
int thread_exit = 0;

int refresh_video(void *opaque) {
thread_exit = 0;
while (thread_exit == 0) {
SDL_Event event;
event.type = REFRESH_EVENT;
SDL_PushEvent(&event);
SDL_Delay(40);
}
thread_exit = 0;
//Break
SDL_Event event;
event.type = BREAK_EVENT;
SDL_PushEvent(&event);
return 0;
}

int sdl_second()
{
SDL_Init(SDL_INIT_VIDEO);
SDL_Window *screen = SDL_CreateWindow("Simplest Video Play SDL2", SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED, screen_w, screen_h, SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE);
SDL_Renderer *sdlRenderer = SDL_CreateRenderer(screen, -1, 0);

Uint32 pixformat = SDL_PIXELFORMAT_IYUV;
SDL_Texture *sdlTexture = SDL_CreateTexture(sdlRenderer, pixformat,
SDL_TEXTUREACCESS_STREAMING, pixel_w, pixel_h);

FILE *fp = fopen("D:/ffmpeg/learn/sintel_640_360.yuv", "rb+");
SDL_Rect sdlRect;

SDL_Thread *refresh_thread = SDL_CreateThread(refresh_video, NULL, NULL);
SDL_Event event;
while (1) {
//Wait
SDL_WaitEvent(&event);
if (event.type == REFRESH_EVENT) {
if (fread(buffer, 1, pixel_w * pixel_h * bpp / 8, fp) != pixel_w * pixel_h * bpp / 8) {
fseek(fp, 0, SEEK_SET);
fread(buffer, 1, pixel_w * pixel_h * bpp / 8, fp);
}

SDL_UpdateTexture(sdlTexture, NULL, buffer, pixel_w);
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = screen_w;
sdlRect.h = screen_h;
SDL_RenderClear(sdlRenderer);
SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, &sdlRect);
SDL_RenderPresent(sdlRenderer);

}
else if (event.type == SDL_WINDOWEVENT) {
//If Resize
SDL_GetWindowSize(screen, &screen_w, &screen_h);
}
else if (event.type == SDL_QUIT) {
thread_exit = 1;
}
else if (event.type == BREAK_EVENT) {
break;
}
}
SDL_Quit();
return 0;
}

SDL && FFmpeg

ffmpeg将视频转换为yuv格式并通过SDL播放

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
//sdl_ffmpeg.h
#pragma once
#include <stdio.h>
#include <stdint.h>
#define __STDC_CONSTANT_MACROS
#define SDL_MAIN_HANDLED

extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
#include <SDL.h>
};

//Refresh Event
#define SFM_REFRESH_EVENT (SDL_USEREVENT + 1)
#define SFM_BREAK_EVENT (SDL_USEREVENT + 2)

int sfp_refresh_thread(void *opaque);
int sdl_ffmpeg();
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
//sdl_ffmpeg.cpp
#include "sdl_ffmpeg.h"

int thread_ctrl = 0;
int sfp_refresh_thread(void *opaque) {
thread_ctrl = 0;
while (!thread_ctrl) {
SDL_Event event;
event.type = SFM_REFRESH_EVENT;
SDL_PushEvent(&event);
SDL_Delay(20);
}
thread_ctrl = 0;
SDL_Event event;
event.type = SFM_BREAK_EVENT;
SDL_PushEvent(&event);

return 0;
}


int sdl_ffmpeg()
{
const char filepath[] = "D:/ffmpeg/learn/test.mov";
AVFormatContext *pFormatCtx = avformat_alloc_context();
avformat_open_input(&pFormatCtx, filepath, nullptr, nullptr);
avformat_find_stream_info(pFormatCtx, nullptr);

int videoindex = -1;
for (unsigned char i = 0; i < pFormatCtx->nb_streams; ++i) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex = i;
break;
}
}
const AVCodec *pCodec = avcodec_find_decoder(pFormatCtx->streams[videoindex]->codecpar->codec_id);
AVCodecContext *pCodecCtx = avcodec_alloc_context3(pCodec);
avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoindex]->codecpar);
avcodec_open2(pCodecCtx, pCodec, nullptr);

AVFrame *pFrameYUV = av_frame_alloc();
uint8_t *buffer = (uint8_t *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
pCodecCtx->width, pCodecCtx->height, 1));

av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P,
pCodecCtx->width, pCodecCtx->height, 1);

SwsContext *img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr);


SDL_Init(SDL_INIT_VIDEO | SDL_INIT_TIMER);
int screen_w = pCodecCtx->width;
int screen_h = pCodecCtx->height;
SDL_Window* screen = SDL_CreateWindow("Simplest ffmpeg player's Window",
SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
screen_w, screen_h, SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE);

SDL_Renderer* sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
SDL_Texture *sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
SDL_Rect sdlRect;
AVPacket *packet = av_packet_alloc();
SDL_Thread *video_tid = SDL_CreateThread(sfp_refresh_thread, NULL, NULL);
SDL_Event event;
AVFrame *pFrame = av_frame_alloc();
int ret;
while(true) {
SDL_WaitEvent(&event);
if (event.type == SFM_REFRESH_EVENT) {
if (av_read_frame(pFormatCtx, packet) >= 0) {
if (packet->stream_index == videoindex) {
ret = avcodec_send_packet(pCodecCtx, packet);
while (ret >= 0) {
ret = avcodec_receive_frame(pCodecCtx, pFrame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
}
sws_scale(img_convert_ctx, (const uint8_t *const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);

SDL_UpdateTexture(sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0]);
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = screen_w;
sdlRect.h = screen_h;
SDL_RenderClear(sdlRenderer);
SDL_RenderCopy(sdlRenderer, sdlTexture, &sdlRect, &sdlRect);
//SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, NULL);
SDL_RenderPresent(sdlRenderer);
}
}
av_packet_unref(packet);
}
else {
thread_ctrl = 1;
}
}
else if (event.type == SDL_WINDOWEVENT) {
SDL_GetWindowSize(screen, &screen_w, &screen_h);
}
else if (event.type == SDL_QUIT) {
thread_ctrl = 1;
}
else if (event.type == SFM_BREAK_EVENT) {
break;
}

}

sws_freeContext(img_convert_ctx);
SDL_Quit();
av_frame_free(&pFrameYUV);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
return 0;
}

项目

备份

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
// 客户端pro文件
#-------------------------------------------------
#
# Project created by QtCreator 2020-11-03T18:22:45
#
#-------------------------------------------------

QT += core gui

RC_ICONS = ./images/message.ico

greaterThan(QT_MAJOR_VERSION, 4): QT += widgets


include(./netapi/netapi.pri)
INCLUDEPATH += $$PWD/netapi

include(./RecordVideo/RecordVideo.pri)
INCLUDEPATH += $$PWD/RecordVideo

include(./RecordAudio/RecordAudio.pri)
INCLUDEPATH += $$PWD/RecordAudio

include(./uiapi/uiapi.pri)
INCLUDEPATH += $$PWD/uiapi

TARGET = myqq
TEMPLATE = app

INCLUDEPATH+=D:\opencv\qt_opencv\OpenCV\include\opencv\
D:\opencv\qt_opencv\OpenCV\include\opencv2\
D:\opencv\qt_opencv\OpenCV\include\
LIBS+=D:\opencv\qt_opencv\OpenCV\lib


SOURCES += main.cpp\
myqq.cpp \
logindialog.cpp \
chatdialog.cpp \
roomdialog.cpp \
useritem.cpp \
IMToolBox.cpp \
videoitem.cpp

HEADERS += myqq.h \
logindialog.h \
chatdialog.h \
roomdialog.h \
useritem.h \
IMToolBox.h \
videoitem.h

FORMS += myqq.ui \
logindialog.ui \
chatdialog.ui \
roomdialog.ui \
useritem.ui \
videoitem.ui

RESOURCES += \
resource.qrc