添加音频结构, 添加MediaParam, 增加bugList
This commit is contained in:
parent
146a955841
commit
4d82e2b0a5
0
include/audioDecoder.h
Normal file
0
include/audioDecoder.h
Normal file
@ -1,14 +1,5 @@
|
|||||||
#ifndef DECODER_H
|
#ifndef DECODEPARAM_H
|
||||||
#define DECODER_H
|
#define DECODEPARAM_H
|
||||||
extern "C" {
|
|
||||||
#include "libavcodec/avcodec.h"
|
|
||||||
#include "libavformat/avformat.h"
|
|
||||||
#include "libavutil/imgutils.h"
|
|
||||||
}
|
|
||||||
#include <queue>
|
|
||||||
#include <condition_variable>
|
|
||||||
#include <mutex>
|
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
requires std::is_same_v<T, AVPacket> || std::is_same_v<T, AVFrame>
|
requires std::is_same_v<T, AVPacket> || std::is_same_v<T, AVFrame>
|
||||||
struct MediaQueue
|
struct MediaQueue
|
||||||
@ -103,7 +94,23 @@ struct VideoParam
|
|||||||
bool quit = false;
|
bool quit = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
void InitDecoder(const char* filepath, VideoParam& param);
|
struct AudioParam
|
||||||
void RequestPacket(VideoParam& param);
|
{
|
||||||
void RequestFrame(VideoParam& param);
|
MediaQueue<AVPacket> packetQueue;
|
||||||
|
MediaQueue<AVFrame> frameQueue;
|
||||||
|
AVFormatContext* fmtCtx;
|
||||||
|
AVCodecContext* codecCtx;
|
||||||
|
int audioStreamIndex;
|
||||||
|
|
||||||
|
bool eof = false;
|
||||||
|
|
||||||
|
bool pause = false;
|
||||||
|
bool quit = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct MediaParam
|
||||||
|
{
|
||||||
|
VideoParam videoParam;
|
||||||
|
AudioParam audioParam;
|
||||||
|
};
|
||||||
#endif
|
#endif
|
17
include/mediaDecoder.h
Normal file
17
include/mediaDecoder.h
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
#ifndef DECODER_H
|
||||||
|
#define DECODER_H
|
||||||
|
extern "C" {
|
||||||
|
#include "libavcodec/avcodec.h"
|
||||||
|
#include "libavformat/avformat.h"
|
||||||
|
#include "libavutil/imgutils.h"
|
||||||
|
}
|
||||||
|
#include <queue>
|
||||||
|
#include <condition_variable>
|
||||||
|
#include <mutex>
|
||||||
|
|
||||||
|
#include "decodeParam.h"
|
||||||
|
|
||||||
|
void InitDecoder(const char* filepath, VideoParam& param);
|
||||||
|
void RequestPacket(MediaParam& param);
|
||||||
|
void RequestVideoFrame(MediaParam& param);
|
||||||
|
#endif
|
32
main.cc
32
main.cc
@ -6,7 +6,7 @@
|
|||||||
#include <SDL2/SDL.h>
|
#include <SDL2/SDL.h>
|
||||||
|
|
||||||
#include "util.h"
|
#include "util.h"
|
||||||
#include "videoDecoder.h"
|
#include "mediaDecoder.h"
|
||||||
#include "shaderService.h"
|
#include "shaderService.h"
|
||||||
#include "shader.h"
|
#include "shader.h"
|
||||||
using std::cout;
|
using std::cout;
|
||||||
@ -18,14 +18,14 @@ struct OpenglVideoParam
|
|||||||
unsigned int texs[3];
|
unsigned int texs[3];
|
||||||
};
|
};
|
||||||
|
|
||||||
int InitVideo(SDL_Window*& window, const char* targetFilepath, VideoParam& videoParam, OpenglVideoParam& openglVideoParam, ShaderService*& shaderService)
|
int InitVideo(SDL_Window*& window, const char* targetFilepath, MediaParam& param, OpenglVideoParam& openglVideoParam, ShaderService*& shaderService)
|
||||||
{
|
{
|
||||||
InitDecoder(targetFilepath, videoParam);
|
InitDecoder(targetFilepath, param.videoParam);
|
||||||
//FIX: when app exited, the fmtCtx was freed, so need notify decode thread to stop decode and exit.
|
//FIX: when app exited, the fmtCtx was freed, so need notify decode thread to stop decode and exit.
|
||||||
std::jthread(RequestPacket, std::ref(videoParam)).detach();
|
std::jthread(RequestPacket, std::ref(param)).detach();
|
||||||
std::jthread(RequestFrame, std::ref(videoParam)).detach();
|
std::jthread(RequestVideoFrame, std::ref(param)).detach();
|
||||||
const int client_width = videoParam.width / 2;
|
const int client_width = param.videoParam.width / 2;
|
||||||
const int client_height = videoParam.height / 2;
|
const int client_height = param.videoParam.height / 2;
|
||||||
window = SDL_CreateWindow(
|
window = SDL_CreateWindow(
|
||||||
"MP",
|
"MP",
|
||||||
SDL_WINDOWPOS_UNDEFINED,
|
SDL_WINDOWPOS_UNDEFINED,
|
||||||
@ -125,10 +125,10 @@ void InitImg(SDL_Window*& window, const char* filepath, SDL_Renderer*& renderer,
|
|||||||
texture = SDL_CreateTextureFromSurface(renderer, surface);
|
texture = SDL_CreateTextureFromSurface(renderer, surface);
|
||||||
}
|
}
|
||||||
|
|
||||||
void OpenglRenderVideo(VideoParam& videoParam, const OpenglVideoParam& openglVideoParam, ShaderService* shaderService)
|
void OpenglRenderVideo(MediaParam& param, const OpenglVideoParam& openglVideoParam, ShaderService* shaderService)
|
||||||
{
|
{
|
||||||
AVFrame* frame = av_frame_alloc();
|
AVFrame* frame = av_frame_alloc();
|
||||||
videoParam.frameQueue.pop(frame, true, videoParam.quit);
|
param.videoParam.frameQueue.pop(frame, true, param.videoParam.quit);
|
||||||
// TODO: TIMER
|
// TODO: TIMER
|
||||||
glBindTexture(GL_TEXTURE_2D, openglVideoParam.texs[0]);
|
glBindTexture(GL_TEXTURE_2D, openglVideoParam.texs[0]);
|
||||||
glPixelStoref(GL_UNPACK_ROW_LENGTH, static_cast<float>(frame->linesize[0]));
|
glPixelStoref(GL_UNPACK_ROW_LENGTH, static_cast<float>(frame->linesize[0]));
|
||||||
@ -171,9 +171,8 @@ int main(int argc, char** argv)
|
|||||||
|
|
||||||
// INIT
|
// INIT
|
||||||
|
|
||||||
int client_width, client_height;
|
|
||||||
SDL_Window* window = nullptr;
|
SDL_Window* window = nullptr;
|
||||||
VideoParam videoParam{};
|
MediaParam mediaParam{};
|
||||||
OpenglVideoParam openglVideoParam{};
|
OpenglVideoParam openglVideoParam{};
|
||||||
ShaderService* shaderService = nullptr;
|
ShaderService* shaderService = nullptr;
|
||||||
SDL_Surface* surface = nullptr;
|
SDL_Surface* surface = nullptr;
|
||||||
@ -189,8 +188,8 @@ int main(int argc, char** argv)
|
|||||||
{
|
{
|
||||||
case FileType::VIDEO:
|
case FileType::VIDEO:
|
||||||
{
|
{
|
||||||
InitVideo(window, targetFilepath, videoParam, openglVideoParam, shaderService);
|
InitVideo(window, targetFilepath, mediaParam, openglVideoParam, shaderService);
|
||||||
const auto stream_frame_rate = videoParam.fmtCtx->streams[videoParam.videoStreamIndex]->avg_frame_rate;
|
const auto stream_frame_rate = mediaParam.videoParam.fmtCtx->streams[mediaParam.videoParam.videoStreamIndex]->avg_frame_rate;
|
||||||
framerate = static_cast<double>(stream_frame_rate.den) / stream_frame_rate.num;
|
framerate = static_cast<double>(stream_frame_rate.den) / stream_frame_rate.num;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -238,11 +237,10 @@ int main(int argc, char** argv)
|
|||||||
switch (fileType)
|
switch (fileType)
|
||||||
{
|
{
|
||||||
case FileType::VIDEO:
|
case FileType::VIDEO:
|
||||||
OpenglRenderVideo(videoParam, openglVideoParam, shaderService);
|
OpenglRenderVideo(mediaParam, openglVideoParam, shaderService);
|
||||||
SDL_GL_SwapWindow(window);
|
SDL_GL_SwapWindow(window);
|
||||||
std::this_thread::sleep_until(current_time + std::chrono::milliseconds(30));
|
std::this_thread::sleep_until(current_time + std::chrono::milliseconds(30));
|
||||||
current_time = std::chrono::system_clock::now();
|
current_time = std::chrono::system_clock::now();
|
||||||
cout << SDL_GetTicks() << '\n';
|
|
||||||
break;
|
break;
|
||||||
case FileType::IMG:
|
case FileType::IMG:
|
||||||
RenderPicture(window, renderer, texture);
|
RenderPicture(window, renderer, texture);
|
||||||
@ -252,8 +250,8 @@ int main(int argc, char** argv)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
avcodec_close(videoParam.codecCtx);
|
avcodec_close(mediaParam.videoParam.codecCtx);
|
||||||
avformat_close_input(&(videoParam.fmtCtx));
|
avformat_close_input(&(mediaParam.videoParam.fmtCtx));
|
||||||
SDL_GL_DeleteContext(openglVideoParam.glContext);
|
SDL_GL_DeleteContext(openglVideoParam.glContext);
|
||||||
SDL_DestroyWindow(window);
|
SDL_DestroyWindow(window);
|
||||||
SDL_Quit();
|
SDL_Quit();
|
||||||
|
2
src/audioDecoder.cc
Normal file
2
src/audioDecoder.cc
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
#include"audioDecoder.h"
|
||||||
|
|
@ -1,4 +1,4 @@
|
|||||||
#include "videoDecoder.h"
|
#include "mediaDecoder.h"
|
||||||
#include <thread>
|
#include <thread>
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
using namespace std::literals::chrono_literals;
|
using namespace std::literals::chrono_literals;
|
||||||
@ -26,31 +26,32 @@ void InitDecoder(const char* filepath, VideoParam& param) {
|
|||||||
param.height = codecFmt->height;
|
param.height = codecFmt->height;
|
||||||
}
|
}
|
||||||
|
|
||||||
void RequestPacket(VideoParam& param) {
|
void RequestPacket(MediaParam& param) {
|
||||||
const auto& fmtCtx = param.fmtCtx;
|
const auto& fmtCtx = param.videoParam.fmtCtx;
|
||||||
const auto& videoStreamIndex = param.videoStreamIndex;
|
const auto& videoStreamIndex = param.videoParam.videoStreamIndex;
|
||||||
|
|
||||||
AVPacket* packet = av_packet_alloc();
|
AVPacket* packet = av_packet_alloc();
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
if (param.packetQueue.isFill()) {
|
if (param.videoParam.packetQueue.isFill()) {
|
||||||
std::this_thread::sleep_for(100ms);
|
std::this_thread::sleep_for(100ms);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
//FIX:
|
//FIX:
|
||||||
const int ret = av_read_frame(fmtCtx, packet);
|
const int ret = av_read_frame(fmtCtx, packet);
|
||||||
if (param.eof) {
|
if (param.videoParam.eof) {
|
||||||
std::this_thread::sleep_for(100ms);
|
std::this_thread::sleep_for(100ms);
|
||||||
|
av_packet_unref(packet);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (ret == 0) {
|
if (ret == 0) {
|
||||||
if (packet->stream_index == videoStreamIndex) {
|
if (packet->stream_index == videoStreamIndex) {
|
||||||
param.packetQueue.push(packet);
|
param.videoParam.packetQueue.push(packet);
|
||||||
av_packet_unref(packet);
|
av_packet_unref(packet);
|
||||||
}
|
}
|
||||||
else if (ret == AVERROR_EOF)
|
else if (ret == AVERROR_EOF)
|
||||||
{
|
{
|
||||||
param.eof = true;
|
param.videoParam.eof = true;
|
||||||
av_packet_unref(packet);
|
av_packet_unref(packet);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -58,28 +59,27 @@ void RequestPacket(VideoParam& param) {
|
|||||||
av_packet_unref(packet);
|
av_packet_unref(packet);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (param.fmtCtx->pb->error == 0) {
|
else if (param.videoParam.fmtCtx->pb->error == 0) {
|
||||||
std::this_thread::sleep_for(100ms);
|
std::this_thread::sleep_for(100ms);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
av_packet_unref(packet);
|
av_packet_unref(packet);
|
||||||
}
|
}
|
||||||
|
|
||||||
void RequestFrame(VideoParam& param) {
|
void RequestVideoFrame(MediaParam& param) {
|
||||||
const auto& fmtCtx = param.fmtCtx;
|
const auto& fmtCtx = param.videoParam.fmtCtx;
|
||||||
const auto& codecCtx = param.codecCtx;
|
const auto& codecCtx = param.videoParam.codecCtx;
|
||||||
const auto& videoStreamIndex = param.videoStreamIndex;
|
const auto& videoStreamIndex = param.videoParam.videoStreamIndex;
|
||||||
|
|
||||||
AVPacket* packet = av_packet_alloc();
|
AVPacket* packet = av_packet_alloc();
|
||||||
AVFrame* frame = av_frame_alloc();
|
AVFrame* frame = av_frame_alloc();
|
||||||
//frame->format = AV_PIX_FMT_YUV420P;
|
|
||||||
while (true) {
|
while (true) {
|
||||||
if (param.frameQueue.isFill()) {
|
if (param.videoParam.frameQueue.isFill()) {
|
||||||
std::this_thread::sleep_for(30ms);
|
std::this_thread::sleep_for(30ms);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (!param.packetQueue.pop(packet, true, param.quit)) {
|
if (!param.videoParam.packetQueue.pop(packet, true, param.videoParam.quit)) {
|
||||||
if (param.quit)
|
if (param.videoParam.quit)
|
||||||
{
|
{
|
||||||
av_packet_unref(packet);
|
av_packet_unref(packet);
|
||||||
av_frame_unref(frame);
|
av_frame_unref(frame);
|
||||||
@ -97,7 +97,7 @@ void RequestFrame(VideoParam& param) {
|
|||||||
{
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
param.frameQueue.push(frame);
|
param.videoParam.frameQueue.push(frame);
|
||||||
av_frame_unref(frame);
|
av_frame_unref(frame);
|
||||||
}
|
}
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user