Audio播放逻辑
This commit is contained in:
parent
5e794cf825
commit
f005b19ee4
@ -0,0 +1,9 @@
|
|||||||
|
#ifndef AUDIODECODER_H
|
||||||
|
#define AUDIODECODER_H
|
||||||
|
#include <decodeParam.h>
|
||||||
|
|
||||||
|
int RequestAudioFrame(AudioParam& param, uint8_t* audioBuffer, int bufSize);
|
||||||
|
void audioCallback(void* userdata, uint8_t* stream, int len);
|
||||||
|
void RequestAudioPacket(MediaParam& param);
|
||||||
|
|
||||||
|
#endif
|
@ -1,5 +1,17 @@
|
|||||||
#ifndef DECODEPARAM_H
|
#ifndef DECODEPARAM_H
|
||||||
#define DECODEPARAM_H
|
#define DECODEPARAM_H
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
#include "libavcodec/avcodec.h"
|
||||||
|
#include "libavformat/avformat.h"
|
||||||
|
#include "libavutil/imgutils.h"
|
||||||
|
}
|
||||||
|
#include <queue>
|
||||||
|
#include <condition_variable>
|
||||||
|
#include <mutex>
|
||||||
|
#include <chrono>
|
||||||
|
using namespace std::literals::chrono_literals;
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
requires std::is_same_v<T, AVPacket> || std::is_same_v<T, AVFrame>
|
requires std::is_same_v<T, AVPacket> || std::is_same_v<T, AVFrame>
|
||||||
struct MediaQueue
|
struct MediaQueue
|
||||||
@ -83,7 +95,6 @@ struct VideoParam
|
|||||||
{
|
{
|
||||||
MediaQueue<AVPacket> packetQueue;
|
MediaQueue<AVPacket> packetQueue;
|
||||||
MediaQueue<AVFrame> frameQueue;
|
MediaQueue<AVFrame> frameQueue;
|
||||||
AVFormatContext* fmtCtx;
|
|
||||||
AVCodecContext* codecCtx;
|
AVCodecContext* codecCtx;
|
||||||
int width;
|
int width;
|
||||||
int height;
|
int height;
|
||||||
@ -98,13 +109,13 @@ struct VideoParam
|
|||||||
struct AudioParam
|
struct AudioParam
|
||||||
{
|
{
|
||||||
MediaQueue<AVPacket> packetQueue;
|
MediaQueue<AVPacket> packetQueue;
|
||||||
MediaQueue<AVFrame> frameQueue;
|
|
||||||
AVFormatContext* fmtCtx;
|
|
||||||
AVCodecContext* codecCtx;
|
AVCodecContext* codecCtx;
|
||||||
int audioStreamIndex;
|
int audioStreamIndex;
|
||||||
|
static constexpr int MAX_BUFFER_SIZE = 192000;
|
||||||
|
uint8_t* buffer = new uint8_t[MAX_BUFFER_SIZE];
|
||||||
|
uint32_t bufferSize = 0;
|
||||||
|
uint32_t bufferIndex = 0;
|
||||||
bool eof = false;
|
bool eof = false;
|
||||||
|
|
||||||
bool pause = false;
|
bool pause = false;
|
||||||
bool quit = false;
|
bool quit = false;
|
||||||
};
|
};
|
||||||
@ -113,5 +124,6 @@ struct MediaParam
|
|||||||
{
|
{
|
||||||
VideoParam videoParam;
|
VideoParam videoParam;
|
||||||
AudioParam audioParam;
|
AudioParam audioParam;
|
||||||
|
AVFormatContext* fmtCtx;
|
||||||
};
|
};
|
||||||
#endif
|
#endif
|
@ -1,17 +1,9 @@
|
|||||||
#ifndef DECODER_H
|
#ifndef DECODER_H
|
||||||
#define DECODER_H
|
#define DECODER_H
|
||||||
extern "C" {
|
|
||||||
#include "libavcodec/avcodec.h"
|
|
||||||
#include "libavformat/avformat.h"
|
|
||||||
#include "libavutil/imgutils.h"
|
|
||||||
}
|
|
||||||
#include <queue>
|
|
||||||
#include <condition_variable>
|
|
||||||
#include <mutex>
|
|
||||||
|
|
||||||
#include "decodeParam.h"
|
#include "decodeParam.h"
|
||||||
|
|
||||||
void InitDecoder(const char* filepath, VideoParam& param);
|
void InitDecoder(const char* filepath, MediaParam& param);
|
||||||
void RequestPacket(MediaParam& param);
|
void RequestVideoPacket(MediaParam& param);
|
||||||
void RequestVideoFrame(MediaParam& param);
|
void RequestVideoFrame(MediaParam& param);
|
||||||
#endif
|
#endif
|
@ -8,7 +8,7 @@
|
|||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <string_view>
|
#include <string_view>
|
||||||
enum class FileType {
|
enum class FileType {
|
||||||
MUSIC,
|
AUDIO,
|
||||||
VIDEO,
|
VIDEO,
|
||||||
IMG,
|
IMG,
|
||||||
ERRORTYPE
|
ERRORTYPE
|
||||||
@ -64,7 +64,7 @@ private:
|
|||||||
}
|
}
|
||||||
public:
|
public:
|
||||||
static FileType GetFileType(const path& filepath) {
|
static FileType GetFileType(const path& filepath) {
|
||||||
if (IsMusic(filepath)) return FileType::MUSIC;
|
if (IsMusic(filepath)) return FileType::AUDIO;
|
||||||
if (IsVideo(filepath)) return FileType::VIDEO;
|
if (IsVideo(filepath)) return FileType::VIDEO;
|
||||||
if (IsImg(filepath)) return FileType::IMG;
|
if (IsImg(filepath)) return FileType::IMG;
|
||||||
return FileType::ERRORTYPE;
|
return FileType::ERRORTYPE;
|
||||||
|
36
main.cc
36
main.cc
@ -9,6 +9,7 @@
|
|||||||
#include "mediaDecoder.h"
|
#include "mediaDecoder.h"
|
||||||
#include "shaderService.h"
|
#include "shaderService.h"
|
||||||
#include "shader.h"
|
#include "shader.h"
|
||||||
|
#include "audioDecoder.h"
|
||||||
using std::cout;
|
using std::cout;
|
||||||
|
|
||||||
struct OpenglVideoParam
|
struct OpenglVideoParam
|
||||||
@ -18,11 +19,32 @@ struct OpenglVideoParam
|
|||||||
unsigned int texs[3];
|
unsigned int texs[3];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
int InitAudio(const char* targetFilePath, MediaParam& param)
|
||||||
|
{
|
||||||
|
InitDecoder(targetFilePath, param);
|
||||||
|
std::jthread(RequestAudioPacket, std::ref(param)).detach();
|
||||||
|
SDL_AudioSpec des;
|
||||||
|
des.freq = param.audioParam.codecCtx->sample_rate;
|
||||||
|
des.channels = param.audioParam.codecCtx->channels;
|
||||||
|
des.format = AUDIO_S16SYS;
|
||||||
|
des.samples = 1024;
|
||||||
|
des.silence = 0;
|
||||||
|
des.userdata = &(param.audioParam);
|
||||||
|
des.callback = audioCallback;
|
||||||
|
if (SDL_OpenAudio(&des, nullptr) < 0)
|
||||||
|
{
|
||||||
|
cout << SDL_GetError() << "\n";
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
SDL_PauseAudio(0);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
int InitVideo(SDL_Window*& window, const char* targetFilepath, MediaParam& param, OpenglVideoParam& openglVideoParam, ShaderService*& shaderService)
|
int InitVideo(SDL_Window*& window, const char* targetFilepath, MediaParam& param, OpenglVideoParam& openglVideoParam, ShaderService*& shaderService)
|
||||||
{
|
{
|
||||||
InitDecoder(targetFilepath, param.videoParam);
|
InitDecoder(targetFilepath, param);
|
||||||
//FIX: when app exited, the fmtCtx was freed, so need notify decode thread to stop decode and exit.
|
//FIX: when app exited, the fmtCtx was freed, so need notify decode thread to stop decode and exit.
|
||||||
std::jthread(RequestPacket, std::ref(param)).detach();
|
std::jthread(RequestVideoPacket, std::ref(param)).detach();
|
||||||
std::jthread(RequestVideoFrame, std::ref(param)).detach();
|
std::jthread(RequestVideoFrame, std::ref(param)).detach();
|
||||||
const int client_width = param.videoParam.width / 2;
|
const int client_width = param.videoParam.width / 2;
|
||||||
const int client_height = param.videoParam.height / 2;
|
const int client_height = param.videoParam.height / 2;
|
||||||
@ -152,7 +174,6 @@ void OpenglRenderVideo(MediaParam& param, const OpenglVideoParam& openglVideoPar
|
|||||||
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
|
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
int main(int argc, char** argv)
|
int main(int argc, char** argv)
|
||||||
{
|
{
|
||||||
// Check File
|
// Check File
|
||||||
@ -189,7 +210,7 @@ int main(int argc, char** argv)
|
|||||||
case FileType::VIDEO:
|
case FileType::VIDEO:
|
||||||
{
|
{
|
||||||
InitVideo(window, targetFilepath, mediaParam, openglVideoParam, shaderService);
|
InitVideo(window, targetFilepath, mediaParam, openglVideoParam, shaderService);
|
||||||
const auto stream_frame_rate = mediaParam.videoParam.fmtCtx->streams[mediaParam.videoParam.videoStreamIndex]->avg_frame_rate;
|
const auto stream_frame_rate = mediaParam.fmtCtx->streams[mediaParam.videoParam.videoStreamIndex]->avg_frame_rate;
|
||||||
framerate = static_cast<double>(stream_frame_rate.den) / stream_frame_rate.num;
|
framerate = static_cast<double>(stream_frame_rate.den) / stream_frame_rate.num;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -198,8 +219,9 @@ int main(int argc, char** argv)
|
|||||||
InitImg(window, targetFilepath, renderer, surface, texture);
|
InitImg(window, targetFilepath, renderer, surface, texture);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case FileType::MUSIC:
|
case FileType::AUDIO:
|
||||||
{
|
{
|
||||||
|
InitAudio(targetFilepath, mediaParam);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case FileType::ERRORTYPE:
|
case FileType::ERRORTYPE:
|
||||||
@ -245,13 +267,15 @@ int main(int argc, char** argv)
|
|||||||
case FileType::IMG:
|
case FileType::IMG:
|
||||||
RenderPicture(window, renderer, texture);
|
RenderPicture(window, renderer, texture);
|
||||||
break;
|
break;
|
||||||
|
case FileType::AUDIO:
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
avcodec_close(mediaParam.videoParam.codecCtx);
|
avcodec_close(mediaParam.videoParam.codecCtx);
|
||||||
avformat_close_input(&(mediaParam.videoParam.fmtCtx));
|
avformat_close_input(&(mediaParam.fmtCtx));
|
||||||
SDL_GL_DeleteContext(openglVideoParam.glContext);
|
SDL_GL_DeleteContext(openglVideoParam.glContext);
|
||||||
SDL_DestroyWindow(window);
|
SDL_DestroyWindow(window);
|
||||||
SDL_Quit();
|
SDL_Quit();
|
||||||
|
@ -1,2 +1,130 @@
|
|||||||
#include"audioDecoder.h"
|
#include "audioDecoder.h"
|
||||||
|
|
||||||
|
#include <SDL2/SDL_audio.h>
|
||||||
|
#include <SDL2/SDL_stdinc.h>
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
#include "libswresample/swresample.h"
|
||||||
|
}
|
||||||
|
|
||||||
|
int RequestAudioFrame(AudioParam& param, uint8_t* audioBuffer, int bufSize)
|
||||||
|
{
|
||||||
|
AVFrame* frame = av_frame_alloc();
|
||||||
|
int dataSize = 0;
|
||||||
|
AVPacket packet;
|
||||||
|
SwrContext* swrCtx = nullptr;
|
||||||
|
if (param.quit) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
if (!param.packetQueue.pop(&packet, true)) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
int ret = avcodec_send_packet(param.codecCtx, &packet);
|
||||||
|
if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
ret = avcodec_receive_frame(param.codecCtx, frame);
|
||||||
|
if (ret < 0 && ret != AVERROR_EOF) {
|
||||||
|
av_frame_unref(frame);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
if (frame->best_effort_timestamp == AV_NOPTS_VALUE)
|
||||||
|
{
|
||||||
|
av_frame_unref(frame);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (frame->channels > 0 && frame->channel_layout == 0) {
|
||||||
|
frame->channel_layout = av_get_default_channel_layout(frame->channels);
|
||||||
|
}
|
||||||
|
else if (frame->channels == 0 && frame->channel_layout > 0) {
|
||||||
|
frame->channels = av_get_channel_layout_nb_channels(frame->channel_layout);
|
||||||
|
}
|
||||||
|
|
||||||
|
AVSampleFormat dstFormat = AV_SAMPLE_FMT_S16;
|
||||||
|
uint64_t dstLayout = av_get_default_channel_layout(frame->channels);
|
||||||
|
swrCtx = swr_alloc_set_opts(nullptr, dstLayout, dstFormat, frame->sample_rate, frame->channel_layout, (AVSampleFormat)frame->format, frame->sample_rate, 0, nullptr);
|
||||||
|
if (!swrCtx || swr_init(swrCtx) < 0) {
|
||||||
|
av_frame_unref(frame);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint64_t dstNbSamples = av_rescale_rnd(swr_get_delay(swrCtx, frame->sample_rate) + frame->nb_samples, frame->sample_rate, frame->sample_rate, AVRounding(1));
|
||||||
|
int nb = swr_convert(swrCtx, &audioBuffer, dstNbSamples, const_cast<const uint8_t**>(frame->data), frame->nb_samples);
|
||||||
|
dataSize = frame->channels * nb * av_get_bytes_per_sample(dstFormat);
|
||||||
|
av_frame_free(&frame);
|
||||||
|
swr_free(&swrCtx);
|
||||||
|
return dataSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
void audioCallback(void* userdata, uint8_t* stream, int len) {
|
||||||
|
AudioParam* param = static_cast<AudioParam*>(userdata);
|
||||||
|
SDL_memset(stream, 0, len);
|
||||||
|
int audioSize = 0;
|
||||||
|
int len1 = 0;
|
||||||
|
while (len > 0)
|
||||||
|
{
|
||||||
|
if (param->bufferIndex >= param->bufferSize)
|
||||||
|
{
|
||||||
|
audioSize = RequestAudioFrame(*param, param->buffer, sizeof(param->buffer));
|
||||||
|
if (audioSize < 0)
|
||||||
|
{
|
||||||
|
param->bufferSize = 0;
|
||||||
|
memset(param->buffer, 0, param->bufferSize);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
param->bufferSize = audioSize;
|
||||||
|
}
|
||||||
|
param->bufferIndex = 0;
|
||||||
|
}
|
||||||
|
len1 = param->bufferSize - param->bufferIndex;
|
||||||
|
if (len1 > len)
|
||||||
|
len1 = len;
|
||||||
|
|
||||||
|
SDL_MixAudio(stream, param->buffer + param->bufferIndex, len1, SDL_MIX_MAXVOLUME);
|
||||||
|
len -= len1;
|
||||||
|
stream += len1;
|
||||||
|
param->bufferIndex += len1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void RequestAudioPacket(MediaParam& param) {
|
||||||
|
const auto& fmtCtx = param.fmtCtx;
|
||||||
|
const auto& audioStreamIndex = param.audioParam.audioStreamIndex;
|
||||||
|
|
||||||
|
AVPacket* packet = av_packet_alloc();
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
if (param.audioParam.packetQueue.isFill()) {
|
||||||
|
std::this_thread::sleep_for(100ms);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
//FIX:
|
||||||
|
const int ret = av_read_frame(fmtCtx, packet);
|
||||||
|
if (param.audioParam.eof) {
|
||||||
|
std::this_thread::sleep_for(100ms);
|
||||||
|
av_packet_unref(packet);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (ret == 0) {
|
||||||
|
if (packet->stream_index == audioStreamIndex) {
|
||||||
|
param.audioParam.packetQueue.push(packet);
|
||||||
|
av_packet_unref(packet);
|
||||||
|
}
|
||||||
|
else if (ret == AVERROR_EOF)
|
||||||
|
{
|
||||||
|
param.audioParam.eof = true;
|
||||||
|
av_packet_unref(packet);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
av_packet_unref(packet);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (param.fmtCtx->pb->error == 0) {
|
||||||
|
std::this_thread::sleep_for(100ms);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
av_packet_unref(packet);
|
||||||
|
}
|
@ -3,7 +3,7 @@
|
|||||||
#include <chrono>
|
#include <chrono>
|
||||||
using namespace std::literals::chrono_literals;
|
using namespace std::literals::chrono_literals;
|
||||||
|
|
||||||
void InitDecoder(const char* filepath, VideoParam& param) {
|
void InitDecoder(const char* filepath, MediaParam& param) {
|
||||||
AVFormatContext* fmtCtx = nullptr;
|
AVFormatContext* fmtCtx = nullptr;
|
||||||
AVCodecContext* codecFmt = nullptr;
|
AVCodecContext* codecFmt = nullptr;
|
||||||
auto ret = avformat_open_input(&fmtCtx, filepath, NULL, NULL);
|
auto ret = avformat_open_input(&fmtCtx, filepath, NULL, NULL);
|
||||||
@ -13,21 +13,28 @@ void InitDecoder(const char* filepath, VideoParam& param) {
|
|||||||
const auto stream = fmtCtx->streams[i];
|
const auto stream = fmtCtx->streams[i];
|
||||||
const auto codec = avcodec_find_decoder(stream->codecpar->codec_id);
|
const auto codec = avcodec_find_decoder(stream->codecpar->codec_id);
|
||||||
if (codec->type == AVMEDIA_TYPE_VIDEO) {
|
if (codec->type == AVMEDIA_TYPE_VIDEO) {
|
||||||
param.videoStreamIndex = i;
|
param.videoParam.videoStreamIndex = i;
|
||||||
codecFmt = avcodec_alloc_context3(codec);
|
codecFmt = avcodec_alloc_context3(codec);
|
||||||
avcodec_parameters_to_context(codecFmt, stream->codecpar);
|
avcodec_parameters_to_context(codecFmt, stream->codecpar);
|
||||||
avcodec_open2(codecFmt, codec, nullptr);
|
avcodec_open2(codecFmt, codec, nullptr);
|
||||||
|
param.videoParam.codecCtx = codecFmt;
|
||||||
|
param.videoParam.width = codecFmt->width;
|
||||||
|
param.videoParam.height = codecFmt->height;
|
||||||
|
}
|
||||||
|
else if (codec->type == AVMEDIA_TYPE_AUDIO)
|
||||||
|
{
|
||||||
|
param.audioParam.audioStreamIndex = i;
|
||||||
|
codecFmt = avcodec_alloc_context3(codec);
|
||||||
|
avcodec_parameters_to_context(codecFmt, stream->codecpar);
|
||||||
|
avcodec_open2(codecFmt, codec, nullptr);
|
||||||
|
param.audioParam.codecCtx = codecFmt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
param.codecCtx = codecFmt;
|
|
||||||
param.fmtCtx = fmtCtx;
|
param.fmtCtx = fmtCtx;
|
||||||
param.width = codecFmt->width;
|
|
||||||
param.height = codecFmt->height;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void RequestPacket(MediaParam& param) {
|
void RequestVideoPacket(MediaParam& param) {
|
||||||
const auto& fmtCtx = param.videoParam.fmtCtx;
|
const auto& fmtCtx = param.fmtCtx;
|
||||||
const auto& videoStreamIndex = param.videoParam.videoStreamIndex;
|
const auto& videoStreamIndex = param.videoParam.videoStreamIndex;
|
||||||
|
|
||||||
AVPacket* packet = av_packet_alloc();
|
AVPacket* packet = av_packet_alloc();
|
||||||
@ -37,7 +44,7 @@ void RequestPacket(MediaParam& param) {
|
|||||||
std::this_thread::sleep_for(100ms);
|
std::this_thread::sleep_for(100ms);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
//FIX:
|
//FIX:
|
||||||
const int ret = av_read_frame(fmtCtx, packet);
|
const int ret = av_read_frame(fmtCtx, packet);
|
||||||
if (param.videoParam.eof) {
|
if (param.videoParam.eof) {
|
||||||
std::this_thread::sleep_for(100ms);
|
std::this_thread::sleep_for(100ms);
|
||||||
@ -59,7 +66,7 @@ void RequestPacket(MediaParam& param) {
|
|||||||
av_packet_unref(packet);
|
av_packet_unref(packet);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (param.videoParam.fmtCtx->pb->error == 0) {
|
else if (param.fmtCtx->pb->error == 0) {
|
||||||
std::this_thread::sleep_for(100ms);
|
std::this_thread::sleep_for(100ms);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -67,9 +74,7 @@ void RequestPacket(MediaParam& param) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void RequestVideoFrame(MediaParam& param) {
|
void RequestVideoFrame(MediaParam& param) {
|
||||||
const auto& fmtCtx = param.videoParam.fmtCtx;
|
|
||||||
const auto& codecCtx = param.videoParam.codecCtx;
|
const auto& codecCtx = param.videoParam.codecCtx;
|
||||||
const auto& videoStreamIndex = param.videoParam.videoStreamIndex;
|
|
||||||
AVPacket* packet = av_packet_alloc();
|
AVPacket* packet = av_packet_alloc();
|
||||||
AVFrame* frame = av_frame_alloc();
|
AVFrame* frame = av_frame_alloc();
|
||||||
while (true) {
|
while (true) {
|
||||||
|
0
src/tempCodeRunnerFile.cc
Normal file
0
src/tempCodeRunnerFile.cc
Normal file
Loading…
Reference in New Issue
Block a user