播放视频时同时播放音频
This commit is contained in:
parent
800364ea78
commit
c8578b1e37
@ -99,16 +99,19 @@ struct VideoParam
|
||||
int width;
|
||||
int height;
|
||||
int videoStreamIndex;
|
||||
|
||||
AVStream* stream;
|
||||
bool eof = false;
|
||||
bool pause = false;
|
||||
bool quit = false;
|
||||
double totalTime = 0;
|
||||
double currentTime = 0;
|
||||
};
|
||||
|
||||
struct AudioParam
|
||||
{
|
||||
MediaQueue<AVPacket> packetQueue;
|
||||
AVCodecContext* codecCtx;
|
||||
AVStream* stream;
|
||||
int audioStreamIndex;
|
||||
static constexpr int MAX_BUFFER_SIZE = 192000;
|
||||
uint8_t* buffer = new uint8_t[MAX_BUFFER_SIZE];
|
||||
@ -118,6 +121,16 @@ struct AudioParam
|
||||
bool pause = false;
|
||||
bool isVideo = false;
|
||||
bool quit = false;
|
||||
double currentTime = 0;
|
||||
double totalTime = 0;
|
||||
double lastPts;
|
||||
|
||||
double getCurrentTime() const
|
||||
{
|
||||
const int diff = bufferSize - bufferIndex;
|
||||
const int bytePreSec = codecCtx->sample_rate * codecCtx->ch_layout.nb_channels * 2;
|
||||
return currentTime - static_cast<double>(diff) / bytePreSec;
|
||||
}
|
||||
};
|
||||
|
||||
struct MediaParam
|
||||
|
@ -4,6 +4,6 @@
|
||||
#include "decodeParam.h"
|
||||
|
||||
void InitDecoder(const char* filepath, MediaParam& param);
|
||||
void RequestVideoPacket(MediaParam& param);
|
||||
void RequestMediaPacket(MediaParam& param);
|
||||
void RequestVideoFrame(MediaParam& param);
|
||||
#endif
|
8
main.cc
8
main.cc
@ -29,8 +29,8 @@ int InitAudio(SDL_Window* window, SDL_Renderer* renderer, const char* targetFile
|
||||
InitDecoder(targetFilePath, param);
|
||||
window = SDL_CreateWindow("mp", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, SCREEN_WIDTH, SCREEN_HEIGHT, SDL_WINDOW_SHOWN);
|
||||
renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC);
|
||||
std::jthread(RequestAudioPacket, std::ref(param)).detach();
|
||||
}
|
||||
std::jthread(RequestAudioPacket, std::ref(param)).detach();
|
||||
SDL_AudioSpec des;
|
||||
des.freq = param.audioParam.codecCtx->sample_rate;
|
||||
des.channels = param.audioParam.codecCtx->ch_layout.nb_channels;
|
||||
@ -55,7 +55,7 @@ int InitVideo(SDL_Window*& window, const char* targetFilepath, MediaParam& param
|
||||
param.audioParam.isVideo = true;
|
||||
InitAudio(nullptr, nullptr, nullptr, param);
|
||||
//FIX: when app exited, the fmtCtx was freed, so need notify decode thread to stop decode and exit.
|
||||
std::jthread(RequestVideoPacket, std::ref(param)).detach();
|
||||
std::jthread(RequestMediaPacket, std::ref(param)).detach();
|
||||
std::jthread(RequestVideoFrame, std::ref(param)).detach();
|
||||
const int client_width = param.videoParam.width / 2;
|
||||
const int client_height = param.videoParam.height / 2;
|
||||
@ -221,7 +221,7 @@ int main(int argc, char** argv)
|
||||
case FileType::VIDEO:
|
||||
{
|
||||
InitVideo(window, targetFilepath, mediaParam, openglVideoParam, shaderService);
|
||||
const auto stream_frame_rate = mediaParam.fmtCtx->streams[mediaParam.videoParam.videoStreamIndex]->avg_frame_rate;
|
||||
const auto stream_frame_rate = mediaParam.videoParam.stream->avg_frame_rate;
|
||||
framerate = static_cast<double>(stream_frame_rate.den) / stream_frame_rate.num;
|
||||
break;
|
||||
}
|
||||
@ -272,7 +272,7 @@ int main(int argc, char** argv)
|
||||
case FileType::VIDEO:
|
||||
OpenglRenderVideo(mediaParam, openglVideoParam, shaderService);
|
||||
SDL_GL_SwapWindow(window);
|
||||
std::this_thread::sleep_until(current_time + std::chrono::milliseconds(30));
|
||||
std::this_thread::sleep_until(current_time + std::chrono::milliseconds(static_cast<int>(framerate * 1000)));
|
||||
current_time = std::chrono::system_clock::now();
|
||||
break;
|
||||
case FileType::IMG:
|
||||
|
@ -22,6 +22,10 @@ int RequestAudioFrame(AudioParam& param, uint8_t* audioBuffer, int bufSize, SDL_
|
||||
if (!param.packetQueue.pop(&packet, true)) {
|
||||
return -1;
|
||||
}
|
||||
if (packet.pts != AV_NOPTS_VALUE)
|
||||
{
|
||||
param.currentTime = av_q2d(param.stream->time_base) * packet.pts;
|
||||
}
|
||||
int ret = avcodec_send_packet(param.codecCtx, &packet);
|
||||
if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
|
||||
return -1;
|
||||
@ -55,9 +59,9 @@ int RequestAudioFrame(AudioParam& param, uint8_t* audioBuffer, int bufSize, SDL_
|
||||
|
||||
for (int i = 0; i < nbSamples; i++)
|
||||
{
|
||||
double magnitude = sqrt(fftwOutput[i][0] * fftwOutput[i][0] + fftwOutput[i][1] * fftwOutput[i][1]);
|
||||
double phase = atan2(fftwOutput[i][1], fftwOutput[i][0]);
|
||||
std::cout << "mag: " << magnitude << "\n phase: " << phase << "\n";
|
||||
//double magnitude = sqrt(fftwOutput[i][0] * fftwOutput[i][0] + fftwOutput[i][1] * fftwOutput[i][1]);
|
||||
//double phase = atan2(fftwOutput[i][1], fftwOutput[i][0]);
|
||||
//std::cout << "mag: " << magnitude << "\n phase: " << phase << "\n";
|
||||
}
|
||||
|
||||
constexpr AVSampleFormat dstFormat = AV_SAMPLE_FMT_S16;
|
||||
@ -79,6 +83,7 @@ int RequestAudioFrame(AudioParam& param, uint8_t* audioBuffer, int bufSize, SDL_
|
||||
SDL_RenderDrawLine(renderer, 0, 0, 300, 300);
|
||||
SDL_RenderPresent(renderer);
|
||||
}
|
||||
param.currentTime += static_cast<double>(dataSize) / (2 * (param.codecCtx->ch_layout.nb_channels) / param.codecCtx->sample_rate);
|
||||
av_frame_free(&frame);
|
||||
swr_free(&swrCtx);
|
||||
return dataSize;
|
||||
|
@ -18,8 +18,10 @@ void InitDecoder(const char* filepath, MediaParam& param) {
|
||||
avcodec_parameters_to_context(codecFmt, stream->codecpar);
|
||||
avcodec_open2(codecFmt, codec, nullptr);
|
||||
param.videoParam.codecCtx = codecFmt;
|
||||
param.videoParam.stream = stream;
|
||||
param.videoParam.width = codecFmt->width;
|
||||
param.videoParam.height = codecFmt->height;
|
||||
param.videoParam.totalTime = av_q2d(stream->avg_frame_rate) * stream->duration;
|
||||
}
|
||||
else if (codec->type == AVMEDIA_TYPE_AUDIO)
|
||||
{
|
||||
@ -28,15 +30,16 @@ void InitDecoder(const char* filepath, MediaParam& param) {
|
||||
avcodec_parameters_to_context(codecFmt, stream->codecpar);
|
||||
avcodec_open2(codecFmt, codec, nullptr);
|
||||
param.audioParam.codecCtx = codecFmt;
|
||||
param.audioParam.stream = stream;
|
||||
}
|
||||
}
|
||||
param.fmtCtx = fmtCtx;
|
||||
}
|
||||
|
||||
void RequestVideoPacket(MediaParam& param) {
|
||||
void RequestMediaPacket(MediaParam& param) {
|
||||
const auto& fmtCtx = param.fmtCtx;
|
||||
const auto& videoStreamIndex = param.videoParam.videoStreamIndex;
|
||||
|
||||
const auto& audioStreamIndex = param.audioParam.audioStreamIndex;
|
||||
AVPacket* packet = av_packet_alloc();
|
||||
|
||||
while (true) {
|
||||
@ -56,6 +59,11 @@ void RequestVideoPacket(MediaParam& param) {
|
||||
param.videoParam.packetQueue.push(packet);
|
||||
av_packet_unref(packet);
|
||||
}
|
||||
else if (packet->stream_index == audioStreamIndex)
|
||||
{
|
||||
param.audioParam.packetQueue.push(packet);
|
||||
av_packet_unref(packet);
|
||||
}
|
||||
else if (ret == AVERROR_EOF)
|
||||
{
|
||||
param.videoParam.eof = true;
|
||||
@ -102,6 +110,8 @@ void RequestVideoFrame(MediaParam& param) {
|
||||
continue;
|
||||
}
|
||||
param.videoParam.frameQueue.push(frame);
|
||||
auto time_duration = av_q2d(param.videoParam.stream->time_base);
|
||||
auto pts = frame->best_effort_timestamp;
|
||||
av_frame_unref(frame);
|
||||
av_packet_unref(packet);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user