Compare commits
	
		
			10 Commits
		
	
	
		
			04008c0ef8
			...
			c8578b1e37
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| c8578b1e37 | |||
| 800364ea78 | |||
| 34d74c5c2d | |||
| f7700bb398 | |||
| 7085c6ca9b | |||
| e6ce8e1781 | |||
| f005b19ee4 | |||
| 5e794cf825 | |||
| 4d82e2b0a5 | |||
| 146a955841 | 
							
								
								
									
										9
									
								
								include/audioDecoder.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								include/audioDecoder.h
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,9 @@
 | 
			
		||||
#ifndef AUDIODECODER_H
 | 
			
		||||
#define AUDIODECODER_H
 | 
			
		||||
#include <decodeParam.h>
 | 
			
		||||
 | 
			
		||||
int RequestAudioFrame(AudioParam& param, uint8_t* audioBuffer, int bufSize);
 | 
			
		||||
void audioCallback(void* userdata, uint8_t* stream, int len);
 | 
			
		||||
void RequestAudioPacket(MediaParam& param);
 | 
			
		||||
 | 
			
		||||
#endif
 | 
			
		||||
@@ -1,5 +1,6 @@
 | 
			
		||||
#ifndef DECODER_H
 | 
			
		||||
#define DECODER_H
 | 
			
		||||
#ifndef DECODEPARAM_H
 | 
			
		||||
#define DECODEPARAM_H
 | 
			
		||||
 | 
			
		||||
extern "C" {
 | 
			
		||||
#include "libavcodec/avcodec.h"
 | 
			
		||||
#include "libavformat/avformat.h"
 | 
			
		||||
@@ -8,6 +9,8 @@ extern "C" {
 | 
			
		||||
#include <queue>
 | 
			
		||||
#include <condition_variable>
 | 
			
		||||
#include <mutex>
 | 
			
		||||
#include <chrono>
 | 
			
		||||
using namespace std::literals::chrono_literals;
 | 
			
		||||
 | 
			
		||||
template<typename T>
 | 
			
		||||
	requires std::is_same_v<T, AVPacket> || std::is_same_v<T, AVFrame>
 | 
			
		||||
@@ -70,7 +73,8 @@ bool MediaQueue<T>::pop(T* item, bool block, bool quit) {
 | 
			
		||||
				if (av_frame_ref(item, temp) < 0) {
 | 
			
		||||
					return false;
 | 
			
		||||
				}
 | 
			
		||||
				av_frame_unref(temp);
 | 
			
		||||
				av_frame_free(&temp);
 | 
			
		||||
				delete temp;
 | 
			
		||||
			}
 | 
			
		||||
			queue.pop();
 | 
			
		||||
			count--;
 | 
			
		||||
@@ -91,19 +95,48 @@ struct VideoParam
 | 
			
		||||
{
 | 
			
		||||
	MediaQueue<AVPacket> packetQueue;
 | 
			
		||||
	MediaQueue<AVFrame> frameQueue;
 | 
			
		||||
	AVFormatContext* fmtCtx;
 | 
			
		||||
	AVCodecContext* codecCtx;
 | 
			
		||||
	int width;
 | 
			
		||||
	int height;
 | 
			
		||||
	int videoStreamIndex;
 | 
			
		||||
 | 
			
		||||
	AVStream* stream;
 | 
			
		||||
	bool eof = false;
 | 
			
		||||
 | 
			
		||||
	bool pause = false;
 | 
			
		||||
	bool quit = false;
 | 
			
		||||
	double totalTime = 0;
 | 
			
		||||
	double currentTime = 0;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
void InitDecoder(const char* filepath, VideoParam& param);
 | 
			
		||||
void RequestPacket(VideoParam& param);
 | 
			
		||||
void RequestFrame(VideoParam& param);
 | 
			
		||||
struct AudioParam
 | 
			
		||||
{
 | 
			
		||||
	MediaQueue<AVPacket> packetQueue;
 | 
			
		||||
	AVCodecContext* codecCtx;
 | 
			
		||||
	AVStream* stream;
 | 
			
		||||
	int audioStreamIndex;
 | 
			
		||||
	static constexpr int MAX_BUFFER_SIZE = 192000;
 | 
			
		||||
	uint8_t* buffer = new uint8_t[MAX_BUFFER_SIZE];
 | 
			
		||||
	uint32_t bufferSize = 0;
 | 
			
		||||
	uint32_t bufferIndex = 0;
 | 
			
		||||
	bool eof = false;
 | 
			
		||||
	bool pause = false;
 | 
			
		||||
	bool isVideo = false;
 | 
			
		||||
	bool quit = false;
 | 
			
		||||
	double currentTime = 0;
 | 
			
		||||
	double totalTime = 0;
 | 
			
		||||
	double lastPts;
 | 
			
		||||
 | 
			
		||||
	double getCurrentTime() const
 | 
			
		||||
	{
 | 
			
		||||
		const int diff = bufferSize - bufferIndex;
 | 
			
		||||
		const int bytePreSec = codecCtx->sample_rate * codecCtx->ch_layout.nb_channels * 2;
 | 
			
		||||
		return currentTime - static_cast<double>(diff) / bytePreSec;
 | 
			
		||||
	}
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
struct MediaParam
 | 
			
		||||
{
 | 
			
		||||
	VideoParam videoParam;
 | 
			
		||||
	AudioParam audioParam;
 | 
			
		||||
	AVFormatContext* fmtCtx;
 | 
			
		||||
};
 | 
			
		||||
#endif
 | 
			
		||||
							
								
								
									
										9
									
								
								include/mediaDecoder.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								include/mediaDecoder.h
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,9 @@
 | 
			
		||||
#ifndef DECODER_H
 | 
			
		||||
#define DECODER_H
 | 
			
		||||
 | 
			
		||||
#include "decodeParam.h"
 | 
			
		||||
 | 
			
		||||
void InitDecoder(const char* filepath, MediaParam& param);
 | 
			
		||||
void RequestMediaPacket(MediaParam& param);
 | 
			
		||||
void RequestVideoFrame(MediaParam& param);
 | 
			
		||||
#endif
 | 
			
		||||
@@ -8,7 +8,7 @@
 | 
			
		||||
#include <cstring>
 | 
			
		||||
#include <string_view>
 | 
			
		||||
enum class FileType {
 | 
			
		||||
	MUSIC,
 | 
			
		||||
	AUDIO,
 | 
			
		||||
	VIDEO,
 | 
			
		||||
	IMG,
 | 
			
		||||
	ERRORTYPE
 | 
			
		||||
@@ -64,7 +64,7 @@ private:
 | 
			
		||||
	}
 | 
			
		||||
public:
 | 
			
		||||
	static FileType GetFileType(const path& filepath) {
 | 
			
		||||
		if (IsMusic(filepath)) return FileType::MUSIC;
 | 
			
		||||
		if (IsMusic(filepath)) return FileType::AUDIO;
 | 
			
		||||
		if (IsVideo(filepath)) return FileType::VIDEO;
 | 
			
		||||
		if (IsImg(filepath))   return FileType::IMG;
 | 
			
		||||
		return FileType::ERRORTYPE;
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										77
									
								
								main.cc
									
									
									
									
									
								
							
							
						
						
									
										77
									
								
								main.cc
									
									
									
									
									
								
							@@ -1,15 +1,20 @@
 | 
			
		||||
#include <iostream>
 | 
			
		||||
#include <thread>
 | 
			
		||||
#include <SDL2/SDL.h>
 | 
			
		||||
#include <filesystem>
 | 
			
		||||
#include <opencv2/opencv.hpp>
 | 
			
		||||
#define SDL_MAIN_HANDLED
 | 
			
		||||
#include <SDL2/SDL.h>
 | 
			
		||||
 | 
			
		||||
#include "util.h"
 | 
			
		||||
#include "decoder.h"
 | 
			
		||||
#include "mediaDecoder.h"
 | 
			
		||||
#include "shaderService.h"
 | 
			
		||||
#include "shader.h"
 | 
			
		||||
#include "audioDecoder.h"
 | 
			
		||||
using std::cout;
 | 
			
		||||
 | 
			
		||||
constexpr int SCREEN_WIDTH = 640;
 | 
			
		||||
constexpr int SCREEN_HEIGHT = 480;
 | 
			
		||||
 | 
			
		||||
struct OpenglVideoParam
 | 
			
		||||
{
 | 
			
		||||
	SDL_GLContext glContext;
 | 
			
		||||
@@ -17,14 +22,43 @@ struct OpenglVideoParam
 | 
			
		||||
	unsigned int texs[3];
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
int InitVideo(SDL_Window*& window, const char* targetFilepath, VideoParam& videoParam, OpenglVideoParam& openglVideoParam, ShaderService*& shaderService)
 | 
			
		||||
int InitAudio(SDL_Window* window, SDL_Renderer* renderer, const char* targetFilePath, MediaParam& param)
 | 
			
		||||
{
 | 
			
		||||
	InitDecoder(targetFilepath, videoParam);
 | 
			
		||||
	if (!param.audioParam.isVideo)
 | 
			
		||||
	{
 | 
			
		||||
		InitDecoder(targetFilePath, param);
 | 
			
		||||
		window = SDL_CreateWindow("mp", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, SCREEN_WIDTH, SCREEN_HEIGHT, SDL_WINDOW_SHOWN);
 | 
			
		||||
		renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC);
 | 
			
		||||
		std::jthread(RequestAudioPacket, std::ref(param)).detach();
 | 
			
		||||
	}
 | 
			
		||||
	SDL_AudioSpec des;
 | 
			
		||||
	des.freq = param.audioParam.codecCtx->sample_rate;
 | 
			
		||||
	des.channels = param.audioParam.codecCtx->ch_layout.nb_channels;
 | 
			
		||||
	des.format = AUDIO_S16SYS;
 | 
			
		||||
	des.samples = 1024;
 | 
			
		||||
	des.silence = 0;
 | 
			
		||||
	std::tuple<SDL_Window*, SDL_Renderer*, AudioParam*>* callbackParam = new std::tuple{ window, renderer, &(param.audioParam) };
 | 
			
		||||
	des.userdata = callbackParam;
 | 
			
		||||
	des.callback = audioCallback;
 | 
			
		||||
	if (SDL_OpenAudio(&des, nullptr) < 0)
 | 
			
		||||
	{
 | 
			
		||||
		cout << SDL_GetError() << "\n";
 | 
			
		||||
		return -1;
 | 
			
		||||
	}
 | 
			
		||||
	SDL_PauseAudio(0);
 | 
			
		||||
	return 0;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
int InitVideo(SDL_Window*& window, const char* targetFilepath, MediaParam& param, OpenglVideoParam& openglVideoParam, ShaderService*& shaderService)
 | 
			
		||||
{
 | 
			
		||||
	InitDecoder(targetFilepath, param);
 | 
			
		||||
	param.audioParam.isVideo = true;
 | 
			
		||||
	InitAudio(nullptr, nullptr, nullptr, param);
 | 
			
		||||
	//FIX: when app exited, the fmtCtx was freed, so need notify decode thread to stop decode and exit.
 | 
			
		||||
	std::jthread(RequestPacket, std::ref(videoParam)).detach();
 | 
			
		||||
	std::jthread(RequestFrame, std::ref(videoParam)).detach();
 | 
			
		||||
	const int client_width = videoParam.width / 2;
 | 
			
		||||
	const int client_height = videoParam.height / 2;
 | 
			
		||||
	std::jthread(RequestMediaPacket, std::ref(param)).detach();
 | 
			
		||||
	std::jthread(RequestVideoFrame, std::ref(param)).detach();
 | 
			
		||||
	const int client_width = param.videoParam.width / 2;
 | 
			
		||||
	const int client_height = param.videoParam.height / 2;
 | 
			
		||||
	window = SDL_CreateWindow(
 | 
			
		||||
		"MP",
 | 
			
		||||
		SDL_WINDOWPOS_UNDEFINED,
 | 
			
		||||
@@ -124,10 +158,10 @@ void InitImg(SDL_Window*& window, const char* filepath, SDL_Renderer*& renderer,
 | 
			
		||||
	texture = SDL_CreateTextureFromSurface(renderer, surface);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void OpenglRenderVideo(VideoParam& videoParam, const OpenglVideoParam& openglVideoParam, ShaderService* shaderService)
 | 
			
		||||
void OpenglRenderVideo(MediaParam& param, const OpenglVideoParam& openglVideoParam, ShaderService* shaderService)
 | 
			
		||||
{
 | 
			
		||||
	AVFrame* frame = av_frame_alloc();
 | 
			
		||||
	videoParam.frameQueue.pop(frame, true, videoParam.quit);
 | 
			
		||||
	param.videoParam.frameQueue.pop(frame, true, param.videoParam.quit);
 | 
			
		||||
	// TODO: TIMER
 | 
			
		||||
	glBindTexture(GL_TEXTURE_2D, openglVideoParam.texs[0]);
 | 
			
		||||
	glPixelStoref(GL_UNPACK_ROW_LENGTH, static_cast<float>(frame->linesize[0]));
 | 
			
		||||
@@ -151,7 +185,7 @@ void OpenglRenderVideo(VideoParam& videoParam, const OpenglVideoParam& openglVid
 | 
			
		||||
	glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
int main(int argc, char** const argv)
 | 
			
		||||
int main(int argc, char** argv)
 | 
			
		||||
{
 | 
			
		||||
	// Check File
 | 
			
		||||
	const char* targetFilepath = argv[1];
 | 
			
		||||
@@ -169,9 +203,8 @@ int main(int argc, char** const argv)
 | 
			
		||||
 | 
			
		||||
	// INIT
 | 
			
		||||
 | 
			
		||||
	int client_width, client_height;
 | 
			
		||||
	SDL_Window* window = nullptr;
 | 
			
		||||
	VideoParam videoParam{};
 | 
			
		||||
	MediaParam mediaParam{};
 | 
			
		||||
	OpenglVideoParam openglVideoParam{};
 | 
			
		||||
	ShaderService* shaderService = nullptr;
 | 
			
		||||
	SDL_Surface* surface = nullptr;
 | 
			
		||||
@@ -187,8 +220,8 @@ int main(int argc, char** const argv)
 | 
			
		||||
	{
 | 
			
		||||
	case FileType::VIDEO:
 | 
			
		||||
	{
 | 
			
		||||
		InitVideo(window, targetFilepath, videoParam, openglVideoParam, shaderService);
 | 
			
		||||
		const auto stream_frame_rate = videoParam.fmtCtx->streams[videoParam.videoStreamIndex]->avg_frame_rate;
 | 
			
		||||
		InitVideo(window, targetFilepath, mediaParam, openglVideoParam, shaderService);
 | 
			
		||||
		const auto stream_frame_rate = mediaParam.videoParam.stream->avg_frame_rate;
 | 
			
		||||
		framerate = static_cast<double>(stream_frame_rate.den) / stream_frame_rate.num;
 | 
			
		||||
		break;
 | 
			
		||||
	}
 | 
			
		||||
@@ -197,8 +230,9 @@ int main(int argc, char** const argv)
 | 
			
		||||
		InitImg(window, targetFilepath, renderer, surface, texture);
 | 
			
		||||
		break;
 | 
			
		||||
	}
 | 
			
		||||
	case FileType::MUSIC:
 | 
			
		||||
	case FileType::AUDIO:
 | 
			
		||||
	{
 | 
			
		||||
		InitAudio(window, renderer, targetFilepath, mediaParam);
 | 
			
		||||
		break;
 | 
			
		||||
	}
 | 
			
		||||
	case FileType::ERRORTYPE:
 | 
			
		||||
@@ -236,22 +270,23 @@ int main(int argc, char** const argv)
 | 
			
		||||
		switch (fileType)
 | 
			
		||||
		{
 | 
			
		||||
		case FileType::VIDEO:
 | 
			
		||||
			OpenglRenderVideo(videoParam, openglVideoParam, shaderService);
 | 
			
		||||
			OpenglRenderVideo(mediaParam, openglVideoParam, shaderService);
 | 
			
		||||
			SDL_GL_SwapWindow(window);
 | 
			
		||||
			std::this_thread::sleep_until(current_time + std::chrono::milliseconds(30));
 | 
			
		||||
			std::this_thread::sleep_until(current_time + std::chrono::milliseconds(static_cast<int>(framerate * 1000)));
 | 
			
		||||
			current_time = std::chrono::system_clock::now();
 | 
			
		||||
			cout << SDL_GetTicks() << '\n';
 | 
			
		||||
			break;
 | 
			
		||||
		case FileType::IMG:
 | 
			
		||||
			RenderPicture(window, renderer, texture);
 | 
			
		||||
			break;
 | 
			
		||||
		case FileType::AUDIO:
 | 
			
		||||
			break;
 | 
			
		||||
		default:
 | 
			
		||||
			break;
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	avcodec_close(videoParam.codecCtx);
 | 
			
		||||
	avformat_close_input(&(videoParam.fmtCtx));
 | 
			
		||||
	avcodec_close(mediaParam.videoParam.codecCtx);
 | 
			
		||||
	avformat_close_input(&(mediaParam.fmtCtx));
 | 
			
		||||
	SDL_GL_DeleteContext(openglVideoParam.glContext);
 | 
			
		||||
	SDL_DestroyWindow(window);
 | 
			
		||||
	SDL_Quit();
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										160
									
								
								src/audioDecoder.cc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										160
									
								
								src/audioDecoder.cc
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,160 @@
 | 
			
		||||
#include "audioDecoder.h"
 | 
			
		||||
 | 
			
		||||
#include <iostream>
 | 
			
		||||
#include <SDL2/SDL_audio.h>
 | 
			
		||||
#include <SDL2/SDL_stdinc.h>
 | 
			
		||||
#include <SDL2/SDL.h>
 | 
			
		||||
 | 
			
		||||
extern "C" {
 | 
			
		||||
#include "libswresample/swresample.h"
 | 
			
		||||
#include "fftw3.h"
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
int RequestAudioFrame(AudioParam& param, uint8_t* audioBuffer, int bufSize, SDL_Window* window, SDL_Renderer* renderer)
 | 
			
		||||
{
 | 
			
		||||
	AVFrame* frame = av_frame_alloc();
 | 
			
		||||
	int dataSize = 0;
 | 
			
		||||
	AVPacket packet;
 | 
			
		||||
	SwrContext* swrCtx = nullptr;
 | 
			
		||||
	if (param.quit) {
 | 
			
		||||
		return -1;
 | 
			
		||||
	}
 | 
			
		||||
	if (!param.packetQueue.pop(&packet, true)) {
 | 
			
		||||
		return -1;
 | 
			
		||||
	}
 | 
			
		||||
	if (packet.pts != AV_NOPTS_VALUE)
 | 
			
		||||
	{
 | 
			
		||||
		param.currentTime = av_q2d(param.stream->time_base) * packet.pts;
 | 
			
		||||
	}
 | 
			
		||||
	int ret = avcodec_send_packet(param.codecCtx, &packet);
 | 
			
		||||
	if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
 | 
			
		||||
		return -1;
 | 
			
		||||
	}
 | 
			
		||||
	ret = avcodec_receive_frame(param.codecCtx, frame);
 | 
			
		||||
	if (ret < 0 && ret != AVERROR_EOF) {
 | 
			
		||||
		av_frame_unref(frame);
 | 
			
		||||
		return -1;
 | 
			
		||||
	}
 | 
			
		||||
	if (frame->best_effort_timestamp == AV_NOPTS_VALUE)
 | 
			
		||||
	{
 | 
			
		||||
		av_frame_unref(frame);
 | 
			
		||||
		return -1;
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if (frame->ch_layout.nb_channels > 0) {
 | 
			
		||||
		av_channel_layout_default(&frame->ch_layout, frame->ch_layout.nb_channels);
 | 
			
		||||
	}
 | 
			
		||||
	int nbSamples = frame->nb_samples;
 | 
			
		||||
	uint8_t* audioData = frame->data[0];
 | 
			
		||||
	fftw_complex* fftwInput = static_cast<fftw_complex*>(fftw_malloc(sizeof(fftw_complex) * frame->nb_samples));
 | 
			
		||||
	fftw_complex* fftwOutput = static_cast<fftw_complex*>(fftw_malloc(sizeof(fftw_complex) * frame->nb_samples));
 | 
			
		||||
 | 
			
		||||
	for (int i = 0; i < nbSamples; i++)
 | 
			
		||||
	{
 | 
			
		||||
		fftwInput[i][0] = audioData[i];
 | 
			
		||||
		fftwInput[i][1] = .0f;
 | 
			
		||||
	}
 | 
			
		||||
	fftw_plan fftwPlan = fftw_plan_dft_1d(nbSamples, fftwInput, fftwOutput, FFTW_FORWARD, FFTW_ESTIMATE);
 | 
			
		||||
	fftw_execute(fftwPlan);
 | 
			
		||||
 | 
			
		||||
	for (int i = 0; i < nbSamples; i++)
 | 
			
		||||
	{
 | 
			
		||||
		//double magnitude = sqrt(fftwOutput[i][0] * fftwOutput[i][0] + fftwOutput[i][1] * fftwOutput[i][1]);
 | 
			
		||||
		//double phase = atan2(fftwOutput[i][1], fftwOutput[i][0]);
 | 
			
		||||
		//std::cout << "mag: " << magnitude << "\n phase: " << phase << "\n";
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	constexpr AVSampleFormat dstFormat = AV_SAMPLE_FMT_S16;
 | 
			
		||||
	swr_alloc_set_opts2(&swrCtx, &frame->ch_layout, dstFormat, frame->sample_rate, &frame->ch_layout, static_cast<AVSampleFormat>(frame->format), frame->sample_rate, 0, nullptr);
 | 
			
		||||
	if (!swrCtx || swr_init(swrCtx) < 0) {
 | 
			
		||||
		av_frame_unref(frame);
 | 
			
		||||
		return -1;
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	const uint64_t dstNbSamples = av_rescale_rnd(swr_get_delay(swrCtx, frame->sample_rate) + frame->nb_samples, frame->sample_rate, frame->sample_rate, static_cast<AVRounding>(1));
 | 
			
		||||
	const int nb = swr_convert(swrCtx, &audioBuffer, static_cast<int>(dstNbSamples), const_cast<const uint8_t**>(frame->data), frame->nb_samples);
 | 
			
		||||
	dataSize = frame->ch_layout.nb_channels * nb * av_get_bytes_per_sample(dstFormat);
 | 
			
		||||
 | 
			
		||||
	//TODO: render wave
 | 
			
		||||
	if (!param.isVideo) {
 | 
			
		||||
		SDL_SetRenderDrawColor(renderer, 0x00, 0x00, 0x00, 0xff);
 | 
			
		||||
		SDL_RenderClear(renderer);
 | 
			
		||||
		SDL_SetRenderDrawColor(renderer, 0xff, 0xff, 0xff, 0xff);
 | 
			
		||||
		SDL_RenderDrawLine(renderer, 0, 0, 300, 300);
 | 
			
		||||
		SDL_RenderPresent(renderer);
 | 
			
		||||
	}
 | 
			
		||||
	param.currentTime += static_cast<double>(dataSize) / (2 * (param.codecCtx->ch_layout.nb_channels) / param.codecCtx->sample_rate);
 | 
			
		||||
	av_frame_free(&frame);
 | 
			
		||||
	swr_free(&swrCtx);
 | 
			
		||||
	return dataSize;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void audioCallback(void* userdata, uint8_t* stream, int len) {
 | 
			
		||||
	const auto callbackParam = *static_cast<std::tuple<SDL_Window*, SDL_Renderer*, AudioParam*>*>(userdata);
 | 
			
		||||
	const auto param = std::get<2>(callbackParam);
 | 
			
		||||
	SDL_memset(stream, 0, len);
 | 
			
		||||
	while (len > 0)
 | 
			
		||||
	{
 | 
			
		||||
		if (param->bufferIndex >= param->bufferSize)
 | 
			
		||||
		{
 | 
			
		||||
			const int audioSize = RequestAudioFrame(*param, param->buffer, sizeof(param->buffer), std::get<0>(callbackParam), std::get<1>(callbackParam));
 | 
			
		||||
			if (audioSize < 0)
 | 
			
		||||
			{
 | 
			
		||||
				param->bufferSize = 0;
 | 
			
		||||
				memset(param->buffer, 0, param->bufferSize);
 | 
			
		||||
			}
 | 
			
		||||
			else
 | 
			
		||||
			{
 | 
			
		||||
				param->bufferSize = audioSize;
 | 
			
		||||
			}
 | 
			
		||||
			param->bufferIndex = 0;
 | 
			
		||||
		}
 | 
			
		||||
		int len1 = static_cast<int>(param->bufferSize - param->bufferIndex);
 | 
			
		||||
		if (len1 > len)
 | 
			
		||||
			len1 = len;
 | 
			
		||||
 | 
			
		||||
		SDL_MixAudio(stream, param->buffer + param->bufferIndex, len1, SDL_MIX_MAXVOLUME);
 | 
			
		||||
		len -= len1;
 | 
			
		||||
		stream += len1;
 | 
			
		||||
		param->bufferIndex += len1;
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void RequestAudioPacket(MediaParam& param) {
 | 
			
		||||
	const auto& fmtCtx = param.fmtCtx;
 | 
			
		||||
	const auto& audioStreamIndex = param.audioParam.audioStreamIndex;
 | 
			
		||||
 | 
			
		||||
	AVPacket* packet = av_packet_alloc();
 | 
			
		||||
 | 
			
		||||
	while (true) {
 | 
			
		||||
		if (param.audioParam.packetQueue.isFill()) {
 | 
			
		||||
			std::this_thread::sleep_for(100ms);
 | 
			
		||||
			continue;
 | 
			
		||||
		}
 | 
			
		||||
		const int ret = av_read_frame(fmtCtx, packet);
 | 
			
		||||
		if (param.audioParam.eof) {
 | 
			
		||||
			std::this_thread::sleep_for(100ms);
 | 
			
		||||
			av_packet_unref(packet);
 | 
			
		||||
			return;
 | 
			
		||||
		}
 | 
			
		||||
		if (ret == 0) {
 | 
			
		||||
			if (packet->stream_index == audioStreamIndex) {
 | 
			
		||||
				param.audioParam.packetQueue.push(packet);
 | 
			
		||||
				av_packet_unref(packet);
 | 
			
		||||
			}
 | 
			
		||||
			else if (ret == AVERROR_EOF)
 | 
			
		||||
			{
 | 
			
		||||
				param.audioParam.eof = true;
 | 
			
		||||
				av_packet_unref(packet);
 | 
			
		||||
				break;
 | 
			
		||||
			}
 | 
			
		||||
			else {
 | 
			
		||||
				av_packet_unref(packet);
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
		else if (param.fmtCtx->pb->error == 0) {
 | 
			
		||||
			std::this_thread::sleep_for(100ms);
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	av_packet_unref(packet);
 | 
			
		||||
}
 | 
			
		||||
@@ -1,9 +1,9 @@
 | 
			
		||||
#include "decoder.h"
 | 
			
		||||
#include "mediaDecoder.h"
 | 
			
		||||
#include <thread>
 | 
			
		||||
#include <chrono>
 | 
			
		||||
using namespace std::literals::chrono_literals;
 | 
			
		||||
 | 
			
		||||
void InitDecoder(const char* filepath, VideoParam& param) {
 | 
			
		||||
void InitDecoder(const char* filepath, MediaParam& param) {
 | 
			
		||||
	AVFormatContext* fmtCtx = nullptr;
 | 
			
		||||
	AVCodecContext* codecFmt = nullptr;
 | 
			
		||||
	auto ret = avformat_open_input(&fmtCtx, filepath, NULL, NULL);
 | 
			
		||||
@@ -13,43 +13,60 @@ void InitDecoder(const char* filepath, VideoParam& param) {
 | 
			
		||||
		const auto stream = fmtCtx->streams[i];
 | 
			
		||||
		const auto codec = avcodec_find_decoder(stream->codecpar->codec_id);
 | 
			
		||||
		if (codec->type == AVMEDIA_TYPE_VIDEO) {
 | 
			
		||||
			param.videoStreamIndex = i;
 | 
			
		||||
			param.videoParam.videoStreamIndex = i;
 | 
			
		||||
			codecFmt = avcodec_alloc_context3(codec);
 | 
			
		||||
			avcodec_parameters_to_context(codecFmt, stream->codecpar);
 | 
			
		||||
			avcodec_open2(codecFmt, codec, nullptr);
 | 
			
		||||
			param.videoParam.codecCtx = codecFmt;
 | 
			
		||||
			param.videoParam.stream = stream;
 | 
			
		||||
			param.videoParam.width = codecFmt->width;
 | 
			
		||||
			param.videoParam.height = codecFmt->height;
 | 
			
		||||
			param.videoParam.totalTime = av_q2d(stream->avg_frame_rate) * stream->duration;
 | 
			
		||||
		}
 | 
			
		||||
		else if (codec->type == AVMEDIA_TYPE_AUDIO)
 | 
			
		||||
		{
 | 
			
		||||
			param.audioParam.audioStreamIndex = i;
 | 
			
		||||
			codecFmt = avcodec_alloc_context3(codec);
 | 
			
		||||
			avcodec_parameters_to_context(codecFmt, stream->codecpar);
 | 
			
		||||
			avcodec_open2(codecFmt, codec, nullptr);
 | 
			
		||||
			param.audioParam.codecCtx = codecFmt;
 | 
			
		||||
			param.audioParam.stream = stream;
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	param.codecCtx = codecFmt;
 | 
			
		||||
	param.fmtCtx = fmtCtx;
 | 
			
		||||
	param.width = codecFmt->width;
 | 
			
		||||
	param.height = codecFmt->height;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void RequestPacket(VideoParam& param) {
 | 
			
		||||
void RequestMediaPacket(MediaParam& param) {
 | 
			
		||||
	const auto& fmtCtx = param.fmtCtx;
 | 
			
		||||
	const auto& videoStreamIndex = param.videoStreamIndex;
 | 
			
		||||
 | 
			
		||||
	const auto& videoStreamIndex = param.videoParam.videoStreamIndex;
 | 
			
		||||
	const auto& audioStreamIndex = param.audioParam.audioStreamIndex;
 | 
			
		||||
	AVPacket* packet = av_packet_alloc();
 | 
			
		||||
 | 
			
		||||
	while (true) {
 | 
			
		||||
		if (param.packetQueue.isFill()) {
 | 
			
		||||
		if (param.videoParam.packetQueue.isFill()) {
 | 
			
		||||
			std::this_thread::sleep_for(100ms);
 | 
			
		||||
			continue;
 | 
			
		||||
		}
 | 
			
		||||
		//FIX:
 | 
			
		||||
		const int ret = av_read_frame(fmtCtx, packet);
 | 
			
		||||
		if (param.eof) {
 | 
			
		||||
		if (param.videoParam.eof) {
 | 
			
		||||
			std::this_thread::sleep_for(100ms);
 | 
			
		||||
			av_packet_unref(packet);
 | 
			
		||||
			return;
 | 
			
		||||
		}
 | 
			
		||||
		if (ret == 0) {
 | 
			
		||||
			if (packet->stream_index == videoStreamIndex) {
 | 
			
		||||
				param.packetQueue.push(packet);
 | 
			
		||||
				param.videoParam.packetQueue.push(packet);
 | 
			
		||||
				av_packet_unref(packet);
 | 
			
		||||
			}
 | 
			
		||||
			else if (packet->stream_index == audioStreamIndex)
 | 
			
		||||
			{
 | 
			
		||||
				param.audioParam.packetQueue.push(packet);
 | 
			
		||||
				av_packet_unref(packet);
 | 
			
		||||
			}
 | 
			
		||||
			else if (ret == AVERROR_EOF)
 | 
			
		||||
			{
 | 
			
		||||
				param.eof = true;
 | 
			
		||||
				param.videoParam.eof = true;
 | 
			
		||||
				av_packet_unref(packet);
 | 
			
		||||
				break;
 | 
			
		||||
			}
 | 
			
		||||
@@ -64,21 +81,17 @@ void RequestPacket(VideoParam& param) {
 | 
			
		||||
	av_packet_unref(packet);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void RequestFrame(VideoParam& param) {
 | 
			
		||||
	const auto& fmtCtx = param.fmtCtx;
 | 
			
		||||
	const auto& codecCtx = param.codecCtx;
 | 
			
		||||
	const auto& videoStreamIndex = param.videoStreamIndex;
 | 
			
		||||
 | 
			
		||||
void RequestVideoFrame(MediaParam& param) {
 | 
			
		||||
	const auto& codecCtx = param.videoParam.codecCtx;
 | 
			
		||||
	AVPacket* packet = av_packet_alloc();
 | 
			
		||||
	AVFrame* frame = av_frame_alloc();
 | 
			
		||||
	//frame->format = AV_PIX_FMT_YUV420P;
 | 
			
		||||
	while (true) {
 | 
			
		||||
		if (param.frameQueue.isFill()) {
 | 
			
		||||
		if (param.videoParam.frameQueue.isFill()) {
 | 
			
		||||
			std::this_thread::sleep_for(30ms);
 | 
			
		||||
			continue;
 | 
			
		||||
		}
 | 
			
		||||
		if (!param.packetQueue.pop(packet, true, param.quit)) {
 | 
			
		||||
			if (param.quit)
 | 
			
		||||
		if (!param.videoParam.packetQueue.pop(packet, true, param.videoParam.quit)) {
 | 
			
		||||
			if (param.videoParam.quit)
 | 
			
		||||
			{
 | 
			
		||||
				av_packet_unref(packet);
 | 
			
		||||
				av_frame_unref(frame);
 | 
			
		||||
@@ -96,8 +109,11 @@ void RequestFrame(VideoParam& param) {
 | 
			
		||||
		{
 | 
			
		||||
			continue;
 | 
			
		||||
		}
 | 
			
		||||
		param.frameQueue.push(frame);
 | 
			
		||||
		param.videoParam.frameQueue.push(frame);
 | 
			
		||||
		auto time_duration = av_q2d(param.videoParam.stream->time_base);
 | 
			
		||||
		auto pts = frame->best_effort_timestamp;
 | 
			
		||||
		av_frame_unref(frame);
 | 
			
		||||
		av_packet_unref(packet);
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										0
									
								
								src/tempCodeRunnerFile.cc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								src/tempCodeRunnerFile.cc
									
									
									
									
									
										Normal file
									
								
							
		Reference in New Issue
	
	Block a user