mirror of
https://github.com/PabloMK7/citra
synced 2024-11-15 05:08:23 +00:00
Merge pull request #4602 from zhaowenlan1779/video-dump-reborn
Implement dumping audio+video to video files
This commit is contained in:
commit
e18c7ee78f
26 changed files with 1137 additions and 38 deletions
|
@ -38,7 +38,7 @@ matrix:
|
||||||
after_success: "./.travis/macos/upload.sh"
|
after_success: "./.travis/macos/upload.sh"
|
||||||
cache: ccache
|
cache: ccache
|
||||||
- os: linux
|
- os: linux
|
||||||
env: NAME="linux build (frozen versions of dependencies)"
|
env: NAME="linux build (debug, frozen versions of dependencies, no additional CMake flags)"
|
||||||
sudo: required
|
sudo: required
|
||||||
dist: trusty
|
dist: trusty
|
||||||
services: docker
|
services: docker
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
cd /citra
|
cd /citra
|
||||||
|
|
||||||
mkdir build && cd build
|
mkdir build && cd build
|
||||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_C_COMPILER=/usr/lib/ccache/gcc -DCMAKE_CXX_COMPILER=/usr/lib/ccache/g++ -DCITRA_ENABLE_COMPATIBILITY_REPORTING=${ENABLE_COMPATIBILITY_REPORTING:-"OFF"} -DENABLE_COMPATIBILITY_LIST_DOWNLOAD=ON -DUSE_DISCORD_PRESENCE=ON
|
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Debug -DCMAKE_C_COMPILER=/usr/lib/ccache/gcc -DCMAKE_CXX_COMPILER=/usr/lib/ccache/g++
|
||||||
ninja
|
ninja
|
||||||
|
|
||||||
ctest -VV -C Release
|
ctest -VV -C Release
|
||||||
|
|
|
@ -5,7 +5,7 @@ cd /citra
|
||||||
echo 'max_size = 3.0G' > "$HOME/.ccache/ccache.conf"
|
echo 'max_size = 3.0G' > "$HOME/.ccache/ccache.conf"
|
||||||
|
|
||||||
mkdir build && cd build
|
mkdir build && cd build
|
||||||
cmake .. -G Ninja -DCMAKE_TOOLCHAIN_FILE="$(pwd)/../CMakeModules/MinGWCross.cmake" -DUSE_CCACHE=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_QT_TRANSLATION=ON -DCITRA_ENABLE_COMPATIBILITY_REPORTING=${ENABLE_COMPATIBILITY_REPORTING:-"OFF"} -DENABLE_COMPATIBILITY_LIST_DOWNLOAD=ON -DUSE_DISCORD_PRESENCE=ON -DENABLE_MF=ON
|
cmake .. -G Ninja -DCMAKE_TOOLCHAIN_FILE="$(pwd)/../CMakeModules/MinGWCross.cmake" -DUSE_CCACHE=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_QT_TRANSLATION=ON -DCITRA_ENABLE_COMPATIBILITY_REPORTING=${ENABLE_COMPATIBILITY_REPORTING:-"OFF"} -DENABLE_COMPATIBILITY_LIST_DOWNLOAD=ON -DUSE_DISCORD_PRESENCE=ON -DENABLE_MF=ON -DENABLE_FFMPEG=ON -DCMAKE_NO_SYSTEM_FROM_IMPORTED=TRUE
|
||||||
ninja
|
ninja
|
||||||
|
|
||||||
echo "Tests skipped"
|
echo "Tests skipped"
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
cd /citra
|
cd /citra
|
||||||
|
|
||||||
mkdir build && cd build
|
mkdir build && cd build
|
||||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_C_COMPILER=/usr/lib/ccache/gcc -DCMAKE_CXX_COMPILER=/usr/lib/ccache/g++ -DENABLE_QT_TRANSLATION=ON -DCITRA_ENABLE_COMPATIBILITY_REPORTING=${ENABLE_COMPATIBILITY_REPORTING:-"OFF"} -DENABLE_COMPATIBILITY_LIST_DOWNLOAD=ON -DUSE_DISCORD_PRESENCE=ON
|
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_C_COMPILER=/usr/lib/ccache/gcc -DCMAKE_CXX_COMPILER=/usr/lib/ccache/g++ -DENABLE_QT_TRANSLATION=ON -DCITRA_ENABLE_COMPATIBILITY_REPORTING=${ENABLE_COMPATIBILITY_REPORTING:-"OFF"} -DENABLE_COMPATIBILITY_LIST_DOWNLOAD=ON -DUSE_DISCORD_PRESENCE=ON -DENABLE_FFMPEG=ON
|
||||||
ninja
|
ninja
|
||||||
|
|
||||||
ctest -VV -C Release
|
ctest -VV -C Release
|
||||||
|
|
|
@ -21,10 +21,11 @@ option(ENABLE_WEB_SERVICE "Enable web services (telemetry, etc.)" ON)
|
||||||
option(ENABLE_CUBEB "Enables the cubeb audio backend" ON)
|
option(ENABLE_CUBEB "Enables the cubeb audio backend" ON)
|
||||||
|
|
||||||
option(ENABLE_FFMPEG "Enable FFmpeg decoder/encoder" OFF)
|
option(ENABLE_FFMPEG "Enable FFmpeg decoder/encoder" OFF)
|
||||||
|
CMAKE_DEPENDENT_OPTION(CITRA_USE_BUNDLED_FFMPEG "Download bundled FFmpeg binaries" ON "ENABLE_FFMPEG;MSVC" OFF)
|
||||||
|
|
||||||
option(USE_DISCORD_PRESENCE "Enables Discord Rich Presence" OFF)
|
option(USE_DISCORD_PRESENCE "Enables Discord Rich Presence" OFF)
|
||||||
|
|
||||||
CMAKE_DEPENDENT_OPTION(ENABLE_MF "Use Media Foundation decoder" ON "WIN32;NOT ENABLE_FFMPEG" OFF)
|
CMAKE_DEPENDENT_OPTION(ENABLE_MF "Use Media Foundation decoder (preferred over FFmpeg)" ON "WIN32" OFF)
|
||||||
|
|
||||||
if(NOT EXISTS ${PROJECT_SOURCE_DIR}/.git/hooks/pre-commit)
|
if(NOT EXISTS ${PROJECT_SOURCE_DIR}/.git/hooks/pre-commit)
|
||||||
message(STATUS "Copying pre-commit hook")
|
message(STATUS "Copying pre-commit hook")
|
||||||
|
@ -189,26 +190,23 @@ endif()
|
||||||
if (ENABLE_FFMPEG)
|
if (ENABLE_FFMPEG)
|
||||||
if (CITRA_USE_BUNDLED_FFMPEG)
|
if (CITRA_USE_BUNDLED_FFMPEG)
|
||||||
if ((MSVC_VERSION GREATER_EQUAL 1910 AND MSVC_VERSION LESS 1930) AND ARCHITECTURE_x86_64)
|
if ((MSVC_VERSION GREATER_EQUAL 1910 AND MSVC_VERSION LESS 1930) AND ARCHITECTURE_x86_64)
|
||||||
set(FFmpeg_VER "ffmpeg-4.0.2-msvc")
|
set(FFmpeg_VER "ffmpeg-4.1-win64")
|
||||||
else()
|
else()
|
||||||
message(FATAL_ERROR "No bundled FFmpeg binaries for your toolchain. Disable CITRA_USE_BUNDLED_FFMPEG and provide your own.")
|
message(FATAL_ERROR "No bundled FFmpeg binaries for your toolchain. Disable CITRA_USE_BUNDLED_FFMPEG and provide your own.")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (DEFINED FFmpeg_VER)
|
if (DEFINED FFmpeg_VER)
|
||||||
download_bundled_external("ffmpeg/" ${FFmpeg_VER} FFmpeg_PREFIX)
|
download_bundled_external("ffmpeg/" ${FFmpeg_VER} FFmpeg_PREFIX)
|
||||||
set(FFMPEG_DIR "${FFmpeg_PREFIX}/../")
|
set(FFMPEG_DIR "${FFmpeg_PREFIX}")
|
||||||
set(FFMPEG_FOUND YES)
|
|
||||||
endif()
|
|
||||||
else()
|
|
||||||
find_package(FFmpeg REQUIRED COMPONENTS avcodec)
|
|
||||||
if ("${FFmpeg_avcodec_VERSION}" VERSION_LESS "57.48.101")
|
|
||||||
message(FATAL_ERROR "Found version for libavcodec is too low. The required version is at least 57.48.101 (included in FFmpeg 3.1 and later).")
|
|
||||||
else()
|
|
||||||
set(FFMPEG_FOUND YES)
|
|
||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
||||||
else()
|
|
||||||
set(FFMPEG_FOUND NO)
|
find_package(FFmpeg REQUIRED COMPONENTS avcodec avformat avutil swscale swresample)
|
||||||
|
if ("${FFmpeg_avcodec_VERSION}" VERSION_LESS "57.48.101")
|
||||||
|
message(FATAL_ERROR "Found version for libavcodec is too low. The required version is at least 57.48.101 (included in FFmpeg 3.1 and later).")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
add_definitions(-DENABLE_FFMPEG)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# Platform-specific library requirements
|
# Platform-specific library requirements
|
||||||
|
|
11
CMakeModules/CopyCitraFFmpegDeps.cmake
Normal file
11
CMakeModules/CopyCitraFFmpegDeps.cmake
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
function(copy_citra_FFmpeg_deps target_dir)
|
||||||
|
include(WindowsCopyFiles)
|
||||||
|
set(DLL_DEST "${CMAKE_BINARY_DIR}/bin/$<CONFIG>/")
|
||||||
|
windows_copy_files(${target_dir} ${FFMPEG_DIR}/bin ${DLL_DEST}
|
||||||
|
avcodec*.dll
|
||||||
|
avformat*.dll
|
||||||
|
avutil*.dll
|
||||||
|
swresample*.dll
|
||||||
|
swscale*.dll
|
||||||
|
)
|
||||||
|
endfunction(copy_citra_FFmpeg_deps)
|
|
@ -43,9 +43,9 @@ before_build:
|
||||||
$COMPAT = if ($env:ENABLE_COMPATIBILITY_REPORTING -eq $null) {0} else {$env:ENABLE_COMPATIBILITY_REPORTING}
|
$COMPAT = if ($env:ENABLE_COMPATIBILITY_REPORTING -eq $null) {0} else {$env:ENABLE_COMPATIBILITY_REPORTING}
|
||||||
if ($env:BUILD_TYPE -eq 'msvc') {
|
if ($env:BUILD_TYPE -eq 'msvc') {
|
||||||
# redirect stderr and change the exit code to prevent powershell from cancelling the build if cmake prints a warning
|
# redirect stderr and change the exit code to prevent powershell from cancelling the build if cmake prints a warning
|
||||||
cmd /C 'cmake -G "Visual Studio 15 2017 Win64" -DCITRA_USE_BUNDLED_QT=1 -DCITRA_USE_BUNDLED_SDL2=1 -DCITRA_ENABLE_COMPATIBILITY_REPORTING=${COMPAT} -DENABLE_COMPATIBILITY_LIST_DOWNLOAD=ON -DUSE_DISCORD_PRESENCE=ON -DENABLE_MF=ON .. 2>&1 && exit 0'
|
cmd /C 'cmake -G "Visual Studio 15 2017 Win64" -DCITRA_USE_BUNDLED_QT=1 -DCITRA_USE_BUNDLED_SDL2=1 -DCITRA_ENABLE_COMPATIBILITY_REPORTING=${COMPAT} -DENABLE_COMPATIBILITY_LIST_DOWNLOAD=ON -DUSE_DISCORD_PRESENCE=ON -DENABLE_MF=ON -DENABLE_FFMPEG=ON .. 2>&1 && exit 0'
|
||||||
} else {
|
} else {
|
||||||
C:\msys64\usr\bin\bash.exe -lc "cmake -G 'MSYS Makefiles' -DCMAKE_BUILD_TYPE=Release -DENABLE_QT_TRANSLATION=ON -DCITRA_ENABLE_COMPATIBILITY_REPORTING=${COMPAT} -DENABLE_COMPATIBILITY_LIST_DOWNLOAD=ON -DUSE_DISCORD_PRESENCE=ON -DENABLE_MF=ON .. 2>&1"
|
C:\msys64\usr\bin\bash.exe -lc "cmake -G 'MSYS Makefiles' -DCMAKE_BUILD_TYPE=Release -DENABLE_QT_TRANSLATION=ON -DCITRA_ENABLE_COMPATIBILITY_REPORTING=${COMPAT} -DENABLE_COMPATIBILITY_LIST_DOWNLOAD=ON -DUSE_DISCORD_PRESENCE=ON -DENABLE_MF=ON -DENABLE_FFMPEG=ON .. 2>&1"
|
||||||
}
|
}
|
||||||
- cd ..
|
- cd ..
|
||||||
|
|
||||||
|
|
|
@ -31,8 +31,6 @@ add_library(audio_core STATIC
|
||||||
|
|
||||||
$<$<BOOL:${SDL2_FOUND}>:sdl2_sink.cpp sdl2_sink.h>
|
$<$<BOOL:${SDL2_FOUND}>:sdl2_sink.cpp sdl2_sink.h>
|
||||||
$<$<BOOL:${ENABLE_CUBEB}>:cubeb_sink.cpp cubeb_sink.h cubeb_input.cpp cubeb_input.h>
|
$<$<BOOL:${ENABLE_CUBEB}>:cubeb_sink.cpp cubeb_sink.h cubeb_input.cpp cubeb_input.h>
|
||||||
$<$<BOOL:${FFMPEG_FOUND}>:hle/ffmpeg_decoder.cpp hle/ffmpeg_decoder.h hle/ffmpeg_dl.cpp hle/ffmpeg_dl.h>
|
|
||||||
$<$<BOOL:${ENABLE_MF}>:hle/wmf_decoder.cpp hle/wmf_decoder.h hle/wmf_decoder_utils.cpp hle/wmf_decoder_utils.h>
|
|
||||||
)
|
)
|
||||||
|
|
||||||
create_target_directory_groups(audio_core)
|
create_target_directory_groups(audio_core)
|
||||||
|
@ -40,7 +38,22 @@ create_target_directory_groups(audio_core)
|
||||||
target_link_libraries(audio_core PUBLIC common core)
|
target_link_libraries(audio_core PUBLIC common core)
|
||||||
target_link_libraries(audio_core PRIVATE SoundTouch teakra)
|
target_link_libraries(audio_core PRIVATE SoundTouch teakra)
|
||||||
|
|
||||||
if(FFMPEG_FOUND)
|
if(ENABLE_MF)
|
||||||
|
target_sources(audio_core PRIVATE
|
||||||
|
hle/wmf_decoder.cpp
|
||||||
|
hle/wmf_decoder.h
|
||||||
|
hle/wmf_decoder_utils.cpp
|
||||||
|
hle/wmf_decoder_utils.h
|
||||||
|
)
|
||||||
|
target_link_libraries(audio_core PRIVATE mf.lib mfplat.lib mfuuid.lib)
|
||||||
|
target_compile_definitions(audio_core PUBLIC HAVE_MF)
|
||||||
|
elseif(ENABLE_FFMPEG)
|
||||||
|
target_sources(audio_core PRIVATE
|
||||||
|
hle/ffmpeg_decoder.cpp
|
||||||
|
hle/ffmpeg_decoder.h
|
||||||
|
hle/ffmpeg_dl.cpp
|
||||||
|
hle/ffmpeg_dl.h
|
||||||
|
)
|
||||||
if(UNIX)
|
if(UNIX)
|
||||||
target_link_libraries(audio_core PRIVATE FFmpeg::avcodec)
|
target_link_libraries(audio_core PRIVATE FFmpeg::avcodec)
|
||||||
else()
|
else()
|
||||||
|
@ -49,11 +62,6 @@ if(FFMPEG_FOUND)
|
||||||
target_compile_definitions(audio_core PUBLIC HAVE_FFMPEG)
|
target_compile_definitions(audio_core PUBLIC HAVE_FFMPEG)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(ENABLE_MF)
|
|
||||||
target_link_libraries(audio_core PRIVATE mf.lib mfplat.lib mfuuid.lib)
|
|
||||||
target_compile_definitions(audio_core PUBLIC HAVE_MF)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(SDL2_FOUND)
|
if(SDL2_FOUND)
|
||||||
target_link_libraries(audio_core PRIVATE SDL2)
|
target_link_libraries(audio_core PRIVATE SDL2)
|
||||||
target_compile_definitions(audio_core PRIVATE HAVE_SDL2)
|
target_compile_definitions(audio_core PRIVATE HAVE_SDL2)
|
||||||
|
|
|
@ -7,6 +7,8 @@
|
||||||
#include "audio_core/sink.h"
|
#include "audio_core/sink.h"
|
||||||
#include "audio_core/sink_details.h"
|
#include "audio_core/sink_details.h"
|
||||||
#include "common/assert.h"
|
#include "common/assert.h"
|
||||||
|
#include "core/core.h"
|
||||||
|
#include "core/dumping/backend.h"
|
||||||
#include "core/settings.h"
|
#include "core/settings.h"
|
||||||
|
|
||||||
namespace AudioCore {
|
namespace AudioCore {
|
||||||
|
@ -41,6 +43,10 @@ void DspInterface::OutputFrame(StereoFrame16& frame) {
|
||||||
return;
|
return;
|
||||||
|
|
||||||
fifo.Push(frame.data(), frame.size());
|
fifo.Push(frame.data(), frame.size());
|
||||||
|
|
||||||
|
if (Core::System::GetInstance().VideoDumper().IsDumping()) {
|
||||||
|
Core::System::GetInstance().VideoDumper().AddAudioFrame(frame);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void DspInterface::OutputSample(std::array<s16, 2> sample) {
|
void DspInterface::OutputSample(std::array<s16, 2> sample) {
|
||||||
|
@ -48,6 +54,10 @@ void DspInterface::OutputSample(std::array<s16, 2> sample) {
|
||||||
return;
|
return;
|
||||||
|
|
||||||
fifo.Push(&sample, 1);
|
fifo.Push(&sample, 1);
|
||||||
|
|
||||||
|
if (Core::System::GetInstance().VideoDumper().IsDumping()) {
|
||||||
|
Core::System::GetInstance().VideoDumper().AddAudioSample(sample);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void DspInterface::OutputCallback(s16* buffer, std::size_t num_frames) {
|
void DspInterface::OutputCallback(s16* buffer, std::size_t num_frames) {
|
||||||
|
|
|
@ -30,8 +30,10 @@
|
||||||
#include "common/scope_exit.h"
|
#include "common/scope_exit.h"
|
||||||
#include "common/string_util.h"
|
#include "common/string_util.h"
|
||||||
#include "core/core.h"
|
#include "core/core.h"
|
||||||
|
#include "core/dumping/backend.h"
|
||||||
#include "core/file_sys/cia_container.h"
|
#include "core/file_sys/cia_container.h"
|
||||||
#include "core/frontend/applets/default_applets.h"
|
#include "core/frontend/applets/default_applets.h"
|
||||||
|
#include "core/frontend/framebuffer_layout.h"
|
||||||
#include "core/gdbstub/gdbstub.h"
|
#include "core/gdbstub/gdbstub.h"
|
||||||
#include "core/hle/service/am/am.h"
|
#include "core/hle/service/am/am.h"
|
||||||
#include "core/hle/service/cfg/cfg.h"
|
#include "core/hle/service/cfg/cfg.h"
|
||||||
|
@ -39,6 +41,7 @@
|
||||||
#include "core/movie.h"
|
#include "core/movie.h"
|
||||||
#include "core/settings.h"
|
#include "core/settings.h"
|
||||||
#include "network/network.h"
|
#include "network/network.h"
|
||||||
|
#include "video_core/video_core.h"
|
||||||
|
|
||||||
#undef _UNICODE
|
#undef _UNICODE
|
||||||
#include <getopt.h>
|
#include <getopt.h>
|
||||||
|
@ -62,6 +65,7 @@ static void PrintHelp(const char* argv0) {
|
||||||
" Nickname, password, address and port for multiplayer\n"
|
" Nickname, password, address and port for multiplayer\n"
|
||||||
"-r, --movie-record=[file] Record a movie (game inputs) to the given file\n"
|
"-r, --movie-record=[file] Record a movie (game inputs) to the given file\n"
|
||||||
"-p, --movie-play=[file] Playback the movie (game inputs) from the given file\n"
|
"-p, --movie-play=[file] Playback the movie (game inputs) from the given file\n"
|
||||||
|
"-d, --dump-video=[file] Dumps audio and video to the given video file\n"
|
||||||
"-f, --fullscreen Start in fullscreen mode\n"
|
"-f, --fullscreen Start in fullscreen mode\n"
|
||||||
"-h, --help Display this help and exit\n"
|
"-h, --help Display this help and exit\n"
|
||||||
"-v, --version Output version information and exit\n";
|
"-v, --version Output version information and exit\n";
|
||||||
|
@ -187,6 +191,7 @@ int main(int argc, char** argv) {
|
||||||
u32 gdb_port = static_cast<u32>(Settings::values.gdbstub_port);
|
u32 gdb_port = static_cast<u32>(Settings::values.gdbstub_port);
|
||||||
std::string movie_record;
|
std::string movie_record;
|
||||||
std::string movie_play;
|
std::string movie_play;
|
||||||
|
std::string dump_video;
|
||||||
|
|
||||||
InitializeLogging();
|
InitializeLogging();
|
||||||
|
|
||||||
|
@ -210,15 +215,11 @@ int main(int argc, char** argv) {
|
||||||
u16 port = Network::DefaultRoomPort;
|
u16 port = Network::DefaultRoomPort;
|
||||||
|
|
||||||
static struct option long_options[] = {
|
static struct option long_options[] = {
|
||||||
{"gdbport", required_argument, 0, 'g'},
|
{"gdbport", required_argument, 0, 'g'}, {"install", required_argument, 0, 'i'},
|
||||||
{"install", required_argument, 0, 'i'},
|
{"multiplayer", required_argument, 0, 'm'}, {"movie-record", required_argument, 0, 'r'},
|
||||||
{"multiplayer", required_argument, 0, 'm'},
|
{"movie-play", required_argument, 0, 'p'}, {"dump-video", required_argument, 0, 'd'},
|
||||||
{"movie-record", required_argument, 0, 'r'},
|
{"fullscreen", no_argument, 0, 'f'}, {"help", no_argument, 0, 'h'},
|
||||||
{"movie-play", required_argument, 0, 'p'},
|
{"version", no_argument, 0, 'v'}, {0, 0, 0, 0},
|
||||||
{"fullscreen", no_argument, 0, 'f'},
|
|
||||||
{"help", no_argument, 0, 'h'},
|
|
||||||
{"version", no_argument, 0, 'v'},
|
|
||||||
{0, 0, 0, 0},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
while (optind < argc) {
|
while (optind < argc) {
|
||||||
|
@ -285,6 +286,9 @@ int main(int argc, char** argv) {
|
||||||
case 'p':
|
case 'p':
|
||||||
movie_play = optarg;
|
movie_play = optarg;
|
||||||
break;
|
break;
|
||||||
|
case 'd':
|
||||||
|
dump_video = optarg;
|
||||||
|
break;
|
||||||
case 'f':
|
case 'f':
|
||||||
fullscreen = true;
|
fullscreen = true;
|
||||||
LOG_INFO(Frontend, "Starting in fullscreen mode...");
|
LOG_INFO(Frontend, "Starting in fullscreen mode...");
|
||||||
|
@ -399,12 +403,20 @@ int main(int argc, char** argv) {
|
||||||
if (!movie_record.empty()) {
|
if (!movie_record.empty()) {
|
||||||
Core::Movie::GetInstance().StartRecording(movie_record);
|
Core::Movie::GetInstance().StartRecording(movie_record);
|
||||||
}
|
}
|
||||||
|
if (!dump_video.empty()) {
|
||||||
|
Layout::FramebufferLayout layout{
|
||||||
|
Layout::FrameLayoutFromResolutionScale(VideoCore::GetResolutionScaleFactor())};
|
||||||
|
system.VideoDumper().StartDumping(dump_video, "webm", layout);
|
||||||
|
}
|
||||||
|
|
||||||
while (emu_window->IsOpen()) {
|
while (emu_window->IsOpen()) {
|
||||||
system.RunLoop();
|
system.RunLoop();
|
||||||
}
|
}
|
||||||
|
|
||||||
Core::Movie::GetInstance().Shutdown();
|
Core::Movie::GetInstance().Shutdown();
|
||||||
|
if (system.VideoDumper().IsDumping()) {
|
||||||
|
system.VideoDumper().StopDumping();
|
||||||
|
}
|
||||||
|
|
||||||
detached_tasks.WaitForAllTasks();
|
detached_tasks.WaitForAllTasks();
|
||||||
return 0;
|
return 0;
|
||||||
|
|
|
@ -265,4 +265,9 @@ if (MSVC)
|
||||||
include(CopyCitraSDLDeps)
|
include(CopyCitraSDLDeps)
|
||||||
copy_citra_Qt5_deps(citra-qt)
|
copy_citra_Qt5_deps(citra-qt)
|
||||||
copy_citra_SDL_deps(citra-qt)
|
copy_citra_SDL_deps(citra-qt)
|
||||||
|
|
||||||
|
if (ENABLE_FFMPEG)
|
||||||
|
include(CopyCitraFFmpegDeps)
|
||||||
|
copy_citra_FFmpeg_deps(citra-qt)
|
||||||
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
|
|
@ -326,6 +326,7 @@ void Config::ReadValues() {
|
||||||
UISettings::values.movie_record_path = ReadSetting("movieRecordPath").toString();
|
UISettings::values.movie_record_path = ReadSetting("movieRecordPath").toString();
|
||||||
UISettings::values.movie_playback_path = ReadSetting("moviePlaybackPath").toString();
|
UISettings::values.movie_playback_path = ReadSetting("moviePlaybackPath").toString();
|
||||||
UISettings::values.screenshot_path = ReadSetting("screenshotPath").toString();
|
UISettings::values.screenshot_path = ReadSetting("screenshotPath").toString();
|
||||||
|
UISettings::values.video_dumping_path = ReadSetting("videoDumpingPath").toString();
|
||||||
UISettings::values.game_dir_deprecated = ReadSetting("gameListRootDir", ".").toString();
|
UISettings::values.game_dir_deprecated = ReadSetting("gameListRootDir", ".").toString();
|
||||||
UISettings::values.game_dir_deprecated_deepscan =
|
UISettings::values.game_dir_deprecated_deepscan =
|
||||||
ReadSetting("gameListDeepScan", false).toBool();
|
ReadSetting("gameListDeepScan", false).toBool();
|
||||||
|
@ -594,6 +595,7 @@ void Config::SaveValues() {
|
||||||
WriteSetting("movieRecordPath", UISettings::values.movie_record_path);
|
WriteSetting("movieRecordPath", UISettings::values.movie_record_path);
|
||||||
WriteSetting("moviePlaybackPath", UISettings::values.movie_playback_path);
|
WriteSetting("moviePlaybackPath", UISettings::values.movie_playback_path);
|
||||||
WriteSetting("screenshotPath", UISettings::values.screenshot_path);
|
WriteSetting("screenshotPath", UISettings::values.screenshot_path);
|
||||||
|
WriteSetting("videoDumpingPath", UISettings::values.video_dumping_path);
|
||||||
qt_config->beginWriteArray("gamedirs");
|
qt_config->beginWriteArray("gamedirs");
|
||||||
for (int i = 0; i < UISettings::values.game_dirs.size(); ++i) {
|
for (int i = 0; i < UISettings::values.game_dirs.size(); ++i) {
|
||||||
qt_config->setArrayIndex(i);
|
qt_config->setArrayIndex(i);
|
||||||
|
|
|
@ -59,6 +59,7 @@
|
||||||
#include "common/scm_rev.h"
|
#include "common/scm_rev.h"
|
||||||
#include "common/scope_exit.h"
|
#include "common/scope_exit.h"
|
||||||
#include "core/core.h"
|
#include "core/core.h"
|
||||||
|
#include "core/dumping/backend.h"
|
||||||
#include "core/file_sys/archive_extsavedata.h"
|
#include "core/file_sys/archive_extsavedata.h"
|
||||||
#include "core/file_sys/archive_source_sd_savedata.h"
|
#include "core/file_sys/archive_source_sd_savedata.h"
|
||||||
#include "core/frontend/applets/default_applets.h"
|
#include "core/frontend/applets/default_applets.h"
|
||||||
|
@ -69,6 +70,8 @@
|
||||||
#include "core/movie.h"
|
#include "core/movie.h"
|
||||||
#include "core/settings.h"
|
#include "core/settings.h"
|
||||||
#include "game_list_p.h"
|
#include "game_list_p.h"
|
||||||
|
#include "video_core/renderer_base.h"
|
||||||
|
#include "video_core/video_core.h"
|
||||||
|
|
||||||
#ifdef USE_DISCORD_PRESENCE
|
#ifdef USE_DISCORD_PRESENCE
|
||||||
#include "citra_qt/discord_impl.h"
|
#include "citra_qt/discord_impl.h"
|
||||||
|
@ -603,6 +606,17 @@ void GMainWindow::ConnectMenuEvents() {
|
||||||
connect(ui.action_Capture_Screenshot, &QAction::triggered, this,
|
connect(ui.action_Capture_Screenshot, &QAction::triggered, this,
|
||||||
&GMainWindow::OnCaptureScreenshot);
|
&GMainWindow::OnCaptureScreenshot);
|
||||||
|
|
||||||
|
#ifndef ENABLE_FFMPEG
|
||||||
|
ui.action_Dump_Video->setEnabled(false);
|
||||||
|
#endif
|
||||||
|
connect(ui.action_Dump_Video, &QAction::triggered, [this] {
|
||||||
|
if (ui.action_Dump_Video->isChecked()) {
|
||||||
|
OnStartVideoDumping();
|
||||||
|
} else {
|
||||||
|
OnStopVideoDumping();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// Help
|
// Help
|
||||||
connect(ui.action_Open_Citra_Folder, &QAction::triggered, this,
|
connect(ui.action_Open_Citra_Folder, &QAction::triggered, this,
|
||||||
&GMainWindow::OnOpenCitraFolder);
|
&GMainWindow::OnOpenCitraFolder);
|
||||||
|
@ -864,10 +878,25 @@ void GMainWindow::BootGame(const QString& filename) {
|
||||||
if (ui.action_Fullscreen->isChecked()) {
|
if (ui.action_Fullscreen->isChecked()) {
|
||||||
ShowFullscreen();
|
ShowFullscreen();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (video_dumping_on_start) {
|
||||||
|
Layout::FramebufferLayout layout{
|
||||||
|
Layout::FrameLayoutFromResolutionScale(VideoCore::GetResolutionScaleFactor())};
|
||||||
|
Core::System::GetInstance().VideoDumper().StartDumping(video_dumping_path.toStdString(),
|
||||||
|
"webm", layout);
|
||||||
|
video_dumping_on_start = false;
|
||||||
|
video_dumping_path.clear();
|
||||||
|
}
|
||||||
OnStartGame();
|
OnStartGame();
|
||||||
}
|
}
|
||||||
|
|
||||||
void GMainWindow::ShutdownGame() {
|
void GMainWindow::ShutdownGame() {
|
||||||
|
if (Core::System::GetInstance().VideoDumper().IsDumping()) {
|
||||||
|
game_shutdown_delayed = true;
|
||||||
|
OnStopVideoDumping();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
discord_rpc->Pause();
|
discord_rpc->Pause();
|
||||||
OnStopRecordingPlayback();
|
OnStopRecordingPlayback();
|
||||||
emu_thread->RequestStop();
|
emu_thread->RequestStop();
|
||||||
|
@ -1597,6 +1626,51 @@ void GMainWindow::OnCaptureScreenshot() {
|
||||||
OnStartGame();
|
OnStartGame();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void GMainWindow::OnStartVideoDumping() {
|
||||||
|
const QString path = QFileDialog::getSaveFileName(
|
||||||
|
this, tr("Save Video"), UISettings::values.video_dumping_path, tr("WebM Videos (*.webm)"));
|
||||||
|
if (path.isEmpty()) {
|
||||||
|
ui.action_Dump_Video->setChecked(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
UISettings::values.video_dumping_path = QFileInfo(path).path();
|
||||||
|
if (emulation_running) {
|
||||||
|
Layout::FramebufferLayout layout{
|
||||||
|
Layout::FrameLayoutFromResolutionScale(VideoCore::GetResolutionScaleFactor())};
|
||||||
|
Core::System::GetInstance().VideoDumper().StartDumping(path.toStdString(), "webm", layout);
|
||||||
|
} else {
|
||||||
|
video_dumping_on_start = true;
|
||||||
|
video_dumping_path = path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void GMainWindow::OnStopVideoDumping() {
|
||||||
|
ui.action_Dump_Video->setChecked(false);
|
||||||
|
|
||||||
|
if (video_dumping_on_start) {
|
||||||
|
video_dumping_on_start = false;
|
||||||
|
video_dumping_path.clear();
|
||||||
|
} else {
|
||||||
|
const bool was_dumping = Core::System::GetInstance().VideoDumper().IsDumping();
|
||||||
|
if (!was_dumping)
|
||||||
|
return;
|
||||||
|
OnPauseGame();
|
||||||
|
|
||||||
|
auto future =
|
||||||
|
QtConcurrent::run([] { Core::System::GetInstance().VideoDumper().StopDumping(); });
|
||||||
|
auto* future_watcher = new QFutureWatcher<void>(this);
|
||||||
|
connect(future_watcher, &QFutureWatcher<void>::finished, this, [this] {
|
||||||
|
if (game_shutdown_delayed) {
|
||||||
|
game_shutdown_delayed = false;
|
||||||
|
ShutdownGame();
|
||||||
|
} else {
|
||||||
|
OnStartGame();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
future_watcher->setFuture(future);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void GMainWindow::UpdateStatusBar() {
|
void GMainWindow::UpdateStatusBar() {
|
||||||
if (emu_thread == nullptr) {
|
if (emu_thread == nullptr) {
|
||||||
status_bar_update_timer.stop();
|
status_bar_update_timer.stop();
|
||||||
|
|
|
@ -184,6 +184,8 @@ private slots:
|
||||||
void OnPlayMovie();
|
void OnPlayMovie();
|
||||||
void OnStopRecordingPlayback();
|
void OnStopRecordingPlayback();
|
||||||
void OnCaptureScreenshot();
|
void OnCaptureScreenshot();
|
||||||
|
void OnStartVideoDumping();
|
||||||
|
void OnStopVideoDumping();
|
||||||
void OnCoreError(Core::System::ResultStatus, std::string);
|
void OnCoreError(Core::System::ResultStatus, std::string);
|
||||||
/// Called whenever a user selects Help->About Citra
|
/// Called whenever a user selects Help->About Citra
|
||||||
void OnMenuAboutCitra();
|
void OnMenuAboutCitra();
|
||||||
|
@ -230,6 +232,12 @@ private:
|
||||||
bool movie_record_on_start = false;
|
bool movie_record_on_start = false;
|
||||||
QString movie_record_path;
|
QString movie_record_path;
|
||||||
|
|
||||||
|
// Video dumping
|
||||||
|
bool video_dumping_on_start = false;
|
||||||
|
QString video_dumping_path;
|
||||||
|
// Whether game shutdown is delayed due to video dumping
|
||||||
|
bool game_shutdown_delayed = false;
|
||||||
|
|
||||||
// Debugger panes
|
// Debugger panes
|
||||||
ProfilerWidget* profilerWidget;
|
ProfilerWidget* profilerWidget;
|
||||||
MicroProfileDialog* microProfileDialog;
|
MicroProfileDialog* microProfileDialog;
|
||||||
|
|
|
@ -158,6 +158,7 @@
|
||||||
<addaction name="menu_Frame_Advance"/>
|
<addaction name="menu_Frame_Advance"/>
|
||||||
<addaction name="separator"/>
|
<addaction name="separator"/>
|
||||||
<addaction name="action_Capture_Screenshot"/>
|
<addaction name="action_Capture_Screenshot"/>
|
||||||
|
<addaction name="action_Dump_Video"/>
|
||||||
</widget>
|
</widget>
|
||||||
<widget class="QMenu" name="menu_Help">
|
<widget class="QMenu" name="menu_Help">
|
||||||
<property name="title">
|
<property name="title">
|
||||||
|
@ -336,6 +337,14 @@
|
||||||
<string>Capture Screenshot</string>
|
<string>Capture Screenshot</string>
|
||||||
</property>
|
</property>
|
||||||
</action>
|
</action>
|
||||||
|
<action name="action_Dump_Video">
|
||||||
|
<property name="checkable">
|
||||||
|
<bool>true</bool>
|
||||||
|
</property>
|
||||||
|
<property name="text">
|
||||||
|
<string>Dump Video</string>
|
||||||
|
</property>
|
||||||
|
</action>
|
||||||
<action name="action_View_Lobby">
|
<action name="action_View_Lobby">
|
||||||
<property name="enabled">
|
<property name="enabled">
|
||||||
<bool>true</bool>
|
<bool>true</bool>
|
||||||
|
|
|
@ -93,6 +93,7 @@ struct Values {
|
||||||
QString movie_record_path;
|
QString movie_record_path;
|
||||||
QString movie_playback_path;
|
QString movie_playback_path;
|
||||||
QString screenshot_path;
|
QString screenshot_path;
|
||||||
|
QString video_dumping_path;
|
||||||
QString game_dir_deprecated;
|
QString game_dir_deprecated;
|
||||||
bool game_dir_deprecated_deepscan;
|
bool game_dir_deprecated_deepscan;
|
||||||
QList<UISettings::GameDir> game_dirs;
|
QList<UISettings::GameDir> game_dirs;
|
||||||
|
|
|
@ -36,6 +36,8 @@ add_library(core STATIC
|
||||||
core.h
|
core.h
|
||||||
core_timing.cpp
|
core_timing.cpp
|
||||||
core_timing.h
|
core_timing.h
|
||||||
|
dumping/backend.cpp
|
||||||
|
dumping/backend.h
|
||||||
file_sys/archive_backend.cpp
|
file_sys/archive_backend.cpp
|
||||||
file_sys/archive_backend.h
|
file_sys/archive_backend.h
|
||||||
file_sys/archive_extsavedata.cpp
|
file_sys/archive_extsavedata.cpp
|
||||||
|
@ -444,6 +446,13 @@ add_library(core STATIC
|
||||||
tracer/recorder.h
|
tracer/recorder.h
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if (ENABLE_FFMPEG)
|
||||||
|
target_sources(core PRIVATE
|
||||||
|
dumping/ffmpeg_backend.cpp
|
||||||
|
dumping/ffmpeg_backend.h
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
|
||||||
create_target_directory_groups(core)
|
create_target_directory_groups(core)
|
||||||
|
|
||||||
target_link_libraries(core PUBLIC common PRIVATE audio_core network video_core)
|
target_link_libraries(core PUBLIC common PRIVATE audio_core network video_core)
|
||||||
|
@ -462,3 +471,7 @@ if (ARCHITECTURE_x86_64)
|
||||||
)
|
)
|
||||||
target_link_libraries(core PRIVATE dynarmic)
|
target_link_libraries(core PRIVATE dynarmic)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
if (ENABLE_FFMPEG)
|
||||||
|
target_link_libraries(core PRIVATE FFmpeg::avcodec FFmpeg::avformat FFmpeg::swscale FFmpeg::swresample FFmpeg::avutil)
|
||||||
|
endif()
|
||||||
|
|
|
@ -16,6 +16,10 @@
|
||||||
#include "core/cheats/cheats.h"
|
#include "core/cheats/cheats.h"
|
||||||
#include "core/core.h"
|
#include "core/core.h"
|
||||||
#include "core/core_timing.h"
|
#include "core/core_timing.h"
|
||||||
|
#include "core/dumping/backend.h"
|
||||||
|
#ifdef ENABLE_FFMPEG
|
||||||
|
#include "core/dumping/ffmpeg_backend.h"
|
||||||
|
#endif
|
||||||
#include "core/gdbstub/gdbstub.h"
|
#include "core/gdbstub/gdbstub.h"
|
||||||
#include "core/hle/kernel/client_port.h"
|
#include "core/hle/kernel/client_port.h"
|
||||||
#include "core/hle/kernel/kernel.h"
|
#include "core/hle/kernel/kernel.h"
|
||||||
|
@ -217,6 +221,12 @@ System::ResultStatus System::Init(Frontend::EmuWindow& emu_window, u32 system_mo
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef ENABLE_FFMPEG
|
||||||
|
video_dumper = std::make_unique<VideoDumper::FFmpegBackend>();
|
||||||
|
#else
|
||||||
|
video_dumper = std::make_unique<VideoDumper::NullBackend>();
|
||||||
|
#endif
|
||||||
|
|
||||||
LOG_DEBUG(Core, "Initialized OK");
|
LOG_DEBUG(Core, "Initialized OK");
|
||||||
|
|
||||||
// Reset counters and set time origin to current frame
|
// Reset counters and set time origin to current frame
|
||||||
|
@ -274,6 +284,14 @@ const Cheats::CheatEngine& System::CheatEngine() const {
|
||||||
return *cheat_engine;
|
return *cheat_engine;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
VideoDumper::Backend& System::VideoDumper() {
|
||||||
|
return *video_dumper;
|
||||||
|
}
|
||||||
|
|
||||||
|
const VideoDumper::Backend& System::VideoDumper() const {
|
||||||
|
return *video_dumper;
|
||||||
|
}
|
||||||
|
|
||||||
void System::RegisterMiiSelector(std::shared_ptr<Frontend::MiiSelector> mii_selector) {
|
void System::RegisterMiiSelector(std::shared_ptr<Frontend::MiiSelector> mii_selector) {
|
||||||
registered_mii_selector = std::move(mii_selector);
|
registered_mii_selector = std::move(mii_selector);
|
||||||
}
|
}
|
||||||
|
@ -306,6 +324,10 @@ void System::Shutdown() {
|
||||||
timing.reset();
|
timing.reset();
|
||||||
app_loader.reset();
|
app_loader.reset();
|
||||||
|
|
||||||
|
if (video_dumper->IsDumping()) {
|
||||||
|
video_dumper->StopDumping();
|
||||||
|
}
|
||||||
|
|
||||||
if (auto room_member = Network::GetRoomMember().lock()) {
|
if (auto room_member = Network::GetRoomMember().lock()) {
|
||||||
Network::GameInfo game_info{};
|
Network::GameInfo game_info{};
|
||||||
room_member->SendGameInfo(game_info);
|
room_member->SendGameInfo(game_info);
|
||||||
|
|
|
@ -49,6 +49,10 @@ namespace Cheats {
|
||||||
class CheatEngine;
|
class CheatEngine;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
namespace VideoDumper {
|
||||||
|
class Backend;
|
||||||
|
}
|
||||||
|
|
||||||
namespace Core {
|
namespace Core {
|
||||||
|
|
||||||
class Timing;
|
class Timing;
|
||||||
|
@ -206,6 +210,12 @@ public:
|
||||||
/// Gets a const reference to the cheat engine
|
/// Gets a const reference to the cheat engine
|
||||||
const Cheats::CheatEngine& CheatEngine() const;
|
const Cheats::CheatEngine& CheatEngine() const;
|
||||||
|
|
||||||
|
/// Gets a reference to the video dumper backend
|
||||||
|
VideoDumper::Backend& VideoDumper();
|
||||||
|
|
||||||
|
/// Gets a const reference to the video dumper backend
|
||||||
|
const VideoDumper::Backend& VideoDumper() const;
|
||||||
|
|
||||||
PerfStats perf_stats;
|
PerfStats perf_stats;
|
||||||
FrameLimiter frame_limiter;
|
FrameLimiter frame_limiter;
|
||||||
|
|
||||||
|
@ -276,6 +286,9 @@ private:
|
||||||
/// Cheats manager
|
/// Cheats manager
|
||||||
std::unique_ptr<Cheats::CheatEngine> cheat_engine;
|
std::unique_ptr<Cheats::CheatEngine> cheat_engine;
|
||||||
|
|
||||||
|
/// Video dumper backend
|
||||||
|
std::unique_ptr<VideoDumper::Backend> video_dumper;
|
||||||
|
|
||||||
/// RPC Server for scripting support
|
/// RPC Server for scripting support
|
||||||
std::unique_ptr<RPC::RPCServer> rpc_server;
|
std::unique_ptr<RPC::RPCServer> rpc_server;
|
||||||
|
|
||||||
|
|
26
src/core/dumping/backend.cpp
Normal file
26
src/core/dumping/backend.cpp
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
// Copyright 2018 Citra Emulator Project
|
||||||
|
// Licensed under GPLv2 or any later version
|
||||||
|
// Refer to the license.txt file included.
|
||||||
|
|
||||||
|
#include <cstring>
|
||||||
|
#include "core/dumping/backend.h"
|
||||||
|
|
||||||
|
namespace VideoDumper {
|
||||||
|
|
||||||
|
VideoFrame::VideoFrame(std::size_t width_, std::size_t height_, u8* data_)
|
||||||
|
: width(width_), height(height_), stride(width * 4), data(width * height * 4) {
|
||||||
|
// While copying, rotate the image to put the pixels in correct order
|
||||||
|
// (As OpenGL returns pixel data starting from the lowest position)
|
||||||
|
for (std::size_t i = 0; i < height; i++) {
|
||||||
|
for (std::size_t j = 0; j < width; j++) {
|
||||||
|
for (std::size_t k = 0; k < 4; k++) {
|
||||||
|
data[i * stride + j * 4 + k] = data_[(height - i - 1) * stride + j * 4 + k];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Backend::~Backend() = default;
|
||||||
|
NullBackend::~NullBackend() = default;
|
||||||
|
|
||||||
|
} // namespace VideoDumper
|
59
src/core/dumping/backend.h
Normal file
59
src/core/dumping/backend.h
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
// Copyright 2018 Citra Emulator Project
|
||||||
|
// Licensed under GPLv2 or any later version
|
||||||
|
// Refer to the license.txt file included.
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
#include "audio_core/audio_types.h"
|
||||||
|
#include "common/common_types.h"
|
||||||
|
#include "core/frontend/framebuffer_layout.h"
|
||||||
|
|
||||||
|
namespace VideoDumper {
|
||||||
|
/**
|
||||||
|
* Frame dump data for a single screen
|
||||||
|
* data is in RGB888 format, left to right then top to bottom
|
||||||
|
*/
|
||||||
|
class VideoFrame {
|
||||||
|
public:
|
||||||
|
std::size_t width;
|
||||||
|
std::size_t height;
|
||||||
|
u32 stride;
|
||||||
|
std::vector<u8> data;
|
||||||
|
|
||||||
|
VideoFrame(std::size_t width_ = 0, std::size_t height_ = 0, u8* data_ = nullptr);
|
||||||
|
};
|
||||||
|
|
||||||
|
class Backend {
|
||||||
|
public:
|
||||||
|
virtual ~Backend();
|
||||||
|
virtual bool StartDumping(const std::string& path, const std::string& format,
|
||||||
|
const Layout::FramebufferLayout& layout) = 0;
|
||||||
|
virtual void AddVideoFrame(const VideoFrame& frame) = 0;
|
||||||
|
virtual void AddAudioFrame(const AudioCore::StereoFrame16& frame) = 0;
|
||||||
|
virtual void AddAudioSample(const std::array<s16, 2>& sample) = 0;
|
||||||
|
virtual void StopDumping() = 0;
|
||||||
|
virtual bool IsDumping() const = 0;
|
||||||
|
virtual Layout::FramebufferLayout GetLayout() const = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
class NullBackend : public Backend {
|
||||||
|
public:
|
||||||
|
~NullBackend() override;
|
||||||
|
bool StartDumping(const std::string& /*path*/, const std::string& /*format*/,
|
||||||
|
const Layout::FramebufferLayout& /*layout*/) override {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
void AddVideoFrame(const VideoFrame& /*frame*/) override {}
|
||||||
|
void AddAudioFrame(const AudioCore::StereoFrame16& /*frame*/) override {}
|
||||||
|
void AddAudioSample(const std::array<s16, 2>& /*sample*/) override {}
|
||||||
|
void StopDumping() override {}
|
||||||
|
bool IsDumping() const override {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
Layout::FramebufferLayout GetLayout() const override {
|
||||||
|
return Layout::FramebufferLayout{};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} // namespace VideoDumper
|
530
src/core/dumping/ffmpeg_backend.cpp
Normal file
530
src/core/dumping/ffmpeg_backend.cpp
Normal file
|
@ -0,0 +1,530 @@
|
||||||
|
// Copyright 2018 Citra Emulator Project
|
||||||
|
// Licensed under GPLv2 or any later version
|
||||||
|
// Refer to the license.txt file included.
|
||||||
|
|
||||||
|
#include "common/assert.h"
|
||||||
|
#include "common/file_util.h"
|
||||||
|
#include "common/logging/log.h"
|
||||||
|
#include "core/dumping/ffmpeg_backend.h"
|
||||||
|
#include "video_core/renderer_base.h"
|
||||||
|
#include "video_core/video_core.h"
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
#include <libavutil/opt.h>
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace VideoDumper {
|
||||||
|
|
||||||
|
void InitializeFFmpegLibraries() {
|
||||||
|
static bool initialized = false;
|
||||||
|
|
||||||
|
if (initialized)
|
||||||
|
return;
|
||||||
|
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(58, 9, 100)
|
||||||
|
av_register_all();
|
||||||
|
#endif
|
||||||
|
avformat_network_init();
|
||||||
|
initialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
FFmpegStream::~FFmpegStream() {
|
||||||
|
Free();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool FFmpegStream::Init(AVFormatContext* format_context_) {
|
||||||
|
InitializeFFmpegLibraries();
|
||||||
|
|
||||||
|
format_context = format_context_;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegStream::Free() {
|
||||||
|
codec_context.reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegStream::Flush() {
|
||||||
|
SendFrame(nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegStream::WritePacket(AVPacket& packet) {
|
||||||
|
if (packet.pts != static_cast<s64>(AV_NOPTS_VALUE)) {
|
||||||
|
packet.pts = av_rescale_q(packet.pts, codec_context->time_base, stream->time_base);
|
||||||
|
}
|
||||||
|
if (packet.dts != static_cast<s64>(AV_NOPTS_VALUE)) {
|
||||||
|
packet.dts = av_rescale_q(packet.dts, codec_context->time_base, stream->time_base);
|
||||||
|
}
|
||||||
|
packet.stream_index = stream->index;
|
||||||
|
av_interleaved_write_frame(format_context, &packet);
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegStream::SendFrame(AVFrame* frame) {
|
||||||
|
// Initialize packet
|
||||||
|
AVPacket packet;
|
||||||
|
av_init_packet(&packet);
|
||||||
|
packet.data = nullptr;
|
||||||
|
packet.size = 0;
|
||||||
|
|
||||||
|
// Encode frame
|
||||||
|
if (avcodec_send_frame(codec_context.get(), frame) < 0) {
|
||||||
|
LOG_ERROR(Render, "Frame dropped: could not send frame");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
int error = 1;
|
||||||
|
while (error >= 0) {
|
||||||
|
error = avcodec_receive_packet(codec_context.get(), &packet);
|
||||||
|
if (error == AVERROR(EAGAIN) || error == AVERROR_EOF)
|
||||||
|
return;
|
||||||
|
if (error < 0) {
|
||||||
|
LOG_ERROR(Render, "Frame dropped: could not encode audio");
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
// Write frame to video file
|
||||||
|
WritePacket(packet);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FFmpegVideoStream::~FFmpegVideoStream() {
|
||||||
|
Free();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool FFmpegVideoStream::Init(AVFormatContext* format_context, AVOutputFormat* output_format,
|
||||||
|
const Layout::FramebufferLayout& layout_) {
|
||||||
|
|
||||||
|
InitializeFFmpegLibraries();
|
||||||
|
|
||||||
|
if (!FFmpegStream::Init(format_context))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
layout = layout_;
|
||||||
|
frame_count = 0;
|
||||||
|
|
||||||
|
// Initialize video codec
|
||||||
|
// Ensure VP9 codec here, also to avoid patent issues
|
||||||
|
constexpr AVCodecID codec_id = AV_CODEC_ID_VP9;
|
||||||
|
const AVCodec* codec = avcodec_find_encoder(codec_id);
|
||||||
|
codec_context.reset(avcodec_alloc_context3(codec));
|
||||||
|
if (!codec || !codec_context) {
|
||||||
|
LOG_ERROR(Render, "Could not find video encoder or allocate video codec context");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure video codec context
|
||||||
|
codec_context->codec_type = AVMEDIA_TYPE_VIDEO;
|
||||||
|
codec_context->bit_rate = 2500000;
|
||||||
|
codec_context->width = layout.width;
|
||||||
|
codec_context->height = layout.height;
|
||||||
|
codec_context->time_base.num = 1;
|
||||||
|
codec_context->time_base.den = 60;
|
||||||
|
codec_context->gop_size = 12;
|
||||||
|
codec_context->pix_fmt = AV_PIX_FMT_YUV420P;
|
||||||
|
codec_context->thread_count = 8;
|
||||||
|
if (output_format->flags & AVFMT_GLOBALHEADER)
|
||||||
|
codec_context->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
|
||||||
|
av_opt_set_int(codec_context.get(), "cpu-used", 5, 0);
|
||||||
|
|
||||||
|
if (avcodec_open2(codec_context.get(), codec, nullptr) < 0) {
|
||||||
|
LOG_ERROR(Render, "Could not open video codec");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create video stream
|
||||||
|
stream = avformat_new_stream(format_context, codec);
|
||||||
|
if (!stream || avcodec_parameters_from_context(stream->codecpar, codec_context.get()) < 0) {
|
||||||
|
LOG_ERROR(Render, "Could not create video stream");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allocate frames
|
||||||
|
current_frame.reset(av_frame_alloc());
|
||||||
|
scaled_frame.reset(av_frame_alloc());
|
||||||
|
scaled_frame->format = codec_context->pix_fmt;
|
||||||
|
scaled_frame->width = layout.width;
|
||||||
|
scaled_frame->height = layout.height;
|
||||||
|
if (av_frame_get_buffer(scaled_frame.get(), 1) < 0) {
|
||||||
|
LOG_ERROR(Render, "Could not allocate frame buffer");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create SWS Context
|
||||||
|
auto* context = sws_getCachedContext(
|
||||||
|
sws_context.get(), layout.width, layout.height, pixel_format, layout.width, layout.height,
|
||||||
|
codec_context->pix_fmt, SWS_BICUBIC, nullptr, nullptr, nullptr);
|
||||||
|
if (context != sws_context.get())
|
||||||
|
sws_context.reset(context);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegVideoStream::Free() {
|
||||||
|
FFmpegStream::Free();
|
||||||
|
|
||||||
|
current_frame.reset();
|
||||||
|
scaled_frame.reset();
|
||||||
|
sws_context.reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegVideoStream::ProcessFrame(VideoFrame& frame) {
|
||||||
|
if (frame.width != layout.width || frame.height != layout.height) {
|
||||||
|
LOG_ERROR(Render, "Frame dropped: resolution does not match");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Prepare frame
|
||||||
|
current_frame->data[0] = frame.data.data();
|
||||||
|
current_frame->linesize[0] = frame.stride;
|
||||||
|
current_frame->format = pixel_format;
|
||||||
|
current_frame->width = layout.width;
|
||||||
|
current_frame->height = layout.height;
|
||||||
|
|
||||||
|
// Scale the frame
|
||||||
|
if (sws_context) {
|
||||||
|
sws_scale(sws_context.get(), current_frame->data, current_frame->linesize, 0, layout.height,
|
||||||
|
scaled_frame->data, scaled_frame->linesize);
|
||||||
|
}
|
||||||
|
scaled_frame->pts = frame_count++;
|
||||||
|
|
||||||
|
// Encode frame
|
||||||
|
SendFrame(scaled_frame.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
FFmpegAudioStream::~FFmpegAudioStream() {
|
||||||
|
Free();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool FFmpegAudioStream::Init(AVFormatContext* format_context) {
|
||||||
|
InitializeFFmpegLibraries();
|
||||||
|
|
||||||
|
if (!FFmpegStream::Init(format_context))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
sample_count = 0;
|
||||||
|
|
||||||
|
// Initialize audio codec
|
||||||
|
constexpr AVCodecID codec_id = AV_CODEC_ID_VORBIS;
|
||||||
|
const AVCodec* codec = avcodec_find_encoder(codec_id);
|
||||||
|
codec_context.reset(avcodec_alloc_context3(codec));
|
||||||
|
if (!codec || !codec_context) {
|
||||||
|
LOG_ERROR(Render, "Could not find audio encoder or allocate audio codec context");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure audio codec context
|
||||||
|
codec_context->codec_type = AVMEDIA_TYPE_AUDIO;
|
||||||
|
codec_context->bit_rate = 64000;
|
||||||
|
codec_context->sample_fmt = codec->sample_fmts[0];
|
||||||
|
codec_context->sample_rate = AudioCore::native_sample_rate;
|
||||||
|
codec_context->channel_layout = AV_CH_LAYOUT_STEREO;
|
||||||
|
codec_context->channels = 2;
|
||||||
|
|
||||||
|
if (avcodec_open2(codec_context.get(), codec, nullptr) < 0) {
|
||||||
|
LOG_ERROR(Render, "Could not open audio codec");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create audio stream
|
||||||
|
stream = avformat_new_stream(format_context, codec);
|
||||||
|
if (!stream || avcodec_parameters_from_context(stream->codecpar, codec_context.get()) < 0) {
|
||||||
|
|
||||||
|
LOG_ERROR(Render, "Could not create audio stream");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allocate frame
|
||||||
|
audio_frame.reset(av_frame_alloc());
|
||||||
|
audio_frame->format = codec_context->sample_fmt;
|
||||||
|
audio_frame->channel_layout = codec_context->channel_layout;
|
||||||
|
audio_frame->channels = codec_context->channels;
|
||||||
|
|
||||||
|
// Allocate SWR context
|
||||||
|
auto* context =
|
||||||
|
swr_alloc_set_opts(nullptr, codec_context->channel_layout, codec_context->sample_fmt,
|
||||||
|
codec_context->sample_rate, codec_context->channel_layout,
|
||||||
|
AV_SAMPLE_FMT_S16P, AudioCore::native_sample_rate, 0, nullptr);
|
||||||
|
if (!context) {
|
||||||
|
LOG_ERROR(Render, "Could not create SWR context");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
swr_context.reset(context);
|
||||||
|
if (swr_init(swr_context.get()) < 0) {
|
||||||
|
LOG_ERROR(Render, "Could not init SWR context");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allocate resampled data
|
||||||
|
int error =
|
||||||
|
av_samples_alloc_array_and_samples(&resampled_data, nullptr, codec_context->channels,
|
||||||
|
codec_context->frame_size, codec_context->sample_fmt, 0);
|
||||||
|
if (error < 0) {
|
||||||
|
LOG_ERROR(Render, "Could not allocate samples storage");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegAudioStream::Free() {
|
||||||
|
FFmpegStream::Free();
|
||||||
|
|
||||||
|
audio_frame.reset();
|
||||||
|
swr_context.reset();
|
||||||
|
// Free resampled data
|
||||||
|
if (resampled_data) {
|
||||||
|
av_freep(&resampled_data[0]);
|
||||||
|
}
|
||||||
|
av_freep(&resampled_data);
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegAudioStream::ProcessFrame(VariableAudioFrame& channel0, VariableAudioFrame& channel1) {
|
||||||
|
ASSERT_MSG(channel0.size() == channel1.size(),
|
||||||
|
"Frames of the two channels must have the same number of samples");
|
||||||
|
std::array<const u8*, 2> src_data = {reinterpret_cast<u8*>(channel0.data()),
|
||||||
|
reinterpret_cast<u8*>(channel1.data())};
|
||||||
|
if (swr_convert(swr_context.get(), resampled_data, channel0.size(), src_data.data(),
|
||||||
|
channel0.size()) < 0) {
|
||||||
|
|
||||||
|
LOG_ERROR(Render, "Audio frame dropped: Could not resample data");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare frame
|
||||||
|
audio_frame->nb_samples = channel0.size();
|
||||||
|
audio_frame->data[0] = resampled_data[0];
|
||||||
|
audio_frame->data[1] = resampled_data[1];
|
||||||
|
audio_frame->pts = sample_count;
|
||||||
|
sample_count += channel0.size();
|
||||||
|
|
||||||
|
SendFrame(audio_frame.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
std::size_t FFmpegAudioStream::GetAudioFrameSize() const {
|
||||||
|
ASSERT_MSG(codec_context, "Codec context is not initialized yet!");
|
||||||
|
return codec_context->frame_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
FFmpegMuxer::~FFmpegMuxer() {
|
||||||
|
Free();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool FFmpegMuxer::Init(const std::string& path, const std::string& format,
|
||||||
|
const Layout::FramebufferLayout& layout) {
|
||||||
|
|
||||||
|
InitializeFFmpegLibraries();
|
||||||
|
|
||||||
|
if (!FileUtil::CreateFullPath(path)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get output format
|
||||||
|
// Ensure webm here to avoid patent issues
|
||||||
|
ASSERT_MSG(format == "webm", "Only webm is allowed for frame dumping");
|
||||||
|
auto* output_format = av_guess_format(format.c_str(), path.c_str(), "video/webm");
|
||||||
|
if (!output_format) {
|
||||||
|
LOG_ERROR(Render, "Could not get format {}", format);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize format context
|
||||||
|
auto* format_context_raw = format_context.get();
|
||||||
|
if (avformat_alloc_output_context2(&format_context_raw, output_format, nullptr, path.c_str()) <
|
||||||
|
0) {
|
||||||
|
|
||||||
|
LOG_ERROR(Render, "Could not allocate output context");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
format_context.reset(format_context_raw);
|
||||||
|
|
||||||
|
if (!video_stream.Init(format_context.get(), output_format, layout))
|
||||||
|
return false;
|
||||||
|
if (!audio_stream.Init(format_context.get()))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
// Open video file
|
||||||
|
if (avio_open(&format_context->pb, path.c_str(), AVIO_FLAG_WRITE) < 0 ||
|
||||||
|
avformat_write_header(format_context.get(), nullptr)) {
|
||||||
|
|
||||||
|
LOG_ERROR(Render, "Could not open {}", path);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
LOG_INFO(Render, "Dumping frames to {} ({}x{})", path, layout.width, layout.height);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegMuxer::Free() {
|
||||||
|
video_stream.Free();
|
||||||
|
audio_stream.Free();
|
||||||
|
format_context.reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegMuxer::ProcessVideoFrame(VideoFrame& frame) {
|
||||||
|
video_stream.ProcessFrame(frame);
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegMuxer::ProcessAudioFrame(VariableAudioFrame& channel0, VariableAudioFrame& channel1) {
|
||||||
|
audio_stream.ProcessFrame(channel0, channel1);
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegMuxer::FlushVideo() {
|
||||||
|
video_stream.Flush();
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegMuxer::FlushAudio() {
|
||||||
|
audio_stream.Flush();
|
||||||
|
}
|
||||||
|
|
||||||
|
std::size_t FFmpegMuxer::GetAudioFrameSize() const {
|
||||||
|
return audio_stream.GetAudioFrameSize();
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegMuxer::WriteTrailer() {
|
||||||
|
av_write_trailer(format_context.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
FFmpegBackend::FFmpegBackend() = default;
|
||||||
|
|
||||||
|
FFmpegBackend::~FFmpegBackend() {
|
||||||
|
ASSERT_MSG(!IsDumping(), "Dumping must be stopped first");
|
||||||
|
|
||||||
|
if (video_processing_thread.joinable())
|
||||||
|
video_processing_thread.join();
|
||||||
|
if (audio_processing_thread.joinable())
|
||||||
|
audio_processing_thread.join();
|
||||||
|
ffmpeg.Free();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool FFmpegBackend::StartDumping(const std::string& path, const std::string& format,
|
||||||
|
const Layout::FramebufferLayout& layout) {
|
||||||
|
|
||||||
|
InitializeFFmpegLibraries();
|
||||||
|
|
||||||
|
if (!ffmpeg.Init(path, format, layout)) {
|
||||||
|
ffmpeg.Free();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
video_layout = layout;
|
||||||
|
|
||||||
|
if (video_processing_thread.joinable())
|
||||||
|
video_processing_thread.join();
|
||||||
|
video_processing_thread = std::thread([&] {
|
||||||
|
event1.Set();
|
||||||
|
while (true) {
|
||||||
|
event2.Wait();
|
||||||
|
current_buffer = (current_buffer + 1) % 2;
|
||||||
|
next_buffer = (current_buffer + 1) % 2;
|
||||||
|
event1.Set();
|
||||||
|
// Process this frame
|
||||||
|
auto& frame = video_frame_buffers[current_buffer];
|
||||||
|
if (frame.width == 0 && frame.height == 0) {
|
||||||
|
// An empty frame marks the end of frame data
|
||||||
|
ffmpeg.FlushVideo();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
ffmpeg.ProcessVideoFrame(frame);
|
||||||
|
}
|
||||||
|
// Finish audio execution first if not done yet
|
||||||
|
if (audio_processing_thread.joinable())
|
||||||
|
audio_processing_thread.join();
|
||||||
|
EndDumping();
|
||||||
|
});
|
||||||
|
|
||||||
|
if (audio_processing_thread.joinable())
|
||||||
|
audio_processing_thread.join();
|
||||||
|
audio_processing_thread = std::thread([&] {
|
||||||
|
VariableAudioFrame channel0, channel1;
|
||||||
|
while (true) {
|
||||||
|
channel0 = audio_frame_queues[0].PopWait();
|
||||||
|
channel1 = audio_frame_queues[1].PopWait();
|
||||||
|
if (channel0.empty()) {
|
||||||
|
// An empty frame marks the end of frame data
|
||||||
|
ffmpeg.FlushAudio();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
ffmpeg.ProcessAudioFrame(channel0, channel1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
VideoCore::g_renderer->PrepareVideoDumping();
|
||||||
|
is_dumping = true;
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegBackend::AddVideoFrame(const VideoFrame& frame) {
|
||||||
|
event1.Wait();
|
||||||
|
video_frame_buffers[next_buffer] = std::move(frame);
|
||||||
|
event2.Set();
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegBackend::AddAudioFrame(const AudioCore::StereoFrame16& frame) {
|
||||||
|
std::array<std::array<s16, 160>, 2> refactored_frame;
|
||||||
|
for (std::size_t i = 0; i < frame.size(); i++) {
|
||||||
|
refactored_frame[0][i] = frame[i][0];
|
||||||
|
refactored_frame[1][i] = frame[i][1];
|
||||||
|
}
|
||||||
|
|
||||||
|
for (auto i : {0, 1}) {
|
||||||
|
audio_buffers[i].insert(audio_buffers[i].end(), refactored_frame[i].begin(),
|
||||||
|
refactored_frame[i].end());
|
||||||
|
}
|
||||||
|
CheckAudioBuffer();
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegBackend::AddAudioSample(const std::array<s16, 2>& sample) {
|
||||||
|
for (auto i : {0, 1}) {
|
||||||
|
audio_buffers[i].push_back(sample[i]);
|
||||||
|
}
|
||||||
|
CheckAudioBuffer();
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegBackend::StopDumping() {
|
||||||
|
is_dumping = false;
|
||||||
|
VideoCore::g_renderer->CleanupVideoDumping();
|
||||||
|
|
||||||
|
// Flush the video processing queue
|
||||||
|
AddVideoFrame(VideoFrame());
|
||||||
|
for (auto i : {0, 1}) {
|
||||||
|
// Add remaining data to audio queue
|
||||||
|
if (audio_buffers[i].size() >= 0) {
|
||||||
|
VariableAudioFrame buffer(audio_buffers[i].begin(), audio_buffers[i].end());
|
||||||
|
audio_frame_queues[i].Push(std::move(buffer));
|
||||||
|
audio_buffers[i].clear();
|
||||||
|
}
|
||||||
|
// Flush the audio processing queue
|
||||||
|
audio_frame_queues[i].Push(VariableAudioFrame());
|
||||||
|
}
|
||||||
|
// Wait until processing ends
|
||||||
|
processing_ended.Wait();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool FFmpegBackend::IsDumping() const {
|
||||||
|
return is_dumping.load(std::memory_order_relaxed);
|
||||||
|
}
|
||||||
|
|
||||||
|
Layout::FramebufferLayout FFmpegBackend::GetLayout() const {
|
||||||
|
return video_layout;
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegBackend::EndDumping() {
|
||||||
|
LOG_INFO(Render, "Ending frame dumping");
|
||||||
|
|
||||||
|
ffmpeg.WriteTrailer();
|
||||||
|
ffmpeg.Free();
|
||||||
|
processing_ended.Set();
|
||||||
|
}
|
||||||
|
|
||||||
|
void FFmpegBackend::CheckAudioBuffer() {
|
||||||
|
for (auto i : {0, 1}) {
|
||||||
|
const std::size_t frame_size = ffmpeg.GetAudioFrameSize();
|
||||||
|
// Add audio data to the queue when there is enough to form a frame
|
||||||
|
while (audio_buffers[i].size() >= frame_size) {
|
||||||
|
VariableAudioFrame buffer(audio_buffers[i].begin(),
|
||||||
|
audio_buffers[i].begin() + frame_size);
|
||||||
|
audio_frame_queues[i].Push(std::move(buffer));
|
||||||
|
|
||||||
|
audio_buffers[i].erase(audio_buffers[i].begin(), audio_buffers[i].begin() + frame_size);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace VideoDumper
|
196
src/core/dumping/ffmpeg_backend.h
Normal file
196
src/core/dumping/ffmpeg_backend.h
Normal file
|
@ -0,0 +1,196 @@
|
||||||
|
// Copyright 2018 Citra Emulator Project
|
||||||
|
// Licensed under GPLv2 or any later version
|
||||||
|
// Refer to the license.txt file included.
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <atomic>
|
||||||
|
#include <condition_variable>
|
||||||
|
#include <limits>
|
||||||
|
#include <memory>
|
||||||
|
#include <mutex>
|
||||||
|
#include <thread>
|
||||||
|
#include <vector>
|
||||||
|
#include "common/common_types.h"
|
||||||
|
#include "common/thread.h"
|
||||||
|
#include "common/threadsafe_queue.h"
|
||||||
|
#include "core/dumping/backend.h"
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
#include <libavcodec/avcodec.h>
|
||||||
|
#include <libavformat/avformat.h>
|
||||||
|
#include <libswresample/swresample.h>
|
||||||
|
#include <libswscale/swscale.h>
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace VideoDumper {
|
||||||
|
|
||||||
|
using VariableAudioFrame = std::vector<s16>;
|
||||||
|
|
||||||
|
void InitFFmpegLibraries();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrapper around FFmpeg AVCodecContext + AVStream.
|
||||||
|
* Rescales/Resamples, encodes and writes a frame.
|
||||||
|
*/
|
||||||
|
class FFmpegStream {
|
||||||
|
public:
|
||||||
|
bool Init(AVFormatContext* format_context);
|
||||||
|
void Free();
|
||||||
|
void Flush();
|
||||||
|
|
||||||
|
protected:
|
||||||
|
~FFmpegStream();
|
||||||
|
|
||||||
|
void WritePacket(AVPacket& packet);
|
||||||
|
void SendFrame(AVFrame* frame);
|
||||||
|
|
||||||
|
struct AVCodecContextDeleter {
|
||||||
|
void operator()(AVCodecContext* codec_context) const {
|
||||||
|
avcodec_free_context(&codec_context);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct AVFrameDeleter {
|
||||||
|
void operator()(AVFrame* frame) const {
|
||||||
|
av_frame_free(&frame);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
AVFormatContext* format_context{};
|
||||||
|
std::unique_ptr<AVCodecContext, AVCodecContextDeleter> codec_context{};
|
||||||
|
AVStream* stream{};
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A FFmpegStream used for video data.
|
||||||
|
* Rescales, encodes and writes a frame.
|
||||||
|
*/
|
||||||
|
class FFmpegVideoStream : public FFmpegStream {
|
||||||
|
public:
|
||||||
|
~FFmpegVideoStream();
|
||||||
|
|
||||||
|
bool Init(AVFormatContext* format_context, AVOutputFormat* output_format,
|
||||||
|
const Layout::FramebufferLayout& layout);
|
||||||
|
void Free();
|
||||||
|
void ProcessFrame(VideoFrame& frame);
|
||||||
|
|
||||||
|
private:
|
||||||
|
struct SwsContextDeleter {
|
||||||
|
void operator()(SwsContext* sws_context) const {
|
||||||
|
sws_freeContext(sws_context);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
u64 frame_count{};
|
||||||
|
|
||||||
|
std::unique_ptr<AVFrame, AVFrameDeleter> current_frame{};
|
||||||
|
std::unique_ptr<AVFrame, AVFrameDeleter> scaled_frame{};
|
||||||
|
std::unique_ptr<SwsContext, SwsContextDeleter> sws_context{};
|
||||||
|
Layout::FramebufferLayout layout;
|
||||||
|
|
||||||
|
/// The pixel format the frames are stored in
|
||||||
|
static constexpr AVPixelFormat pixel_format = AVPixelFormat::AV_PIX_FMT_BGRA;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A FFmpegStream used for audio data.
|
||||||
|
* Resamples (converts), encodes and writes a frame.
|
||||||
|
*/
|
||||||
|
class FFmpegAudioStream : public FFmpegStream {
|
||||||
|
public:
|
||||||
|
~FFmpegAudioStream();
|
||||||
|
|
||||||
|
bool Init(AVFormatContext* format_context);
|
||||||
|
void Free();
|
||||||
|
void ProcessFrame(VariableAudioFrame& channel0, VariableAudioFrame& channel1);
|
||||||
|
std::size_t GetAudioFrameSize() const;
|
||||||
|
|
||||||
|
private:
|
||||||
|
struct SwrContextDeleter {
|
||||||
|
void operator()(SwrContext* swr_context) const {
|
||||||
|
swr_free(&swr_context);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
u64 sample_count{};
|
||||||
|
|
||||||
|
std::unique_ptr<AVFrame, AVFrameDeleter> audio_frame{};
|
||||||
|
std::unique_ptr<SwrContext, SwrContextDeleter> swr_context{};
|
||||||
|
|
||||||
|
u8** resampled_data{};
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrapper around FFmpeg AVFormatContext.
|
||||||
|
* Manages the video and audio streams, and accepts video and audio data.
|
||||||
|
*/
|
||||||
|
class FFmpegMuxer {
|
||||||
|
public:
|
||||||
|
~FFmpegMuxer();
|
||||||
|
|
||||||
|
bool Init(const std::string& path, const std::string& format,
|
||||||
|
const Layout::FramebufferLayout& layout);
|
||||||
|
void Free();
|
||||||
|
void ProcessVideoFrame(VideoFrame& frame);
|
||||||
|
void ProcessAudioFrame(VariableAudioFrame& channel0, VariableAudioFrame& channel1);
|
||||||
|
void FlushVideo();
|
||||||
|
void FlushAudio();
|
||||||
|
std::size_t GetAudioFrameSize() const;
|
||||||
|
void WriteTrailer();
|
||||||
|
|
||||||
|
private:
|
||||||
|
struct AVFormatContextDeleter {
|
||||||
|
void operator()(AVFormatContext* format_context) const {
|
||||||
|
avio_closep(&format_context->pb);
|
||||||
|
avformat_free_context(format_context);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
FFmpegAudioStream audio_stream{};
|
||||||
|
FFmpegVideoStream video_stream{};
|
||||||
|
std::unique_ptr<AVFormatContext, AVFormatContextDeleter> format_context{};
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* FFmpeg video dumping backend.
|
||||||
|
* This class implements a double buffer, and an audio queue to keep audio data
|
||||||
|
* before enough data is received to form a frame.
|
||||||
|
*/
|
||||||
|
class FFmpegBackend : public Backend {
|
||||||
|
public:
|
||||||
|
FFmpegBackend();
|
||||||
|
~FFmpegBackend() override;
|
||||||
|
bool StartDumping(const std::string& path, const std::string& format,
|
||||||
|
const Layout::FramebufferLayout& layout) override;
|
||||||
|
void AddVideoFrame(const VideoFrame& frame) override;
|
||||||
|
void AddAudioFrame(const AudioCore::StereoFrame16& frame) override;
|
||||||
|
void AddAudioSample(const std::array<s16, 2>& sample) override;
|
||||||
|
void StopDumping() override;
|
||||||
|
bool IsDumping() const override;
|
||||||
|
Layout::FramebufferLayout GetLayout() const override;
|
||||||
|
|
||||||
|
private:
|
||||||
|
void CheckAudioBuffer();
|
||||||
|
void EndDumping();
|
||||||
|
|
||||||
|
std::atomic_bool is_dumping = false; ///< Whether the backend is currently dumping
|
||||||
|
|
||||||
|
FFmpegMuxer ffmpeg{};
|
||||||
|
|
||||||
|
Layout::FramebufferLayout video_layout;
|
||||||
|
std::array<VideoFrame, 2> video_frame_buffers;
|
||||||
|
u32 current_buffer = 0, next_buffer = 1;
|
||||||
|
Common::Event event1, event2;
|
||||||
|
std::thread video_processing_thread;
|
||||||
|
|
||||||
|
/// An audio buffer used to temporarily hold audio data, before the size is big enough
|
||||||
|
/// to be sent to the encoder as a frame
|
||||||
|
std::array<VariableAudioFrame, 2> audio_buffers;
|
||||||
|
std::array<Common::SPSCQueue<VariableAudioFrame>, 2> audio_frame_queues;
|
||||||
|
std::thread audio_processing_thread;
|
||||||
|
|
||||||
|
Common::Event processing_ended;
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace VideoDumper
|
|
@ -13,6 +13,10 @@ namespace Frontend {
|
||||||
class EmuWindow;
|
class EmuWindow;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
namespace FrameDumper {
|
||||||
|
class Backend;
|
||||||
|
}
|
||||||
|
|
||||||
class RendererBase : NonCopyable {
|
class RendererBase : NonCopyable {
|
||||||
public:
|
public:
|
||||||
/// Used to reference a framebuffer
|
/// Used to reference a framebuffer
|
||||||
|
@ -30,6 +34,12 @@ public:
|
||||||
/// Shutdown the renderer
|
/// Shutdown the renderer
|
||||||
virtual void ShutDown() = 0;
|
virtual void ShutDown() = 0;
|
||||||
|
|
||||||
|
/// Prepares for video dumping (e.g. create necessary buffers, etc)
|
||||||
|
virtual void PrepareVideoDumping() = 0;
|
||||||
|
|
||||||
|
/// Cleans up after video dumping is ended
|
||||||
|
virtual void CleanupVideoDumping() = 0;
|
||||||
|
|
||||||
/// Updates the framebuffer layout of the contained render window handle.
|
/// Updates the framebuffer layout of the contained render window handle.
|
||||||
void UpdateCurrentFramebufferLayout();
|
void UpdateCurrentFramebufferLayout();
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,9 @@
|
||||||
#include "common/logging/log.h"
|
#include "common/logging/log.h"
|
||||||
#include "core/core.h"
|
#include "core/core.h"
|
||||||
#include "core/core_timing.h"
|
#include "core/core_timing.h"
|
||||||
|
#include "core/dumping/backend.h"
|
||||||
#include "core/frontend/emu_window.h"
|
#include "core/frontend/emu_window.h"
|
||||||
|
#include "core/frontend/framebuffer_layout.h"
|
||||||
#include "core/hw/gpu.h"
|
#include "core/hw/gpu.h"
|
||||||
#include "core/hw/hw.h"
|
#include "core/hw/hw.h"
|
||||||
#include "core/hw/lcd.h"
|
#include "core/hw/lcd.h"
|
||||||
|
@ -204,7 +206,38 @@ void RendererOpenGL::SwapBuffers() {
|
||||||
VideoCore::g_renderer_screenshot_requested = false;
|
VideoCore::g_renderer_screenshot_requested = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (cleanup_video_dumping.exchange(false)) {
|
||||||
|
ReleaseVideoDumpingGLObjects();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Core::System::GetInstance().VideoDumper().IsDumping()) {
|
||||||
|
if (prepare_video_dumping.exchange(false)) {
|
||||||
|
InitVideoDumpingGLObjects();
|
||||||
|
}
|
||||||
|
|
||||||
|
const auto& layout = Core::System::GetInstance().VideoDumper().GetLayout();
|
||||||
|
glBindFramebuffer(GL_READ_FRAMEBUFFER, frame_dumping_framebuffer.handle);
|
||||||
|
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, frame_dumping_framebuffer.handle);
|
||||||
|
DrawScreens(layout);
|
||||||
|
|
||||||
|
glBindBuffer(GL_PIXEL_PACK_BUFFER, frame_dumping_pbos[current_pbo].handle);
|
||||||
|
glReadPixels(0, 0, layout.width, layout.height, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, 0);
|
||||||
|
glBindBuffer(GL_PIXEL_PACK_BUFFER, frame_dumping_pbos[next_pbo].handle);
|
||||||
|
|
||||||
|
GLubyte* pixels = static_cast<GLubyte*>(glMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY));
|
||||||
|
VideoDumper::VideoFrame frame_data{layout.width, layout.height, pixels};
|
||||||
|
Core::System::GetInstance().VideoDumper().AddVideoFrame(frame_data);
|
||||||
|
|
||||||
|
glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
|
||||||
|
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||||
|
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
|
||||||
|
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||||
|
current_pbo = (current_pbo + 1) % 2;
|
||||||
|
next_pbo = (current_pbo + 1) % 2;
|
||||||
|
}
|
||||||
|
|
||||||
DrawScreens(render_window.GetFramebufferLayout());
|
DrawScreens(render_window.GetFramebufferLayout());
|
||||||
|
m_current_frame++;
|
||||||
|
|
||||||
Core::System::GetInstance().perf_stats.EndSystemFrame();
|
Core::System::GetInstance().perf_stats.EndSystemFrame();
|
||||||
|
|
||||||
|
@ -634,13 +667,49 @@ void RendererOpenGL::DrawScreens(const Layout::FramebufferLayout& layout) {
|
||||||
(float)bottom_screen.GetHeight());
|
(float)bottom_screen.GetHeight());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
m_current_frame++;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Updates the framerate
|
/// Updates the framerate
|
||||||
void RendererOpenGL::UpdateFramerate() {}
|
void RendererOpenGL::UpdateFramerate() {}
|
||||||
|
|
||||||
|
void RendererOpenGL::PrepareVideoDumping() {
|
||||||
|
prepare_video_dumping = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void RendererOpenGL::CleanupVideoDumping() {
|
||||||
|
cleanup_video_dumping = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void RendererOpenGL::InitVideoDumpingGLObjects() {
|
||||||
|
const auto& layout = Core::System::GetInstance().VideoDumper().GetLayout();
|
||||||
|
|
||||||
|
frame_dumping_framebuffer.Create();
|
||||||
|
glGenRenderbuffers(1, &frame_dumping_renderbuffer);
|
||||||
|
glBindRenderbuffer(GL_RENDERBUFFER, frame_dumping_renderbuffer);
|
||||||
|
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGB8, layout.width, layout.height);
|
||||||
|
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, frame_dumping_framebuffer.handle);
|
||||||
|
glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER,
|
||||||
|
frame_dumping_renderbuffer);
|
||||||
|
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||||
|
|
||||||
|
for (auto& buffer : frame_dumping_pbos) {
|
||||||
|
buffer.Create();
|
||||||
|
glBindBuffer(GL_PIXEL_PACK_BUFFER, buffer.handle);
|
||||||
|
glBufferData(GL_PIXEL_PACK_BUFFER, layout.width * layout.height * 4, nullptr,
|
||||||
|
GL_STREAM_READ);
|
||||||
|
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void RendererOpenGL::ReleaseVideoDumpingGLObjects() {
|
||||||
|
frame_dumping_framebuffer.Release();
|
||||||
|
glDeleteRenderbuffers(1, &frame_dumping_renderbuffer);
|
||||||
|
|
||||||
|
for (auto& buffer : frame_dumping_pbos) {
|
||||||
|
buffer.Release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static const char* GetSource(GLenum source) {
|
static const char* GetSource(GLenum source) {
|
||||||
#define RET(s) \
|
#define RET(s) \
|
||||||
case GL_DEBUG_SOURCE_##s: \
|
case GL_DEBUG_SOURCE_##s: \
|
||||||
|
|
|
@ -50,6 +50,12 @@ public:
|
||||||
/// Shutdown the renderer
|
/// Shutdown the renderer
|
||||||
void ShutDown() override;
|
void ShutDown() override;
|
||||||
|
|
||||||
|
/// Prepares for video dumping (e.g. create necessary buffers, etc)
|
||||||
|
void PrepareVideoDumping() override;
|
||||||
|
|
||||||
|
/// Cleans up after video dumping is ended
|
||||||
|
void CleanupVideoDumping() override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void InitOpenGLObjects();
|
void InitOpenGLObjects();
|
||||||
void ReloadSampler();
|
void ReloadSampler();
|
||||||
|
@ -69,6 +75,9 @@ private:
|
||||||
// Fills active OpenGL texture with the given RGB color.
|
// Fills active OpenGL texture with the given RGB color.
|
||||||
void LoadColorToActiveGLTexture(u8 color_r, u8 color_g, u8 color_b, const TextureInfo& texture);
|
void LoadColorToActiveGLTexture(u8 color_r, u8 color_g, u8 color_b, const TextureInfo& texture);
|
||||||
|
|
||||||
|
void InitVideoDumpingGLObjects();
|
||||||
|
void ReleaseVideoDumpingGLObjects();
|
||||||
|
|
||||||
OpenGLState state;
|
OpenGLState state;
|
||||||
|
|
||||||
// OpenGL object IDs
|
// OpenGL object IDs
|
||||||
|
@ -94,6 +103,20 @@ private:
|
||||||
// Shader attribute input indices
|
// Shader attribute input indices
|
||||||
GLuint attrib_position;
|
GLuint attrib_position;
|
||||||
GLuint attrib_tex_coord;
|
GLuint attrib_tex_coord;
|
||||||
|
|
||||||
|
// Frame dumping
|
||||||
|
OGLFramebuffer frame_dumping_framebuffer;
|
||||||
|
GLuint frame_dumping_renderbuffer;
|
||||||
|
|
||||||
|
// Whether prepare/cleanup video dumping has been requested.
|
||||||
|
// They will be executed on next frame.
|
||||||
|
std::atomic_bool prepare_video_dumping = false;
|
||||||
|
std::atomic_bool cleanup_video_dumping = false;
|
||||||
|
|
||||||
|
// PBOs used to dump frames faster
|
||||||
|
std::array<OGLBuffer, 2> frame_dumping_pbos;
|
||||||
|
GLuint current_pbo = 1;
|
||||||
|
GLuint next_pbo = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace OpenGL
|
} // namespace OpenGL
|
||||||
|
|
Loading…
Reference in a new issue