ci(tests): add test framework (#1603)

This commit is contained in:
ReenigneArcher
2024-03-24 19:52:24 -04:00
committed by GitHub
parent 934f81182a
commit 89e8b9628c
43 changed files with 1519 additions and 136 deletions

120
tests/CMakeLists.txt Normal file
View File

@@ -0,0 +1,120 @@
cmake_minimum_required(VERSION 3.13)
# https://github.com/google/oss-policies-info/blob/main/foundational-cxx-support-matrix.md#foundational-c-support
project(test_sunshine)
set(PYTHON_PREFERRED_VERSION 3.11)
set(PYTHON_MINIMUM_VERSION 3.9)
include_directories("${CMAKE_SOURCE_DIR}")
enable_testing()
# Add GoogleTest directory to the project
set(GTEST_SOURCE_DIR "${CMAKE_SOURCE_DIR}/third-party/googletest")
set(INSTALL_GTEST OFF)
set(INSTALL_GMOCK OFF)
add_subdirectory("${GTEST_SOURCE_DIR}" "${CMAKE_CURRENT_BINARY_DIR}/googletest")
include_directories("${GTEST_SOURCE_DIR}/googletest/include" "${GTEST_SOURCE_DIR}")
# coverage
# https://gcovr.com/en/stable/guide/compiling.html#compiler-options
set(CMAKE_CXX_FLAGS "-fprofile-arcs -ftest-coverage -O1")
set(CMAKE_C_FLAGS "-fprofile-arcs -ftest-coverage -O1")
# if windows
if (WIN32)
# For Windows: Prevent overriding the parent project's compiler/linker settings
set(gtest_force_shared_crt ON CACHE BOOL "" FORCE) # cmake-lint: disable=C0103
endif ()
# modify SUNSHINE_DEFINITIONS
if (WIN32)
list(APPEND
SUNSHINE_DEFINITIONS SUNSHINE_SHADERS_DIR="${CMAKE_SOURCE_DIR}/src_assets/windows/assets/shaders/directx")
elseif (NOT APPLE)
list(APPEND SUNSHINE_DEFINITIONS SUNSHINE_SHADERS_DIR="${CMAKE_SOURCE_DIR}/src_assets/linux/assets/shaders/opengl")
endif ()
set(TEST_DEFINITIONS) # list will be appended as needed
# IF option TESTS_ENABLE_PYTHON_TESTS is ON, then we need to find python
if (TESTS_ENABLE_PYTHON_TESTS)
if (NOT DEFINED TESTS_PYTHON_EXECUTABLE)
# python is required for doc tests
# https://github.com/actions/setup-python/issues/121#issuecomment-777748504
if (POLICY CMP0094) # https://cmake.org/cmake/help/latest/policy/CMP0094.html
cmake_policy(SET CMP0094 NEW) # FindPython should return the first matching Python
endif ()
# needed on GitHub Actions CI: actions/setup-python does not touch registry/frameworks on Windows/macOS
# this mirrors PythonInterp behavior which did not consult registry/frameworks first
if (NOT DEFINED Python_FIND_REGISTRY)
set(Python_FIND_REGISTRY "LAST") # cmake-lint: disable=C0103
endif ()
if (NOT DEFINED Python_FIND_FRAMEWORK)
set(Python_FIND_FRAMEWORK "LAST") # cmake-lint: disable=C0103
endif ()
# first, try to find preferred version of python
find_package(Python ${PYTHON_PREFERRED_VERSION} EXACT COMPONENTS Interpreter)
if (Python_FOUND)
message(STATUS
"Preferred Python ${PYTHON_PREFERRED_VERSION} found, tests dependent on Python will be enabled")
else ()
# fallback to minimum version
find_package(Python ${PYTHON_MINIMUM_VERSION} COMPONENTS Interpreter)
endif ()
if (Python_FOUND)
message(STATUS "Python found, tests dependent on Python will be enabled")
list(APPEND TEST_DEFINITIONS TESTS_ENABLE_VENV_TESTS=1)
list(APPEND TEST_DEFINITIONS TESTS_PYTHON_EXECUTABLE="${Python_EXECUTABLE}")
else ()
message(SEND_ERROR "Python not found, tests dependent on Python will be disabled")
list(APPEND TEST_DEFINITIONS TESTS_ENABLE_VENV_TESTS=0)
endif ()
else()
message(STATUS "Python executable is set to ${TESTS_PYTHON_EXECUTABLE}")
list(APPEND TEST_DEFINITIONS TESTS_ENABLE_VENV_TESTS=1)
list(APPEND TEST_DEFINITIONS TESTS_PYTHON_EXECUTABLE="${TESTS_PYTHON_EXECUTABLE}")
endif()
else ()
message(STATUS "Python tests are disabled by 'TESTS_ENABLE_PYTHON_TESTS' option")
list(APPEND TEST_DEFINITIONS TESTS_ENABLE_VENV_TESTS=0)
endif ()
list(APPEND TEST_DEFINITIONS TESTS_SOURCE_DIR="${CMAKE_SOURCE_DIR}") # add source directory to TEST_DEFINITIONS
list(APPEND TEST_DEFINITIONS TESTS_DOCS_DIR="${CMAKE_SOURCE_DIR}/docs") # add docs directory to TEST_DEFINITIONS
# make sure TESTS_SOFTWARE_ENCODER_UNAVAILABLE is set to "fail" or "skip"
if (NOT (TESTS_SOFTWARE_ENCODER_UNAVAILABLE STREQUAL "fail" OR TESTS_SOFTWARE_ENCODER_UNAVAILABLE STREQUAL "skip"))
set(TESTS_SOFTWARE_ENCODER_UNAVAILABLE "fail")
endif ()
list(APPEND TEST_DEFINITIONS TESTS_SOFTWARE_ENCODER_UNAVAILABLE="${TESTS_SOFTWARE_ENCODER_UNAVAILABLE}") # fail/skip
file(GLOB_RECURSE TEST_SOURCES
${CMAKE_SOURCE_DIR}/tests/conftest.cpp
${CMAKE_SOURCE_DIR}/tests/utils.cpp
${CMAKE_SOURCE_DIR}/tests/test_*.cpp)
set(SUNSHINE_SOURCES
${SUNSHINE_TARGET_FILES})
# remove main.cpp from the list of sources
list(REMOVE_ITEM SUNSHINE_SOURCES ${CMAKE_SOURCE_DIR}/src/main.cpp)
add_executable(${PROJECT_NAME}
${TEST_SOURCES}
${SUNSHINE_SOURCES})
set_target_properties(${PROJECT_NAME} PROPERTIES CXX_STANDARD 17)
target_link_libraries(${PROJECT_NAME}
${SUNSHINE_EXTERNAL_LIBRARIES}
gtest
gtest_main # if we use this we don't need our own main function
${PLATFORM_LIBRARIES})
target_compile_definitions(${PROJECT_NAME} PUBLIC ${SUNSHINE_DEFINITIONS} ${TEST_DEFINITIONS})
target_compile_options(${PROJECT_NAME} PRIVATE $<$<COMPILE_LANGUAGE:CXX>:${SUNSHINE_COMPILE_OPTIONS}>;$<$<COMPILE_LANGUAGE:CUDA>:${SUNSHINE_COMPILE_OPTIONS_CUDA};-std=c++17>) # cmake-lint: disable=C0301
target_link_options(${PROJECT_NAME} PRIVATE)
add_test(NAME ${PROJECT_NAME} COMMAND sunshine_test)

36
tests/ci/test_docs.cpp Normal file
View File

@@ -0,0 +1,36 @@
#include <tests/conftest.cpp>
class DocsTests: public DocsTestFixture, public ::testing::WithParamInterface<std::tuple<const char *, const char *>> {};
INSTANTIATE_TEST_SUITE_P(
DocFormats,
DocsTests,
::testing::Values(
std::make_tuple("html", "index.html"),
std::make_tuple("epub", "Sunshine.epub")));
TEST_P(DocsTests, MakeDocs) {
auto params = GetParam();
std::string format = std::get<0>(params);
std::string expected_filename = std::get<1>(params);
std::filesystem::path expected_file = std::filesystem::current_path() / "build" / format / expected_filename;
std::string command = "make " + format;
int status = BaseTest::exec(command.c_str());
EXPECT_EQ(status, 0);
EXPECT_TRUE(std::filesystem::exists(expected_file));
}
class DocsRstTests: public DocsPythonVenvTest, public ::testing::WithParamInterface<std::filesystem::path> {};
INSTANTIATE_TEST_SUITE_P(
RstFiles,
DocsRstTests,
::testing::Values(
std::filesystem::path(TESTS_DOCS_DIR),
std::filesystem::path(TESTS_SOURCE_DIR) / "README.rst"));
TEST_P(DocsRstTests, RstCheckDocs) {
std::filesystem::path docs_dir = GetParam();
std::string command = "rstcheck -r " + docs_dir.string();
int status = BaseTest::exec(command.c_str());
EXPECT_EQ(status, 0);
}

385
tests/conftest.cpp Normal file
View File

@@ -0,0 +1,385 @@
#include <filesystem>
#include <gtest/gtest.h>
#include <boost/log/core.hpp>
#include <boost/log/expressions.hpp>
#include <boost/log/sinks/sync_frontend.hpp>
#include <boost/log/sinks/text_ostream_backend.hpp>
#include <boost/log/trivial.hpp>
#include <boost/shared_ptr.hpp>
#include <src/globals.h>
#include <src/platform/common.h>
#include <tests/utils.h>
namespace logging = boost::log;
namespace sinks = logging::sinks;
// Undefine the original TEST macro
#undef TEST
// Redefine TEST to use our BaseTest class, to automatically use our BaseTest fixture
#define TEST(test_case_name, test_name) \
GTEST_TEST_(test_case_name, test_name, ::BaseTest, \
::testing::internal::GetTypeId<::BaseTest>())
/**
* @brief Base class for tests.
*
* This class provides a base test fixture for all tests.
*
* ``cout``, ``stderr``, and ``stdout`` are redirected to a buffer, and the buffer is printed if the test fails.
*
* @todo Retain the color of the original output.
*/
class BaseTest: public ::testing::Test {
protected:
// https://stackoverflow.com/a/58369622/11214013
// we can possibly use some internal googletest functions to capture stdout and stderr, but I have not tested this
// https://stackoverflow.com/a/33186201/11214013
// Add a member variable to store the sink
boost::shared_ptr<sinks::synchronous_sink<sinks::text_ostream_backend>> test_sink;
BaseTest():
sbuf { nullptr }, pipe_stdout { nullptr }, pipe_stderr { nullptr } {
// intentionally empty
}
~BaseTest() override = default;
void
SetUp() override {
// todo: only run this one time, instead of every time a test is run
// see: https://stackoverflow.com/questions/2435277/googletest-accessing-the-environment-from-a-test
// get command line args from the test executable
testArgs = ::testing::internal::GetArgvs();
// then get the directory of the test executable
// std::string path = ::testing::internal::GetArgvs()[0];
testBinary = testArgs[0];
// get the directory of the test executable
testBinaryDir = std::filesystem::path(testBinary).parent_path();
// If testBinaryDir is empty or `.` then set it to the current directory
// maybe some better options here: https://stackoverflow.com/questions/875249/how-to-get-current-directory
if (testBinaryDir.empty() || testBinaryDir.string() == ".") {
testBinaryDir = std::filesystem::current_path();
}
// Create a sink that writes to our stringstream (BOOST_LOG)
typedef sinks::synchronous_sink<sinks::text_ostream_backend> test_text_sink;
test_sink = boost::make_shared<test_text_sink>();
// Set the stringstream as the target of the sink (BOOST_LOG)
boost::shared_ptr<std::ostream> stream(&boost_log_buffer, [](std::ostream *) {});
test_sink->locked_backend()->add_stream(stream);
// Register the sink in the logging core (BOOST_LOG)
logging::core::get()->add_sink(test_sink);
sbuf = std::cout.rdbuf(); // save cout buffer (std::cout)
std::cout.rdbuf(cout_buffer.rdbuf()); // redirect cout to buffer (std::cout)
// todo: do this only once
// setup a mail object
mail::man = std::make_shared<safe::mail_raw_t>();
}
void
TearDown() override {
std::cout.rdbuf(sbuf); // restore cout buffer
// get test info
const ::testing::TestInfo *const test_info = ::testing::UnitTest::GetInstance()->current_test_info();
if (test_info->result()->Failed()) {
std::cout << std::endl
<< "Test failed: " << test_info->name() << std::endl
<< std::endl
<< "Captured boost log:" << std::endl
<< boost_log_buffer.str() << std::endl
<< "Captured cout:" << std::endl
<< cout_buffer.str() << std::endl
<< "Captured stdout:" << std::endl
<< stdout_buffer.str() << std::endl
<< "Captured stderr:" << std::endl
<< stderr_buffer.str() << std::endl;
}
sbuf = nullptr; // clear sbuf
if (pipe_stdout) {
pclose(pipe_stdout);
pipe_stdout = nullptr;
}
if (pipe_stderr) {
pclose(pipe_stderr);
pipe_stderr = nullptr;
}
// Remove the sink from the logging core (BOOST_LOG)
logging::core::get()->remove_sink(test_sink);
test_sink.reset();
}
// functions and variables
std::vector<std::string> testArgs; // CLI arguments used
std::filesystem::path testBinary; // full path of this binary
std::filesystem::path testBinaryDir; // full directory of this binary
std::stringstream boost_log_buffer; // declare boost_log_buffer
std::stringstream cout_buffer; // declare cout_buffer
std::stringstream stdout_buffer; // declare stdout_buffer
std::stringstream stderr_buffer; // declare stderr_buffer
std::streambuf *sbuf;
FILE *pipe_stdout;
FILE *pipe_stderr;
int
exec(const char *cmd) {
std::array<char, 128> buffer {};
pipe_stdout = popen((std::string(cmd) + " 2>&1").c_str(), "r");
pipe_stderr = popen((std::string(cmd) + " 2>&1").c_str(), "r");
if (!pipe_stdout || !pipe_stderr) {
throw std::runtime_error("popen() failed!");
}
while (fgets(buffer.data(), buffer.size(), pipe_stdout) != nullptr) {
stdout_buffer << buffer.data();
}
while (fgets(buffer.data(), buffer.size(), pipe_stderr) != nullptr) {
stderr_buffer << buffer.data();
}
int returnCode = pclose(pipe_stdout);
pipe_stdout = nullptr;
if (returnCode != 0) {
std::cout << "Error: " << stderr_buffer.str() << std::endl
<< "Return code: " << returnCode << std::endl;
}
return returnCode;
}
};
class PlatformInitBase: public virtual BaseTest {
protected:
void
SetUp() override {
std::cout << "PlatformInitTest:: starting Fixture SetUp" << std::endl;
// initialize the platform
auto deinit_guard = platf::init();
if (!deinit_guard) {
FAIL() << "Platform failed to initialize";
}
std::cout << "PlatformInitTest:: finished Fixture SetUp" << std::endl;
}
void
TearDown() override {
std::cout << "PlatformInitTest:: starting Fixture TearDown" << std::endl;
std::cout << "PlatformInitTest:: finished Fixture TearDown" << std::endl;
}
};
class DocsPythonVenvBase: public virtual BaseTest {
protected:
void
SetUp() override {
#if defined TESTS_ENABLE_VENV_TESTS && TESTS_ENABLE_VENV_TESTS == 0
GTEST_SKIP_("TESTS_ENABLE_VENV_TESTS is disabled by CMake");
#else
std::cout << "DocsPythonVenvTest:: starting Fixture SetUp" << std::endl;
std::string pythonBinDirArray[] = { "bin", "Scripts" };
std::filesystem::path pythonPath = "python";
std::string binPath;
std::string command;
int exit_code;
std::filesystem::path venvPath = ".venv";
std::filesystem::path fullVenvPath = BaseTest::testBinaryDir / venvPath;
// check for existence of venv, and create it if necessary
std::cout << "DocsPythonVenvTest:: checking for venv" << std::endl;
if (!std::filesystem::exists(fullVenvPath)) {
std::cout << "DocsPythonVenvTest:: venv not found" << std::endl;
// create the venv
command = "\"" TESTS_PYTHON_EXECUTABLE "\" -m venv " + fullVenvPath.string();
std::cout << "DocsPythonVenvTest:: trying to create venv with command: " << command << std::endl;
exit_code = BaseTest::exec(command.c_str());
if (exit_code != 0) {
if (!std::filesystem::exists(fullVenvPath)) {
FAIL() << "Command failed: " << command << " with exit code: " << exit_code;
}
else {
// venv command will randomly complain that some files already exist...
std::cout << "DocsPythonVenvTest:: exit code (" << exit_code << ") indicates venv creation failed, but venv exists" << std::endl;
}
}
}
// determine if bin directory is `bin` (Unix) or `Scripts` (Windows)
// cannot assume `Scripts` on Windows, as it could be `bin` if using MSYS2, cygwin, etc.
std::cout << "DocsPythonVenvTest:: checking structure of venv" << std::endl;
for (const std::string &binDir : pythonBinDirArray) {
// check if bin directory exists
if (std::filesystem::exists(fullVenvPath / binDir)) {
binPath = binDir;
std::cout << "DocsPythonVenvTest:: found binPath: " << binPath << std::endl;
break;
}
}
if (binPath.empty()) {
FAIL() << "Python venv not found";
}
// set fullPythonPath and fullPythonBinPath
fullPythonPath = fullVenvPath / binPath / pythonPath;
fullPythonBinPath = fullVenvPath / binPath;
std::cout << "DocsPythonVenvTest:: fullPythonPath: " << fullPythonPath << std::endl;
std::cout << "DocsPythonVenvTest:: fullPythonBinPath: " << fullPythonBinPath << std::endl;
std::filesystem::path requirements_path = std::filesystem::path(TESTS_DOCS_DIR) / "requirements.txt";
// array of commands to run
std::string CommandArray[] = {
"\"" + fullPythonPath.string() + "\" -m pip install -r " + requirements_path.string(),
};
for (const std::string &_command : CommandArray) {
std::cout << "DocsPythonVenvTest:: running command: " << _command << std::endl;
exit_code = BaseTest::exec(_command.c_str());
if (exit_code != 0) {
FAIL() << "Command failed: " << command << " with exit code: " << exit_code;
}
}
// Save the original PATH
originalEnvPath = std::getenv("PATH") ? std::getenv("PATH") : "";
std::cout << "DocsPythonVenvTest:: originalEnvPath: " << originalEnvPath << std::endl;
// Set the temporary PATH
std::string tempPath;
std::string envPathSep;
#ifdef _WIN32
envPathSep = ";";
#else
envPathSep = ":";
#endif
tempPath = fullPythonBinPath.string() + envPathSep + originalEnvPath;
std::cout << "DocsPythonVenvTest:: tempPath: " << tempPath << std::endl;
setEnv("PATH", tempPath);
std::cout << "DocsPythonVenvTest:: finished Fixture SetUp" << std::endl;
#endif
}
void
TearDown() override {
std::cout << "DocsPythonVenvTest:: starting Fixture TearDown" << std::endl;
// Restore the original PATH
if (!originalEnvPath.empty()) {
std::cout << "DocsPythonVenvTest:: restoring originalEnvPath: " << originalEnvPath << std::endl;
setEnv("PATH", originalEnvPath);
}
std::cout << "DocsPythonVenvTest:: finished Fixture TearDown" << std::endl;
}
// functions and variables
std::filesystem::path fullPythonPath;
std::filesystem::path fullPythonBinPath;
std::string originalEnvPath;
};
class DocsPythonVenvTest: public virtual BaseTest, public DocsPythonVenvBase {
protected:
void
SetUp() override {
BaseTest::SetUp();
DocsPythonVenvBase::SetUp();
}
void
TearDown() override {
DocsPythonVenvBase::TearDown();
BaseTest::TearDown();
}
};
class DocsWorkingDirectoryBase: public virtual BaseTest {
protected:
void
SetUp() override {
#if defined TESTS_ENABLE_VENV_TESTS && TESTS_ENABLE_VENV_TESTS == 1
std::cout << "DocsWorkingDirectoryTest:: starting Fixture SetUp" << std::endl;
temp_dir = TESTS_DOCS_DIR;
std::cout << "DocsWorkingDirectoryTest:: temp_dir: " << temp_dir << std::endl;
// change directory to `docs`
original_dir = std::filesystem::current_path(); // save original directory
std::cout << "DocsWorkingDirectoryTest:: original_dir: " << original_dir << std::endl;
std::filesystem::current_path(temp_dir);
std::cout << "DocsWorkingDirectoryTest:: working directory set to: " << std::filesystem::current_path() << std::endl;
std::cout << "DocsWorkingDirectoryTest:: finished Fixture SetUp" << std::endl;
#endif
}
void
TearDown() override {
#if defined TESTS_ENABLE_VENV_TESTS && TESTS_ENABLE_VENV_TESTS == 1
std::cout << "DocsWorkingDirectoryTest:: starting Fixture TearDown" << std::endl;
// change directory back to original
std::filesystem::current_path(original_dir);
std::cout << "DocsWorkingDirectoryTest:: working directory set to: " << std::filesystem::current_path() << std::endl;
std::cout << "DocsWorkingDirectoryTest:: finished Fixture TearDown" << std::endl;
#endif
}
// functions and variables
std::filesystem::path original_dir;
std::filesystem::path temp_dir;
};
class DocsWorkingDirectoryTest: public virtual BaseTest, public DocsWorkingDirectoryBase {
protected:
void
SetUp() override {
BaseTest::SetUp();
DocsWorkingDirectoryBase::SetUp();
}
void
TearDown() override {
DocsWorkingDirectoryBase::TearDown();
BaseTest::TearDown();
}
};
class DocsTestFixture: public virtual BaseTest, public DocsPythonVenvBase, public DocsWorkingDirectoryBase {
protected:
void
SetUp() override {
BaseTest::SetUp();
DocsPythonVenvBase::SetUp();
DocsWorkingDirectoryBase::SetUp();
}
void
TearDown() override {
DocsWorkingDirectoryBase::TearDown();
DocsPythonVenvBase::TearDown();
BaseTest::TearDown();
}
};

View File

@@ -0,0 +1,19 @@
/**
* @file tests/test_file_handler.cpp
* @brief Test src/file_handler.*.
*/
#include <src/file_handler.h>
#include <tests/conftest.cpp>
TEST(FileHandlerTests, WriteFileTest) {
EXPECT_EQ(file_handler::write_file("write_file_test.txt", "test"), 0);
}
TEST(FileHandlerTests, ReadFileTest) {
// read file from WriteFileTest
EXPECT_EQ(file_handler::read_file("write_file_test.txt"), "test\n"); // sunshine adds a newline
// read missing file
EXPECT_EQ(file_handler::read_file("non-existing-file.txt"), "");
}

68
tests/unit/test_video.cpp Normal file
View File

@@ -0,0 +1,68 @@
/**
* @file tests/test_video.cpp
* @brief Test src/video.*.
*/
#include <src/video.h>
#include <tests/conftest.cpp>
class EncoderTest: public virtual BaseTest, public PlatformInitBase, public ::testing::WithParamInterface<std::tuple<std::basic_string_view<char>, video::encoder_t *>> {
protected:
void
SetUp() override {
BaseTest::SetUp();
PlatformInitBase::SetUp();
std::string_view p_name = std::get<0>(GetParam());
std::cout << "EncoderTest(" << p_name << "):: starting Fixture SetUp" << std::endl;
std::cout << "EncoderTest(" << p_name << "):: validating encoder" << std::endl;
video::encoder_t *encoder = std::get<1>(GetParam());
bool isEncoderValid;
isEncoderValid = video::validate_encoder(*encoder, false);
// todo: av logging is not redirected to boost so it will be visible whether the test passes or fails
// move this code to logging
// https://github.com/LizardByte/Sunshine/blob/5606840c8983b714a0e442c42d887a49807715e1/src/main.cpp#L118
if (!isEncoderValid) {
// if encoder is software fail, otherwise skip
if (encoder == &video::software && std::string(TESTS_SOFTWARE_ENCODER_UNAVAILABLE) == "fail") {
FAIL() << "EncoderTest(" << p_name << "):: software encoder not available";
}
else {
GTEST_SKIP_((std::string("EncoderTest(") + std::string(p_name) + "):: encoder not available").c_str());
}
}
else {
std::cout << "EncoderTest(" << p_name << "):: encoder available" << std::endl;
}
}
void
TearDown() override {
PlatformInitBase::TearDown();
BaseTest::TearDown();
}
};
INSTANTIATE_TEST_SUITE_P(
EncoderVariants,
EncoderTest,
::testing::Values(
// todo: all encoders crash on windows, probably due to platf not being initialized (which also crashes)
#if !defined(__APPLE__)
std::make_tuple(video::nvenc.name, &video::nvenc),
#endif
#ifdef _WIN32
std::make_tuple(video::amdvce.name, &video::amdvce), std::make_tuple(video::quicksync.name, &video::quicksync),
#endif
#ifdef __linux__
std::make_tuple(video::vaapi.name, &video::vaapi),
#endif
#ifdef __APPLE__
std::make_tuple(video::videotoolbox.name, &video::videotoolbox),
#endif
std::make_tuple(video::software.name, &video::software)));
TEST_P(EncoderTest, ValidateEncoder) {
// todo:: test something besides fixture setup
}

21
tests/utils.cpp Normal file
View File

@@ -0,0 +1,21 @@
/**
* @file utils.cpp
* @brief Utility functions
*/
#include "utils.h"
/**
* @brief Set an environment variable.
* @param name Name of the environment variable
* @param value Value of the environment variable
* @return 0 on success, non-zero error code on failure
*/
int
setEnv(const std::string &name, const std::string &value) {
#ifdef _WIN32
return _putenv_s(name.c_str(), value.c_str());
#else
return setenv(name.c_str(), value.c_str(), 1);
#endif
}

11
tests/utils.h Normal file
View File

@@ -0,0 +1,11 @@
/**
* @file utils.h
* @brief Reusable functions for tests.
*/
#pragma once
#include <string>
int
setEnv(const std::string &name, const std::string &value);