remove mac support

This commit is contained in:
pigeatgarlic 2023-12-20 17:16:15 +07:00
parent b0239a673c
commit 84c0d950dd
14 changed files with 23 additions and 1237 deletions

View File

@ -19,10 +19,8 @@
#include "platform/common.h"
#ifndef __APPLE__
// For NVENC legacy constants
#include <ffnvcodec/nvEncodeAPI.h>
#endif
// For NVENC legacy constants
#include <ffnvcodec/nvEncodeAPI.h>
namespace fs = std::filesystem;
using namespace std::literals;
@ -55,49 +53,9 @@ namespace config {
} // namespace nv
namespace amd {
#ifdef __APPLE__
// values accurate as of 27/12/2022, but aren't strictly necessary for MacOS build
#define AMF_VIDEO_ENCODER_AV1_QUALITY_PRESET_SPEED 100
#define AMF_VIDEO_ENCODER_AV1_QUALITY_PRESET_QUALITY 30
#define AMF_VIDEO_ENCODER_AV1_QUALITY_PRESET_BALANCED 70
#define AMF_VIDEO_ENCODER_HEVC_QUALITY_PRESET_SPEED 10
#define AMF_VIDEO_ENCODER_HEVC_QUALITY_PRESET_QUALITY 0
#define AMF_VIDEO_ENCODER_HEVC_QUALITY_PRESET_BALANCED 5
#define AMF_VIDEO_ENCODER_QUALITY_PRESET_SPEED 1
#define AMF_VIDEO_ENCODER_QUALITY_PRESET_QUALITY 2
#define AMF_VIDEO_ENCODER_QUALITY_PRESET_BALANCED 0
#define AMF_VIDEO_ENCODER_AV1_RATE_CONTROL_METHOD_CONSTANT_QP 0
#define AMF_VIDEO_ENCODER_AV1_RATE_CONTROL_METHOD_CBR 3
#define AMF_VIDEO_ENCODER_AV1_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR 2
#define AMF_VIDEO_ENCODER_AV1_RATE_CONTROL_METHOD_LATENCY_CONSTRAINED_VBR 1
#define AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_CONSTANT_QP 0
#define AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_CBR 3
#define AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR 2
#define AMF_VIDEO_ENCODER_HEVC_RATE_CONTROL_METHOD_LATENCY_CONSTRAINED_VBR 1
#define AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_CONSTANT_QP 0
#define AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_CBR 1
#define AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR 2
#define AMF_VIDEO_ENCODER_RATE_CONTROL_METHOD_LATENCY_CONSTRAINED_VBR 3
#define AMF_VIDEO_ENCODER_AV1_USAGE_TRANSCODING 0
#define AMF_VIDEO_ENCODER_AV1_USAGE_LOW_LATENCY 1
#define AMF_VIDEO_ENCODER_AV1_USAGE_ULTRA_LOW_LATENCY 2
#define AMF_VIDEO_ENCODER_AV1_USAGE_WEBCAM 3
#define AMF_VIDEO_ENCODER_HEVC_USAGE_TRANSCONDING 0
#define AMF_VIDEO_ENCODER_HEVC_USAGE_ULTRA_LOW_LATENCY 1
#define AMF_VIDEO_ENCODER_HEVC_USAGE_LOW_LATENCY 2
#define AMF_VIDEO_ENCODER_HEVC_USAGE_WEBCAM 3
#define AMF_VIDEO_ENCODER_USAGE_TRANSCONDING 0
#define AMF_VIDEO_ENCODER_USAGE_ULTRA_LOW_LATENCY 1
#define AMF_VIDEO_ENCODER_USAGE_LOW_LATENCY 2
#define AMF_VIDEO_ENCODER_USAGE_WEBCAM 3
#define AMF_VIDEO_ENCODER_UNDEFINED 0
#define AMF_VIDEO_ENCODER_CABAC 1
#define AMF_VIDEO_ENCODER_CALV 2
#else
#include <AMF/components/VideoEncoderAV1.h>
#include <AMF/components/VideoEncoderHEVC.h>
#include <AMF/components/VideoEncoderVCE.h>
#endif
enum class quality_av1_e : int {
speed = AMF_VIDEO_ENCODER_AV1_QUALITY_PRESET_SPEED,
@ -832,13 +790,11 @@ namespace config {
bool_f(vars, "nvenc_h264_cavlc", video.nv.h264_cavlc);
bool_f(vars, "nvenc_realtime_hags", video.nv_realtime_hags);
#ifndef __APPLE__
video.nv_legacy.preset = video.nv.quality_preset + 11;
video.nv_legacy.multipass = video.nv.two_pass == nvenc::nvenc_two_pass::quarter_resolution ? NV_ENC_TWO_PASS_QUARTER_RESOLUTION :
video.nv.two_pass == nvenc::nvenc_two_pass::full_resolution ? NV_ENC_TWO_PASS_FULL_RESOLUTION :
NV_ENC_MULTI_PASS_DISABLED;
video.nv_legacy.h264_coder = video.nv.h264_cavlc ? NV_ENC_H264_ENTROPY_CODING_MODE_CAVLC : NV_ENC_H264_ENTROPY_CODING_MODE_CABAC;
#endif
int_f(vars, "qsv_preset", video.qsv.qsv_preset, qsv::preset_from_view);
int_f(vars, "qsv_coder", video.qsv.qsv_cavlc, qsv::coder_from_view);

View File

@ -82,6 +82,8 @@ extern VideoPipeline *__cdecl StartQueue(int video_codec,
}};
thread.detach();
RaiseEventS(&pipeline,CHANGE_DISPLAY,display_name);
return &pipeline;
}

View File

@ -407,27 +407,6 @@ namespace nvenc {
}
void
nvenc_base::update_bitrate(int bitrate) {
auto get_encoder_cap = [&](NV_ENC_CAPS cap) {
NV_ENC_CAPS_PARAM param = { NV_ENC_CAPS_PARAM_VER, cap };
int value = 0;
nvenc->nvEncGetEncodeCaps(encoder, init_params.encodeGUID, &param, &value);
return value;
};
NV_ENC_RECONFIGURE_PARAMS reconfigure_params = { NV_ENC_RECONFIGURE_PARAMS_VER };
/* reset rate control state and start from IDR */
init_params.encodeConfig->rcParams.averageBitRate = bitrate * 1000;
if (get_encoder_cap(NV_ENC_CAPS_SUPPORT_CUSTOM_VBV_BUF_SIZE)) {
init_params.encodeConfig->rcParams.vbvBufferSize = bitrate * 1000 / init_params.frameRateNum;
}
reconfigure_params.reInitEncodeParams = init_params;
reconfigure_params.resetEncoder = TRUE;
reconfigure_params.forceIDR = TRUE;
nvenc->nvEncReconfigureEncoder(encoder, &reconfigure_params);
}
nvenc_encoded_frame
nvenc_base::encode_frame(uint64_t frame_index, bool force_idr) {

View File

@ -25,8 +25,6 @@ namespace nvenc {
void
destroy_encoder();
void
update_bitrate(int bitrate);
nvenc_encoded_frame
encode_frame(uint64_t frame_index, bool force_idr);

View File

@ -1,52 +0,0 @@
/**
* @file src/platform/macos/av_img_t.h
* @brief todo
*/
#pragma once
#include "src/platform/common.h"
#include <CoreMedia/CoreMedia.h>
#include <CoreVideo/CoreVideo.h>
namespace platf {
struct av_sample_buf_t {
av_sample_buf_t(CMSampleBufferRef buf):
buf((CMSampleBufferRef) CFRetain(buf)) {}
~av_sample_buf_t() {
CFRelease(buf);
}
CMSampleBufferRef buf;
};
struct av_pixel_buf_t {
av_pixel_buf_t(CVPixelBufferRef buf):
buf((CVPixelBufferRef) CFRetain(buf)),
locked(false) {}
uint8_t *
lock() {
if (!locked) {
CVPixelBufferLockBaseAddress(buf, kCVPixelBufferLock_ReadOnly);
}
return (uint8_t *) CVPixelBufferGetBaseAddress(buf);
}
~av_pixel_buf_t() {
if (locked) {
CVPixelBufferUnlockBaseAddress(buf, kCVPixelBufferLock_ReadOnly);
}
CFRelease(buf);
}
CVPixelBufferRef buf;
bool locked;
};
struct av_img_t: public img_t {
std::shared_ptr<av_sample_buf_t> sample_buffer;
std::shared_ptr<av_pixel_buf_t> pixel_buffer;
};
} // namespace platf

View File

@ -1,38 +0,0 @@
/**
* @file src/platform/macos/av_video.h
* @brief todo
*/
#pragma once
#import <AVFoundation/AVFoundation.h>
struct CaptureSession {
AVCaptureVideoDataOutput *output;
NSCondition *captureStopped;
};
@interface AVVideo: NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
#define kMaxDisplays 32
@property (nonatomic, assign) CGDirectDisplayID displayID;
@property (nonatomic, assign) CMTime minFrameDuration;
@property (nonatomic, assign) OSType pixelFormat;
@property (nonatomic, assign) int frameWidth;
@property (nonatomic, assign) int frameHeight;
typedef bool (^FrameCallbackBlock)(CMSampleBufferRef);
@property (nonatomic, assign) AVCaptureSession *session;
@property (nonatomic, assign) NSMapTable<AVCaptureConnection *, AVCaptureVideoDataOutput *> *videoOutputs;
@property (nonatomic, assign) NSMapTable<AVCaptureConnection *, FrameCallbackBlock> *captureCallbacks;
@property (nonatomic, assign) NSMapTable<AVCaptureConnection *, dispatch_semaphore_t> *captureSignals;
+ (NSArray<NSDictionary *> *)displayNames;
- (id)initWithDisplay:(CGDirectDisplayID)displayID frameRate:(int)frameRate;
- (void)setFrameWidth:(int)frameWidth frameHeight:(int)frameHeight;
- (dispatch_semaphore_t)capture:(FrameCallbackBlock)frameCallback;
@end

View File

@ -1,139 +0,0 @@
/**
* @file src/platform/macos/av_video.m
* @brief todo
*/
#import "av_video.h"
@implementation AVVideo
// XXX: Currently, this function only returns the screen IDs as names,
// which is not very helpful to the user. The API to retrieve names
// was deprecated with 10.9+.
// However, there is a solution with little external code that can be used:
// https://stackoverflow.com/questions/20025868/cgdisplayioserviceport-is-deprecated-in-os-x-10-9-how-to-replace
+ (NSArray<NSDictionary *> *)displayNames {
CGDirectDisplayID displays[kMaxDisplays];
uint32_t count;
if (CGGetActiveDisplayList(kMaxDisplays, displays, &count) != kCGErrorSuccess) {
return [NSArray array];
}
NSMutableArray *result = [NSMutableArray array];
for (uint32_t i = 0; i < count; i++) {
[result addObject:@{
@"id": [NSNumber numberWithUnsignedInt:displays[i]],
@"name": [NSString stringWithFormat:@"%d", displays[i]]
}];
}
return [NSArray arrayWithArray:result];
}
- (id)initWithDisplay:(CGDirectDisplayID)displayID frameRate:(int)frameRate {
self = [super init];
CGDisplayModeRef mode = CGDisplayCopyDisplayMode(displayID);
self.displayID = displayID;
self.pixelFormat = kCVPixelFormatType_32BGRA;
self.frameWidth = CGDisplayModeGetPixelWidth(mode);
self.frameHeight = CGDisplayModeGetPixelHeight(mode);
self.minFrameDuration = CMTimeMake(1, frameRate);
self.session = [[AVCaptureSession alloc] init];
self.videoOutputs = [[NSMapTable alloc] init];
self.captureCallbacks = [[NSMapTable alloc] init];
self.captureSignals = [[NSMapTable alloc] init];
CFRelease(mode);
AVCaptureScreenInput *screenInput = [[AVCaptureScreenInput alloc] initWithDisplayID:self.displayID];
[screenInput setMinFrameDuration:self.minFrameDuration];
if ([self.session canAddInput:screenInput]) {
[self.session addInput:screenInput];
}
else {
[screenInput release];
return nil;
}
[self.session startRunning];
return self;
}
- (void)dealloc {
[self.videoOutputs release];
[self.captureCallbacks release];
[self.captureSignals release];
[self.session stopRunning];
[super dealloc];
}
- (void)setFrameWidth:(int)frameWidth frameHeight:(int)frameHeight {
self.frameWidth = frameWidth;
self.frameHeight = frameHeight;
}
- (dispatch_semaphore_t)capture:(FrameCallbackBlock)frameCallback {
@synchronized(self) {
AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[videoOutput setVideoSettings:@{
(NSString *) kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithUnsignedInt:self.pixelFormat],
(NSString *) kCVPixelBufferWidthKey: [NSNumber numberWithInt:self.frameWidth],
(NSString *) kCVPixelBufferHeightKey: [NSNumber numberWithInt:self.frameHeight],
(NSString *) AVVideoScalingModeKey: AVVideoScalingModeResizeAspect,
}];
dispatch_queue_attr_t qos = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL,
QOS_CLASS_USER_INITIATED,
DISPATCH_QUEUE_PRIORITY_HIGH);
dispatch_queue_t recordingQueue = dispatch_queue_create("videoCaptureQueue", qos);
[videoOutput setSampleBufferDelegate:self queue:recordingQueue];
[self.session stopRunning];
if ([self.session canAddOutput:videoOutput]) {
[self.session addOutput:videoOutput];
}
else {
[videoOutput release];
return nil;
}
AVCaptureConnection *videoConnection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
dispatch_semaphore_t signal = dispatch_semaphore_create(0);
[self.videoOutputs setObject:videoOutput forKey:videoConnection];
[self.captureCallbacks setObject:frameCallback forKey:videoConnection];
[self.captureSignals setObject:signal forKey:videoConnection];
[self.session startRunning];
return signal;
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
FrameCallbackBlock callback = [self.captureCallbacks objectForKey:connection];
if (callback != nil) {
if (!callback(sampleBuffer)) {
@synchronized(self) {
[self.session stopRunning];
[self.captureCallbacks removeObjectForKey:connection];
[self.session removeOutput:[self.videoOutputs objectForKey:connection]];
[self.videoOutputs removeObjectForKey:connection];
dispatch_semaphore_signal([self.captureSignals objectForKey:connection]);
[self.captureSignals removeObjectForKey:connection];
[self.session startRunning];
}
}
}
}
@end

View File

@ -1,185 +0,0 @@
/**
* @file src/platform/macos/display.mm
* @brief todo
*/
#include "src/platform/common.h"
#include "src/platform/macos/av_img_t.h"
#include "src/platform/macos/av_video.h"
#include "src/platform/macos/nv12_zero_device.h"
#include "src/config.h"
#include "src/main.h"
// Avoid conflict between AVFoundation and libavutil both defining AVMediaType
#define AVMediaType AVMediaType_FFmpeg
#include "src/video.h"
#undef AVMediaType
namespace fs = std::filesystem;
namespace platf {
using namespace std::literals;
struct av_display_t: public display_t {
AVVideo *av_capture;
CGDirectDisplayID display_id;
~av_display_t() {
[av_capture release];
}
capture_e
capture(const push_captured_image_cb_t &push_captured_image_cb, const pull_free_image_cb_t &pull_free_image_cb, bool *cursor) override {
auto signal = [av_capture capture:^(CMSampleBufferRef sampleBuffer) {
std::shared_ptr<img_t> img_out;
if (!pull_free_image_cb(img_out)) {
// got interrupt signal
// returning false here stops capture backend
return false;
}
auto av_img = std::static_pointer_cast<av_img_t>(img_out);
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
av_img->sample_buffer = std::make_shared<av_sample_buf_t>(sampleBuffer);
av_img->pixel_buffer = std::make_shared<av_pixel_buf_t>(pixelBuffer);
img_out->data = av_img->pixel_buffer->lock();
img_out->width = CVPixelBufferGetWidth(pixelBuffer);
img_out->height = CVPixelBufferGetHeight(pixelBuffer);
img_out->row_pitch = CVPixelBufferGetBytesPerRow(pixelBuffer);
img_out->pixel_pitch = img_out->row_pitch / img_out->width;
if (!push_captured_image_cb(std::move(img_out), true)) {
// got interrupt signal
// returning false here stops capture backend
return false;
}
return true;
}];
// FIXME: We should time out if an image isn't returned for a while
dispatch_semaphore_wait(signal, DISPATCH_TIME_FOREVER);
return capture_e::ok;
}
std::shared_ptr<img_t>
alloc_img() override {
return std::make_shared<av_img_t>();
}
std::unique_ptr<avcodec_encode_device_t>
make_avcodec_encode_device(pix_fmt_e pix_fmt) override {
if (pix_fmt == pix_fmt_e::yuv420p) {
av_capture.pixelFormat = kCVPixelFormatType_32BGRA;
return std::make_unique<avcodec_encode_device_t>();
}
else if (pix_fmt == pix_fmt_e::nv12 || pix_fmt == pix_fmt_e::p010) {
auto device = std::make_unique<nv12_zero_device>();
device->init(static_cast<void *>(av_capture), pix_fmt, setResolution, setPixelFormat);
return device;
}
else {
BOOST_LOG(error) << "Unsupported Pixel Format."sv;
return nullptr;
}
}
int
dummy_img(img_t *img) override {
auto signal = [av_capture capture:^(CMSampleBufferRef sampleBuffer) {
auto av_img = (av_img_t *) img;
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
av_img->sample_buffer = std::make_shared<av_sample_buf_t>(sampleBuffer);
av_img->pixel_buffer = std::make_shared<av_pixel_buf_t>(pixelBuffer);
img->data = av_img->pixel_buffer->lock();
img->width = CVPixelBufferGetWidth(pixelBuffer);
img->height = CVPixelBufferGetHeight(pixelBuffer);
img->row_pitch = CVPixelBufferGetBytesPerRow(pixelBuffer);
img->pixel_pitch = img->row_pitch / img->width;
// returning false here stops capture backend
return false;
}];
dispatch_semaphore_wait(signal, DISPATCH_TIME_FOREVER);
return 0;
}
/**
* A bridge from the pure C++ code of the hwdevice_t class to the pure Objective C code.
*
* display --> an opaque pointer to an object of this class
* width --> the intended capture width
* height --> the intended capture height
*/
static void
setResolution(void *display, int width, int height) {
[static_cast<AVVideo *>(display) setFrameWidth:width frameHeight:height];
}
static void
setPixelFormat(void *display, OSType pixelFormat) {
static_cast<AVVideo *>(display).pixelFormat = pixelFormat;
}
};
std::shared_ptr<display_t>
display(platf::mem_type_e hwdevice_type, const std::string &display_name, const video::config_t &config) {
if (hwdevice_type != platf::mem_type_e::system && hwdevice_type != platf::mem_type_e::videotoolbox) {
BOOST_LOG(error) << "Could not initialize display with the given hw device type."sv;
return nullptr;
}
auto display = std::make_shared<av_display_t>();
display->display_id = CGMainDisplayID();
if (!display_name.empty()) {
auto display_array = [AVVideo displayNames];
for (NSDictionary *item in display_array) {
NSString *name = item[@"name"];
if (name.UTF8String == display_name) {
NSNumber *display_id = item[@"id"];
display->display_id = [display_id unsignedIntValue];
}
}
}
display->av_capture = [[AVVideo alloc] initWithDisplay:display->display_id frameRate:config.framerate];
if (!display->av_capture) {
BOOST_LOG(error) << "Video setup failed."sv;
return nullptr;
}
display->width = display->av_capture.frameWidth;
display->height = display->av_capture.frameHeight;
return display;
}
std::vector<std::string>
display_names(mem_type_e hwdevice_type) {
__block std::vector<std::string> display_names;
auto display_array = [AVVideo displayNames];
display_names.reserve([display_array count]);
[display_array enumerateObjectsUsingBlock:^(NSDictionary *_Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) {
NSString *name = obj[@"name"];
display_names.push_back(name.UTF8String);
}];
return display_names;
}
} // namespace platf

View File

@ -1,19 +0,0 @@
/**
* @file src/platform/macos/misc.h
* @brief todo
*/
#pragma once
#include <vector>
#include <CoreGraphics/CoreGraphics.h>
namespace dyn {
typedef void (*apiproc)(void);
int
load(void *handle, const std::vector<std::tuple<apiproc *, const char *>> &funcs, bool strict = true);
void *
handle(const std::vector<const char *> &libs);
} // namespace dyn

View File

@ -1,178 +0,0 @@
/**
* @file src/platform/macos/misc.mm
* @brief todo
*/
// Required for IPV6_PKTINFO with Darwin headers
#ifndef __APPLE_USE_RFC_3542
#define __APPLE_USE_RFC_3542 1
#endif
#include <Foundation/Foundation.h>
#include <arpa/inet.h>
#include <dlfcn.h>
#include <fcntl.h>
#include <ifaddrs.h>
#include <mach-o/dyld.h>
#include <net/if_dl.h>
#include <pwd.h>
#include "misc.h"
#include "src/main.h"
#include "src/platform/common.h"
using namespace std::literals;
namespace fs = std::filesystem;
namespace platf {
// Even though the following two functions are available starting in macOS 10.15, they weren't
// actually in the Mac SDK until Xcode 12.2, the first to include the SDK for macOS 11
#if __MAC_OS_X_VERSION_MAX_ALLOWED < 110000 // __MAC_11_0
// If they're not in the SDK then we can use our own function definitions.
// Need to use weak import so that this will link in macOS 10.14 and earlier
extern "C" bool
CGPreflightScreenCaptureAccess(void) __attribute__((weak_import));
extern "C" bool
CGRequestScreenCaptureAccess(void) __attribute__((weak_import));
#endif
std::unique_ptr<deinit_t>
init() {
// This will generate a warning about CGPreflightScreenCaptureAccess and
// CGRequestScreenCaptureAccess being unavailable before macOS 10.15, but
// we have a guard to prevent it from being called on those earlier systems.
// Unfortunately the supported way to silence this warning, using @available,
// produces linker errors for __isPlatformVersionAtLeast, so we have to use
// a different method.
// We also ignore "tautological-pointer-compare" because when compiling with
// Xcode 12.2 and later, these functions are not weakly linked and will never
// be null, and therefore generate this warning. Since we are weakly linking
// when compiling with earlier Xcode versions, the check for null is
// necessary and so we ignore the warning.
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wunguarded-availability-new"
#pragma clang diagnostic ignored "-Wtautological-pointer-compare"
if ([[NSProcessInfo processInfo] isOperatingSystemAtLeastVersion:((NSOperatingSystemVersion) { 10, 15, 0 })] &&
// Double check that these weakly-linked symbols have been loaded:
CGPreflightScreenCaptureAccess != nullptr && CGRequestScreenCaptureAccess != nullptr &&
!CGPreflightScreenCaptureAccess()) {
BOOST_LOG(error) << "No screen capture permission!"sv;
BOOST_LOG(error) << "Please activate it in 'System Preferences' -> 'Privacy' -> 'Screen Recording'"sv;
CGRequestScreenCaptureAccess();
return nullptr;
}
#pragma clang diagnostic pop
return std::make_unique<deinit_t>();
}
fs::path
appdata() {
const char *homedir;
if ((homedir = getenv("HOME")) == nullptr) {
homedir = getpwuid(geteuid())->pw_dir;
}
return fs::path { homedir } / ".config/sunshine"sv;
}
using ifaddr_t = util::safe_ptr<ifaddrs, freeifaddrs>;
ifaddr_t
get_ifaddrs() {
ifaddrs *p { nullptr };
getifaddrs(&p);
return ifaddr_t { p };
}
std::string
from_sockaddr(const sockaddr *const ip_addr) {
char data[INET6_ADDRSTRLEN] = {};
auto family = ip_addr->sa_family;
if (family == AF_INET6) {
inet_ntop(AF_INET6, &((sockaddr_in6 *) ip_addr)->sin6_addr, data,
INET6_ADDRSTRLEN);
}
else if (family == AF_INET) {
inet_ntop(AF_INET, &((sockaddr_in *) ip_addr)->sin_addr, data,
INET_ADDRSTRLEN);
}
return std::string { data };
}
std::pair<std::uint16_t, std::string>
from_sockaddr_ex(const sockaddr *const ip_addr) {
char data[INET6_ADDRSTRLEN] = {};
auto family = ip_addr->sa_family;
std::uint16_t port = 0;
if (family == AF_INET6) {
inet_ntop(AF_INET6, &((sockaddr_in6 *) ip_addr)->sin6_addr, data,
INET6_ADDRSTRLEN);
port = ((sockaddr_in6 *) ip_addr)->sin6_port;
}
else if (family == AF_INET) {
inet_ntop(AF_INET, &((sockaddr_in *) ip_addr)->sin_addr, data,
INET_ADDRSTRLEN);
port = ((sockaddr_in *) ip_addr)->sin_port;
}
return { port, std::string { data } };
}
void
adjust_thread_priority(thread_priority_e priority) {
// Unimplemented
}
} // namespace platf
namespace dyn {
void *
handle(const std::vector<const char *> &libs) {
void *handle;
for (auto lib : libs) {
handle = dlopen(lib, RTLD_LAZY | RTLD_LOCAL);
if (handle) {
return handle;
}
}
std::stringstream ss;
ss << "Couldn't find any of the following libraries: ["sv << libs.front();
std::for_each(std::begin(libs) + 1, std::end(libs), [&](auto lib) {
ss << ", "sv << lib;
});
ss << ']';
BOOST_LOG(error) << ss.str();
return nullptr;
}
int
load(void *handle, const std::vector<std::tuple<apiproc *, const char *>> &funcs, bool strict) {
int err = 0;
for (auto &func : funcs) {
TUPLE_2D_REF(fn, name, func);
*fn = (void (*)()) dlsym(handle, name);
if (!*fn && strict) {
BOOST_LOG(error) << "Couldn't find function: "sv << name;
err = -1;
}
}
return err;
}
} // namespace dyn

View File

@ -1,72 +0,0 @@
/**
* @file src/platform/macos/nv12_zero_device.cpp
* @brief todo
*/
#include "src/platform/macos/nv12_zero_device.h"
#include "src/video.h"
extern "C" {
#include "libavutil/imgutils.h"
}
namespace platf {
void
free_frame(AVFrame *frame) {
av_frame_free(&frame);
}
void
free_buffer(void *opaque, uint8_t *data) {
CVPixelBufferRelease((CVPixelBufferRef) data);
}
int
nv12_zero_device::convert(platf::img_t &img) {
av_img_t *av_img = (av_img_t *) &img;
// Release any existing CVPixelBuffer previously retained for encoding
av_buffer_unref(&av_frame->buf[0]);
// Attach an AVBufferRef to this frame which will retain ownership of the CVPixelBuffer
// until av_buffer_unref() is called (above) or the frame is freed with av_frame_free().
//
// The presence of the AVBufferRef allows FFmpeg to simply add a reference to the buffer
// rather than having to perform a deep copy of the data buffers in avcodec_send_frame().
av_frame->buf[0] = av_buffer_create((uint8_t *) CFRetain(av_img->pixel_buffer->buf), 0, free_buffer, NULL, 0);
// Place a CVPixelBufferRef at data[3] as required by AV_PIX_FMT_VIDEOTOOLBOX
av_frame->data[3] = (uint8_t *) av_img->pixel_buffer->buf;
return 0;
}
int
nv12_zero_device::set_frame(AVFrame *frame, AVBufferRef *hw_frames_ctx) {
this->frame = frame;
av_frame.reset(frame);
resolution_fn(this->display, frame->width, frame->height);
return 0;
}
int
nv12_zero_device::init(void *display, pix_fmt_e pix_fmt, resolution_fn_t resolution_fn, pixel_format_fn_t pixel_format_fn) {
pixel_format_fn(display, pix_fmt == pix_fmt_e::nv12 ?
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange :
kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange);
this->display = display;
this->resolution_fn = resolution_fn;
// we never use this pointer but it's existence is checked/used
// by the platform independent code
data = this;
return 0;
}
} // namespace platf

View File

@ -1,40 +0,0 @@
/**
* @file src/platform/macos/nv12_zero_device.h
* @brief todo
*/
#pragma once
#include "src/platform/common.h"
#include "src/platform/macos/av_img_t.h"
struct AVFrame;
namespace platf {
void
free_frame(AVFrame *frame);
class nv12_zero_device: public avcodec_encode_device_t {
// display holds a pointer to an av_video object. Since the namespaces of AVFoundation
// and FFMPEG collide, we need this opaque pointer and cannot use the definition
void *display;
public:
// this function is used to set the resolution on an av_video object that we cannot
// call directly because of namespace collisions between AVFoundation and FFMPEG
using resolution_fn_t = std::function<void(void *display, int width, int height)>;
resolution_fn_t resolution_fn;
using pixel_format_fn_t = std::function<void(void *display, int pixelFormat)>;
int
init(void *display, pix_fmt_e pix_fmt, resolution_fn_t resolution_fn, pixel_format_fn_t pixel_format_fn);
int
convert(img_t &img);
int
set_frame(AVFrame *frame, AVBufferRef *hw_frames_ctx);
private:
util::safe_ptr<AVFrame, free_frame> av_frame;
};
} // namespace platf

View File

@ -392,8 +392,6 @@ namespace video {
virtual void
request_idr_frame() = 0;
virtual void
update_bitrate (int bitrate) = 0;
virtual void
request_normal_frame() = 0;
@ -444,13 +442,6 @@ namespace video {
}
}
void
update_bitrate(int bitrate) override {
bitrate = bitrate * 1000;
avcodec_ctx->rc_max_rate = bitrate;
avcodec_ctx->bit_rate = bitrate;
avcodec_ctx->rc_min_rate = bitrate;
}
void
request_normal_frame() override {
@ -501,12 +492,6 @@ namespace video {
force_idr = false;
}
void
update_bitrate(int bitrate) override {
if (!device || !device->nvenc) return;
device->nvenc->update_bitrate(bitrate);
}
void
@ -532,34 +517,15 @@ namespace video {
bool force_idr = false;
};
struct sync_session_ctx_t {
safe::signal_t *join_event;
safe::mail_raw_t::event_t<bool> shutdown_event;
safe::mail_raw_t::queue_t<packet_t> packets;
safe::mail_raw_t::event_t<bool> idr_events;
safe::mail_raw_t::event_t<int> bitrate_events;
safe::mail_raw_t::event_t<std::string> switch_display;
safe::mail_raw_t::event_t<bool> toggle_cursor;
safe::mail_raw_t::event_t<hdr_info_t> hdr_events;
config_t config;
int frame_nr;
void *channel_data;
};
struct sync_session_t {
sync_session_ctx_t *ctx;
std::unique_ptr<encode_session_t> session;
};
using encode_session_ctx_queue_t = safe::queue_t<sync_session_ctx_t>;
using encode_e = platf::capture_e;
struct capture_ctx_t {
safe::mail_raw_t::event_t<std::string> switch_display;
safe::mail_raw_t::event_t<bool> toggle_cursor;
img_event_t images;
config_t config;
config_t* config;
};
struct capture_thread_async_ctx_t {
@ -571,14 +537,6 @@ namespace video {
sync_util::sync_t<std::weak_ptr<platf::display_t>> display_wp;
};
struct capture_thread_sync_ctx_t {
encode_session_ctx_queue_t encode_session_ctx_queue { 30 };
};
int
start_capture_sync(capture_thread_sync_ctx_t &ctx);
void
end_capture_sync(capture_thread_sync_ctx_t &ctx);
int
start_capture_async(capture_thread_async_ctx_t &ctx);
void
@ -586,7 +544,6 @@ namespace video {
// Keep a reference counter to ensure the capture thread only runs when other threads have a reference to the capture thread
auto capture_thread_async = safe::make_shared<capture_thread_async_ctx_t>(start_capture_async, end_capture_async);
auto capture_thread_sync = safe::make_shared<capture_thread_sync_ctx_t>(start_capture_sync, end_capture_sync);
#ifdef _WIN32
static encoder_t nvenc {
@ -626,85 +583,6 @@ namespace video {
},
PARALLEL_ENCODING | REF_FRAMES_INVALIDATION // flags
};
#elif !defined(__APPLE__)
static encoder_t nvenc {
"nvenc"sv,
std::make_unique<encoder_platform_formats_avcodec>(
#ifdef _WIN32
AV_HWDEVICE_TYPE_D3D11VA, AV_HWDEVICE_TYPE_NONE,
AV_PIX_FMT_D3D11,
#else
AV_HWDEVICE_TYPE_CUDA, AV_HWDEVICE_TYPE_NONE,
AV_PIX_FMT_CUDA,
#endif
AV_PIX_FMT_NV12, AV_PIX_FMT_P010,
#ifdef _WIN32
dxgi_init_avcodec_hardware_input_buffer
#else
cuda_init_avcodec_hardware_input_buffer
#endif
),
{
// Common options
{
{ "delay"s, 0 },
{ "forced-idr"s, 1 },
{ "zerolatency"s, 1 },
{ "preset"s, &config::video.nv_legacy.preset },
{ "tune"s, NV_ENC_TUNING_INFO_ULTRA_LOW_LATENCY },
{ "rc"s, NV_ENC_PARAMS_RC_CBR },
{ "multipass"s, &config::video.nv_legacy.multipass },
},
// SDR-specific options
{},
// HDR-specific options
{},
std::nullopt,
"av1_nvenc"s,
},
{
// Common options
{
{ "delay"s, 0 },
{ "forced-idr"s, 1 },
{ "zerolatency"s, 1 },
{ "preset"s, &config::video.nv_legacy.preset },
{ "tune"s, NV_ENC_TUNING_INFO_ULTRA_LOW_LATENCY },
{ "rc"s, NV_ENC_PARAMS_RC_CBR },
{ "multipass"s, &config::video.nv_legacy.multipass },
},
// SDR-specific options
{
{ "profile"s, (int) nv::profile_hevc_e::main },
},
// HDR-specific options
{
{ "profile"s, (int) nv::profile_hevc_e::main_10 },
},
std::nullopt,
"hevc_nvenc"s,
},
{
{
{ "delay"s, 0 },
{ "forced-idr"s, 1 },
{ "zerolatency"s, 1 },
{ "preset"s, &config::video.nv_legacy.preset },
{ "tune"s, NV_ENC_TUNING_INFO_ULTRA_LOW_LATENCY },
{ "rc"s, NV_ENC_PARAMS_RC_CBR },
{ "coder"s, &config::video.nv_legacy.h264_coder },
{ "multipass"s, &config::video.nv_legacy.multipass },
},
// SDR-specific options
{
{ "profile"s, (int) nv::profile_h264_e::high },
},
{}, // HDR-specific options
std::make_optional<encoder_t::option_t>({ "qp"s, &config::video.qp }),
"h264_nvenc"s,
},
PARALLEL_ENCODING
};
#endif
#ifdef _WIN32
@ -948,70 +826,15 @@ namespace video {
};
#endif
#ifdef __APPLE__
static encoder_t videotoolbox {
"videotoolbox"sv,
std::make_unique<encoder_platform_formats_avcodec>(
AV_HWDEVICE_TYPE_VIDEOTOOLBOX, AV_HWDEVICE_TYPE_NONE,
AV_PIX_FMT_VIDEOTOOLBOX,
AV_PIX_FMT_NV12, AV_PIX_FMT_P010,
vt_init_avcodec_hardware_input_buffer),
{
// Common options
{
{ "allow_sw"s, &config::video.vt.vt_allow_sw },
{ "require_sw"s, &config::video.vt.vt_require_sw },
{ "realtime"s, &config::video.vt.vt_realtime },
{ "prio_speed"s, 1 },
},
{}, // SDR-specific options
{}, // HDR-specific options
std::nullopt,
"av1_videotoolbox"s,
},
{
// Common options
{
{ "allow_sw"s, &config::video.vt.vt_allow_sw },
{ "require_sw"s, &config::video.vt.vt_require_sw },
{ "realtime"s, &config::video.vt.vt_realtime },
{ "prio_speed"s, 1 },
},
{}, // SDR-specific options
{}, // HDR-specific options
std::nullopt,
"hevc_videotoolbox"s,
},
{
// Common options
{
{ "allow_sw"s, &config::video.vt.vt_allow_sw },
{ "require_sw"s, &config::video.vt.vt_require_sw },
{ "realtime"s, &config::video.vt.vt_realtime },
{ "prio_speed"s, 1 },
},
{}, // SDR-specific options
{}, // HDR-specific options
std::nullopt,
"h264_videotoolbox"s,
},
DEFAULT
};
#endif
static const std::vector<encoder_t *> encoders {
#ifndef __APPLE__
&nvenc,
#endif
#ifdef _WIN32
&quicksync,
&amdvce,
#endif
#ifdef __linux__
&vaapi,
#endif
#ifdef __APPLE__
&videotoolbox,
#endif
&software
};
@ -1022,12 +845,12 @@ namespace video {
bool last_encoder_probe_supported_ref_frames_invalidation = false;
void
reset_display(std::shared_ptr<platf::display_t> &disp, const platf::mem_type_e &type, const std::string &display_name, config_t config) {
reset_display(std::shared_ptr<platf::display_t> &disp, const platf::mem_type_e &type, const std::string &display_name, config_t* config) {
// We try this twice, in case we still get an error on reinitialization
update_resolution(config,display_name);
for (int x = 0; x < 2; ++x) {
disp.reset();
disp = platf::display(type, display_name, config);
disp = platf::display(type, display_name, *config);
if (disp) {
break;
}
@ -1092,7 +915,7 @@ namespace video {
}
capture_ctxs.emplace_back(std::move(*initial_capture_ctx));
auto disp = platf::display(encoder.platform_formats->dev_type, display_name, capture_ctxs.front().config);
auto disp = platf::display(encoder.platform_formats->dev_type, display_name, *capture_ctxs.front().config);
if (!disp) {
return;
}
@ -1771,13 +1594,13 @@ namespace video {
int &frame_nr, // Store progress of the frame number
safe::mail_t mail,
img_event_t images,
config_t config,
config_t* config,
std::shared_ptr<platf::display_t> disp,
std::unique_ptr<platf::encode_device_t> encode_device,
safe::signal_t &reinit_event,
const encoder_t &encoder,
void *channel_data) {
auto session = make_encode_session(disp.get(), encoder, config, disp->width, disp->height, std::move(encode_device));
auto session = make_encode_session(disp.get(), encoder, *config, disp->width, disp->height, std::move(encode_device));
if (!session) {
return;
}
@ -1821,8 +1644,9 @@ namespace video {
if (bitrate_events->peek()) {
auto bitrate = bitrate_events->pop().value();
BOOST_LOG(info) << "bitrate changed"sv;
session->update_bitrate(bitrate);
BOOST_LOG(info) << "bitrate changed to "sv << bitrate;
config->bitrate = bitrate;
break;
}
if (requested_idr_frame) {
@ -1877,225 +1701,6 @@ namespace video {
return result;
}
std::optional<sync_session_t>
make_synced_session(platf::display_t *disp, const encoder_t &encoder, platf::img_t &img, sync_session_ctx_t &ctx) {
sync_session_t encode_session;
encode_session.ctx = &ctx;
auto encode_device = make_encode_device(*disp, encoder, ctx.config);
if (!encode_device) {
return std::nullopt;
}
// Update client with our current HDR display state
hdr_info_t hdr_info = std::make_unique<hdr_info_raw_t>(false);
if (colorspace_is_hdr(encode_device->colorspace)) {
if (disp->get_hdr_metadata(hdr_info->metadata)) {
hdr_info->enabled = true;
}
else {
BOOST_LOG(error) << "Couldn't get display hdr metadata when colorspace selection indicates it should have one";
}
}
ctx.hdr_events->raise(std::move(hdr_info));
auto session = make_encode_session(disp, encoder, ctx.config, img.width, img.height, std::move(encode_device));
if (!session) {
return std::nullopt;
}
// Load the initial image to prepare for encoding
if (session->convert(img)) {
BOOST_LOG(error) << "Could not convert initial image"sv;
return std::nullopt;
}
encode_session.session = std::move(session);
return encode_session;
}
encode_e
encode_run_sync(
std::vector<std::unique_ptr<sync_session_ctx_t>> &synced_session_ctxs,
encode_session_ctx_queue_t &encode_session_ctx_queue) {
const auto &encoder = *chosen_encoder;
auto display_names = platf::display_names(encoder.platform_formats->dev_type);
if (display_names.size() == 0 ) {
BOOST_LOG(error) << "No available display";
return encode_e::error;
}
auto display_name = display_names[0];
std::shared_ptr<platf::display_t> disp;
if (synced_session_ctxs.empty()) {
auto ctx = encode_session_ctx_queue.pop();
if (!ctx) {
return encode_e::ok;
}
synced_session_ctxs.emplace_back(std::make_unique<sync_session_ctx_t>(std::move(*ctx)));
}
while (encode_session_ctx_queue.running()) {
// reset_display() will sleep between retries
reset_display(disp, encoder.platform_formats->dev_type, display_name, synced_session_ctxs.front()->config);
if (disp) {
break;
}
}
if (!disp) {
return encode_e::error;
}
auto img = disp->alloc_img();
if (!img || disp->dummy_img(img.get())) {
return encode_e::error;
}
std::vector<sync_session_t> synced_sessions;
for (auto &ctx : synced_session_ctxs) {
auto synced_session = make_synced_session(disp.get(), encoder, *img, *ctx);
if (!synced_session) {
return encode_e::error;
}
synced_sessions.emplace_back(std::move(*synced_session));
}
bool display_cursor = false;
auto ec = platf::capture_e::ok;
while (encode_session_ctx_queue.running()) {
auto push_captured_image_callback = [&](std::shared_ptr<platf::img_t> &&img, bool frame_captured) -> bool {
while (encode_session_ctx_queue.peek()) {
auto encode_session_ctx = encode_session_ctx_queue.pop();
if (!encode_session_ctx) {
return false;
}
synced_session_ctxs.emplace_back(std::make_unique<sync_session_ctx_t>(std::move(*encode_session_ctx)));
auto encode_session = make_synced_session(disp.get(), encoder, *img, *synced_session_ctxs.back());
if (!encode_session) {
ec = platf::capture_e::error;
return false;
}
synced_sessions.emplace_back(std::move(*encode_session));
}
KITTY_WHILE_LOOP(auto pos = std::begin(synced_sessions), pos != std::end(synced_sessions), {
auto ctx = pos->ctx;
if (ctx->shutdown_event->peek()) {
BOOST_LOG(info) << "Sync shutdown event raised";
// Let waiting thread know it can delete shutdown_event
ctx->join_event->raise(true);
pos = synced_sessions.erase(pos);
synced_session_ctxs.erase(std::find_if(std::begin(synced_session_ctxs), std::end(synced_session_ctxs), [&ctx_p = ctx](auto &ctx) {
return ctx.get() == ctx_p;
}));
continue;
} else if (ctx->switch_display->peek()) {
ec = platf::capture_e::reinit;
display_name = validate_display_name(display_names,ctx->switch_display->pop().value());
BOOST_LOG(info) << "capturing screen on display " << display_name;
return false;
} else if (ctx->toggle_cursor->peek()) {
display_cursor = ctx->toggle_cursor->pop();
BOOST_LOG(info) << "cursor set to " << display_cursor;
}
if (ctx->bitrate_events->peek()) {
auto bitrate = ctx->bitrate_events->pop().value();
BOOST_LOG(info) << "bitrate changed"sv;
pos->session->update_bitrate(bitrate);
}
if (ctx->idr_events->peek()) {
BOOST_LOG(info) << "IDR frame generated"sv;
pos->session->request_idr_frame();
ctx->idr_events->pop();
}
if (frame_captured && pos->session->convert(*img)) {
BOOST_LOG(error) << "Could not convert image"sv;
ctx->shutdown_event->raise(true);
continue;
}
std::optional<std::chrono::steady_clock::time_point> frame_timestamp;
if (img) {
frame_timestamp = img->frame_timestamp;
}
if (encode(ctx->frame_nr++, *pos->session, ctx->packets, ctx->channel_data, frame_timestamp)) {
BOOST_LOG(error) << "Could not encode video packet"sv;
ctx->shutdown_event->raise(true);
continue;
}
pos->session->request_normal_frame();
++pos;
})
return true;
};
auto pull_free_image_callback = [&img](std::shared_ptr<platf::img_t> &img_out) -> bool {
img_out = img;
img_out->frame_timestamp.reset();
return true;
};
auto status = disp->capture(push_captured_image_callback, pull_free_image_callback, &display_cursor);
switch (status) {
case platf::capture_e::reinit:
case platf::capture_e::error:
case platf::capture_e::ok:
case platf::capture_e::timeout:
case platf::capture_e::interrupted:
return ec != platf::capture_e::ok ? ec : status;
}
}
return encode_e::ok;
}
void
captureThreadSync() {
auto ref = capture_thread_sync.ref();
std::vector<std::unique_ptr<sync_session_ctx_t>> synced_session_ctxs;
auto &ctx = ref->encode_session_ctx_queue;
auto lg = util::fail_guard([&]() {
ctx.stop();
for (auto &ctx : synced_session_ctxs) {
ctx->shutdown_event->raise(true);
ctx->join_event->raise(true);
}
for (auto &ctx : ctx.unsafe()) {
ctx.shutdown_event->raise(true);
ctx.join_event->raise(true);
}
});
// Encoding and capture takes place on this thread
platf::adjust_thread_priority(platf::thread_priority_e::high);
while (encode_run_sync(synced_session_ctxs, ctx) == encode_e::reinit) {}
}
void
capture_async(
@ -2119,7 +1724,7 @@ namespace video {
ref->capture_ctx_queue->raise(capture_ctx_t {
mail->event<std::string>(mail::switch_display),
mail->event<bool>(mail::toggle_cursor),
images, config });
images, &config });
if (!ref->capture_ctx_queue->running()) {
return;
@ -2175,7 +1780,7 @@ namespace video {
frame_nr,
mail,
images,
config,
&config,
display,
std::move(encode_device),
ref->reinit_event,
@ -2186,7 +1791,7 @@ namespace video {
void
update_resolution(
config_t config,
config_t* config,
const std::string &display_name) {
HRESULT result = 1;
@ -2206,8 +1811,8 @@ namespace video {
PDEVMODE dm = (PDEVMODE)malloc(sizeof(DEVMODE));
if ( EnumDisplaySettings(displayDevice->DeviceName, ENUM_CURRENT_SETTINGS, dm) ) {
config.width = dm->dmPelsWidth;
config.height = dm->dmPelsHeight;
config->width = dm->dmPelsWidth;
config->height = dm->dmPelsHeight;
}
}
} while (result);
@ -2231,30 +1836,7 @@ namespace video {
idr_events->raise(true);
BOOST_LOG(info) << "Start capturing";
if (chosen_encoder->flags & PARALLEL_ENCODING) {
BOOST_LOG(info) << "Capturing async";
capture_async(mail, config, channel_data);
} else {
BOOST_LOG(info) << "Capturing sync";
safe::signal_t join_event;
auto ref = capture_thread_sync.ref();
ref->encode_session_ctx_queue.raise(sync_session_ctx_t {
&join_event,
std::move(shutdown_event),
std::move(packets),
std::move(idr_events),
std::move(bitrate_events),
std::move(display_events),
std::move(ptr_events),
mail->event<hdr_info_t>(mail::hdr),
config,
1,
channel_data,
});
// Wait for join signal
join_event.view();
}
capture_async(mail, config, channel_data);
}
enum validate_flag_e {
@ -2262,8 +1844,8 @@ namespace video {
};
int
validate_config(std::shared_ptr<platf::display_t> &disp, const encoder_t &encoder, const config_t &config) {
reset_display(disp, encoder.platform_formats->dev_type, config::video.output_name, config);
validate_config(std::shared_ptr<platf::display_t> &disp, const encoder_t &encoder, config_t config) {
reset_display(disp, encoder.platform_formats->dev_type, config::video.output_name, &config);
if (!disp) {
return -1;
}
@ -2342,7 +1924,7 @@ namespace video {
config_t config_autoselect { 1920, 1080, 60, 1000, 1, 0, 1, 0, 0 };
// If the encoder isn't supported at all (not even H.264), bail early
reset_display(disp, encoder.platform_formats->dev_type, config::video.output_name, config_autoselect);
reset_display(disp, encoder.platform_formats->dev_type, config::video.output_name, &config_autoselect);
if (!disp) {
return false;
}
@ -2778,14 +2360,6 @@ namespace video {
capture_thread_ctx.capture_thread.join();
}
int
start_capture_sync(capture_thread_sync_ctx_t &ctx) {
std::thread { &captureThreadSync }.detach();
return 0;
}
void
end_capture_sync(capture_thread_sync_ctx_t &ctx) {}
platf::mem_type_e
map_base_dev_type(AVHWDeviceType type) {
switch (type) {

View File

@ -155,7 +155,7 @@ namespace video {
void
update_resolution(
config_t config,
config_t* config,
const std::string &display_name
);