mirror of
https://gitlab.uni-freiburg.de/opensourcevdi/spice
synced 2025-12-29 17:07:15 +00:00
streaming: Always delegate bit rate control to the video encoder
The video encoders already have sophisticated bit rate control code that can react to both stream reports sent by the client, and server frame drop notifications. Furthermore they can adjust both the frame rate and the image quality to best match the network conditions. But if the client cannot send stream reports all this is bypassed and instead the streaming code performs its own primitive bit rate control that can only adjust the frame rate. So this patch removes the code duplication and lets the video encoders do their job. Signed-off-by: Francois Gouget <fgouget@codeweavers.com> Acked-by: Frediano Ziglio <fziglio@redhat.com>
This commit is contained in:
parent
5554b1ec95
commit
97fcad82eb
@ -55,7 +55,6 @@ struct DisplayChannelClientPrivate
|
||||
QRegion surface_client_lossy_region[NUM_SURFACES];
|
||||
|
||||
StreamAgent stream_agents[NUM_STREAMS];
|
||||
int use_video_encoder_rate_control;
|
||||
uint32_t streams_max_latency;
|
||||
uint64_t streams_max_bit_rate;
|
||||
bool gl_draw_ongoing;
|
||||
|
||||
@ -1689,18 +1689,6 @@ static int red_marshall_stream_data(RedChannelClient *rcc,
|
||||
}
|
||||
|
||||
StreamAgent *agent = &dcc->priv->stream_agents[display_channel_get_stream_id(display, stream)];
|
||||
uint64_t time_now = spice_get_monotonic_time_ns();
|
||||
|
||||
if (!dcc->priv->use_video_encoder_rate_control) {
|
||||
if (time_now - agent->last_send_time < (1000 * 1000 * 1000) / agent->fps) {
|
||||
agent->frames--;
|
||||
#ifdef STREAM_STATS
|
||||
agent->stats.num_drops_fps++;
|
||||
#endif
|
||||
return TRUE;
|
||||
}
|
||||
}
|
||||
|
||||
VideoBuffer *outbuf;
|
||||
/* workaround for vga streams */
|
||||
frame_mm_time = drawable->red_drawable->mm_time ?
|
||||
@ -1715,7 +1703,6 @@ static int red_marshall_stream_data(RedChannelClient *rcc,
|
||||
&outbuf);
|
||||
switch (ret) {
|
||||
case VIDEO_ENCODER_FRAME_DROP:
|
||||
spice_assert(dcc->priv->use_video_encoder_rate_control);
|
||||
#ifdef STREAM_STATS
|
||||
agent->stats.num_drops_fps++;
|
||||
#endif
|
||||
@ -1757,7 +1744,6 @@ static int red_marshall_stream_data(RedChannelClient *rcc,
|
||||
}
|
||||
spice_marshaller_add_ref_full(base_marshaller, outbuf->data, outbuf->size,
|
||||
&red_release_video_encoder_buffer, outbuf);
|
||||
agent->last_send_time = time_now;
|
||||
#ifdef STREAM_STATS
|
||||
agent->stats.num_frames_sent++;
|
||||
agent->stats.size_sent += outbuf->size;
|
||||
@ -2142,7 +2128,6 @@ static void marshall_stream_start(RedChannelClient *rcc,
|
||||
DisplayChannelClient *dcc = DISPLAY_CHANNEL_CLIENT(rcc);
|
||||
Stream *stream = agent->stream;
|
||||
|
||||
agent->last_send_time = 0;
|
||||
spice_assert(stream);
|
||||
if (!agent->video_encoder) {
|
||||
/* Without a video encoder nothing will be streamed */
|
||||
|
||||
@ -478,8 +478,6 @@ static void dcc_init_stream_agents(DisplayChannelClient *dcc)
|
||||
region_init(&agent->vis_region);
|
||||
region_init(&agent->clip);
|
||||
}
|
||||
dcc->priv->use_video_encoder_rate_control =
|
||||
red_channel_client_test_remote_cap(RED_CHANNEL_CLIENT(dcc), SPICE_DISPLAY_CAP_STREAM_REPORT);
|
||||
}
|
||||
|
||||
DisplayChannelClient *dcc_new(DisplayChannel *display,
|
||||
@ -1283,11 +1281,6 @@ spice_wan_compression_t dcc_get_zlib_glz_state(DisplayChannelClient *dcc)
|
||||
return dcc->priv->zlib_glz_state;
|
||||
}
|
||||
|
||||
gboolean dcc_use_video_encoder_rate_control(DisplayChannelClient *dcc)
|
||||
{
|
||||
return dcc->priv->use_video_encoder_rate_control;
|
||||
}
|
||||
|
||||
uint32_t dcc_get_max_stream_latency(DisplayChannelClient *dcc)
|
||||
{
|
||||
return dcc->priv->streams_max_latency;
|
||||
|
||||
@ -198,7 +198,6 @@ StreamAgent * dcc_get_stream_agent (DisplayCha
|
||||
ImageEncoders *dcc_get_encoders(DisplayChannelClient *dcc);
|
||||
spice_wan_compression_t dcc_get_jpeg_state (DisplayChannelClient *dcc);
|
||||
spice_wan_compression_t dcc_get_zlib_glz_state (DisplayChannelClient *dcc);
|
||||
gboolean dcc_use_video_encoder_rate_control (DisplayChannelClient *dcc);
|
||||
uint32_t dcc_get_max_stream_latency(DisplayChannelClient *dcc);
|
||||
void dcc_set_max_stream_latency(DisplayChannelClient *dcc, uint32_t latency);
|
||||
uint64_t dcc_get_max_stream_bit_rate(DisplayChannelClient *dcc);
|
||||
|
||||
@ -1500,9 +1500,8 @@ static int spice_gst_encoder_encode_frame(VideoEncoder *video_encoder,
|
||||
return VIDEO_ENCODER_FRAME_UNSUPPORTED;
|
||||
}
|
||||
|
||||
if (rate_control_is_active(encoder) &&
|
||||
(handle_server_drops(encoder, frame_mm_time) ||
|
||||
frame_mm_time < encoder->next_frame_mm_time)) {
|
||||
if (handle_server_drops(encoder, frame_mm_time) ||
|
||||
frame_mm_time < encoder->next_frame_mm_time) {
|
||||
/* Drop the frame to limit the outgoing bit rate. */
|
||||
return VIDEO_ENCODER_FRAME_DROP;
|
||||
}
|
||||
@ -1716,10 +1715,8 @@ VideoEncoder *gstreamer_encoder_new(SpiceVideoCodecType codec_type,
|
||||
encoder->unused_bitmap_opaques = g_async_queue_new();
|
||||
#endif
|
||||
|
||||
if (cbs) {
|
||||
encoder->cbs = *cbs;
|
||||
}
|
||||
encoder->starting_bit_rate = starting_bit_rate;
|
||||
encoder->cbs = *cbs;
|
||||
encoder->bitmap_ref = bitmap_ref;
|
||||
encoder->bitmap_unref = bitmap_unref;
|
||||
encoder->format = GSTREAMER_FORMAT_INVALID;
|
||||
|
||||
@ -32,8 +32,6 @@
|
||||
#define MJPEG_QUALITY_SAMPLE_NUM 7
|
||||
static const int mjpeg_quality_samples[MJPEG_QUALITY_SAMPLE_NUM] = {20, 30, 40, 50, 60, 70, 80};
|
||||
|
||||
#define MJPEG_LEGACY_STATIC_QUALITY_ID 5 // jpeg quality 70
|
||||
|
||||
#define MJPEG_IMPROVE_QUALITY_FPS_STRICT_TH 10
|
||||
#define MJPEG_IMPROVE_QUALITY_FPS_PERMISSIVE_TH 5
|
||||
|
||||
@ -208,11 +206,6 @@ static MJpegVideoBuffer* create_mjpeg_video_buffer(void)
|
||||
return buffer;
|
||||
}
|
||||
|
||||
static inline int rate_control_is_active(MJpegEncoder* encoder)
|
||||
{
|
||||
return encoder->cbs.get_roundtrip_ms != NULL;
|
||||
}
|
||||
|
||||
static void mjpeg_encoder_destroy(VideoEncoder *video_encoder)
|
||||
{
|
||||
MJpegEncoder *encoder = (MJpegEncoder*)video_encoder;
|
||||
@ -592,8 +585,6 @@ static void mjpeg_encoder_adjust_params_to_bit_rate(MJpegEncoder *encoder)
|
||||
uint32_t latency = 0;
|
||||
uint32_t src_fps;
|
||||
|
||||
spice_assert(rate_control_is_active(encoder));
|
||||
|
||||
rate_control = &encoder->rate_control;
|
||||
quality_eval = &rate_control->quality_eval_data;
|
||||
|
||||
@ -677,8 +668,6 @@ static void mjpeg_encoder_adjust_fps(MJpegEncoder *encoder, uint64_t now)
|
||||
MJpegEncoderRateControl *rate_control = &encoder->rate_control;
|
||||
uint64_t adjusted_fps_time_passed;
|
||||
|
||||
spice_assert(rate_control_is_active(encoder));
|
||||
|
||||
adjusted_fps_time_passed = (now - rate_control->adjusted_fps_start_time) / NSEC_PER_MILLISEC;
|
||||
|
||||
if (!rate_control->during_quality_eval &&
|
||||
@ -731,37 +720,35 @@ static int mjpeg_encoder_start_frame(MJpegEncoder *encoder,
|
||||
{
|
||||
uint32_t quality;
|
||||
|
||||
if (rate_control_is_active(encoder)) {
|
||||
MJpegEncoderRateControl *rate_control = &encoder->rate_control;
|
||||
uint64_t now;
|
||||
uint64_t interval;
|
||||
MJpegEncoderRateControl *rate_control = &encoder->rate_control;
|
||||
uint64_t now;
|
||||
uint64_t interval;
|
||||
|
||||
now = spice_get_monotonic_time_ns();
|
||||
now = spice_get_monotonic_time_ns();
|
||||
|
||||
if (!rate_control->adjusted_fps_start_time) {
|
||||
rate_control->adjusted_fps_start_time = now;
|
||||
}
|
||||
mjpeg_encoder_adjust_fps(encoder, now);
|
||||
interval = (now - rate_control->bit_rate_info.last_frame_time);
|
||||
|
||||
if (interval < NSEC_PER_SEC / rate_control->adjusted_fps) {
|
||||
return VIDEO_ENCODER_FRAME_DROP;
|
||||
}
|
||||
|
||||
mjpeg_encoder_adjust_params_to_bit_rate(encoder);
|
||||
|
||||
if (!rate_control->during_quality_eval ||
|
||||
rate_control->quality_eval_data.reason == MJPEG_QUALITY_EVAL_REASON_SIZE_CHANGE) {
|
||||
MJpegEncoderBitRateInfo *bit_rate_info;
|
||||
|
||||
bit_rate_info = &encoder->rate_control.bit_rate_info;
|
||||
|
||||
if (!bit_rate_info->change_start_time) {
|
||||
bit_rate_info->change_start_time = now;
|
||||
bit_rate_info->change_start_mm_time = frame_mm_time;
|
||||
}
|
||||
bit_rate_info->last_frame_time = now;
|
||||
if (!rate_control->adjusted_fps_start_time) {
|
||||
rate_control->adjusted_fps_start_time = now;
|
||||
}
|
||||
mjpeg_encoder_adjust_fps(encoder, now);
|
||||
interval = (now - rate_control->bit_rate_info.last_frame_time);
|
||||
|
||||
if (interval < NSEC_PER_SEC / rate_control->adjusted_fps) {
|
||||
return VIDEO_ENCODER_FRAME_DROP;
|
||||
}
|
||||
|
||||
mjpeg_encoder_adjust_params_to_bit_rate(encoder);
|
||||
|
||||
if (!rate_control->during_quality_eval ||
|
||||
rate_control->quality_eval_data.reason == MJPEG_QUALITY_EVAL_REASON_SIZE_CHANGE) {
|
||||
MJpegEncoderBitRateInfo *bit_rate_info;
|
||||
|
||||
bit_rate_info = &encoder->rate_control.bit_rate_info;
|
||||
|
||||
if (!bit_rate_info->change_start_time) {
|
||||
bit_rate_info->change_start_time = now;
|
||||
bit_rate_info->change_start_mm_time = frame_mm_time;
|
||||
}
|
||||
bit_rate_info->last_frame_time = now;
|
||||
}
|
||||
|
||||
encoder->cinfo.in_color_space = JCS_RGB;
|
||||
@ -1222,10 +1209,6 @@ static void mjpeg_encoder_client_stream_report(VideoEncoder *video_encoder,
|
||||
end_frame_mm_time - start_frame_mm_time,
|
||||
end_frame_delay, audio_delay);
|
||||
|
||||
if (!rate_control_is_active(encoder)) {
|
||||
spice_debug("rate control was not activated: ignoring");
|
||||
return;
|
||||
}
|
||||
if (rate_control->during_quality_eval) {
|
||||
if (rate_control->quality_eval_data.type == MJPEG_QUALITY_EVAL_TYPE_DOWNGRADE &&
|
||||
rate_control->quality_eval_data.reason == MJPEG_QUALITY_EVAL_REASON_RATE_CHANGE) {
|
||||
@ -1388,17 +1371,12 @@ VideoEncoder *mjpeg_encoder_new(SpiceVideoCodecType codec_type,
|
||||
encoder->rate_control.byte_rate = starting_bit_rate / 8;
|
||||
encoder->starting_bit_rate = starting_bit_rate;
|
||||
|
||||
if (cbs) {
|
||||
encoder->cbs = *cbs;
|
||||
mjpeg_encoder_reset_quality(encoder, MJPEG_QUALITY_SAMPLE_NUM / 2, 5, 0);
|
||||
encoder->rate_control.during_quality_eval = TRUE;
|
||||
encoder->rate_control.quality_eval_data.type = MJPEG_QUALITY_EVAL_TYPE_SET;
|
||||
encoder->rate_control.quality_eval_data.reason = MJPEG_QUALITY_EVAL_REASON_RATE_CHANGE;
|
||||
encoder->rate_control.warmup_start_time = spice_get_monotonic_time_ns();
|
||||
} else {
|
||||
encoder->cbs.get_roundtrip_ms = NULL;
|
||||
mjpeg_encoder_reset_quality(encoder, MJPEG_LEGACY_STATIC_QUALITY_ID, MJPEG_MAX_FPS, 0);
|
||||
}
|
||||
encoder->cbs = *cbs;
|
||||
mjpeg_encoder_reset_quality(encoder, MJPEG_QUALITY_SAMPLE_NUM / 2, 5, 0);
|
||||
encoder->rate_control.during_quality_eval = TRUE;
|
||||
encoder->rate_control.quality_eval_data.type = MJPEG_QUALITY_EVAL_TYPE_SET;
|
||||
encoder->rate_control.quality_eval_data.reason = MJPEG_QUALITY_EVAL_REASON_RATE_CHANGE;
|
||||
encoder->rate_control.warmup_start_time = spice_get_monotonic_time_ns();
|
||||
|
||||
encoder->cinfo.err = jpeg_std_error(&encoder->jerr);
|
||||
jpeg_create_compress(&encoder->cinfo);
|
||||
|
||||
@ -110,7 +110,7 @@ void stream_stop(DisplayChannel *display, Stream *stream)
|
||||
stream_agent = dcc_get_stream_agent(dcc, display_channel_get_stream_id(display, stream));
|
||||
region_clear(&stream_agent->vis_region);
|
||||
region_clear(&stream_agent->clip);
|
||||
if (stream_agent->video_encoder && dcc_use_video_encoder_rate_control(dcc)) {
|
||||
if (stream_agent->video_encoder) {
|
||||
uint64_t stream_bit_rate = stream_agent->video_encoder->get_bit_rate(stream_agent->video_encoder);
|
||||
|
||||
if (stream_bit_rate > dcc_get_max_stream_bit_rate(dcc)) {
|
||||
@ -337,7 +337,6 @@ static void before_reattach_stream(DisplayChannel *display,
|
||||
int index;
|
||||
StreamAgent *agent;
|
||||
GList *dpi_link, *dpi_next;
|
||||
GListIter iter;
|
||||
|
||||
spice_return_if_fail(stream->current);
|
||||
|
||||
@ -357,54 +356,14 @@ static void before_reattach_stream(DisplayChannel *display,
|
||||
dcc = dpi->dcc;
|
||||
agent = dcc_get_stream_agent(dcc, index);
|
||||
|
||||
if (!dcc_use_video_encoder_rate_control(dcc) &&
|
||||
!dcc_is_low_bandwidth(dcc)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (red_channel_client_pipe_item_is_linked(RED_CHANNEL_CLIENT(dcc),
|
||||
&dpi->dpi_pipe_item)) {
|
||||
#ifdef STREAM_STATS
|
||||
agent->stats.num_drops_pipe++;
|
||||
#endif
|
||||
if (dcc_use_video_encoder_rate_control(dcc)) {
|
||||
agent->video_encoder->notify_server_frame_drop(agent->video_encoder);
|
||||
} else {
|
||||
++agent->drops;
|
||||
}
|
||||
agent->video_encoder->notify_server_frame_drop(agent->video_encoder);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
FOREACH_DCC(display, iter, dcc) {
|
||||
double drop_factor;
|
||||
|
||||
agent = dcc_get_stream_agent(dcc, index);
|
||||
|
||||
if (dcc_use_video_encoder_rate_control(dcc)) {
|
||||
continue;
|
||||
}
|
||||
if (agent->frames / agent->fps < FPS_TEST_INTERVAL) {
|
||||
agent->frames++;
|
||||
continue;
|
||||
}
|
||||
drop_factor = ((double)agent->frames - (double)agent->drops) /
|
||||
(double)agent->frames;
|
||||
spice_debug("stream %d: #frames %u #drops %u", index, agent->frames, agent->drops);
|
||||
if (drop_factor == 1) {
|
||||
if (agent->fps < MAX_FPS) {
|
||||
agent->fps++;
|
||||
spice_debug("stream %d: fps++ %u", index, agent->fps);
|
||||
}
|
||||
} else if (drop_factor < 0.9) {
|
||||
if (agent->fps > 1) {
|
||||
agent->fps--;
|
||||
spice_debug("stream %d: fps--%u", index, agent->fps);
|
||||
}
|
||||
}
|
||||
agent->frames = 1;
|
||||
agent->drops = 0;
|
||||
}
|
||||
}
|
||||
|
||||
static Stream *display_channel_stream_try_new(DisplayChannel *display)
|
||||
@ -768,30 +727,20 @@ void dcc_create_stream(DisplayChannelClient *dcc, Stream *stream)
|
||||
spice_return_if_fail(region_is_empty(&agent->vis_region));
|
||||
|
||||
if (stream->current) {
|
||||
agent->frames = 1;
|
||||
region_clone(&agent->vis_region, &stream->current->tree_item.base.rgn);
|
||||
region_clone(&agent->clip, &agent->vis_region);
|
||||
} else {
|
||||
agent->frames = 0;
|
||||
}
|
||||
agent->drops = 0;
|
||||
agent->fps = MAX_FPS;
|
||||
agent->dcc = dcc;
|
||||
|
||||
if (dcc_use_video_encoder_rate_control(dcc)) {
|
||||
VideoEncoderRateControlCbs video_cbs;
|
||||
uint64_t initial_bit_rate;
|
||||
VideoEncoderRateControlCbs video_cbs;
|
||||
video_cbs.opaque = agent;
|
||||
video_cbs.get_roundtrip_ms = get_roundtrip_ms;
|
||||
video_cbs.get_source_fps = get_source_fps;
|
||||
video_cbs.update_client_playback_delay = update_client_playback_delay;
|
||||
|
||||
video_cbs.opaque = agent;
|
||||
video_cbs.get_roundtrip_ms = get_roundtrip_ms;
|
||||
video_cbs.get_source_fps = get_source_fps;
|
||||
video_cbs.update_client_playback_delay = update_client_playback_delay;
|
||||
|
||||
initial_bit_rate = get_initial_bit_rate(dcc, stream);
|
||||
agent->video_encoder = dcc_create_video_encoder(dcc, initial_bit_rate, &video_cbs);
|
||||
} else {
|
||||
agent->video_encoder = dcc_create_video_encoder(dcc, 0, NULL);
|
||||
}
|
||||
uint64_t initial_bit_rate = get_initial_bit_rate(dcc, stream);
|
||||
agent->video_encoder = dcc_create_video_encoder(dcc, initial_bit_rate, &video_cbs);
|
||||
red_channel_client_pipe_add(RED_CHANNEL_CLIENT(dcc), stream_create_item_new(agent));
|
||||
|
||||
if (red_channel_client_test_remote_cap(RED_CHANNEL_CLIENT(dcc), SPICE_DISPLAY_CAP_STREAM_REPORT)) {
|
||||
|
||||
@ -73,12 +73,9 @@ typedef struct StreamAgent {
|
||||
displays fragments of the video */
|
||||
|
||||
Stream *stream;
|
||||
uint64_t last_send_time;
|
||||
VideoEncoder *video_encoder;
|
||||
DisplayChannelClient *dcc;
|
||||
|
||||
int frames;
|
||||
int drops;
|
||||
int fps;
|
||||
|
||||
uint32_t report_id;
|
||||
|
||||
Loading…
Reference in New Issue
Block a user