[spice-gtk,v1,09/11] channel-display: don't debug latency for each frame

Submitted by Victor Toso on March 13, 2018, 11:25 a.m.

Details

Message ID 20180313112542.32238-10-victortoso@redhat.com
State New
Headers show
Series "misc improvements in channel-display" ( rev: 2 1 ) in Spice

Not browsing as part of any series.

Commit Message

Victor Toso March 13, 2018, 11:25 a.m.
From: Victor Toso <me@victortoso.com>

Becomes quite hard to find meaning on something that is printed every
time. Only print latency value if it is a new min/max or if average
latency is 10% bigger/lower then usual.

Not aiming to perfect statistics in latency here, just to avoid too
verbose logging. Removing latency debug isn't cool as we could infer
issues with streaming based on it.

Signed-off-by: Victor Toso <victortoso@redhat.com>
---
 src/channel-display-priv.h |  3 +++
 src/channel-display.c      | 18 +++++++++++++++++-
 2 files changed, 20 insertions(+), 1 deletion(-)

Patch hide | download patch | download mbox

diff --git a/src/channel-display-priv.h b/src/channel-display-priv.h
index 95ad7d8..e7758cc 100644
--- a/src/channel-display-priv.h
+++ b/src/channel-display-priv.h
@@ -136,6 +136,9 @@  struct display_stream {
     drops_sequence_stats cur_drops_seq_stats;
     GArray               *drops_seqs_stats_arr;
     uint32_t             num_drops_seqs;
+    uint32_t             latency_min;
+    uint32_t             latency_max;
+    uint32_t             latency_avg;
 
     uint32_t             playback_sync_drops_seq_len;
 
diff --git a/src/channel-display.c b/src/channel-display.c
index 4757aa6..3901cd1 100644
--- a/src/channel-display.c
+++ b/src/channel-display.c
@@ -1547,6 +1547,10 @@  static void display_stream_stats_save(display_stream *st,
                                       guint32 client_mmtime)
 {
     gint32 latency = server_mmtime - client_mmtime;
+    gint32 min_latency = st->latency_min == 0 ? latency : MIN(st->latency_min, latency);
+    gint32 max_latency = MAX(st->latency_max, latency);
+    gdouble avg_latency = (st->latency_avg * st->num_input_frames + latency) /
+                          ((double) (st->num_input_frames + 1));
 
     if (!st->num_input_frames) {
         st->first_frame_mm_time = server_mmtime;
@@ -1567,7 +1571,19 @@  static void display_stream_stats_save(display_stream *st,
         return;
     }
 
-    CHANNEL_DEBUG(st->channel, "video latency: %d", latency);
+    /* Only debug latency value if it matters otherwise it can be too verbose */
+    if (min_latency != st->latency_min ||
+        max_latency != st->latency_max ||
+        avg_latency < 0.90 * st->latency_avg ||
+        avg_latency > 1.10 * st->latency_avg) {
+        CHANNEL_DEBUG(st->channel,
+                      "video latency: %d | (%d , %0.2f , %d)",
+                      latency, min_latency, avg_latency, max_latency);
+        st->latency_min = min_latency;
+        st->latency_max = max_latency;
+    }
+    st->latency_avg = avg_latency;
+
     if (st->cur_drops_seq_stats.len) {
         st->cur_drops_seq_stats.duration = server_mmtime -
                                            st->cur_drops_seq_stats.start_mm_time;

Comments

On Tue, Mar 13, 2018 at 12:25:40PM +0100, Victor Toso wrote:
> From: Victor Toso <me@victortoso.com>
> 
> Becomes quite hard to find meaning on something that is printed every
> time. Only print latency value if it is a new min/max or if average
> latency is 10% bigger/lower then usual.
> 
> Not aiming to perfect statistics in latency here, just to avoid too
> verbose logging. Removing latency debug isn't cool as we could infer
> issues with streaming based on it.
> 
> Signed-off-by: Victor Toso <victortoso@redhat.com>
> ---
>  src/channel-display-priv.h |  3 +++
>  src/channel-display.c      | 18 +++++++++++++++++-
>  2 files changed, 20 insertions(+), 1 deletion(-)
> 
> diff --git a/src/channel-display-priv.h b/src/channel-display-priv.h
> index 95ad7d8..e7758cc 100644
> --- a/src/channel-display-priv.h
> +++ b/src/channel-display-priv.h
> @@ -136,6 +136,9 @@ struct display_stream {
>      drops_sequence_stats cur_drops_seq_stats;
>      GArray               *drops_seqs_stats_arr;
>      uint32_t             num_drops_seqs;
> +    uint32_t             latency_min;
> +    uint32_t             latency_max;
> +    uint32_t             latency_avg;

Btw, the latency_avg should be double.

>  
>      uint32_t             playback_sync_drops_seq_len;
>  
> diff --git a/src/channel-display.c b/src/channel-display.c
> index 4757aa6..3901cd1 100644
> --- a/src/channel-display.c
> +++ b/src/channel-display.c
> @@ -1547,6 +1547,10 @@ static void display_stream_stats_save(display_stream *st,
>                                        guint32 client_mmtime)
>  {
>      gint32 latency = server_mmtime - client_mmtime;
> +    gint32 min_latency = st->latency_min == 0 ? latency : MIN(st->latency_min, latency);
> +    gint32 max_latency = MAX(st->latency_max, latency);
> +    gdouble avg_latency = (st->latency_avg * st->num_input_frames + latency) /
> +                          ((double) (st->num_input_frames + 1));
>  
>      if (!st->num_input_frames) {
>          st->first_frame_mm_time = server_mmtime;
> @@ -1567,7 +1571,19 @@ static void display_stream_stats_save(display_stream *st,
>          return;
>      }
>  
> -    CHANNEL_DEBUG(st->channel, "video latency: %d", latency);
> +    /* Only debug latency value if it matters otherwise it can be too verbose */
> +    if (min_latency != st->latency_min ||
> +        max_latency != st->latency_max ||
> +        avg_latency < 0.90 * st->latency_avg ||
> +        avg_latency > 1.10 * st->latency_avg) {
> +        CHANNEL_DEBUG(st->channel,
> +                      "video latency: %d | (%d , %0.2f , %d)",
> +                      latency, min_latency, avg_latency, max_latency);
> +        st->latency_min = min_latency;
> +        st->latency_max = max_latency;
> +    }
> +    st->latency_avg = avg_latency;
> +
>      if (st->cur_drops_seq_stats.len) {
>          st->cur_drops_seq_stats.duration = server_mmtime -
>                                             st->cur_drops_seq_stats.start_mm_time;
> -- 
> 2.16.2
> 
> _______________________________________________
> Spice-devel mailing list
> Spice-devel@lists.freedesktop.org
> https://lists.freedesktop.org/mailman/listinfo/spice-devel
> 
> From: Victor Toso <me@victortoso.com>
> 
> Becomes quite hard to find meaning on something that is printed every
> time. Only print latency value if it is a new min/max or if average
> latency is 10% bigger/lower then usual.
> 
> Not aiming to perfect statistics in latency here, just to avoid too
> verbose logging. Removing latency debug isn't cool as we could infer
> issues with streaming based on it.
> 
> Signed-off-by: Victor Toso <victortoso@redhat.com>
> ---
>  src/channel-display-priv.h |  3 +++
>  src/channel-display.c      | 18 +++++++++++++++++-
>  2 files changed, 20 insertions(+), 1 deletion(-)
> 
> diff --git a/src/channel-display-priv.h b/src/channel-display-priv.h
> index 95ad7d8..e7758cc 100644
> --- a/src/channel-display-priv.h
> +++ b/src/channel-display-priv.h
> @@ -136,6 +136,9 @@ struct display_stream {
>      drops_sequence_stats cur_drops_seq_stats;
>      GArray               *drops_seqs_stats_arr;
>      uint32_t             num_drops_seqs;
> +    uint32_t             latency_min;
> +    uint32_t             latency_max;
> +    uint32_t             latency_avg;
>  

In the style documentation is explicitly state that we should not
column align.

>      uint32_t             playback_sync_drops_seq_len;
>  
> diff --git a/src/channel-display.c b/src/channel-display.c
> index 4757aa6..3901cd1 100644
> --- a/src/channel-display.c
> +++ b/src/channel-display.c
> @@ -1547,6 +1547,10 @@ static void display_stream_stats_save(display_stream
> *st,
>                                        guint32 client_mmtime)
>  {
>      gint32 latency = server_mmtime - client_mmtime;
> +    gint32 min_latency = st->latency_min == 0 ? latency :
> MIN(st->latency_min, latency);

why not initializing latency_min with INT32_MAX?

> +    gint32 max_latency = MAX(st->latency_max, latency);

as latency can be <0 latency_max should be initialized to INT32_MIN, not 0.

> +    gdouble avg_latency = (st->latency_avg * st->num_input_frames + latency)
> /
> +                          ((double) (st->num_input_frames + 1));
>  

I would use a latency_total in the display_stream structure. I think int64_t is
safe.

>      if (!st->num_input_frames) {
>          st->first_frame_mm_time = server_mmtime;
> @@ -1567,7 +1571,19 @@ static void display_stream_stats_save(display_stream
> *st,
>          return;
>      }
>  
> -    CHANNEL_DEBUG(st->channel, "video latency: %d", latency);
> +    /* Only debug latency value if it matters otherwise it can be too
> verbose */
> +    if (min_latency != st->latency_min ||
> +        max_latency != st->latency_max ||
> +        avg_latency < 0.90 * st->latency_avg ||
> +        avg_latency > 1.10 * st->latency_avg) {
> +        CHANNEL_DEBUG(st->channel,
> +                      "video latency: %d | (%d , %0.2f , %d)",
> +                      latency, min_latency, avg_latency, max_latency);
> +        st->latency_min = min_latency;
> +        st->latency_max = max_latency;
> +    }
> +    st->latency_avg = avg_latency;
> +
>      if (st->cur_drops_seq_stats.len) {
>          st->cur_drops_seq_stats.duration = server_mmtime -
>                                             st->cur_drops_seq_stats.start_mm_time;

Frediano
On Tue, Apr 17, 2018 at 07:46:42AM -0400, Frediano Ziglio wrote:
> > 
> > From: Victor Toso <me@victortoso.com>
> > 
> > Becomes quite hard to find meaning on something that is printed every
> > time. Only print latency value if it is a new min/max or if average
> > latency is 10% bigger/lower then usual.
> > 
> > Not aiming to perfect statistics in latency here, just to avoid too
> > verbose logging. Removing latency debug isn't cool as we could infer
> > issues with streaming based on it.
> > 
> > Signed-off-by: Victor Toso <victortoso@redhat.com>
> > ---
> >  src/channel-display-priv.h |  3 +++
> >  src/channel-display.c      | 18 +++++++++++++++++-
> >  2 files changed, 20 insertions(+), 1 deletion(-)
> > 
> > diff --git a/src/channel-display-priv.h b/src/channel-display-priv.h
> > index 95ad7d8..e7758cc 100644
> > --- a/src/channel-display-priv.h
> > +++ b/src/channel-display-priv.h
> > @@ -136,6 +136,9 @@ struct display_stream {
> >      drops_sequence_stats cur_drops_seq_stats;
> >      GArray               *drops_seqs_stats_arr;
> >      uint32_t             num_drops_seqs;
> > +    uint32_t             latency_min;
> > +    uint32_t             latency_max;
> > +    uint32_t             latency_avg;
> >  
> 
> In the style documentation is explicitly state that we should not
> column align.

But I'm keeping the local coding style. Should that have higher
priority to what the documentation says?


> >      uint32_t             playback_sync_drops_seq_len;
> >  
> > diff --git a/src/channel-display.c b/src/channel-display.c
> > index 4757aa6..3901cd1 100644
> > --- a/src/channel-display.c
> > +++ b/src/channel-display.c
> > @@ -1547,6 +1547,10 @@ static void display_stream_stats_save(display_stream
> > *st,
> >                                        guint32 client_mmtime)
> >  {
> >      gint32 latency = server_mmtime - client_mmtime;
> > +    gint32 min_latency = st->latency_min == 0 ? latency :
> > MIN(st->latency_min, latency);
> 
> why not initializing latency_min with INT32_MAX?

Why not indeed. I'll change it.

> 
> > +    gint32 max_latency = MAX(st->latency_max, latency);
> 
> as latency can be <0 latency_max should be initialized to INT32_MIN, not 0.

Thanks!

> 
> > +    gdouble avg_latency = (st->latency_avg * st->num_input_frames + latency)
> > /
> > +                          ((double) (st->num_input_frames + 1));
> >  
> 
> I would use a latency_total in the display_stream structure. I think int64_t is
> safe.

Okay, I'll change this too.

> 
> >      if (!st->num_input_frames) {
> >          st->first_frame_mm_time = server_mmtime;
> > @@ -1567,7 +1571,19 @@ static void display_stream_stats_save(display_stream
> > *st,
> >          return;
> >      }
> >  
> > -    CHANNEL_DEBUG(st->channel, "video latency: %d", latency);
> > +    /* Only debug latency value if it matters otherwise it can be too
> > verbose */
> > +    if (min_latency != st->latency_min ||
> > +        max_latency != st->latency_max ||
> > +        avg_latency < 0.90 * st->latency_avg ||
> > +        avg_latency > 1.10 * st->latency_avg) {
> > +        CHANNEL_DEBUG(st->channel,
> > +                      "video latency: %d | (%d , %0.2f , %d)",
> > +                      latency, min_latency, avg_latency, max_latency);
> > +        st->latency_min = min_latency;
> > +        st->latency_max = max_latency;
> > +    }
> > +    st->latency_avg = avg_latency;
> > +
> >      if (st->cur_drops_seq_stats.len) {
> >          st->cur_drops_seq_stats.duration = server_mmtime -
> >                                             st->cur_drops_seq_stats.start_mm_time;
> 
> Frediano

Thanks,
        toso