Coverage for /home/antoine/projects/xpra-git/dist/python3/lib64/python/xpra/server/window/window_stats.py : 62%
Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# -*- coding: utf-8 -*-
2# This file is part of Xpra.
3# Copyright (C) 2011 Serviware (Arthur Huillet, <ahuillet@serviware.com>)
4# Copyright (C) 2010-2019 Antoine Martin <antoine@xpra.org>
5# Copyright (C) 2008 Nathaniel Smith <njs@pobox.com>
6# Xpra is released under the terms of the GNU GPL v2, or, at your option, any
7# later version. See the file COPYING for details.
9from math import sqrt
11from collections import deque
12from xpra.simple_stats import get_list_stats, get_weighted_list_stats
13from xpra.os_util import monotonic_time
14from xpra.util import engs, csv, envint
15from xpra.server.cystats import (logp, #@UnresolvedImport
16 calculate_time_weighted_average, #@UnresolvedImport
17 calculate_size_weighted_average, #@UnresolvedImport
18 calculate_timesize_weighted_average, #@UnresolvedImport
19 calculate_for_average, #@UnresolvedImport
20 )
22from xpra.log import Logger
23log = Logger("stats")
25#how many historical records to keep
26#for the various statistics we collect:
27#(cannot be lower than DamageBatchConfig.MAX_EVENTS)
28NRECS = 100
30TARGET_LATENCY_TOLERANCE = envint("XPRA_TARGET_LATENCY_TOLERANCE", 20)/1000.0
33class WindowPerformanceStatistics:
34 """
35 Statistics which belong to a specific WindowSource
36 """
37 def __init__(self):
38 self.reset()
40 #assume 100ms until we get some data to compute the real values
41 DEFAULT_DAMAGE_LATENCY = 0.1
42 DEFAULT_NETWORK_LATENCY = 0.1
43 DEFAULT_TARGET_LATENCY = 0.1
45 def reset(self):
46 self.init_time = monotonic_time()
47 self.client_decode_time = deque(maxlen=NRECS) #records how long it took the client to decode frames:
48 #(ack_time, no of pixels, decoding_time*1000*1000)
49 self.encoding_stats = deque(maxlen=NRECS) #encoding: (time, coding, pixels, bpp, compressed_size, encoding_time)
50 # statistics:
51 self.damage_in_latency = deque(maxlen=NRECS) #records how long it took for a damage request to be sent
52 #last NRECS: (sent_time, no of pixels, actual batch delay, damage_latency)
53 self.damage_out_latency = deque(maxlen=NRECS) #records how long it took for a damage request to be processed
54 #last NRECS: (processed_time, no of pixels, actual batch delay, damage_latency)
55 self.damage_ack_pending = {} #records when damage packets are sent
56 #so we can calculate the "client_latency" when the client sends
57 #the corresponding ack ("damage-sequence" packet - see "client_ack_damage")
58 self.encoding_totals = {} #for each encoding, how many frames we sent and how many pixels in total
59 self.encoding_pending = {} #damage regions waiting to be picked up by the encoding thread:
60 #for each sequence no: (damage_time, w, h)
61 self.last_damage_events = deque(maxlen=4*NRECS) #every time we get a damage event, we record: time,x,y,w,h
62 self.last_damage_event_time = 0
63 self.last_recalculate = 0
64 self.damage_events_count = 0
65 self.packet_count = 0
67 self.last_resized = 0
68 self.last_packet_time = 0
70 #these values are calculated from the values above (see update_averages)
71 self.target_latency = self.DEFAULT_TARGET_LATENCY
72 self.avg_damage_in_latency = self.DEFAULT_DAMAGE_LATENCY
73 self.recent_damage_in_latency = self.DEFAULT_DAMAGE_LATENCY
74 self.avg_damage_out_latency = self.DEFAULT_DAMAGE_LATENCY + self.DEFAULT_NETWORK_LATENCY
75 self.recent_damage_out_latency = self.DEFAULT_DAMAGE_LATENCY + self.DEFAULT_NETWORK_LATENCY
76 self.max_latency = self.DEFAULT_DAMAGE_LATENCY + self.DEFAULT_NETWORK_LATENCY
77 self.avg_decode_speed = -1
78 self.recent_decode_speed = -1
80 def reset_backlog(self):
81 #this should be a last resort..
82 self.damage_ack_pending = {}
85 def update_averages(self):
86 #damage "in" latency: (the time it takes for damage requests to be processed only)
87 dil = tuple(self.damage_in_latency)
88 if dil:
89 data = tuple((when, latency) for when, _, _, latency in dil)
90 self.avg_damage_in_latency, self.recent_damage_in_latency = calculate_time_weighted_average(data)
91 #damage "out" latency: (the time it takes for damage requests to be processed and sent out)
92 dol = tuple(self.damage_out_latency)
93 if dol:
94 data = tuple((when, latency) for when, _, _, latency in dol)
95 self.avg_damage_out_latency, self.recent_damage_out_latency = calculate_time_weighted_average(data)
96 #client decode speed:
97 cdt = tuple(self.client_decode_time)
98 if cdt:
99 #the elapsed time recorded is in microseconds:
100 decode_speed = tuple((event_time, size, int(size*1000*1000/elapsed)) for event_time, size, elapsed in cdt)
101 r = calculate_size_weighted_average(decode_speed)
102 self.avg_decode_speed = int(r[0])
103 self.recent_decode_speed = int(r[1])
104 #network send speed:
105 all_l = [0.1,
106 self.avg_damage_in_latency, self.recent_damage_in_latency,
107 self.avg_damage_out_latency, self.recent_damage_out_latency]
108 self.max_latency = max(all_l)
110 def get_factors(self, bandwidth_limit=0):
111 factors = []
112 def mayaddfac(metric, info, factor, weight):
113 if weight>0.01:
114 factors.append((metric, info, factor, weight))
115 #ratio of "in" and "out" latency indicates network bottleneck:
116 #(the difference between the two is the time it takes to send)
117 if self.damage_in_latency and self.damage_out_latency:
118 #prevent jitter from skewing the values too much
119 ad = max(0.010, 0.040+self.avg_damage_out_latency-self.avg_damage_in_latency)
120 rd = max(0.010, 0.040+self.recent_damage_out_latency-self.recent_damage_in_latency)
121 metric = "damage-network-delay"
122 #info: avg delay=%.3f recent delay=%.3f" % (ad, rd)
123 mayaddfac(*calculate_for_average(metric, ad, rd))
124 #client decode time:
125 ads = self.avg_decode_speed
126 rds = self.recent_decode_speed
127 if ads>0 and rds>0:
128 metric = "client-decode-speed"
129 #info: avg=%.1f, recent=%.1f (MPixels/s)" % (ads/1000/1000, self.recent_decode_speed/1000/1000)
130 #our calculate methods aims for lower values, so invert speed
131 #this is how long it takes to send 1MB:
132 avg1MB = 1.0*1024*1024/ads
133 recent1MB = 1.0*1024*1024/rds
134 weight_div = max(0.25, rds/(4*1000*1000))
135 mayaddfac(*calculate_for_average(metric, avg1MB, recent1MB, weight_offset=0.0, weight_div=weight_div))
136 ldet = self.last_damage_event_time
137 if ldet:
138 #If nothing happens for a while then we can reduce the batch delay,
139 #however we must ensure this is not caused by a high system latency
140 #so we ignore short elapsed times.
141 elapsed = monotonic_time()-ldet
142 mtime = max(0, elapsed-self.max_latency*2)
143 #the longer the time, the more we slash:
144 weight = sqrt(mtime)
145 target = max(0, 1.0-mtime)
146 metric = "damage-rate"
147 info = {"elapsed" : int(1000.0*elapsed),
148 "max_latency" : int(1000.0*self.max_latency)}
149 mayaddfac(metric, info, target, weight)
150 if bandwidth_limit>0:
151 #calculate how much bandwith we have used in the last second (in bps):
152 #encoding_stats.append((end, coding, w*h, bpp, len(data), end-start))
153 cutoff = monotonic_time()-1
154 used = sum(v[4] for v in tuple(self.encoding_stats) if v[0]>cutoff) * 8
155 info = {
156 "budget" : bandwidth_limit,
157 "used" : used,
158 }
159 #aim for 10% below the limit:
160 target = used*110.0/100.0/bandwidth_limit
161 #if we are getting close to or above the limit,
162 #the certainty of this factor goes up:
163 weight = max(0, target-1)*(5+logp(target))
164 mayaddfac("bandwidth-limit", info, target, weight)
165 return factors
168 def get_info(self) -> dict:
169 info = {"damage" : {"events" : self.damage_events_count,
170 "packets_sent" : self.packet_count,
171 "target-latency" : int(1000*self.target_latency),
172 }
173 }
174 #encoding stats:
175 estats = tuple(self.encoding_stats)
176 if estats:
177 encodings_used = [x[1] for x in estats]
178 def add_compression_stats(enc_stats, encoding=None):
179 comp_ratios_pct = []
180 comp_times_ns = []
181 total_pixels = 0
182 total_time = 0.0
183 for _, _, pixels, bpp, compressed_size, compression_time in enc_stats:
184 if compressed_size>0 and pixels>0:
185 osize = pixels*bpp/8
186 comp_ratios_pct.append((100.0*compressed_size/osize, pixels))
187 comp_times_ns.append((1000.0*1000*1000*compression_time/pixels, pixels))
188 total_pixels += pixels
189 total_time += compression_time
190 einfo = info.setdefault("encoding", {})
191 if encoding:
192 einfo = einfo.setdefault(encoding, {})
193 einfo["ratio_pct"] = get_weighted_list_stats(comp_ratios_pct)
194 einfo["pixels_per_ns"] = get_weighted_list_stats(comp_times_ns)
195 if total_time>0:
196 einfo["pixels_encoded_per_second"] = int(total_pixels / total_time)
197 add_compression_stats(estats)
198 for encoding in encodings_used:
199 enc_stats = [x for x in estats if x[1]==encoding]
200 add_compression_stats(enc_stats, encoding)
202 dinfo = info.setdefault("damage", {})
203 latencies = tuple(x[-1]*1000 for x in tuple(self.damage_in_latency))
204 dinfo["in_latency"] = get_list_stats(latencies, show_percentile=[9])
205 latencies = tuple(x[-1]*1000 for x in tuple(self.damage_out_latency))
206 dinfo["out_latency"] = get_list_stats(latencies, show_percentile=[9])
207 #per encoding totals:
208 if self.encoding_totals:
209 tf = info.setdefault("total_frames", {})
210 tp = info.setdefault("total_pixels", {})
211 for encoding, totals in self.encoding_totals.items():
212 tf[encoding] = totals[0]
213 tp[encoding] = totals[1]
214 return info
217 def get_target_client_latency(self, min_client_latency, avg_client_latency, abs_min=0.010, jitter=0):
218 """ geometric mean of the minimum (+20%) and average latency
219 but not higher than twice more than the minimum,
220 and not lower than abs_min.
221 Then we add the average decoding latency.
222 """
223 decoding_latency = 0.010
224 cdt = tuple(self.client_decode_time)
225 if cdt:
226 decoding_latency = calculate_timesize_weighted_average(cdt)[0]/1000.0
227 min_latency = max(abs_min, min_client_latency or abs_min)*1.2
228 avg_latency = max(min_latency, avg_client_latency or abs_min)
229 max_latency = min(avg_latency, 4.0*min_latency+0.100)
230 return max(abs_min, min(max_latency, sqrt(min_latency*avg_latency))) + decoding_latency + jitter/1000.0
232 def get_client_backlog(self):
233 packets_backlog, pixels_backlog, bytes_backlog = 0, 0, 0
234 if self.damage_ack_pending:
235 sent_before = monotonic_time()-(self.target_latency+TARGET_LATENCY_TOLERANCE)
236 dropped_acks_time = monotonic_time()-60 #1 minute
237 drop_missing_acks = []
238 for sequence, item in tuple(self.damage_ack_pending.items()):
239 start_send_at = item[0]
240 end_send_at = item[3]
241 if end_send_at==0 or start_send_at>sent_before:
242 continue
243 if start_send_at<dropped_acks_time:
244 drop_missing_acks.append(sequence)
245 else:
246 start_bytes = item[2]
247 end_bytes = item[4]
248 pixels = item[5]
249 packets_backlog += 1
250 pixels_backlog += pixels
251 bytes_backlog += (end_bytes - start_bytes)
252 log("get_client_backlog missing acks: %s", drop_missing_acks)
253 #this should never happen...
254 if drop_missing_acks:
255 log.error("Error: expiring %i missing damage ACK%s,", len(drop_missing_acks), engs(drop_missing_acks))
256 log.error(" connection may be closed or closing,")
257 log.error(" sequence numbers missing: %s", csv(drop_missing_acks))
258 for sequence in drop_missing_acks:
259 self.damage_ack_pending.pop(sequence, None)
260 return packets_backlog, pixels_backlog, bytes_backlog
262 def get_acks_pending(self):
263 return sum(1 for x in self.damage_ack_pending.values() if x[0]!=0)
265 def get_late_acks(self, latency):
266 now = monotonic_time()
267 sent_before = now-latency
268 #start_send_at = item[0]
269 #end_send_at = item[3]
270 late = sum(1 for item in self.damage_ack_pending.values() if item[3]>0 and item[0]<=sent_before)
271 log("get_late_acks(%s)=%i (%i in full pending list)",
272 latency, late, len(self.damage_ack_pending))
273 return late
275 def get_pixels_encoding_backlog(self):
276 pixels, count = 0, 0
277 for _, w, h in self.encoding_pending.values():
278 pixels += w*h
279 count += 1
280 return pixels, count
282 def get_bitrate(self, max_elapsed=1):
283 cutoff = monotonic_time()-max_elapsed
284 recs = tuple((v[0], v[4]) for v in tuple(self.encoding_stats) if v[0]>=cutoff)
285 if len(recs)<2:
286 return 0
287 bits = sum(v[1] for v in recs) * 8
288 elapsed = recs[-1][0]-recs[0][0]
289 if elapsed==0:
290 return 0
291 return int(bits/elapsed)
293 def get_damage_pixels(self, elapsed=1):
294 cutoff = monotonic_time()-elapsed
295 return sum(v[3]*v[4] for v in tuple(self.last_damage_events) if v[0]>cutoff)