diff --git a/clients/client-linux.py b/clients/client-linux.py index 8fa8bb2..ce15b5a 100755 --- a/clients/client-linux.py +++ b/clients/client-linux.py @@ -15,6 +15,7 @@ PORT = 35601 PASSWORD = "USER_DEFAULT_PASSWORD" INTERVAL = 1 PORBEPORT = 80 +PING_PACKET_HISTORY_LEN = 100 CU = "cu.tz.cloudcpp.com" CT = "ct.tz.cloudcpp.com" CM = "cm.tz.cloudcpp.com" @@ -28,6 +29,10 @@ import sys import json import subprocess import threading +try: + from queue import Queue # python3 +except ImportError: + from Queue import Queue # python2 def get_uptime(): with open('/proc/uptime', 'r') as f: @@ -156,27 +161,23 @@ netSpeed = { def _ping_thread(host, mark, port): lostPacket = 0 - allPacket = 0 - startTime = time.time() + packet_queue = Queue(maxsize=PING_PACKET_HISTORY_LEN) while True: + if packet_queue.full(): + if packet_queue.get() == 0: + lostPacket -= 1 try: b = timeit.default_timer() socket.create_connection((host, port), timeout=1).close() pingTime[mark] = int((timeit.default_timer()-b)*1000) + packet_queue.put(1) except: lostPacket += 1 - finally: - allPacket += 1 + packet_queue.put(0) - if allPacket > 100: - lostRate[mark] = float(lostPacket) / allPacket - - endTime = time.time() - if endTime - startTime > 3600: - lostPacket = 0 - allPacket = 0 - startTime = endTime + if packet_queue.qsize() > 30: + lostRate[mark] = float(lostPacket) / packet_queue.qsize() time.sleep(INTERVAL) diff --git a/clients/client-psutil.py b/clients/client-psutil.py index 799ce56..278f5f8 100755 --- a/clients/client-psutil.py +++ b/clients/client-psutil.py @@ -16,6 +16,7 @@ PORT = 35601 PASSWORD = "USER_DEFAULT_PASSWORD" INTERVAL = 1 PORBEPORT = 80 +PING_PACKET_HISTORY_LEN = 100 CU = "cu.tz.cloudcpp.com" CT = "ct.tz.cloudcpp.com" CM = "cm.tz.cloudcpp.com" @@ -28,6 +29,11 @@ import json import psutil import sys import threading +import threading +try: + from queue import Queue # python3 +except ImportError: + from Queue import Queue # python2 def get_uptime(): return int(time.time() - psutil.boot_time()) @@ -140,27 +146,23 @@ netSpeed = { def _ping_thread(host, mark, port): lostPacket = 0 - allPacket = 0 - startTime = time.time() + packet_queue = Queue(maxsize=PING_PACKET_HISTORY_LEN) while True: + if packet_queue.full(): + if packet_queue.get() == 0: + lostPacket -= 1 try: b = timeit.default_timer() socket.create_connection((host, port), timeout=1).close() pingTime[mark] = int((timeit.default_timer() - b) * 1000) + packet_queue.put(1) except: lostPacket += 1 - finally: - allPacket += 1 + packet_queue.put(0) - if allPacket > 100: - lostRate[mark] = float(lostPacket) / allPacket - - endTime = time.time() - if endTime - startTime > 3600: - lostPacket = 0 - allPacket = 0 - startTime = endTime + if packet_queue.qsize() > 30: + lostRate[mark] = float(lostPacket) / packet_queue.qsize() time.sleep(INTERVAL)