From 1c1ac70da007e805cbd480e2b8c4e9dc633cdff7 Mon Sep 17 00:00:00 2001 From: Jon Campo Date: Tue, 11 Apr 2023 11:31:11 +1000 Subject: [PATCH] 5GSPEED --- .DS_Store | Bin 0 -> 6148 bytes .../5GSPEED_DETAILED_ASSET.py | 141 ++ 5GSPEED_DETAILED_ASSET/METADATA/MANIFEST.json | 31 + 5GSPEED_DETAILED_ASSET/METADATA/SIGNATURE.DS | 1 + 5GSPEED_DETAILED_ASSET/README.md | 110 + 5GSPEED_DETAILED_ASSET/_csv.py | 208 ++ 5GSPEED_DETAILED_ASSET/csclient.py | 530 +++++ 5GSPEED_DETAILED_ASSET/csv.py | 449 ++++ 5GSPEED_DETAILED_ASSET/package.ini | 12 + 5GSPEED_DETAILED_ASSET/speedtest.py | 2001 +++++++++++++++++ 5GSPEED_DETAILED_ASSET/start.sh | 2 + 5GSPEED_DETAILED_ASSET/timeit.py | 362 +++ 12 files changed, 3847 insertions(+) create mode 100644 .DS_Store create mode 100755 5GSPEED_DETAILED_ASSET/5GSPEED_DETAILED_ASSET.py create mode 100644 5GSPEED_DETAILED_ASSET/METADATA/MANIFEST.json create mode 100644 5GSPEED_DETAILED_ASSET/METADATA/SIGNATURE.DS create mode 100644 5GSPEED_DETAILED_ASSET/README.md create mode 100644 5GSPEED_DETAILED_ASSET/_csv.py create mode 100755 5GSPEED_DETAILED_ASSET/csclient.py create mode 100644 5GSPEED_DETAILED_ASSET/csv.py create mode 100755 5GSPEED_DETAILED_ASSET/package.ini create mode 100644 5GSPEED_DETAILED_ASSET/speedtest.py create mode 100755 5GSPEED_DETAILED_ASSET/start.sh create mode 100644 5GSPEED_DETAILED_ASSET/timeit.py diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..6434e5785fa8d163de1dda04980797df7d46b047 GIT binary patch literal 6148 zcmeHKJxc>o5S-N%0h?4}`Tl~1{Uuxw!9uw7ilz*t5c})=+s=L{9v88*klC<1@9o=p z$4zmY1z?-E{WY)xu%tWU-NW4c+3EWy_dGUNK}*pQa}oPDd69SMtAHA z$He$_FvJKzTreHRb<7gP<_Tg~I3_Yfv!oJ}YBge5(wT2n*A`tV>js0#abAz~V$hinI#GWFTo;)X_zML-0Hb&oQ~&?~ literal 0 HcmV?d00001 diff --git a/5GSPEED_DETAILED_ASSET/5GSPEED_DETAILED_ASSET.py b/5GSPEED_DETAILED_ASSET/5GSPEED_DETAILED_ASSET.py new file mode 100755 index 00000000..4bfbe782 --- /dev/null +++ b/5GSPEED_DETAILED_ASSET/5GSPEED_DETAILED_ASSET.py @@ -0,0 +1,141 @@ +''' + +Description: The 5GSPEED_DETAILED SDK application will uses Ookla Speedtest python library and designed to perform Ookla speedtest from a Cradlepoint Endpoint which will enable comprehensive and end-to-end speedtest result. + +Steps to use: +============= +perform any of the following: + +1. Use NCM API PUT router request to clear the asset ID and to run the SDK speedtest. Wait for 1 min, and run NCM API Get router request to get the result. + +2. Make the asset_id blank in NCM > Devices tab + +3. Go to device console and enter put 5GSPEED 1 + + +Installation: +============= +Go to NCM > Tools page and load the script. Afterwards, load the script in the desired NCM Group where the router belongs + + +Results: +======== +All results will be displayed in asset ID column of the router. + +Sample result: +DL:52.54Mbps - UL:16.55Mbps - Ping:9.715ms - Server:Telstra - ISP:Vocus Communications - TimeGMT:2023-04-11T01:06:43.758382Z - URL:http://www.speedtest.net/result/14595594656.png + + +For any questions, please reach out to developer jon.campo@cradlepoint.com + +DISCLAIMER: +========== +Please note: This script is meant for demo purposes only. All tools/ scripts in this repo are released for use "AS IS" without any warranties of any kind, including, but not limited to their installation, use, or performance. Any use of these scripts and tools is at your own risk. There is no guarantee that they have been through thorough testing in a comparable environment and we are not responsible for any damage or data loss incurred with their use. You are responsible for reviewing and testing any scripts you run thoroughly before use in any non-testing environment. + + +''' + +from csclient import EventingCSClient +from speedtest import Speedtest +import time + +def asset_id_check(path, asset_id, *args): + if not asset_id: + cp.log('Initiating Speedtest due to asset id empty...') + #cp.put('status/5GSPEED', "1") + speedtest() + return + +#def speedtest(path, value, *args): +def speedtest(): + cp.log('Ongoing Speedtest...') + #cp.put('config/system/asset_id', "Ongoing Speedtest. Please wait 1 minute for the result") + #time.sleep(10) + + #servers = [] + s = Speedtest() + + #Find the best ookla speedtest server based from latency and ping + cp.log("Finding the Best Ookla Speedtest.net Server...") + server = s.get_best_server() + cp.log('Found Best Ookla Speedtest.net Server: {}'.format(server['sponsor'])) + + p = s.results.ping + cp.log('Ping: {}ms'.format(p)) + + #Perform Download ookla download speedtest + cp.log("Performing Ookla Speedtest.net Download Test...") + d = s.download() + cp.log('Ookla Speedtest.net Download: {:.2f} Kb/s'.format(d / 1000)) + + #Perform Upload ookla upload speedtest. Option pre_allocate false prevents memory error + cp.log("Performing Ookla Speedtest.net Upload Test...") + u = s.upload(pre_allocate=False) + cp.log('Ookla Speedtest.net Upload: {:.2f} Kb/s'.format(u / 1000)) + + #Access speedtest result dictionary + res = s.results.dict() + + #share link for ookla test result page + share = s.results.share() + + t = res['timestamp'] + + i = res["client"]["isp"] + + s = server['sponsor'] + + #return res["download"], res["upload"], res["ping"],res['timestamp'],server['sponsor'],res["client"]["isp"], share + + + cp.log('') + cp.log('Test Result') + cp.log('Timestamp GMT: {}'.format(t)) + cp.log('Client ISP: {}'.format(i)) + cp.log('Ookla Speedtest.net Server: {}'.format(s)) + cp.log('Ping: {}ms'.format(p)) + cp.log('Download Speed: {:.2f} Mb/s'.format(d / 1000 / 1000)) + cp.log('Upload Speed: {:.2f} Mb/s'.format(u / 1000 / 1000)) + cp.log('Ookla Speedtest.net URL Result: {}'.format(share)) + + download = '{:.2f}'.format(d / 1000 / 1000) + upload = '{:.2f}'.format(u / 1000 / 1000) + #text = 'DL:{}Mbps UL:{}Mbps - {}'.format(download, upload,share) + text = 'DL:{}Mbps - UL:{}Mbps - Ping:{}ms - Server:{} - ISP:{} - TimeGMT:{} - URL:{}'.format(download, upload, p, s, i, t, share) + cp.put('config/system/asset_id', text) + + cp.log(f'Speedtest Complete! {text}') + return + + + +try: + cp = EventingCSClient('5GSPEEDTEST') + + + cp.log('Starting... To start speedtest: put status/5GSPEED 1 or make asset id blank') + cp.on('put', 'config/system/asset_id', asset_id_check) + cp.on('put', 'status/5GSPEED', speedtest) + asset_id = cp.get('/config/system/asset_id') + #cp.log(asset_id) + + #Performed if asset ID is blank + if asset_id is "" or asset_id is None: + + connected = False + while not connected: + connected = cp.get('status/ecm/state') == 'connected' + time.sleep(1) + + #cp.log('Detected at bootup that the asset id is blank. Starting the 5GSpeedtest in 1 minute to allow device finish its bootup...') + cp.log('Detected at bootup that the asset id is blank.') + #time.sleep(60) + cp.log('Starting Initial 5GSpeedtest with asset id blank...') + #cp.put('status/5GSPEED', '1') + speedtest() + + time.sleep(999999) +except Exception as e: + cp.log(e) + + diff --git a/5GSPEED_DETAILED_ASSET/METADATA/MANIFEST.json b/5GSPEED_DETAILED_ASSET/METADATA/MANIFEST.json new file mode 100644 index 00000000..e2d9f342 --- /dev/null +++ b/5GSPEED_DETAILED_ASSET/METADATA/MANIFEST.json @@ -0,0 +1,31 @@ +{ + "app": { + "auto_start": true, + "date": "2023-04-11T11:15:01.448246", + "files": { + "5GSPEED_DETAILED_ASSET.py": "25810756c820c1dac38685d41b6c71cd81b89b3551575109449447594f3b1d1d", + "README.md": "5813f2c137692d03895573190561e477c385dbe8a9ad0f01f2010966573299b8", + "_csv.py": "b778711ec11f2d4e9395436fc369710cdacc49fdbfa762e66443a6e24d542bd1", + "csclient.py": "0c6a55c889daf7df554ab1d22e3290cbb45540a081a62b485036d98ef01e681b", + "csv.py": "0f46b397bda6998a4b7083478f22cd02bd6454a3dd1219a5874562eb3784244d", + "package.ini": "54f211020f9081ef1069ea30ce1280701678c9208f7a36cca701171c350a7698", + "speedtest.py": "c9e65ec60ebc450e29a25cee0dfa558c2007922194318cbdb1c9291cffc59cfc", + "start.sh": "a2c8596fbf31ac181633d57e0da44a49dc2280b16e5d5010888a962d7e611a0e", + "timeit.py": "d0884d28561a0dafce6492e8f526ad53b0ad49f38e9f124573c749a4941447f4" + }, + "firmware_major": 7, + "firmware_minor": 2, + "name": "5GSPEED_DETAILED_ASSET", + "notes": "Ookla speedtest from a Cradlepoint Endpoint. To run, make asset id blank", + "reboot": true, + "restart": true, + "uuid": "a8ac42ec-352d-4320-871c-a81f42b2372f", + "vendor": "Cradlepoint", + "version_major": 0, + "version_minor": 2 + }, + "pmf": { + "version_major": 1, + "version_minor": 0 + } +} \ No newline at end of file diff --git a/5GSPEED_DETAILED_ASSET/METADATA/SIGNATURE.DS b/5GSPEED_DETAILED_ASSET/METADATA/SIGNATURE.DS new file mode 100644 index 00000000..ccd58e48 --- /dev/null +++ b/5GSPEED_DETAILED_ASSET/METADATA/SIGNATURE.DS @@ -0,0 +1 @@ +75d2f596aeb8c4a5b9c8d7e860eb688724197c3fcb758edb07f3190c2d2a2c3b \ No newline at end of file diff --git a/5GSPEED_DETAILED_ASSET/README.md b/5GSPEED_DETAILED_ASSET/README.md new file mode 100644 index 00000000..b1554df3 --- /dev/null +++ b/5GSPEED_DETAILED_ASSET/README.md @@ -0,0 +1,110 @@ +Application Name +================ +5GSPEED_DETAILED_ASSET + +Application Purpose +=================== +5GSPEED_DETAILED is an improved version of the 5GSPEED SDK. It uses Ookla Speedtest python library and designed to perform Ookla speedtest from a Cradlepoint Endpoint which will enable comprehensive and end-to-end speedtest result. +This python SDK application bring the Ookla speedtest.net functionality that is normally performed by users in the LAN behind the Cradlepoint endpoint. +This will provide uniformity of speedtest between the users and the Cradlepoint endpoint devices. + +For any questions, please reach out to developer jon.campo@cradlepoint.com + +Application Version +=================== +0.1 + +NCOS Devices Supported +====================== +ALL + + +External Requirements + +Once downloaded to PC, folder name needs to be renamed to 5GSPEED_DETAILED and in .tar.gz format. + +Installation: +====================== + +1. Download and unzip file - + +https://github.com/joncampo-cradlepoint/5GSPEED_DETAILED/archive/main.zip + +2. Important! Rename unzipped folder to 5GSPEED_DETAILED + +3.a. For MAC and linux, open console, cd to downloads, then create .tar.gz using terminal command + +tar -czvf 5GSPEED_DETAILED.tar.gz 5GSPEED_DETAILED/* + +b. For Windows, use the python make.py build found in https://customer.cradlepoint.com/s/article/NCOS-SDK-v3 + +--- +NCM Deployment + +4. In NCM, upload 5GSPEED_DETAILED_ASSET.tar.gz to NCM via TOOLS tab + +5. After that, go to Groups > Commands > Manage SDK applications and add the application + + +Usage: +====================== + +1. Speedtest will automatically run if Asset ID in NCM > Devices is blank. It will populate Asset ID with speedtest result and URL. + +2. Use NCM API to run. +Use NCM API PUT router request to clear the asset ID and to run the SDK speedtest. Wait for 1 min, and run NCM API Get router request to get the result. + + +3. You can also run 5GSPEED_DETAILED through cli console + +a. Go to Devices > select Device > Remote Connect > Console + +b. Enter the following command: + +put status/5GSPEED 1 + +c. Run log command to view output: + +log -f + +4. You can also see the result in NCM > Devices > Asset ID column of the device + + + +Expected Output +=============== +Info level log message of ookla speedtest.net results including Timestamp, Client ISP, Ookla Speedtest.net Server, Ping, Download speed, upload speed and the URL link of the test result. + + +Sample output: + + +DL:52.54Mbps - UL:16.55Mbps - Ping:9.715ms - Server:Telstra - ISP:Vocus Communications - TimeGMT:2023-04-11T01:06:43.758382Z - URL:http://www.speedtest.net/result/14595594656.png + +--- +NCM > Devices > Asset ID: + +DL:26.81Mbps UL:9.09Mbps - https://www.speedtest.net/result/10690043282.png + + + +Changelog: +=============== + +version 1.1 +1. Improved bootup blank asset id detection + +2. Notification in asset id that speedtest is running + +3. @speedtest.py module libary: +#11-April-2021 JWC: added to fix the speedtest cli module issue - https://github.com/sivel/speedtest-cli/pull/769 + ignore_servers = list( + #map(int, server_config['ignoreids'].split(',')) + map(int, [server_no for server_no in server_config['ignoreids'].split(',') if server_no]) #11-April-2021 JWC: added to fix the speedtest cli module issue - https://github.com/sivel/speedtest-cli/pull/769 + ) + + +DISCLAIMER +=============== + +Please note: This script is meant for demo purposes only. All tools/ scripts in this repo are released for use "AS IS" without any warranties of any kind, including, but not limited to their installation, use, or performance. Any use of these scripts and tools is at your own risk. There is no guarantee that they have been through thorough testing in a comparable environment and we are not responsible for any damage or data loss incurred with their use. You are responsible for reviewing and testing any scripts you run thoroughly before use in any non-testing environment. diff --git a/5GSPEED_DETAILED_ASSET/_csv.py b/5GSPEED_DETAILED_ASSET/_csv.py new file mode 100644 index 00000000..01dc7cd3 --- /dev/null +++ b/5GSPEED_DETAILED_ASSET/_csv.py @@ -0,0 +1,208 @@ +# encoding: utf-8 +# module _csv +# from (pre-generated) +# by generator 1.147 +""" +CSV parsing and writing. + +This module provides classes that assist in the reading and writing +of Comma Separated Value (CSV) files, and implements the interface +described by PEP 305. Although many CSV files are simple to parse, +the format is not formally defined by a stable specification and +is subtle enough that parsing lines of a CSV file with something +like line.split(",") is bound to fail. The module supports three +basic APIs: reading, writing, and registration of dialects. + + +DIALECT REGISTRATION: + +Readers and writers support a dialect argument, which is a convenient +handle on a group of settings. When the dialect argument is a string, +it identifies one of the dialects previously registered with the module. +If it is a class or instance, the attributes of the argument are used as +the settings for the reader or writer: + + class excel: + delimiter = ',' + quotechar = '"' + escapechar = None + doublequote = True + skipinitialspace = False + lineterminator = '\r\n' + quoting = QUOTE_MINIMAL + +SETTINGS: + + * quotechar - specifies a one-character string to use as the + quoting character. It defaults to '"'. + * delimiter - specifies a one-character string to use as the + field separator. It defaults to ','. + * skipinitialspace - specifies how to interpret whitespace which + immediately follows a delimiter. It defaults to False, which + means that whitespace immediately following a delimiter is part + of the following field. + * lineterminator - specifies the character sequence which should + terminate rows. + * quoting - controls when quotes should be generated by the writer. + It can take on any of the following module constants: + + csv.QUOTE_MINIMAL means only when required, for example, when a + field contains either the quotechar or the delimiter + csv.QUOTE_ALL means that quotes are always placed around fields. + csv.QUOTE_NONNUMERIC means that quotes are always placed around + fields which do not parse as integers or floating point + numbers. + csv.QUOTE_NONE means that quotes are never placed around fields. + * escapechar - specifies a one-character string used to escape + the delimiter when quoting is set to QUOTE_NONE. + * doublequote - controls the handling of quotes inside fields. When + True, two consecutive quotes are interpreted as one during read, + and when writing, each quote character embedded in the data is + written as two quotes +""" +# no imports + +# Variables with simple values + +QUOTE_ALL = 1 +QUOTE_MINIMAL = 0 +QUOTE_NONE = 3 +QUOTE_NONNUMERIC = 2 + +__version__ = '1.0' + + +# functions + +def field_size_limit(limit=None): # real signature unknown; restored from __doc__ + """ + Sets an upper limit on parsed fields. + csv.field_size_limit([limit]) + + Returns old limit. If limit is not given, no new limit is set and + the old limit is returned + """ + pass + + +def get_dialect(name): # real signature unknown; restored from __doc__ + """ + Return the dialect instance associated with name. + dialect = csv.get_dialect(name) + """ + pass + + +def list_dialects(): # real signature unknown; restored from __doc__ + """ + Return a list of all know dialect names. + names = csv.list_dialects() + """ + pass + + +def reader(iterable, dialect='excel', *args, **kwargs): # real signature unknown; NOTE: unreliably restored from __doc__ + """ + csv_reader = reader(iterable [, dialect='excel'] + [optional keyword args]) + for row in csv_reader: + process(row) + + The "iterable" argument can be any object that returns a line + of input for each iteration, such as a file object or a list. The + optional "dialect" parameter is discussed below. The function + also accepts optional keyword arguments which override settings + provided by the dialect. + + The returned object is an iterator. Each iteration returns a row + of the CSV file (which can span multiple input lines). + """ + pass + + +def register_dialect(name, dialect=None, **fmtparams): # real signature unknown; restored from __doc__ + """ + Create a mapping from a string name to a dialect class. + dialect = csv.register_dialect(name[, dialect[, **fmtparams]]) + """ + pass + + +def unregister_dialect(name): # real signature unknown; restored from __doc__ + """ + Delete the name/dialect mapping associated with a string name. + csv.unregister_dialect(name) + """ + pass + + +def writer(fileobj, dialect='excel', *args, + **kwargs): # real signature unknown; NOTE: unreliably restored from __doc__ + """ + csv_writer = csv.writer(fileobj [, dialect='excel'] + [optional keyword args]) + for row in sequence: + csv_writer.writerow(row) + + [or] + + csv_writer = csv.writer(fileobj [, dialect='excel'] + [optional keyword args]) + csv_writer.writerows(rows) + + The "fileobj" argument can be any object that supports the file API. + """ + pass + + +# classes + +class Dialect(object): + """ + CSV dialect + + The Dialect type records CSV parsing and generation options. + """ + + def __init__(self, *args, **kwargs): # real signature unknown + pass + + @staticmethod # known case of __new__ + def __new__(*args, **kwargs): # real signature unknown + """ Create and return a new object. See help(type) for accurate signature. """ + pass + + delimiter = property(lambda self: object(), lambda self, v: None, lambda self: None) # default + + doublequote = property(lambda self: object(), lambda self, v: None, lambda self: None) # default + + escapechar = property(lambda self: object(), lambda self, v: None, lambda self: None) # default + + lineterminator = property(lambda self: object(), lambda self, v: None, lambda self: None) # default + + quotechar = property(lambda self: object(), lambda self, v: None, lambda self: None) # default + + quoting = property(lambda self: object(), lambda self, v: None, lambda self: None) # default + + skipinitialspace = property(lambda self: object(), lambda self, v: None, lambda self: None) # default + + strict = property(lambda self: object(), lambda self, v: None, lambda self: None) # default + + +class Error(Exception): + # no doc + def __init__(self, *args, **kwargs): # real signature unknown + pass + + __weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default + """list of weak references to the object (if defined)""" + + +# variables with complex values + +_dialects = {} + +__loader__ = None # (!) real value is '<_frozen_importlib_external.ExtensionFileLoader object at 0x101941a20>' + +__spec__ = None # (!) real value is "ModuleSpec(name='_csv', loader=<_frozen_importlib_external.ExtensionFileLoader object at 0x101941a20>, origin='/opt/buildAgent/system/.persistent_cache/pycharm/pythons4skeletons/python36/lib/python3.6/lib-dynload/_csv.cpython-36m-darwin.so')" + diff --git a/5GSPEED_DETAILED_ASSET/csclient.py b/5GSPEED_DETAILED_ASSET/csclient.py new file mode 100755 index 00000000..2fb8feae --- /dev/null +++ b/5GSPEED_DETAILED_ASSET/csclient.py @@ -0,0 +1,530 @@ +""" +NCOS communication module for SDK applications. + +Copyright (c) 2018 Cradlepoint, Inc. . All rights reserved. + +This file contains confidential information of CradlePoint, Inc. and your use of +this file is subject to the CradlePoint Software License Agreement distributed with +this file. Unauthorized reproduction or distribution of this file is subject to civil and +criminal penalties. +""" + + +import json +import os +import re +import select +import socket +import threading +import logging.handlers +import signal +import sys + +try: + import traceback +except ImportError: + traceback = None + + +class SdkCSException(Exception): + pass + + +class CSClient(object): + """ + The CSClient class is the NCOS SDK mechanism for communication between apps and the router tree/config store. + Instances of this class communicate with the router using either an explicit socket or with http method calls. + + Apps running locally on the router use a socket on the router to send commands from the app to the router tree + and to receive data (JSON) from the router tree. + + Apps running remotely use the requests library to send HTTP method calls to the router and to receive data from + the router tree. This allows one to use an IDE to run and debug the application on a the computer. Although, + there are limitations with respect to the device hardware access (i.e. serial, USB, etc.). + """ + END_OF_HEADER = b"\r\n\r\n" + STATUS_HEADER_RE = re.compile(b"status: \w*") + CONTENT_LENGTH_HEADER_RE = re.compile(b"content-length: \w*") + MAX_PACKET_SIZE = 8192 + RECV_TIMEOUT = 2.0 + + _instances = {} + + @classmethod + def is_initialized(cls): + return cls in cls._instances + + def __new__(cls, *na, **kwna): + """ Singleton factory (with subclassing support) """ + if not cls.is_initialized(): + cls._instances[cls] = super().__new__(cls) + return cls._instances[cls] + + def __init__(self, app_name, init=False): + self.app_name = app_name + handlers = [logging.StreamHandler()] + if sys.platform == 'linux2': + handlers.append(logging.handlers.SysLogHandler(address='/dev/log')) + logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(name)s: %(message)s', datefmt='%b %d %H:%M:%S', + handlers=handlers) + self.logger = logging.getLogger(app_name) + if not init: + return + + def get(self, base, query='', tree=0): + """ + Constructs and sends a get request to retrieve specified data from a device. + + The behavior of this method is contextual: + - If the app is installed on (and executed from) a device, it directly queries the router tree to retrieve the + specified data. + - If the app running remotely from a computer it calls the HTTP GET method to retrieve the specified data. + + Args: + base: String representing a path to a resource on a router tree, + (i.e. '/config/system/logging/level'). + value: Not required. + query: Not required. + tree: Not required. + + Returns: + A dictionary containing the response (i.e. {"success": True, "data:": {}} + + """ + if sys.platform == 'linux2': + cmd = "get\n{}\n{}\n{}\n".format(base, query, tree) + return self._dispatch(cmd).get('data') + else: + # Running in a computer so use http to send the get to the device. + import requests + device_ip, username, password = self._get_device_access_info() + device_api = 'http://{}/api/{}/{}'.format(device_ip, base, query) + + try: + response = requests.get(device_api, auth=self._get_auth(device_ip, username, password)) + + except (requests.exceptions.Timeout, + requests.exceptions.ConnectionError): + print("Timeout: device at {} did not respond.".format(device_ip)) + return None + + return json.loads(response.text).get('data') + + def put(self, base, value='', query='', tree=0): + """ + Constructs and sends a put request to update or add specified data to the device router tree. + + The behavior of this method is contextual: + - If the app is installed on(and executed from) a device, it directly updates or adds the specified data to + the router tree. + - If the app running remotely from a computer it calls the HTTP PUT method to update or add the specified + data. + + + Args: + base: String representing a path to a resource on a router tree, + (i.e. '/config/system/logging/level'). + value: Not required. + query: Not required. + tree: Not required. + + Returns: + A dictionary containing the response (i.e. {"success": True, "data:": {}} + """ + value = json.dumps(value) + if sys.platform == 'linux2': + cmd = "put\n{}\n{}\n{}\n{}\n".format(base, query, tree, value) + return self._dispatch(cmd) + else: + # Running in a computer so use http to send the put to the device. + import requests + device_ip, username, password = self._get_device_access_info() + device_api = 'http://{}/api/{}/{}'.format(device_ip, base, query) + + try: + response = requests.put(device_api, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + auth=self._get_auth(device_ip, username, password), + data={"data": '{}'.format(value)}) + except (requests.exceptions.Timeout, + requests.exceptions.ConnectionError): + print("Timeout: device at {} did not respond.".format(device_ip)) + return None + + return json.loads(response.text) + + def post(self, base, value='', query=''): + """ + Constructs and sends a post request to update or add specified data to the device router tree. + + The behavior of this method is contextual: + - If the app is installed on(and executed from) a device, it directly updates or adds the specified data to + the router tree. + - If the app running remotely from a computer it calls the HTTP POST method to update or add the specified + data. + + + Args: + base: String representing a path to a resource on a router tree, + (i.e. '/config/system/logging/level'). + value: Not required. + query: Not required. + + Returns: + A dictionary containing the response (i.e. {"success": True, "data:": {}} + """ + value = json.dumps(value) + if sys.platform == 'linux2': + cmd = f"post\n{base}\n{query}\n{value}\n" + return self._dispatch(cmd) + else: + # Running in a computer so use http to send the post to the device. + import requests + device_ip, username, password = self._get_device_access_info() + device_api = 'http://{}/api/{}/{}'.format(device_ip, base, query) + + try: + response = requests.post(device_api, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + auth=self._get_auth(device_ip, username, password), + data={"data": '{}'.format(value)}) + except (requests.exceptions.Timeout, + requests.exceptions.ConnectionError): + print("Timeout: device at {} did not respond.".format(device_ip)) + return None + + return json.loads(response.text) + + def delete(self, base, query=''): + """ + Constructs and sends a delete request to delete specified data to the device router tree. + + The behavior of this method is contextual: + - If the app is installed on(and executed from) a device, it directly deletes the specified data to + the router tree. + - If the app running remotely from a computer it calls the HTTP DELETE method to update or add the specified + data. + + + Args: + base: String representing a path to a resource on a router tree, + (i.e. '/config/system/logging/level'). + query: Not required. + + Returns: + A dictionary containing the response (i.e. {"success": True, "data:": {}} + """ + if sys.platform == 'linux2': + cmd = "delete\n{}\n{}\n".format(base, query) + return self._dispatch(cmd) + else: + # Running in a computer so use http to send the delete to the device. + import requests + device_ip, username, password = self._get_device_access_info() + device_api = 'http://{}/api/{}/{}'.format(device_ip, base, query) + + try: + response = requests.delete(device_api, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + auth=self._get_auth(device_ip, username, password), + data={"data": '{}'.format(value)}) + except (requests.exceptions.Timeout, + requests.exceptions.ConnectionError): + print("Timeout: device at {} did not respond.".format(device_ip)) + return None + + return json.loads(response.text) + + def alert(self, value=''): + """ + Constructs and sends a custom alert to NCM for the device. Apps calling this method must be running + on the target device to send the alert. If invoked while running on a computer, then only a log is output. + + Args: + + app_name: String name of your application. + value: String to displayed for the alert. + + Returns: + Success: None + Failure: An error + """ + if sys.platform == 'linux2': + cmd = "alert\n{}\n{}\n".format(self.app_name, value) + return self._dispatch(cmd) + else: + # Running in a computer and can't actually send the alert. + print('Alert is only available when running the app in NCOS.') + print('Alert Text: {}'.format(value)) + + def log(self, value=''): + """ + Adds an INFO log to the device SYSLOG. + + Args: + value: String text for the log. + + Returns: + None + """ + if sys.platform == 'linux2': + self.logger.info(value) + else: + # Running in a computer so just use print for the log. + print(value) + + def _get_auth(self, device_ip, username, password): + # This is only needed when the app is running in a computer. + # Returns the proper HTTP Auth for the global username and password. + # Digest Auth is used for NCOS 6.4 and below while Basic Auth is + # used for NCOS 6.5 and up. + import requests + from http import HTTPStatus + + use_basic = False + device_api = 'http://{}/api/status/product_info'.format(device_ip) + + try: + response = requests.get(device_api, auth=requests.auth.HTTPBasicAuth(username, password)) + if response.status_code == HTTPStatus.OK: + use_basic = True + + except: + use_basic = False + + if use_basic: + return requests.auth.HTTPBasicAuth(username, password) + else: + return requests.auth.HTTPDigestAuth(username, password) + + @staticmethod + def _get_device_access_info(): + # Should only be called when running in a computer. It will return the + # dev_client_ip, dev_client_username, and dev_client_password as defined in + # the sdk section of the sdk_settings.ini file. + device_ip = '' + device_username = '' + device_password = '' + + if sys.platform != 'linux2': + import os + import configparser + + settings_file = os.path.join(os.path.dirname(os.getcwd()), 'sdk_settings.ini') + config = configparser.ConfigParser() + config.read(settings_file) + + # Keys in sdk_settings.ini + sdk_key = 'sdk' + ip_key = 'dev_client_ip' + username_key = 'dev_client_username' + password_key = 'dev_client_password' + + if sdk_key in config: + if ip_key in config[sdk_key]: + device_ip = config[sdk_key][ip_key] + else: + print('ERROR 1: The {} key does not exist in {}'.format(ip_key, settings_file)) + + if username_key in config[sdk_key]: + device_username = config[sdk_key][username_key] + else: + print('ERROR 2: The {} key does not exist in {}'.format(username_key, settings_file)) + + if password_key in config[sdk_key]: + device_password = config[sdk_key][password_key] + else: + print('ERROR 3: The {} key does not exist in {}'.format(password_key, settings_file)) + else: + print('ERROR 4: The {} section does not exist in {}'.format(sdk_key, settings_file)) + + return device_ip, device_username, device_password + + def _safe_dispatch(self, cmd): + """Send the command and return the response.""" + with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock: + sock.connect('/var/tmp/cs.sock') + sock.sendall(bytes(cmd, 'ascii')) + return self._receive(sock) + + def _dispatch(self, cmd): + errmsg = None + result = "" + try: + result = self._safe_dispatch(cmd) + except Exception as err: + # ignore the command error, continue on to next command + errmsg = "dispatch failed with exception={} err={}".format(type(err), str(err)) + if errmsg is not None: + self.log(self.app_name, errmsg) + pass + return result + + def _safe_receive(self, sock): + sock.settimeout(self.RECV_TIMEOUT) + data = b"" + eoh = -1 + while eoh < 0: + # In the event that the config store times out in returning data, lib returns + # an empty result. Then again, if the config store hangs for 2+ seconds, + # the app's behavior is the least of our worries. + try: + buf = sock.recv(self.MAX_PACKET_SIZE) + except socket.timeout: + return {"status": "timeout", "data": None} + if len(buf) == 0: + break + data += buf + eoh = data.find(self.END_OF_HEADER) + + status_hdr = self.STATUS_HEADER_RE.search(data).group(0)[8:] + content_len = self.CONTENT_LENGTH_HEADER_RE.search(data).group(0)[16:] + remaining = int(content_len) - (len(data) - eoh - len(self.END_OF_HEADER)) + + # body sent from csevent_xxx.sock will have id, action, path, & cfg + while remaining > 0: + buf = sock.recv(self.MAX_PACKET_SIZE) # TODO: This will hang things as well. + if len(buf) == 0: + break + data += buf + remaining -= len(buf) + body = data[eoh:].decode() + try: + result = json.loads(body) + except json.JSONDecodeError as e: + # config store receiver doesn't give back + # proper json for 'put' ops, body + # contains verbose error message + # so putting the error msg in result + result = body.strip() + return {"status": status_hdr.decode(), "data": result} + + def _receive(self, sock): + errmsg = None + result = "" + try: + result = self._safe_receive(sock) + except Exception as err: + # ignore the command error, continue on to next command + errmsg = "_receive failed with exception={} err={}".format(type(err), str(err)) + if errmsg is not None: + self.log(self.app_name, errmsg) + return result + + +class EventingCSClient(CSClient): + running = False + registry = {} + eids = 1 + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.on = self.register + self.un = self.unregister + + def start(self): + if self.running: + self.log(f"Eventing Config Store {self.pid} already running") + return + self.running = True + self.pid = os.getpid() + self.f = '/var/tmp/csevent_%d.sock' % self.pid + try: + os.unlink(self.f) + except FileNotFoundError: + pass + self.event_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + self.event_sock.bind(self.f) + self.event_sock.listen() # backlog is optional. already set on value found in /proc + self.event_sock.setblocking(False) + self.eloop = threading.Thread(target=self._handle_events) + self.eloop.start() + + def stop(self): + if not self.running: + return + self.log(f"Stopping {self.app_name}") + for k in list(self.registry.keys()): + self.unregister(k) + self.event_sock.close() + os.unlink(self.f) + self.running = False + + def _handle_events(self): + poller = select.poll() + poller.register(self.event_sock, + select.POLLIN | select.POLLERR | select.POLLHUP) # I don't unregsiter this in cleaning up! + while self.running: + try: + events = poller.poll(1000) + for f, ev in events: + if ev & (select.POLLERR | select.POLLHUP): + self.log("Hangup/error received. Stopping") + self.stop() # TODO: restart w/ cached registrations. Will no longer be an error case + + if ev & select.POLLIN: + conn, addr = self.event_sock.accept() + result = self._receive(conn) + eid = int(result['data']['id']) + try: + cb = self.registry[eid]['cb'] + args = self.registry[eid]['args'] + try: + # PUTting just a string to config store results in a json encoded string returned. + # e.g. set /config/system/logging/level "debug", result['data']['cfg'] is '"debug"' + cfg = json.loads(result['data']['cfg']) + except TypeError as e: + # Non-string path + cfg = result['data']['cfg'] + try: + cb_return = cb(result['data']['path'], cfg, args) + except: + if traceback: + traceback.print_exc() + self.log(f"Exception during callback for {str(self.registry[eid])}") + if result['data']['action'] == 'get': # We've something to send back. + # config_store_receiver expects json + cb_return = json.JSONEncoder().encode(cb_return) + conn.sendall( + cb_return.encode()) # No dispatch. Config store receiver will put to config store. + except (NameError, ValueError) as e: + self.log(f"Could not find register data for eid {eid}") + except OSError as e: + self.log(f"OSError: {e}") + raise + + def register(self, action: object, path: object, callback: object, *args: object) -> object: + if not self.running: + self.start() + # what about multiple registration? + eid = self.eids + self.eids += 1 + self.registry[eid] = {'cb': callback, 'action': action, 'path': path, 'args': args} + cmd = "register\n{}\n{}\n{}\n{}\n".format(self.pid, eid, action, path) + return self._dispatch(cmd) + + def unregister(self, eid): + ret = "" + try: + e = self.registry[eid] + except KeyError: + pass + else: + if self.running: + cmd = "unregister\n{}\n{}\n{}\n{}\n".format(self.pid, eid, e['action'], e['path']) + ret = self._dispatch(cmd) + del self.registry[eid] + return ret + + +def clean_up_reg(signal, frame): + """ + When 'cppython remote_port_forward.py' gets a SIGTERM, config_store_receiver.py doesn't + clean up registrations. Even if it did, the comm module can't rely on an external service + to clean up. + """ + EventingCSClient('CSClient').stop() + sys.exit(0) + + +signal.signal(signal.SIGTERM, clean_up_reg) diff --git a/5GSPEED_DETAILED_ASSET/csv.py b/5GSPEED_DETAILED_ASSET/csv.py new file mode 100644 index 00000000..a05c8da1 --- /dev/null +++ b/5GSPEED_DETAILED_ASSET/csv.py @@ -0,0 +1,449 @@ + +""" +csv.py - read/write/investigate CSV files +""" + +import re +from _csv import Error, __version__, writer, reader, register_dialect, \ + unregister_dialect, get_dialect, list_dialects, \ + field_size_limit, \ + QUOTE_MINIMAL, QUOTE_ALL, QUOTE_NONNUMERIC, QUOTE_NONE, \ + __doc__ +from _csv import Dialect as _Dialect + +from collections import OrderedDict +from io import StringIO + +__all__ = ["QUOTE_MINIMAL", "QUOTE_ALL", "QUOTE_NONNUMERIC", "QUOTE_NONE", + "Error", "Dialect", "__doc__", "excel", "excel_tab", + "field_size_limit", "reader", "writer", + "register_dialect", "get_dialect", "list_dialects", "Sniffer", + "unregister_dialect", "__version__", "DictReader", "DictWriter", + "unix_dialect"] + +class Dialect: + """Describe a CSV dialect. + + This must be subclassed (see csv.excel). Valid attributes are: + delimiter, quotechar, escapechar, doublequote, skipinitialspace, + lineterminator, quoting. + + """ + _name = "" + _valid = False + # placeholders + delimiter = None + quotechar = None + escapechar = None + doublequote = None + skipinitialspace = None + lineterminator = None + quoting = None + + def __init__(self): + if self.__class__ != Dialect: + self._valid = True + self._validate() + + def _validate(self): + try: + _Dialect(self) + except TypeError as e: + # We do this for compatibility with py2.3 + raise Error(str(e)) + +class excel(Dialect): + """Describe the usual properties of Excel-generated CSV files.""" + delimiter = ',' + quotechar = '"' + doublequote = True + skipinitialspace = False + lineterminator = '\r\n' + quoting = QUOTE_MINIMAL +register_dialect("excel", excel) + +class excel_tab(excel): + """Describe the usual properties of Excel-generated TAB-delimited files.""" + delimiter = '\t' +register_dialect("excel-tab", excel_tab) + +class unix_dialect(Dialect): + """Describe the usual properties of Unix-generated CSV files.""" + delimiter = ',' + quotechar = '"' + doublequote = True + skipinitialspace = False + lineterminator = '\n' + quoting = QUOTE_ALL +register_dialect("unix", unix_dialect) + + +class DictReader: + def __init__(self, f, fieldnames=None, restkey=None, restval=None, + dialect="excel", *args, **kwds): + self._fieldnames = fieldnames # list of keys for the dict + self.restkey = restkey # key to catch long rows + self.restval = restval # default value for short rows + self.reader = reader(f, dialect, *args, **kwds) + self.dialect = dialect + self.line_num = 0 + + def __iter__(self): + return self + + @property + def fieldnames(self): + if self._fieldnames is None: + try: + self._fieldnames = next(self.reader) + except StopIteration: + pass + self.line_num = self.reader.line_num + return self._fieldnames + + @fieldnames.setter + def fieldnames(self, value): + self._fieldnames = value + + def __next__(self): + if self.line_num == 0: + # Used only for its side effect. + self.fieldnames + row = next(self.reader) + self.line_num = self.reader.line_num + + # unlike the basic reader, we prefer not to return blanks, + # because we will typically wind up with a dict full of None + # values + while row == []: + row = next(self.reader) + d = OrderedDict(zip(self.fieldnames, row)) + lf = len(self.fieldnames) + lr = len(row) + if lf < lr: + d[self.restkey] = row[lf:] + elif lf > lr: + for key in self.fieldnames[lr:]: + d[key] = self.restval + return d + + +class DictWriter: + def __init__(self, f, fieldnames, restval="", extrasaction="raise", + dialect="excel", *args, **kwds): + self.fieldnames = fieldnames # list of keys for the dict + self.restval = restval # for writing short dicts + if extrasaction.lower() not in ("raise", "ignore"): + raise ValueError("extrasaction (%s) must be 'raise' or 'ignore'" + % extrasaction) + self.extrasaction = extrasaction + self.writer = writer(f, dialect, *args, **kwds) + + def writeheader(self): + header = dict(zip(self.fieldnames, self.fieldnames)) + self.writerow(header) + + def _dict_to_list(self, rowdict): + if self.extrasaction == "raise": + wrong_fields = rowdict.keys() - self.fieldnames + if wrong_fields: + raise ValueError("dict contains fields not in fieldnames: " + + ", ".join([repr(x) for x in wrong_fields])) + return (rowdict.get(key, self.restval) for key in self.fieldnames) + + def writerow(self, rowdict): + return self.writer.writerow(self._dict_to_list(rowdict)) + + def writerows(self, rowdicts): + return self.writer.writerows(map(self._dict_to_list, rowdicts)) + +# Guard Sniffer's type checking against builds that exclude complex() +try: + complex +except NameError: + complex = float + +class Sniffer: + ''' + "Sniffs" the format of a CSV file (i.e. delimiter, quotechar) + Returns a Dialect object. + ''' + def __init__(self): + # in case there is more than one possible delimiter + self.preferred = [',', '\t', ';', ' ', ':'] + + + def sniff(self, sample, delimiters=None): + """ + Returns a dialect (or None) corresponding to the sample + """ + + quotechar, doublequote, delimiter, skipinitialspace = \ + self._guess_quote_and_delimiter(sample, delimiters) + if not delimiter: + delimiter, skipinitialspace = self._guess_delimiter(sample, + delimiters) + + if not delimiter: + raise Error("Could not determine delimiter") + + class dialect(Dialect): + _name = "sniffed" + lineterminator = '\r\n' + quoting = QUOTE_MINIMAL + # escapechar = '' + + dialect.doublequote = doublequote + dialect.delimiter = delimiter + # _csv.reader won't accept a quotechar of '' + dialect.quotechar = quotechar or '"' + dialect.skipinitialspace = skipinitialspace + + return dialect + + + def _guess_quote_and_delimiter(self, data, delimiters): + """ + Looks for text enclosed between two identical quotes + (the probable quotechar) which are preceded and followed + by the same character (the probable delimiter). + For example: + ,'some text', + The quote with the most wins, same with the delimiter. + If there is no quotechar the delimiter can't be determined + this way. + """ + + matches = [] + for restr in (r'(?P[^\w\n"\'])(?P ?)(?P["\']).*?(?P=quote)(?P=delim)', # ,".*?", + r'(?:^|\n)(?P["\']).*?(?P=quote)(?P[^\w\n"\'])(?P ?)', # ".*?", + r'(?P[^\w\n"\'])(?P ?)(?P["\']).*?(?P=quote)(?:$|\n)', # ,".*?" + r'(?:^|\n)(?P["\']).*?(?P=quote)(?:$|\n)'): # ".*?" (no delim, no space) + regexp = re.compile(restr, re.DOTALL | re.MULTILINE) + matches = regexp.findall(data) + if matches: + break + + if not matches: + # (quotechar, doublequote, delimiter, skipinitialspace) + return ('', False, None, 0) + quotes = {} + delims = {} + spaces = 0 + groupindex = regexp.groupindex + for m in matches: + n = groupindex['quote'] - 1 + key = m[n] + if key: + quotes[key] = quotes.get(key, 0) + 1 + try: + n = groupindex['delim'] - 1 + key = m[n] + except KeyError: + continue + if key and (delimiters is None or key in delimiters): + delims[key] = delims.get(key, 0) + 1 + try: + n = groupindex['space'] - 1 + except KeyError: + continue + if m[n]: + spaces += 1 + + quotechar = max(quotes, key=quotes.get) + + if delims: + delim = max(delims, key=delims.get) + skipinitialspace = delims[delim] == spaces + if delim == '\n': # most likely a file with a single column + delim = '' + else: + # there is *no* delimiter, it's a single column of quoted data + delim = '' + skipinitialspace = 0 + + # if we see an extra quote between delimiters, we've got a + # double quoted format + dq_regexp = re.compile( + r"((%(delim)s)|^)\W*%(quote)s[^%(delim)s\n]*%(quote)s[^%(delim)s\n]*%(quote)s\W*((%(delim)s)|$)" % \ + {'delim':re.escape(delim), 'quote':quotechar}, re.MULTILINE) + + + + if dq_regexp.search(data): + doublequote = True + else: + doublequote = False + + return (quotechar, doublequote, delim, skipinitialspace) + + + def _guess_delimiter(self, data, delimiters): + """ + The delimiter /should/ occur the same number of times on + each row. However, due to malformed data, it may not. We don't want + an all or nothing approach, so we allow for small variations in this + number. + 1) build a table of the frequency of each character on every line. + 2) build a table of frequencies of this frequency (meta-frequency?), + e.g. 'x occurred 5 times in 10 rows, 6 times in 1000 rows, + 7 times in 2 rows' + 3) use the mode of the meta-frequency to determine the /expected/ + frequency for that character + 4) find out how often the character actually meets that goal + 5) the character that best meets its goal is the delimiter + For performance reasons, the data is evaluated in chunks, so it can + try and evaluate the smallest portion of the data possible, evaluating + additional chunks as necessary. + """ + + data = list(filter(None, data.split('\n'))) + + ascii = [chr(c) for c in range(127)] # 7-bit ASCII + + # build frequency tables + chunkLength = min(10, len(data)) + iteration = 0 + charFrequency = {} + modes = {} + delims = {} + start, end = 0, min(chunkLength, len(data)) + while start < len(data): + iteration += 1 + for line in data[start:end]: + for char in ascii: + metaFrequency = charFrequency.get(char, {}) + # must count even if frequency is 0 + freq = line.count(char) + # value is the mode + metaFrequency[freq] = metaFrequency.get(freq, 0) + 1 + charFrequency[char] = metaFrequency + + for char in charFrequency.keys(): + items = list(charFrequency[char].items()) + if len(items) == 1 and items[0][0] == 0: + continue + # get the mode of the frequencies + if len(items) > 1: + modes[char] = max(items, key=lambda x: x[1]) + # adjust the mode - subtract the sum of all + # other frequencies + items.remove(modes[char]) + modes[char] = (modes[char][0], modes[char][1] + - sum(item[1] for item in items)) + else: + modes[char] = items[0] + + # build a list of possible delimiters + modeList = modes.items() + total = float(chunkLength * iteration) + # (rows of consistent data) / (number of rows) = 100% + consistency = 1.0 + # minimum consistency threshold + threshold = 0.9 + while len(delims) == 0 and consistency >= threshold: + for k, v in modeList: + if v[0] > 0 and v[1] > 0: + if ((v[1]/total) >= consistency and + (delimiters is None or k in delimiters)): + delims[k] = v + consistency -= 0.01 + + if len(delims) == 1: + delim = list(delims.keys())[0] + skipinitialspace = (data[0].count(delim) == + data[0].count("%c " % delim)) + return (delim, skipinitialspace) + + # analyze another chunkLength lines + start = end + end += chunkLength + + if not delims: + return ('', 0) + + # if there's more than one, fall back to a 'preferred' list + if len(delims) > 1: + for d in self.preferred: + if d in delims.keys(): + skipinitialspace = (data[0].count(d) == + data[0].count("%c " % d)) + return (d, skipinitialspace) + + # nothing else indicates a preference, pick the character that + # dominates(?) + items = [(v,k) for (k,v) in delims.items()] + items.sort() + delim = items[-1][1] + + skipinitialspace = (data[0].count(delim) == + data[0].count("%c " % delim)) + return (delim, skipinitialspace) + + + def has_header(self, sample): + # Creates a dictionary of types of data in each column. If any + # column is of a single type (say, integers), *except* for the first + # row, then the first row is presumed to be labels. If the type + # can't be determined, it is assumed to be a string in which case + # the length of the string is the determining factor: if all of the + # rows except for the first are the same length, it's a header. + # Finally, a 'vote' is taken at the end for each column, adding or + # subtracting from the likelihood of the first row being a header. + + rdr = reader(StringIO(sample), self.sniff(sample)) + + header = next(rdr) # assume first row is header + + columns = len(header) + columnTypes = {} + for i in range(columns): columnTypes[i] = None + + checked = 0 + for row in rdr: + # arbitrary number of rows to check, to keep it sane + if checked > 20: + break + checked += 1 + + if len(row) != columns: + continue # skip rows that have irregular number of columns + + for col in list(columnTypes.keys()): + + for thisType in [int, float, complex]: + try: + thisType(row[col]) + break + except (ValueError, OverflowError): + pass + else: + # fallback to length of string + thisType = len(row[col]) + + if thisType != columnTypes[col]: + if columnTypes[col] is None: # add new column type + columnTypes[col] = thisType + else: + # type is inconsistent, remove column from + # consideration + del columnTypes[col] + + # finally, compare results against first row and "vote" + # on whether it's a header + hasHeader = 0 + for col, colType in columnTypes.items(): + if type(colType) == type(0): # it's a length + if len(header[col]) != colType: + hasHeader += 1 + else: + hasHeader -= 1 + else: # attempt typecast + try: + colType(header[col]) + except (ValueError, TypeError): + hasHeader += 1 + else: + hasHeader -= 1 + + return hasHeader > 0 diff --git a/5GSPEED_DETAILED_ASSET/package.ini b/5GSPEED_DETAILED_ASSET/package.ini new file mode 100755 index 00000000..770e311a --- /dev/null +++ b/5GSPEED_DETAILED_ASSET/package.ini @@ -0,0 +1,12 @@ +[5GSPEED_DETAILED_ASSET] +uuid = a8ac42ec-352d-4320-871c-a81f42b2372f +vendor = Cradlepoint +notes = Ookla speedtest from a Cradlepoint Endpoint. To run, make asset id blank +version_major = 0 +version_minor = 2 +auto_start = true +restart = true +reboot = true +firmware_major = 7 +firmware_minor = 2 + diff --git a/5GSPEED_DETAILED_ASSET/speedtest.py b/5GSPEED_DETAILED_ASSET/speedtest.py new file mode 100644 index 00000000..103311d2 --- /dev/null +++ b/5GSPEED_DETAILED_ASSET/speedtest.py @@ -0,0 +1,2001 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright 2012 Matt Martz +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +import re +import csv +import sys +import math +import errno +import signal +import socket +import timeit +import datetime +import platform +import threading +import xml.parsers.expat + +try: + import gzip + GZIP_BASE = gzip.GzipFile +except ImportError: + gzip = None + GZIP_BASE = object + +__version__ = '2.1.2' + + +class FakeShutdownEvent(object): + """Class to fake a threading.Event.isSet so that users of this module + are not required to register their own threading.Event() + """ + + @staticmethod + def isSet(): + "Dummy method to always return false""" + return False + + +# Some global variables we use +DEBUG = False +_GLOBAL_DEFAULT_TIMEOUT = object() +PY25PLUS = sys.version_info[:2] >= (2, 5) +PY26PLUS = sys.version_info[:2] >= (2, 6) +PY32PLUS = sys.version_info[:2] >= (3, 2) + +# Begin import game to handle Python 2 and Python 3 +try: + import json +except ImportError: + try: + import simplejson as json + except ImportError: + json = None + +try: + import xml.etree.ElementTree as ET + try: + from xml.etree.ElementTree import _Element as ET_Element + except ImportError: + pass +except ImportError: + from xml.dom import minidom as DOM + from xml.parsers.expat import ExpatError + ET = None + +try: + from urllib2 import (urlopen, Request, HTTPError, URLError, + AbstractHTTPHandler, ProxyHandler, + HTTPDefaultErrorHandler, HTTPRedirectHandler, + HTTPErrorProcessor, OpenerDirector) +except ImportError: + from urllib.request import (urlopen, Request, HTTPError, URLError, + AbstractHTTPHandler, ProxyHandler, + HTTPDefaultErrorHandler, HTTPRedirectHandler, + HTTPErrorProcessor, OpenerDirector) + +try: + from httplib import HTTPConnection, BadStatusLine +except ImportError: + from http.client import HTTPConnection, BadStatusLine + +try: + from httplib import HTTPSConnection +except ImportError: + try: + from http.client import HTTPSConnection + except ImportError: + HTTPSConnection = None + +try: + from httplib import FakeSocket +except ImportError: + FakeSocket = None + +try: + from Queue import Queue +except ImportError: + from queue import Queue + +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse + +try: + from urlparse import parse_qs +except ImportError: + try: + from urllib.parse import parse_qs + except ImportError: + from cgi import parse_qs + +try: + from hashlib import md5 +except ImportError: + from md5 import md5 + +try: + from argparse import ArgumentParser as ArgParser + from argparse import SUPPRESS as ARG_SUPPRESS + PARSER_TYPE_INT = int + PARSER_TYPE_STR = str + PARSER_TYPE_FLOAT = float +except ImportError: + from optparse import OptionParser as ArgParser + from optparse import SUPPRESS_HELP as ARG_SUPPRESS + PARSER_TYPE_INT = 'int' + PARSER_TYPE_STR = 'string' + PARSER_TYPE_FLOAT = 'float' + +try: + from cStringIO import StringIO + BytesIO = None +except ImportError: + try: + from StringIO import StringIO + BytesIO = None + except ImportError: + from io import StringIO, BytesIO + +try: + import __builtin__ +except ImportError: + import builtins + from io import TextIOWrapper, FileIO + + class _Py3Utf8Output(TextIOWrapper): + """UTF-8 encoded wrapper around stdout for py3, to override + ASCII stdout + """ + def __init__(self, f, **kwargs): + buf = FileIO(f.fileno(), 'w') + super(_Py3Utf8Output, self).__init__( + buf, + encoding='utf8', + errors='strict' + ) + + def write(self, s): + super(_Py3Utf8Output, self).write(s) + self.flush() + + _py3_print = getattr(builtins, 'print') + try: + _py3_utf8_stdout = _Py3Utf8Output(sys.stdout) + _py3_utf8_stderr = _Py3Utf8Output(sys.stderr) + except OSError: + # sys.stdout/sys.stderr is not a compatible stdout/stderr object + # just use it and hope things go ok + _py3_utf8_stdout = sys.stdout + _py3_utf8_stderr = sys.stderr + + def to_utf8(v): + """No-op encode to utf-8 for py3""" + return v + + def print_(*args, **kwargs): + """Wrapper function for py3 to print, with a utf-8 encoded stdout""" + if kwargs.get('file') == sys.stderr: + kwargs['file'] = _py3_utf8_stderr + else: + kwargs['file'] = kwargs.get('file', _py3_utf8_stdout) + _py3_print(*args, **kwargs) +else: + del __builtin__ + + def to_utf8(v): + """Encode value to utf-8 if possible for py2""" + try: + return v.encode('utf8', 'strict') + except AttributeError: + return v + + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5. + + Taken from https://pypi.python.org/pypi/six/ + + Modified to set encoding to UTF-8 always, and to flush after write + """ + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + encoding = 'utf8' # Always trust UTF-8 for output + if (isinstance(fp, file) and + isinstance(data, unicode) and + encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(encoding, errors) + fp.write(data) + fp.flush() + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) + +if PY32PLUS: + etree_iter = ET.Element.iter +elif PY25PLUS: + etree_iter = ET_Element.getiterator + +if PY26PLUS: + thread_is_alive = threading.Thread.is_alive +else: + thread_is_alive = threading.Thread.isAlive + + +# Exception "constants" to support Python 2 through Python 3 +try: + import ssl + try: + CERT_ERROR = (ssl.CertificateError,) + except AttributeError: + CERT_ERROR = tuple() + + HTTP_ERRORS = ( + (HTTPError, URLError, socket.error, ssl.SSLError, BadStatusLine) + + CERT_ERROR + ) +except ImportError: + ssl = None + HTTP_ERRORS = (HTTPError, URLError, socket.error, BadStatusLine) + + +class SpeedtestException(Exception): + """Base exception for this module""" + + +class SpeedtestCLIError(SpeedtestException): + """Generic exception for raising errors during CLI operation""" + + +class SpeedtestHTTPError(SpeedtestException): + """Base HTTP exception for this module""" + + +class SpeedtestConfigError(SpeedtestException): + """Configuration XML is invalid""" + + +class SpeedtestServersError(SpeedtestException): + """Servers XML is invalid""" + + +class ConfigRetrievalError(SpeedtestHTTPError): + """Could not retrieve config.php""" + + +class ServersRetrievalError(SpeedtestHTTPError): + """Could not retrieve speedtest-servers.php""" + + +class InvalidServerIDType(SpeedtestException): + """Server ID used for filtering was not an integer""" + + +class NoMatchedServers(SpeedtestException): + """No servers matched when filtering""" + + +class SpeedtestMiniConnectFailure(SpeedtestException): + """Could not connect to the provided speedtest mini server""" + + +class InvalidSpeedtestMiniServer(SpeedtestException): + """Server provided as a speedtest mini server does not actually appear + to be a speedtest mini server + """ + + +class ShareResultsConnectFailure(SpeedtestException): + """Could not connect to speedtest.net API to POST results""" + + +class ShareResultsSubmitFailure(SpeedtestException): + """Unable to successfully POST results to speedtest.net API after + connection + """ + + +class SpeedtestUploadTimeout(SpeedtestException): + """testlength configuration reached during upload + Used to ensure the upload halts when no additional data should be sent + """ + + +class SpeedtestBestServerFailure(SpeedtestException): + """Unable to determine best server""" + + +class SpeedtestMissingBestServer(SpeedtestException): + """get_best_server not called or not able to determine best server""" + + +def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, + source_address=None): + """Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + + Largely vendored from Python 2.7, modified to work with Python 2.4 + """ + + host, port = address + err = None + for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket.socket(af, socktype, proto) + if timeout is not _GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(float(timeout)) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except socket.error: + err = get_exception() + if sock is not None: + sock.close() + + if err is not None: + raise err + else: + raise socket.error("getaddrinfo returns an empty list") + + +class SpeedtestHTTPConnection(HTTPConnection): + """Custom HTTPConnection to support source_address across + Python 2.4 - Python 3 + """ + def __init__(self, *args, **kwargs): + source_address = kwargs.pop('source_address', None) + timeout = kwargs.pop('timeout', 10) + + self._tunnel_host = None + + HTTPConnection.__init__(self, *args, **kwargs) + + self.source_address = source_address + self.timeout = timeout + + def connect(self): + """Connect to the host and port specified in __init__.""" + try: + self.sock = socket.create_connection( + (self.host, self.port), + self.timeout, + self.source_address + ) + except (AttributeError, TypeError): + self.sock = create_connection( + (self.host, self.port), + self.timeout, + self.source_address + ) + + if self._tunnel_host: + self._tunnel() + + +if HTTPSConnection: + class SpeedtestHTTPSConnection(HTTPSConnection): + """Custom HTTPSConnection to support source_address across + Python 2.4 - Python 3 + """ + default_port = 443 + + def __init__(self, *args, **kwargs): + source_address = kwargs.pop('source_address', None) + timeout = kwargs.pop('timeout', 10) + + self._tunnel_host = None + + HTTPSConnection.__init__(self, *args, **kwargs) + + self.timeout = timeout + self.source_address = source_address + + def connect(self): + "Connect to a host on a given (SSL) port." + try: + self.sock = socket.create_connection( + (self.host, self.port), + self.timeout, + self.source_address + ) + except (AttributeError, TypeError): + self.sock = create_connection( + (self.host, self.port), + self.timeout, + self.source_address + ) + + if self._tunnel_host: + self._tunnel() + + if ssl: + try: + kwargs = {} + if hasattr(ssl, 'SSLContext'): + if self._tunnel_host: + kwargs['server_hostname'] = self._tunnel_host + else: + kwargs['server_hostname'] = self.host + self.sock = self._context.wrap_socket(self.sock, **kwargs) + except AttributeError: + self.sock = ssl.wrap_socket(self.sock) + try: + self.sock.server_hostname = self.host + except AttributeError: + pass + elif FakeSocket: + # Python 2.4/2.5 support + try: + self.sock = FakeSocket(self.sock, socket.ssl(self.sock)) + except AttributeError: + raise SpeedtestException( + 'This version of Python does not support HTTPS/SSL ' + 'functionality' + ) + else: + raise SpeedtestException( + 'This version of Python does not support HTTPS/SSL ' + 'functionality' + ) + + +def _build_connection(connection, source_address, timeout, context=None): + """Cross Python 2.4 - Python 3 callable to build an ``HTTPConnection`` or + ``HTTPSConnection`` with the args we need + + Called from ``http(s)_open`` methods of ``SpeedtestHTTPHandler`` or + ``SpeedtestHTTPSHandler`` + """ + def inner(host, **kwargs): + kwargs.update({ + 'source_address': source_address, + 'timeout': timeout + }) + if context: + kwargs['context'] = context + return connection(host, **kwargs) + return inner + + +class SpeedtestHTTPHandler(AbstractHTTPHandler): + """Custom ``HTTPHandler`` that can build a ``HTTPConnection`` with the + args we need for ``source_address`` and ``timeout`` + """ + def __init__(self, debuglevel=0, source_address=None, timeout=10): + AbstractHTTPHandler.__init__(self, debuglevel) + self.source_address = source_address + self.timeout = timeout + + def http_open(self, req): + return self.do_open( + _build_connection( + SpeedtestHTTPConnection, + self.source_address, + self.timeout + ), + req + ) + + http_request = AbstractHTTPHandler.do_request_ + + +class SpeedtestHTTPSHandler(AbstractHTTPHandler): + """Custom ``HTTPSHandler`` that can build a ``HTTPSConnection`` with the + args we need for ``source_address`` and ``timeout`` + """ + def __init__(self, debuglevel=0, context=None, source_address=None, + timeout=10): + AbstractHTTPHandler.__init__(self, debuglevel) + self._context = context + self.source_address = source_address + self.timeout = timeout + + def https_open(self, req): + return self.do_open( + _build_connection( + SpeedtestHTTPSConnection, + self.source_address, + self.timeout, + context=self._context, + ), + req + ) + + https_request = AbstractHTTPHandler.do_request_ + + +def build_opener(source_address=None, timeout=10): + """Function similar to ``urllib2.build_opener`` that will build + an ``OpenerDirector`` with the explicit handlers we want, + ``source_address`` for binding, ``timeout`` and our custom + `User-Agent` + """ + + printer('Timeout set to %d' % timeout, debug=True) + + if source_address: + source_address_tuple = (source_address, 0) + printer('Binding to source address: %r' % (source_address_tuple,), + debug=True) + else: + source_address_tuple = None + + handlers = [ + ProxyHandler(), + SpeedtestHTTPHandler(source_address=source_address_tuple, + timeout=timeout), + SpeedtestHTTPSHandler(source_address=source_address_tuple, + timeout=timeout), + HTTPDefaultErrorHandler(), + HTTPRedirectHandler(), + HTTPErrorProcessor() + ] + + opener = OpenerDirector() + opener.addheaders = [('User-agent', build_user_agent())] + + for handler in handlers: + opener.add_handler(handler) + + return opener + + +class GzipDecodedResponse(GZIP_BASE): + """A file-like object to decode a response encoded with the gzip + method, as described in RFC 1952. + + Largely copied from ``xmlrpclib``/``xmlrpc.client`` and modified + to work for py2.4-py3 + """ + def __init__(self, response): + # response doesn't support tell() and read(), required by + # GzipFile + if not gzip: + raise SpeedtestHTTPError('HTTP response body is gzip encoded, ' + 'but gzip support is not available') + IO = BytesIO or StringIO + self.io = IO() + while 1: + chunk = response.read(1024) + if len(chunk) == 0: + break + self.io.write(chunk) + self.io.seek(0) + gzip.GzipFile.__init__(self, mode='rb', fileobj=self.io) + + def close(self): + try: + gzip.GzipFile.close(self) + finally: + self.io.close() + + +def get_exception(): + """Helper function to work with py2.4-py3 for getting the current + exception in a try/except block + """ + return sys.exc_info()[1] + + +def distance(origin, destination): + """Determine distance between 2 sets of [lat,lon] in km""" + + lat1, lon1 = origin + lat2, lon2 = destination + radius = 6371 # km + + dlat = math.radians(lat2 - lat1) + dlon = math.radians(lon2 - lon1) + a = (math.sin(dlat / 2) * math.sin(dlat / 2) + + math.cos(math.radians(lat1)) * + math.cos(math.radians(lat2)) * math.sin(dlon / 2) * + math.sin(dlon / 2)) + c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a)) + d = radius * c + + return d + + +def build_user_agent(): + """Build a Mozilla/5.0 compatible User-Agent string""" + + ua_tuple = ( + 'Mozilla/5.0', + '(%s; U; %s; en-us)' % (platform.platform(), + platform.architecture()[0]), + 'Python/%s' % platform.python_version(), + '(KHTML, like Gecko)', + 'speedtest-cli/%s' % __version__ + ) + user_agent = ' '.join(ua_tuple) + printer('User-Agent: %s' % user_agent, debug=True) + return user_agent + + +def build_request(url, data=None, headers=None, bump='0', secure=False): + """Build a urllib2 request object + + This function automatically adds a User-Agent header to all requests + + """ + + if not headers: + headers = {} + + if url[0] == ':': + scheme = ('http', 'https')[bool(secure)] + schemed_url = '%s%s' % (scheme, url) + else: + schemed_url = url + + if '?' in url: + delim = '&' + else: + delim = '?' + + # WHO YOU GONNA CALL? CACHE BUSTERS! + final_url = '%s%sx=%s.%s' % (schemed_url, delim, + int(timeit.time.time() * 1000), + bump) + + headers.update({ + 'Cache-Control': 'no-cache', + }) + + printer('%s %s' % (('GET', 'POST')[bool(data)], final_url), + debug=True) + + return Request(final_url, data=data, headers=headers) + + +def catch_request(request, opener=None): + """Helper function to catch common exceptions encountered when + establishing a connection with a HTTP/HTTPS request + + """ + + if opener: + _open = opener.open + else: + _open = urlopen + + try: + uh = _open(request) + if request.get_full_url() != uh.geturl(): + printer('Redirected to %s' % uh.geturl(), debug=True) + return uh, False + except HTTP_ERRORS: + e = get_exception() + return None, e + + +def get_response_stream(response): + """Helper function to return either a Gzip reader if + ``Content-Encoding`` is ``gzip`` otherwise the response itself + + """ + + try: + getheader = response.headers.getheader + except AttributeError: + getheader = response.getheader + + if getheader('content-encoding') == 'gzip': + return GzipDecodedResponse(response) + + return response + + +def get_attributes_by_tag_name(dom, tag_name): + """Retrieve an attribute from an XML document and return it in a + consistent format + + Only used with xml.dom.minidom, which is likely only to be used + with python versions older than 2.5 + """ + elem = dom.getElementsByTagName(tag_name)[0] + return dict(list(elem.attributes.items())) + + +def print_dots(shutdown_event): + """Built in callback function used by Thread classes for printing + status + """ + def inner(current, total, start=False, end=False): + if shutdown_event.isSet(): + return + + sys.stdout.write('.') + if current + 1 == total and end is True: + sys.stdout.write('\n') + sys.stdout.flush() + return inner + + +def do_nothing(*args, **kwargs): + pass + + +class HTTPDownloader(threading.Thread): + """Thread class for retrieving a URL""" + + def __init__(self, i, request, start, timeout, opener=None, + shutdown_event=None): + threading.Thread.__init__(self) + self.request = request + self.result = [0] + self.starttime = start + self.timeout = timeout + self.i = i + if opener: + self._opener = opener.open + else: + self._opener = urlopen + + if shutdown_event: + self._shutdown_event = shutdown_event + else: + self._shutdown_event = FakeShutdownEvent() + + def run(self): + try: + if (timeit.default_timer() - self.starttime) <= self.timeout: + f = self._opener(self.request) + while (not self._shutdown_event.isSet() and + (timeit.default_timer() - self.starttime) <= + self.timeout): + self.result.append(len(f.read(10240))) + if self.result[-1] == 0: + break + f.close() + except IOError: + pass + + +class HTTPUploaderData(object): + """File like object to improve cutting off the upload once the timeout + has been reached + """ + + def __init__(self, length, start, timeout, shutdown_event=None): + self.length = length + self.start = start + self.timeout = timeout + + if shutdown_event: + self._shutdown_event = shutdown_event + else: + self._shutdown_event = FakeShutdownEvent() + + self._data = None + + self.total = [0] + + def pre_allocate(self): + chars = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' + multiplier = int(round(int(self.length) / 36.0)) + IO = BytesIO or StringIO + try: + self._data = IO( + ('content1=%s' % + (chars * multiplier)[0:int(self.length) - 9] + ).encode() + ) + except MemoryError: + raise SpeedtestCLIError( + 'Insufficient memory to pre-allocate upload data. Please ' + 'use --no-pre-allocate' + ) + + @property + def data(self): + if not self._data: + self.pre_allocate() + return self._data + + def read(self, n=10240): + if ((timeit.default_timer() - self.start) <= self.timeout and + not self._shutdown_event.isSet()): + chunk = self.data.read(n) + self.total.append(len(chunk)) + return chunk + else: + raise SpeedtestUploadTimeout() + + def __len__(self): + return self.length + + +class HTTPUploader(threading.Thread): + """Thread class for putting a URL""" + + def __init__(self, i, request, start, size, timeout, opener=None, + shutdown_event=None): + threading.Thread.__init__(self) + self.request = request + self.request.data.start = self.starttime = start + self.size = size + self.result = None + self.timeout = timeout + self.i = i + + if opener: + self._opener = opener.open + else: + self._opener = urlopen + + if shutdown_event: + self._shutdown_event = shutdown_event + else: + self._shutdown_event = FakeShutdownEvent() + + def run(self): + request = self.request + try: + if ((timeit.default_timer() - self.starttime) <= self.timeout and + not self._shutdown_event.isSet()): + try: + f = self._opener(request) + except TypeError: + # PY24 expects a string or buffer + # This also causes issues with Ctrl-C, but we will concede + # for the moment that Ctrl-C on PY24 isn't immediate + request = build_request(self.request.get_full_url(), + data=request.data.read(self.size)) + f = self._opener(request) + f.read(11) + f.close() + self.result = sum(self.request.data.total) + else: + self.result = 0 + except (IOError, SpeedtestUploadTimeout): + self.result = sum(self.request.data.total) + + +class SpeedtestResults(object): + """Class for holding the results of a speedtest, including: + + Download speed + Upload speed + Ping/Latency to test server + Data about server that the test was run against + + Additionally this class can return a result data as a dictionary or CSV, + as well as submit a POST of the result data to the speedtest.net API + to get a share results image link. + """ + + def __init__(self, download=0, upload=0, ping=0, server=None, client=None, + opener=None, secure=False): + self.download = download + self.upload = upload + self.ping = ping + if server is None: + self.server = {} + else: + self.server = server + self.client = client or {} + + self._share = None + self.timestamp = '%sZ' % datetime.datetime.utcnow().isoformat() + self.bytes_received = 0 + self.bytes_sent = 0 + + if opener: + self._opener = opener + else: + self._opener = build_opener() + + self._secure = secure + + def __repr__(self): + return repr(self.dict()) + + def share(self): + """POST data to the speedtest.net API to obtain a share results + link + """ + + if self._share: + return self._share + + download = int(round(self.download / 1000.0, 0)) + ping = int(round(self.ping, 0)) + upload = int(round(self.upload / 1000.0, 0)) + + # Build the request to send results back to speedtest.net + # We use a list instead of a dict because the API expects parameters + # in a certain order + api_data = [ + 'recommendedserverid=%s' % self.server['id'], + 'ping=%s' % ping, + 'screenresolution=', + 'promo=', + 'download=%s' % download, + 'screendpi=', + 'upload=%s' % upload, + 'testmethod=http', + 'hash=%s' % md5(('%s-%s-%s-%s' % + (ping, upload, download, '297aae72')) + .encode()).hexdigest(), + 'touchscreen=none', + 'startmode=pingselect', + 'accuracy=1', + 'bytesreceived=%s' % self.bytes_received, + 'bytessent=%s' % self.bytes_sent, + 'serverid=%s' % self.server['id'], + ] + + headers = {'Referer': 'http://c.speedtest.net/flash/speedtest.swf'} + request = build_request('://www.speedtest.net/api/api.php', + data='&'.join(api_data).encode(), + headers=headers, secure=self._secure) + f, e = catch_request(request, opener=self._opener) + if e: + raise ShareResultsConnectFailure(e) + + response = f.read() + code = f.code + f.close() + + if int(code) != 200: + raise ShareResultsSubmitFailure('Could not submit results to ' + 'speedtest.net') + + qsargs = parse_qs(response.decode()) + resultid = qsargs.get('resultid') + if not resultid or len(resultid) != 1: + raise ShareResultsSubmitFailure('Could not submit results to ' + 'speedtest.net') + + self._share = 'http://www.speedtest.net/result/%s.png' % resultid[0] + + return self._share + + def dict(self): + """Return dictionary of result data""" + + return { + 'download': self.download, + 'upload': self.upload, + 'ping': self.ping, + 'server': self.server, + 'timestamp': self.timestamp, + 'bytes_sent': self.bytes_sent, + 'bytes_received': self.bytes_received, + 'share': self._share, + 'client': self.client, + } + + @staticmethod + def csv_header(delimiter=','): + """Return CSV Headers""" + + row = ['Server ID', 'Sponsor', 'Server Name', 'Timestamp', 'Distance', + 'Ping', 'Download', 'Upload', 'Share', 'IP Address'] + out = StringIO() + writer = csv.writer(out, delimiter=delimiter, lineterminator='') + writer.writerow([to_utf8(v) for v in row]) + return out.getvalue() + + def csv(self, delimiter=','): + """Return data in CSV format""" + + data = self.dict() + out = StringIO() + writer = csv.writer(out, delimiter=delimiter, lineterminator='') + row = [data['server']['id'], data['server']['sponsor'], + data['server']['name'], data['timestamp'], + data['server']['d'], data['ping'], data['download'], + data['upload'], self._share or '', self.client['ip']] + writer.writerow([to_utf8(v) for v in row]) + return out.getvalue() + + def json(self, pretty=False): + """Return data in JSON format""" + + kwargs = {} + if pretty: + kwargs.update({ + 'indent': 4, + 'sort_keys': True + }) + return json.dumps(self.dict(), **kwargs) + + +class Speedtest(object): + """Class for performing standard speedtest.net testing operations""" + + def __init__(self, config=None, source_address=None, timeout=10, + secure=False, shutdown_event=None): + self.config = {} + + self._source_address = source_address + self._timeout = timeout + self._opener = build_opener(source_address, timeout) + + self._secure = secure + + if shutdown_event: + self._shutdown_event = shutdown_event + else: + self._shutdown_event = FakeShutdownEvent() + + self.get_config() + if config is not None: + self.config.update(config) + + self.servers = {} + self.closest = [] + self._best = {} + + self.results = SpeedtestResults( + client=self.config['client'], + opener=self._opener, + secure=secure, + ) + + @property + def best(self): + if not self._best: + self.get_best_server() + return self._best + + def get_config(self): + """Download the speedtest.net configuration and return only the data + we are interested in + """ + + headers = {} + if gzip: + headers['Accept-Encoding'] = 'gzip' + request = build_request('://www.speedtest.net/speedtest-config.php', + headers=headers, secure=self._secure) + uh, e = catch_request(request, opener=self._opener) + if e: + raise ConfigRetrievalError(e) + configxml_list = [] + + stream = get_response_stream(uh) + + while 1: + try: + configxml_list.append(stream.read(1024)) + except (OSError, EOFError): + raise ConfigRetrievalError(get_exception()) + if len(configxml_list[-1]) == 0: + break + stream.close() + uh.close() + + if int(uh.code) != 200: + return None + + configxml = ''.encode().join(configxml_list) + + printer('Config XML:\n%s' % configxml, debug=True) + + try: + try: + root = ET.fromstring(configxml) + except ET.ParseError: + e = get_exception() + raise SpeedtestConfigError( + 'Malformed speedtest.net configuration: %s' % e + ) + server_config = root.find('server-config').attrib + download = root.find('download').attrib + upload = root.find('upload').attrib + # times = root.find('times').attrib + client = root.find('client').attrib + + except AttributeError: + try: + root = DOM.parseString(configxml) + except ExpatError: + e = get_exception() + raise SpeedtestConfigError( + 'Malformed speedtest.net configuration: %s' % e + ) + server_config = get_attributes_by_tag_name(root, 'server-config') + download = get_attributes_by_tag_name(root, 'download') + upload = get_attributes_by_tag_name(root, 'upload') + # times = get_attributes_by_tag_name(root, 'times') + client = get_attributes_by_tag_name(root, 'client') + + ignore_servers = list( + #map(int, server_config['ignoreids'].split(',')) + map(int, [server_no for server_no in server_config['ignoreids'].split(',') if server_no]) #11-April-2021 JWC: added to fix the speedtest cli module issue - https://github.com/sivel/speedtest-cli/pull/769 + ) + + ratio = int(upload['ratio']) + upload_max = int(upload['maxchunkcount']) + up_sizes = [32768, 65536, 131072, 262144, 524288, 1048576, 7340032] + sizes = { + 'upload': up_sizes[ratio - 1:], + 'download': [350, 500, 750, 1000, 1500, 2000, 2500, + 3000, 3500, 4000] + } + + size_count = len(sizes['upload']) + + upload_count = int(math.ceil(upload_max / size_count)) + + counts = { + 'upload': upload_count, + 'download': int(download['threadsperurl']) + } + + threads = { + 'upload': int(upload['threads']), + 'download': int(server_config['threadcount']) * 2 + } + + length = { + 'upload': int(upload['testlength']), + 'download': int(download['testlength']) + } + + self.config.update({ + 'client': client, + 'ignore_servers': ignore_servers, + 'sizes': sizes, + 'counts': counts, + 'threads': threads, + 'length': length, + 'upload_max': upload_count * size_count + }) + + try: + self.lat_lon = (float(client['lat']), float(client['lon'])) + except ValueError: + raise SpeedtestConfigError( + 'Unknown location: lat=%r lon=%r' % + (client.get('lat'), client.get('lon')) + ) + + printer('Config:\n%r' % self.config, debug=True) + + return self.config + + def get_servers(self, servers=None, exclude=None): + """Retrieve a the list of speedtest.net servers, optionally filtered + to servers matching those specified in the ``servers`` argument + """ + if servers is None: + servers = [] + + if exclude is None: + exclude = [] + + self.servers.clear() + + for server_list in (servers, exclude): + for i, s in enumerate(server_list): + try: + server_list[i] = int(s) + except ValueError: + raise InvalidServerIDType( + '%s is an invalid server type, must be int' % s + ) + + urls = [ + '://www.speedtest.net/speedtest-servers-static.php', + 'http://c.speedtest.net/speedtest-servers-static.php', + '://www.speedtest.net/speedtest-servers.php', + 'http://c.speedtest.net/speedtest-servers.php', + ] + + headers = {} + if gzip: + headers['Accept-Encoding'] = 'gzip' + + errors = [] + for url in urls: + try: + request = build_request( + '%s?threads=%s' % (url, + self.config['threads']['download']), + headers=headers, + secure=self._secure + ) + uh, e = catch_request(request, opener=self._opener) + if e: + errors.append('%s' % e) + raise ServersRetrievalError() + + stream = get_response_stream(uh) + + serversxml_list = [] + while 1: + try: + serversxml_list.append(stream.read(1024)) + except (OSError, EOFError): + raise ServersRetrievalError(get_exception()) + if len(serversxml_list[-1]) == 0: + break + + stream.close() + uh.close() + + if int(uh.code) != 200: + raise ServersRetrievalError() + + serversxml = ''.encode().join(serversxml_list) + + printer('Servers XML:\n%s' % serversxml, debug=True) + + try: + try: + try: + root = ET.fromstring(serversxml) + except ET.ParseError: + e = get_exception() + raise SpeedtestServersError( + 'Malformed speedtest.net server list: %s' % e + ) + elements = etree_iter(root, 'server') + except AttributeError: + try: + root = DOM.parseString(serversxml) + except ExpatError: + e = get_exception() + raise SpeedtestServersError( + 'Malformed speedtest.net server list: %s' % e + ) + elements = root.getElementsByTagName('server') + except (SyntaxError, xml.parsers.expat.ExpatError): + raise ServersRetrievalError() + + for server in elements: + try: + attrib = server.attrib + except AttributeError: + attrib = dict(list(server.attributes.items())) + + if servers and int(attrib.get('id')) not in servers: + continue + + if (int(attrib.get('id')) in self.config['ignore_servers'] + or int(attrib.get('id')) in exclude): + continue + + try: + d = distance(self.lat_lon, + (float(attrib.get('lat')), + float(attrib.get('lon')))) + except Exception: + continue + + attrib['d'] = d + + try: + self.servers[d].append(attrib) + except KeyError: + self.servers[d] = [attrib] + + break + + except ServersRetrievalError: + continue + + if (servers or exclude) and not self.servers: + raise NoMatchedServers() + + return self.servers + + def set_mini_server(self, server): + """Instead of querying for a list of servers, set a link to a + speedtest mini server + """ + + urlparts = urlparse(server) + + name, ext = os.path.splitext(urlparts[2]) + if ext: + url = os.path.dirname(server) + else: + url = server + + request = build_request(url) + uh, e = catch_request(request, opener=self._opener) + if e: + raise SpeedtestMiniConnectFailure('Failed to connect to %s' % + server) + else: + text = uh.read() + uh.close() + + extension = re.findall('upload_?[Ee]xtension: "([^"]+)"', + text.decode()) + if not extension: + for ext in ['php', 'asp', 'aspx', 'jsp']: + try: + f = self._opener.open( + '%s/speedtest/upload.%s' % (url, ext) + ) + except Exception: + pass + else: + data = f.read().strip().decode() + if (f.code == 200 and + len(data.splitlines()) == 1 and + re.match('size=[0-9]', data)): + extension = [ext] + break + if not urlparts or not extension: + raise InvalidSpeedtestMiniServer('Invalid Speedtest Mini Server: ' + '%s' % server) + + self.servers = [{ + 'sponsor': 'Speedtest Mini', + 'name': urlparts[1], + 'd': 0, + 'url': '%s/speedtest/upload.%s' % (url.rstrip('/'), extension[0]), + 'latency': 0, + 'id': 0 + }] + + return self.servers + + def get_closest_servers(self, limit=5): + """Limit servers to the closest speedtest.net servers based on + geographic distance + """ + + if not self.servers: + self.get_servers() + + for d in sorted(self.servers.keys()): + for s in self.servers[d]: + self.closest.append(s) + if len(self.closest) == limit: + break + else: + continue + break + + printer('Closest Servers:\n%r' % self.closest, debug=True) + return self.closest + + def get_best_server(self, servers=None): + """Perform a speedtest.net "ping" to determine which speedtest.net + server has the lowest latency + """ + + if not servers: + if not self.closest: + servers = self.get_closest_servers() + servers = self.closest + + if self._source_address: + source_address_tuple = (self._source_address, 0) + else: + source_address_tuple = None + + user_agent = build_user_agent() + + results = {} + for server in servers: + cum = [] + url = os.path.dirname(server['url']) + stamp = int(timeit.time.time() * 1000) + latency_url = '%s/latency.txt?x=%s' % (url, stamp) + for i in range(0, 3): + this_latency_url = '%s.%s' % (latency_url, i) + printer('%s %s' % ('GET', this_latency_url), + debug=True) + urlparts = urlparse(latency_url) + try: + if urlparts[0] == 'https': + h = SpeedtestHTTPSConnection( + urlparts[1], + source_address=source_address_tuple + ) + else: + h = SpeedtestHTTPConnection( + urlparts[1], + source_address=source_address_tuple + ) + headers = {'User-Agent': user_agent} + path = '%s?%s' % (urlparts[2], urlparts[4]) + start = timeit.default_timer() + h.request("GET", path, headers=headers) + r = h.getresponse() + total = (timeit.default_timer() - start) + except HTTP_ERRORS: + e = get_exception() + printer('ERROR: %r' % e, debug=True) + cum.append(3600) + continue + + text = r.read(9) + if int(r.status) == 200 and text == 'test=test'.encode(): + cum.append(total) + else: + cum.append(3600) + h.close() + + avg = round((sum(cum) / 6) * 1000.0, 3) + results[avg] = server + + try: + fastest = sorted(results.keys())[0] + except IndexError: + raise SpeedtestBestServerFailure('Unable to connect to servers to ' + 'test latency.') + best = results[fastest] + best['latency'] = fastest + + self.results.ping = fastest + self.results.server = best + + self._best.update(best) + printer('Best Server:\n%r' % best, debug=True) + return best + + def download(self, callback=do_nothing, threads=None): + """Test download speed against speedtest.net + + A ``threads`` value of ``None`` will fall back to those dictated + by the speedtest.net configuration + """ + + urls = [] + for size in self.config['sizes']['download']: + for _ in range(0, self.config['counts']['download']): + urls.append('%s/random%sx%s.jpg' % + (os.path.dirname(self.best['url']), size, size)) + + request_count = len(urls) + requests = [] + for i, url in enumerate(urls): + requests.append( + build_request(url, bump=i, secure=self._secure) + ) + + max_threads = threads or self.config['threads']['download'] + in_flight = {'threads': 0} + + def producer(q, requests, request_count): + for i, request in enumerate(requests): + thread = HTTPDownloader( + i, + request, + start, + self.config['length']['download'], + opener=self._opener, + shutdown_event=self._shutdown_event + ) + while in_flight['threads'] >= max_threads: + timeit.time.sleep(0.001) + thread.start() + q.put(thread, True) + in_flight['threads'] += 1 + callback(i, request_count, start=True) + + finished = [] + + def consumer(q, request_count): + _is_alive = thread_is_alive + while len(finished) < request_count: + thread = q.get(True) + while _is_alive(thread): + thread.join(timeout=0.001) + in_flight['threads'] -= 1 + finished.append(sum(thread.result)) + callback(thread.i, request_count, end=True) + + q = Queue(max_threads) + prod_thread = threading.Thread(target=producer, + args=(q, requests, request_count)) + cons_thread = threading.Thread(target=consumer, + args=(q, request_count)) + start = timeit.default_timer() + prod_thread.start() + cons_thread.start() + _is_alive = thread_is_alive + while _is_alive(prod_thread): + prod_thread.join(timeout=0.001) + while _is_alive(cons_thread): + cons_thread.join(timeout=0.001) + + stop = timeit.default_timer() + self.results.bytes_received = sum(finished) + self.results.download = ( + (self.results.bytes_received / (stop - start)) * 8.0 + ) + if self.results.download > 100000: + self.config['threads']['upload'] = 8 + return self.results.download + + def upload(self, callback=do_nothing, pre_allocate=True, threads=None): + """Test upload speed against speedtest.net + + A ``threads`` value of ``None`` will fall back to those dictated + by the speedtest.net configuration + """ + + sizes = [] + + for size in self.config['sizes']['upload']: + for _ in range(0, self.config['counts']['upload']): + sizes.append(size) + + # request_count = len(sizes) + request_count = self.config['upload_max'] + + requests = [] + for i, size in enumerate(sizes): + # We set ``0`` for ``start`` and handle setting the actual + # ``start`` in ``HTTPUploader`` to get better measurements + data = HTTPUploaderData( + size, + 0, + self.config['length']['upload'], + shutdown_event=self._shutdown_event + ) + if pre_allocate: + data.pre_allocate() + + headers = {'Content-length': size} + requests.append( + ( + build_request(self.best['url'], data, secure=self._secure, + headers=headers), + size + ) + ) + + max_threads = threads or self.config['threads']['upload'] + in_flight = {'threads': 0} + + def producer(q, requests, request_count): + for i, request in enumerate(requests[:request_count]): + thread = HTTPUploader( + i, + request[0], + start, + request[1], + self.config['length']['upload'], + opener=self._opener, + shutdown_event=self._shutdown_event + ) + while in_flight['threads'] >= max_threads: + timeit.time.sleep(0.001) + thread.start() + q.put(thread, True) + in_flight['threads'] += 1 + callback(i, request_count, start=True) + + finished = [] + + def consumer(q, request_count): + _is_alive = thread_is_alive + while len(finished) < request_count: + thread = q.get(True) + while _is_alive(thread): + thread.join(timeout=0.001) + in_flight['threads'] -= 1 + finished.append(thread.result) + callback(thread.i, request_count, end=True) + + q = Queue(threads or self.config['threads']['upload']) + prod_thread = threading.Thread(target=producer, + args=(q, requests, request_count)) + cons_thread = threading.Thread(target=consumer, + args=(q, request_count)) + start = timeit.default_timer() + prod_thread.start() + cons_thread.start() + _is_alive = thread_is_alive + while _is_alive(prod_thread): + prod_thread.join(timeout=0.1) + while _is_alive(cons_thread): + cons_thread.join(timeout=0.1) + + stop = timeit.default_timer() + self.results.bytes_sent = sum(finished) + self.results.upload = ( + (self.results.bytes_sent / (stop - start)) * 8.0 + ) + return self.results.upload + + +def ctrl_c(shutdown_event): + """Catch Ctrl-C key sequence and set a SHUTDOWN_EVENT for our threaded + operations + """ + def inner(signum, frame): + shutdown_event.set() + printer('\nCancelling...', error=True) + sys.exit(0) + return inner + + +def version(): + """Print the version""" + + printer('speedtest-cli %s' % __version__) + printer('Python %s' % sys.version.replace('\n', '')) + sys.exit(0) + + +def csv_header(delimiter=','): + """Print the CSV Headers""" + + printer(SpeedtestResults.csv_header(delimiter=delimiter)) + sys.exit(0) + + +def parse_args(): + """Function to handle building and parsing of command line arguments""" + description = ( + 'Command line interface for testing internet bandwidth using ' + 'speedtest.net.\n' + '------------------------------------------------------------' + '--------------\n' + 'https://github.com/sivel/speedtest-cli') + + parser = ArgParser(description=description) + # Give optparse.OptionParser an `add_argument` method for + # compatibility with argparse.ArgumentParser + try: + parser.add_argument = parser.add_option + except AttributeError: + pass + parser.add_argument('--no-download', dest='download', default=True, + action='store_const', const=False, + help='Do not perform download test') + parser.add_argument('--no-upload', dest='upload', default=True, + action='store_const', const=False, + help='Do not perform upload test') + parser.add_argument('--single', default=False, action='store_true', + help='Only use a single connection instead of ' + 'multiple. This simulates a typical file ' + 'transfer.') + parser.add_argument('--bytes', dest='units', action='store_const', + const=('byte', 8), default=('bit', 1), + help='Display values in bytes instead of bits. Does ' + 'not affect the image generated by --share, nor ' + 'output from --json or --csv') + parser.add_argument('--share', action='store_true', + help='Generate and provide a URL to the speedtest.net ' + 'share results image, not displayed with --csv') + parser.add_argument('--simple', action='store_true', default=False, + help='Suppress verbose output, only show basic ' + 'information') + parser.add_argument('--csv', action='store_true', default=False, + help='Suppress verbose output, only show basic ' + 'information in CSV format. Speeds listed in ' + 'bit/s and not affected by --bytes') + parser.add_argument('--csv-delimiter', default=',', type=PARSER_TYPE_STR, + help='Single character delimiter to use in CSV ' + 'output. Default ","') + parser.add_argument('--csv-header', action='store_true', default=False, + help='Print CSV headers') + parser.add_argument('--json', action='store_true', default=False, + help='Suppress verbose output, only show basic ' + 'information in JSON format. Speeds listed in ' + 'bit/s and not affected by --bytes') + parser.add_argument('--list', action='store_true', + help='Display a list of speedtest.net servers ' + 'sorted by distance') + parser.add_argument('--server', type=PARSER_TYPE_INT, action='append', + help='Specify a server ID to test against. Can be ' + 'supplied multiple times') + parser.add_argument('--exclude', type=PARSER_TYPE_INT, action='append', + help='Exclude a server from selection. Can be ' + 'supplied multiple times') + parser.add_argument('--mini', help='URL of the Speedtest Mini server') + parser.add_argument('--source', help='Source IP address to bind to') + parser.add_argument('--timeout', default=10, type=PARSER_TYPE_FLOAT, + help='HTTP timeout in seconds. Default 10') + parser.add_argument('--secure', action='store_true', + help='Use HTTPS instead of HTTP when communicating ' + 'with speedtest.net operated servers') + parser.add_argument('--no-pre-allocate', dest='pre_allocate', + action='store_const', default=True, const=False, + help='Do not pre allocate upload data. Pre allocation ' + 'is enabled by default to improve upload ' + 'performance. To support systems with ' + 'insufficient memory, use this option to avoid a ' + 'MemoryError') + parser.add_argument('--version', action='store_true', + help='Show the version number and exit') + parser.add_argument('--debug', action='store_true', + help=ARG_SUPPRESS, default=ARG_SUPPRESS) + + options = parser.parse_args() + if isinstance(options, tuple): + args = options[0] + else: + args = options + return args + + +def validate_optional_args(args): + """Check if an argument was provided that depends on a module that may + not be part of the Python standard library. + + If such an argument is supplied, and the module does not exist, exit + with an error stating which module is missing. + """ + optional_args = { + 'json': ('json/simplejson python module', json), + 'secure': ('SSL support', HTTPSConnection), + } + + for arg, info in optional_args.items(): + if getattr(args, arg, False) and info[1] is None: + raise SystemExit('%s is not installed. --%s is ' + 'unavailable' % (info[0], arg)) + + +def printer(string, quiet=False, debug=False, error=False, **kwargs): + """Helper function print a string with various features""" + + if debug and not DEBUG: + return + + if debug: + if sys.stdout.isatty(): + out = '\033[1;30mDEBUG: %s\033[0m' % string + else: + out = 'DEBUG: %s' % string + else: + out = string + + if error: + kwargs['file'] = sys.stderr + + if not quiet: + print_(out, **kwargs) + + +def shell(): + """Run the full speedtest.net test""" + + global DEBUG + shutdown_event = threading.Event() + + signal.signal(signal.SIGINT, ctrl_c(shutdown_event)) + + args = parse_args() + + # Print the version and exit + if args.version: + version() + + if not args.download and not args.upload: + raise SpeedtestCLIError('Cannot supply both --no-download and ' + '--no-upload') + + if len(args.csv_delimiter) != 1: + raise SpeedtestCLIError('--csv-delimiter must be a single character') + + if args.csv_header: + csv_header(args.csv_delimiter) + + validate_optional_args(args) + + debug = getattr(args, 'debug', False) + if debug == 'SUPPRESSHELP': + debug = False + if debug: + DEBUG = True + + if args.simple or args.csv or args.json: + quiet = True + else: + quiet = False + + if args.csv or args.json: + machine_format = True + else: + machine_format = False + + # Don't set a callback if we are running quietly + if quiet or debug: + callback = do_nothing + else: + callback = print_dots(shutdown_event) + + printer('Retrieving speedtest.net configuration...', quiet) + try: + speedtest = Speedtest( + source_address=args.source, + timeout=args.timeout, + secure=args.secure + ) + except (ConfigRetrievalError,) + HTTP_ERRORS: + printer('Cannot retrieve speedtest configuration', error=True) + raise SpeedtestCLIError(get_exception()) + + if args.list: + try: + speedtest.get_servers() + except (ServersRetrievalError,) + HTTP_ERRORS: + printer('Cannot retrieve speedtest server list', error=True) + raise SpeedtestCLIError(get_exception()) + + for _, servers in sorted(speedtest.servers.items()): + for server in servers: + line = ('%(id)5s) %(sponsor)s (%(name)s, %(country)s) ' + '[%(d)0.2f km]' % server) + try: + printer(line) + except IOError: + e = get_exception() + if e.errno != errno.EPIPE: + raise + sys.exit(0) + + printer('Testing from %(isp)s (%(ip)s)...' % speedtest.config['client'], + quiet) + + if not args.mini: + printer('Retrieving speedtest.net server list...', quiet) + try: + speedtest.get_servers(servers=args.server, exclude=args.exclude) + except NoMatchedServers: + raise SpeedtestCLIError( + 'No matched servers: %s' % + ', '.join('%s' % s for s in args.server) + ) + except (ServersRetrievalError,) + HTTP_ERRORS: + printer('Cannot retrieve speedtest server list', error=True) + raise SpeedtestCLIError(get_exception()) + except InvalidServerIDType: + raise SpeedtestCLIError( + '%s is an invalid server type, must ' + 'be an int' % ', '.join('%s' % s for s in args.server) + ) + + if args.server and len(args.server) == 1: + printer('Retrieving information for the selected server...', quiet) + else: + printer('Selecting best server based on ping...', quiet) + speedtest.get_best_server() + elif args.mini: + speedtest.get_best_server(speedtest.set_mini_server(args.mini)) + + results = speedtest.results + + printer('Hosted by %(sponsor)s (%(name)s) [%(d)0.2f km]: ' + '%(latency)s ms' % results.server, quiet) + + if args.download: + printer('Testing download speed', quiet, + end=('', '\n')[bool(debug)]) + speedtest.download( + callback=callback, + threads=(None, 1)[args.single] + ) + printer('Download: %0.2f M%s/s' % + ((results.download / 1000.0 / 1000.0) / args.units[1], + args.units[0]), + quiet) + else: + printer('Skipping download test', quiet) + + if args.upload: + printer('Testing upload speed', quiet, + end=('', '\n')[bool(debug)]) + speedtest.upload( + callback=callback, + pre_allocate=args.pre_allocate, + threads=(None, 1)[args.single] + ) + printer('Upload: %0.2f M%s/s' % + ((results.upload / 1000.0 / 1000.0) / args.units[1], + args.units[0]), + quiet) + else: + printer('Skipping upload test', quiet) + + printer('Results:\n%r' % results.dict(), debug=True) + + if not args.simple and args.share: + results.share() + + if args.simple: + printer('Ping: %s ms\nDownload: %0.2f M%s/s\nUpload: %0.2f M%s/s' % + (results.ping, + (results.download / 1000.0 / 1000.0) / args.units[1], + args.units[0], + (results.upload / 1000.0 / 1000.0) / args.units[1], + args.units[0])) + elif args.csv: + printer(results.csv(delimiter=args.csv_delimiter)) + elif args.json: + printer(results.json()) + + if args.share and not machine_format: + printer('Share results: %s' % results.share()) + + +def main(): + try: + shell() + except KeyboardInterrupt: + printer('\nCancelling...', error=True) + except (SpeedtestException, SystemExit): + e = get_exception() + # Ignore a successful exit, or argparse exit + if getattr(e, 'code', 1) not in (0, 2): + msg = '%s' % e + if not msg: + msg = '%r' % e + raise SystemExit('ERROR: %s' % msg) + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/5GSPEED_DETAILED_ASSET/start.sh b/5GSPEED_DETAILED_ASSET/start.sh new file mode 100755 index 00000000..18193aa2 --- /dev/null +++ b/5GSPEED_DETAILED_ASSET/start.sh @@ -0,0 +1,2 @@ +#!/bin/bash +cppython 5GSPEED_DETAILED_ASSET.py \ No newline at end of file diff --git a/5GSPEED_DETAILED_ASSET/timeit.py b/5GSPEED_DETAILED_ASSET/timeit.py new file mode 100644 index 00000000..8eea766b --- /dev/null +++ b/5GSPEED_DETAILED_ASSET/timeit.py @@ -0,0 +1,362 @@ +#! /usr/bin/env python3 + +"""Tool for measuring execution time of small code snippets. + +This module avoids a number of common traps for measuring execution +times. See also Tim Peters' introduction to the Algorithms chapter in +the Python Cookbook, published by O'Reilly. + +Library usage: see the Timer class. + +Command line usage: + python timeit.py [-n N] [-r N] [-s S] [-t] [-c] [-p] [-h] [--] [statement] + +Options: + -n/--number N: how many times to execute 'statement' (default: see below) + -r/--repeat N: how many times to repeat the timer (default 3) + -s/--setup S: statement to be executed once initially (default 'pass'). + Execution time of this setup statement is NOT timed. + -p/--process: use time.process_time() (default is time.perf_counter()) + -t/--time: use time.time() (deprecated) + -c/--clock: use time.clock() (deprecated) + -v/--verbose: print raw timing results; repeat for more digits precision + -u/--unit: set the output time unit (usec, msec, or sec) + -h/--help: print this usage message and exit + --: separate options from statement, use when statement starts with - + statement: statement to be timed (default 'pass') + +A multi-line statement may be given by specifying each line as a +separate argument; indented lines are possible by enclosing an +argument in quotes and using leading spaces. Multiple -s options are +treated similarly. + +If -n is not given, a suitable number of loops is calculated by trying +successive powers of 10 until the total time is at least 0.2 seconds. + +Note: there is a certain baseline overhead associated with executing a +pass statement. It differs between versions. The code here doesn't try +to hide it, but you should be aware of it. The baseline overhead can be +measured by invoking the program without arguments. + +Classes: + + Timer + +Functions: + + timeit(string, string) -> float + repeat(string, string) -> list + default_timer() -> float + +""" + +import gc +import sys +import time +import itertools + +__all__ = ["Timer", "timeit", "repeat", "default_timer"] + +dummy_src_name = "" +default_number = 1000000 +default_repeat = 3 +default_timer = time.perf_counter + +_globals = globals + +# Don't change the indentation of the template; the reindent() calls +# in Timer.__init__() depend on setup being indented 4 spaces and stmt +# being indented 8 spaces. +template = """ +def inner(_it, _timer{init}): + {setup} + _t0 = _timer() + for _i in _it: + {stmt} + _t1 = _timer() + return _t1 - _t0 +""" + +def reindent(src, indent): + """Helper to reindent a multi-line statement.""" + return src.replace("\n", "\n" + " "*indent) + +class Timer: + """Class for timing execution speed of small code snippets. + + The constructor takes a statement to be timed, an additional + statement used for setup, and a timer function. Both statements + default to 'pass'; the timer function is platform-dependent (see + module doc string). If 'globals' is specified, the code will be + executed within that namespace (as opposed to inside timeit's + namespace). + + To measure the execution time of the first statement, use the + timeit() method. The repeat() method is a convenience to call + timeit() multiple times and return a list of results. + + The statements may contain newlines, as long as they don't contain + multi-line string literals. + """ + + def __init__(self, stmt="pass", setup="pass", timer=default_timer, + globals=None): + """Constructor. See class doc string.""" + self.timer = timer + local_ns = {} + global_ns = _globals() if globals is None else globals + init = '' + if isinstance(setup, str): + # Check that the code can be compiled outside a function + compile(setup, dummy_src_name, "exec") + stmtprefix = setup + '\n' + setup = reindent(setup, 4) + elif callable(setup): + local_ns['_setup'] = setup + init += ', _setup=_setup' + stmtprefix = '' + setup = '_setup()' + else: + raise ValueError("setup is neither a string nor callable") + if isinstance(stmt, str): + # Check that the code can be compiled outside a function + compile(stmtprefix + stmt, dummy_src_name, "exec") + stmt = reindent(stmt, 8) + elif callable(stmt): + local_ns['_stmt'] = stmt + init += ', _stmt=_stmt' + stmt = '_stmt()' + else: + raise ValueError("stmt is neither a string nor callable") + src = template.format(stmt=stmt, setup=setup, init=init) + self.src = src # Save for traceback display + code = compile(src, dummy_src_name, "exec") + exec(code, global_ns, local_ns) + self.inner = local_ns["inner"] + + def print_exc(self, file=None): + """Helper to print a traceback from the timed code. + + Typical use: + + t = Timer(...) # outside the try/except + try: + t.timeit(...) # or t.repeat(...) + except: + t.print_exc() + + The advantage over the standard traceback is that source lines + in the compiled template will be displayed. + + The optional file argument directs where the traceback is + sent; it defaults to sys.stderr. + """ + import linecache, traceback + if self.src is not None: + linecache.cache[dummy_src_name] = (len(self.src), + None, + self.src.split("\n"), + dummy_src_name) + # else the source is already stored somewhere else + + traceback.print_exc(file=file) + + def timeit(self, number=default_number): + """Time 'number' executions of the main statement. + + To be precise, this executes the setup statement once, and + then returns the time it takes to execute the main statement + a number of times, as a float measured in seconds. The + argument is the number of times through the loop, defaulting + to one million. The main statement, the setup statement and + the timer function to be used are passed to the constructor. + """ + it = itertools.repeat(None, number) + gcold = gc.isenabled() + gc.disable() + try: + timing = self.inner(it, self.timer) + finally: + if gcold: + gc.enable() + return timing + + def repeat(self, repeat=default_repeat, number=default_number): + """Call timeit() a few times. + + This is a convenience function that calls the timeit() + repeatedly, returning a list of results. The first argument + specifies how many times to call timeit(), defaulting to 3; + the second argument specifies the timer argument, defaulting + to one million. + + Note: it's tempting to calculate mean and standard deviation + from the result vector and report these. However, this is not + very useful. In a typical case, the lowest value gives a + lower bound for how fast your machine can run the given code + snippet; higher values in the result vector are typically not + caused by variability in Python's speed, but by other + processes interfering with your timing accuracy. So the min() + of the result is probably the only number you should be + interested in. After that, you should look at the entire + vector and apply common sense rather than statistics. + """ + r = [] + for i in range(repeat): + t = self.timeit(number) + r.append(t) + return r + + def autorange(self, callback=None): + """Return the number of loops and time taken so that total time >= 0.2. + + Calls the timeit method with *number* set to successive powers of + ten (10, 100, 1000, ...) up to a maximum of one billion, until + the time taken is at least 0.2 second, or the maximum is reached. + Returns ``(number, time_taken)``. + + If *callback* is given and is not None, it will be called after + each trial with two arguments: ``callback(number, time_taken)``. + """ + for i in range(1, 10): + number = 10**i + time_taken = self.timeit(number) + if callback: + callback(number, time_taken) + if time_taken >= 0.2: + break + return (number, time_taken) + +def timeit(stmt="pass", setup="pass", timer=default_timer, + number=default_number, globals=None): + """Convenience function to create Timer object and call timeit method.""" + return Timer(stmt, setup, timer, globals).timeit(number) + +def repeat(stmt="pass", setup="pass", timer=default_timer, + repeat=default_repeat, number=default_number, globals=None): + """Convenience function to create Timer object and call repeat method.""" + return Timer(stmt, setup, timer, globals).repeat(repeat, number) + +def main(args=None, *, _wrap_timer=None): + """Main program, used when run as a script. + + The optional 'args' argument specifies the command line to be parsed, + defaulting to sys.argv[1:]. + + The return value is an exit code to be passed to sys.exit(); it + may be None to indicate success. + + When an exception happens during timing, a traceback is printed to + stderr and the return value is 1. Exceptions at other times + (including the template compilation) are not caught. + + '_wrap_timer' is an internal interface used for unit testing. If it + is not None, it must be a callable that accepts a timer function + and returns another timer function (used for unit testing). + """ + if args is None: + args = sys.argv[1:] + import getopt + try: + opts, args = getopt.getopt(args, "n:u:s:r:tcpvh", + ["number=", "setup=", "repeat=", + "time", "clock", "process", + "verbose", "unit=", "help"]) + except getopt.error as err: + print(err) + print("use -h/--help for command line help") + return 2 + timer = default_timer + stmt = "\n".join(args) or "pass" + number = 0 # auto-determine + setup = [] + repeat = default_repeat + verbose = 0 + time_unit = None + units = {"usec": 1, "msec": 1e3, "sec": 1e6} + precision = 3 + for o, a in opts: + if o in ("-n", "--number"): + number = int(a) + if o in ("-s", "--setup"): + setup.append(a) + if o in ("-u", "--unit"): + if a in units: + time_unit = a + else: + print("Unrecognized unit. Please select usec, msec, or sec.", + file=sys.stderr) + return 2 + if o in ("-r", "--repeat"): + repeat = int(a) + if repeat <= 0: + repeat = 1 + if o in ("-t", "--time"): + timer = time.time + if o in ("-c", "--clock"): + timer = time.clock + if o in ("-p", "--process"): + timer = time.process_time + if o in ("-v", "--verbose"): + if verbose: + precision += 1 + verbose += 1 + if o in ("-h", "--help"): + print(__doc__, end=' ') + return 0 + setup = "\n".join(setup) or "pass" + # Include the current directory, so that local imports work (sys.path + # contains the directory of this script, rather than the current + # directory) + import os + sys.path.insert(0, os.curdir) + if _wrap_timer is not None: + timer = _wrap_timer(timer) + t = Timer(stmt, setup, timer) + if number == 0: + # determine number so that 0.2 <= total time < 2.0 + callback = None + if verbose: + def callback(number, time_taken): + msg = "{num} loops -> {secs:.{prec}g} secs" + print(msg.format(num=number, secs=time_taken, prec=precision)) + try: + number, _ = t.autorange(callback) + except: + t.print_exc() + return 1 + try: + r = t.repeat(repeat, number) + except: + t.print_exc() + return 1 + best = min(r) + if verbose: + print("raw times:", " ".join(["%.*g" % (precision, x) for x in r])) + print("%d loops," % number, end=' ') + usec = best * 1e6 / number + if time_unit is not None: + scale = units[time_unit] + else: + scales = [(scale, unit) for unit, scale in units.items()] + scales.sort(reverse=True) + for scale, time_unit in scales: + if usec >= scale: + break + print("best of %d: %.*g %s per loop" % (repeat, precision, + usec/scale, time_unit)) + best = min(r) + usec = best * 1e6 / number + worst = max(r) + if worst >= best * 4: + usec = worst * 1e6 / number + import warnings + warnings.warn_explicit( + "The test results are likely unreliable. The worst\n" + "time (%.*g %s) was more than four times slower than the best time." % + (precision, usec/scale, time_unit), + UserWarning, '', 0) + return None + +if __name__ == "__main__": + sys.exit(main())