diff --git a/LocastService.py b/LocastService.py index 351631c..5dac200 100644 --- a/LocastService.py +++ b/LocastService.py @@ -1,9 +1,13 @@ -import json, urllib2, time, os, sys, string +# pylama:ignore=E722,E303 +import json +import urllib2 +import sys import m3u8 import re from functools import update_wrapper from datetime import datetime + def handle_url_except(f): def wrapper_func(self, *args, **kwargs): try: @@ -37,7 +41,7 @@ def __init__(self, base_folder, mock_location, zipcode): @handle_url_except - def login(self, username, password): + def login(self, username, password): # check environment vars if (username is None): @@ -55,10 +59,10 @@ def login(self, username, password): # POST # {"username":"thomas_vg1@hotmail.com","password":"xxxxxxxx"} - - loginReq = urllib2.Request('https://api.locastnet.org/api/user/login', - '{"username":"' + username + '","password":"' + password + '"}', - {'Content-Type': 'application/json'}) + + loginReq = urllib2.Request('https://api.locastnet.org/api/user/login', + '{"username":"' + username + '","password":"' + password + '"}', + {'Content-Type': 'application/json'}) loginOpn = urllib2.urlopen(loginReq) loginRes = json.load(loginOpn) @@ -73,8 +77,8 @@ def validate_user(self): print("Validating User Info...") # get user info and make sure we donated - userReq = urllib2.Request('https://api.locastnet.org/api/user/me', - headers={'Content-Type': 'application/json', 'authorization': 'Bearer ' + self.current_token}) + userReq = urllib2.Request('https://api.locastnet.org/api/user/me', + headers={'Content-Type': 'application/json', 'authorization': 'Bearer ' + self.current_token}) userOpn = urllib2.urlopen(userReq) userRes = json.load(userOpn) @@ -82,9 +86,9 @@ def validate_user(self): print("User Info obtained.") print("User didDonate: {}".format(userRes['didDonate'])) - # Check if the user has donated, and we got an actual expiration date. + # Check if the user has donated, and we got an actual expiration date. if userRes['didDonate'] and userRes['donationExpire']: - # Check if donation has expired. + # Check if donation has expired. donateExp = datetime.fromtimestamp(userRes['donationExpire'] / 1000) print("User donationExpire: {}".format(donateExp)) if datetime.now() > donateExp: @@ -99,18 +103,18 @@ def validate_user(self): print("Getting user location...") # Find the users location via lat\long or zipcode if specified,(lat\lon - # taking precedence if both are provided) otherwise use IP. Attempts to - # mirror the geolocation found at locast.org\dma. Also allows for a - # check that Locast reports the area as active. + # taking precedence if both are provided) otherwise use IP. Attempts to + # mirror the geolocation found at locast.org\dma. Also allows for a + # check that Locast reports the area as active. if self.find_location(): - print("Got location as {} - DMA {} - Lat\Lon {}\{}".format(self.current_city, - self.current_dma, - self.current_location['latitude'], - self.current_location['longitude']) - ) + print("Got location as {} - DMA {} - Lat\Lon {}\{}".format(self.current_city, + self.current_dma, + self.current_location['latitude'], + self.current_location['longitude']) + ) else: return False - # Check that Locast reports this market is currently active and available. + # Check that Locast reports this market is currently active and available. if not self.active_dma: print("Locast reports that this DMA\Market area is not currently active!") return False @@ -119,9 +123,9 @@ def validate_user(self): def find_location(self): ''' - Mirror the geolocation options found at locast.org/dma since we can't - rely on browser geolocation. If the user provides override coords, or - override_zipcode, resolve location based on that data. Otherwise check + Mirror the geolocation options found at locast.org/dma since we can't + rely on browser geolocation. If the user provides override coords, or + override_zipcode, resolve location based on that data. Otherwise check by external ip, (using ipinfo.io, as the site does). Calls to Locast return JSON in the following format: @@ -134,19 +138,19 @@ def find_location(self): u'active': bool, u'announcements': list, u'small_url': str - } + } ''' zip_format = re.compile(r'^[0-9]{5}$') - # Check if the user provided override coords. + # Check if the user provided override coords. if self.mock_location: return self.get_coord_location() # Check if the user provided an override zipcode, and that it's valid. elif self.zipcode and zip_format.match(self.zipcode): return self.get_zip_location() else: - # If no override zip, or not a valid ZIP, fallback to IP location. + # If no override zip, or not a valid ZIP, fallback to IP location. return self.get_ip_location() - + @handle_url_except def get_zip_location(self): print("Getting location via provided zipcode {}".format(self.zipcode)) @@ -182,7 +186,7 @@ def get_ip_location(self): self.active_dma = geoRes['active'] self.current_city = str(geoRes['name']) return True - + @handle_url_except def get_coord_location(self): print("Getting location via provided lat\lon coordinates.") @@ -210,9 +214,9 @@ def get_stations(self): try: # https://api.locastnet.org/api/watch/epg/504 # get stations - stationsReq = urllib2.Request('https://api.locastnet.org/api/watch/epg/' + str(self.current_dma), - headers={'Content-Type': 'application/json', - 'authorization': 'Bearer ' + self.current_token}) + stationsReq = urllib2.Request('https://api.locastnet.org/api/watch/epg/' + str(self.current_dma), + headers={'Content-Type': 'application/json', + 'authorization': 'Bearer ' + self.current_token}) stationsOpn = urllib2.urlopen(stationsReq) stationsRes = json.load(stationsOpn) @@ -259,13 +263,13 @@ def get_stations(self): # whether the first char is a number (checking for result like "2.1 CBS") try: # if number, get the channel and name -- we're done! - # Check if the the callsign has a float (x.x) value. Save as a + # Check if the the callsign has a float (x.x) value. Save as a # string though, to preserve any trailing 0s as on reported # on https://github.com/tgorgdotcom/locast2plex/issues/42 assert(float(locast_station['callSign'].split()[0])) stationsRes[index]['channel'] = locast_station['callSign'].split()[0] - + except ValueError: # result like "WDPN" or "CBS" in the callsign field, or the callsign in the name field # then we'll search the callsign in a few different lists to get the station channel @@ -279,48 +283,48 @@ def get_stations(self): # example: WABCDT2 alt_callsign_result = self.detect_callsign(locast_station['name']) - + # check the known station json that we maintain whenever locast's # reported station is iffy # first look via "callsign" value ks_result = self.find_known_station(locast_station, 'callSign', known_stations) - if ks_result != None: + if ks_result is not None: stationsRes[index]['channel'] = ks_result['channel'] skip_sub_id = ks_result['skip_sub'] # then check "name" - if (not 'channel' in stationsRes[index]): + if ('channel' not in stationsRes[index]): ks_result = self.find_known_station(locast_station, 'name', known_stations) - if ks_result != None: + if ks_result is not None: stationsRes[index]['channel'] = ks_result['channel'] skip_sub_id = ks_result['skip_sub'] # if we couldn't find anything look through fcc list for a match. # first by searching the callsign found in the "callsign" field - if (not 'channel' in stationsRes[index]) and callsign_result['verified']: + if ('channel' not in stationsRes[index]) and callsign_result['verified']: result = self.find_fcc_station(callsign_result['callsign'], fcc_market, fcc_stations) - if result != None: + if result is not None: stationsRes[index]['channel'] = result['channel'] skip_sub_id = result['analog'] - + # if we still couldn't find it, see if there's a match via the # "name" field - if (not 'channel' in stationsRes[index]) and alt_callsign_result['verified']: + if ('channel' not in stationsRes[index]) and alt_callsign_result['verified']: result = self.find_fcc_station(alt_callsign_result['callsign'], fcc_market, fcc_stations) - if result != None: + if result is not None: stationsRes[index]['channel'] = result['channel'] skip_sub_id = result['analog'] - + # locast usually adds a number in it's callsign (in either field). that # number is the subchannel if (not skip_sub_id) and ('channel' in stationsRes[index]): - if callsign_result['verified'] and (callsign_result['subchannel'] != None): + if callsign_result['verified'] and (callsign_result['subchannel'] is not None): stationsRes[index]['channel'] = stationsRes[index]['channel'] + '.' + callsign_result['subchannel'] - elif alt_callsign_result['verified'] and (alt_callsign_result['subchannel'] != None): + elif alt_callsign_result['verified'] and (alt_callsign_result['subchannel'] is not None): stationsRes[index]['channel'] = stationsRes[index]['channel'] + '.' + alt_callsign_result['subchannel'] else: stationsRes[index]['channel'] = stationsRes[index]['channel'] + '.1' @@ -328,7 +332,7 @@ def get_stations(self): # mark stations that did not get a channel, but outside of the normal range. # the user will have to weed these out in Plex... - if (not 'channel' in stationsRes[index]): + if ('channel' not in stationsRes[index]): stationsRes[index]['channel'] = str(noneChannel) noneChannel = noneChannel + 1 @@ -363,8 +367,9 @@ def detect_callsign(self, compare_string): compare_string = compare_string[:-2] # verify if text from "callsign" is an actual callsign - if ( ((compare_string[0] == 'K') or (compare_string[0] == 'W')) and - ((len(compare_string) == 3) or (len(compare_string) == 4)) ): + if (((compare_string[0] == 'K') + or (compare_string[0] == 'W')) and ((len(compare_string) == 3) + or (len(compare_string) == 4))): verified = True return { @@ -381,12 +386,12 @@ def detect_callsign(self, compare_string): def find_known_station(self, station, searchBy, known_stations): for known_station in known_stations: - if ( (known_station[searchBy] == station[searchBy]) and - (known_station['dma'] == station['dma']) ): + if ((known_station[searchBy] == station[searchBy]) + and (known_station['dma'] == station['dma'])): returnChannel = known_station['rootChannel'] - if known_station['subChannel'] != None: + if known_station['subChannel'] is not None: return { "channel": returnChannel + '.' + known_station['subChannel'], "skip_sub": True @@ -441,12 +446,12 @@ def get_station_stream_uri(self, station_id): print("Getting station info for " + station_id + "...") try: - videoUrlReq = urllib2.Request('https://api.locastnet.org/api/watch/station/' + - str(station_id) + '/' + - self.current_location['latitude'] + '/' + - self.current_location['longitude'], - headers={'Content-Type': 'application/json', - 'authorization': 'Bearer ' + self.current_token}) + videoUrlReq = urllib2.Request('https://api.locastnet.org/api/watch/station/' + + str(station_id) + '/' + + self.current_location['latitude'] + '/' + + self.current_location['longitude'], + headers={'Content-Type': 'application/json', + 'authorization': 'Bearer ' + self.current_token}) videoUrlOpn = urllib2.urlopen(videoUrlReq) videoUrlRes = json.load(videoUrlOpn) videoUrlOpn.close() @@ -464,37 +469,36 @@ def get_station_stream_uri(self, station_id): print("Determining best video stream for " + station_id + "...") bestStream = None - + # find the heighest stream url resolution and save it to the list videoUrlM3u = m3u8.load(videoUrlRes['streamUrl']) - + print("Found " + str(len(videoUrlM3u.playlists)) + " Playlists") - + if len(videoUrlM3u.playlists) > 0: for videoStream in videoUrlM3u.playlists: - if bestStream == None: + if bestStream is not None: bestStream = videoStream - elif ((videoStream.stream_info.resolution[0] > bestStream.stream_info.resolution[0]) and - (videoStream.stream_info.resolution[1] > bestStream.stream_info.resolution[1])): + elif ((videoStream.stream_info.resolution[0] > bestStream.stream_info.resolution[0]) and + (videoStream.stream_info.resolution[1] > bestStream.stream_info.resolution[1])): bestStream = videoStream - elif ((videoStream.stream_info.resolution[0] == bestStream.stream_info.resolution[0]) and - (videoStream.stream_info.resolution[1] == bestStream.stream_info.resolution[1]) and - (videoStream.stream_info.bandwidth > bestStream.stream_info.bandwidth)): + elif ((videoStream.stream_info.resolution[0] == bestStream.stream_info.resolution[0]) and + (videoStream.stream_info.resolution[1] == bestStream.stream_info.resolution[1]) and + (videoStream.stream_info.bandwidth > bestStream.stream_info.bandwidth)): bestStream = videoStream - - if bestStream != None: - print(station_id + " will use " + - str(bestStream.stream_info.resolution[0]) + "x" + str(bestStream.stream_info.resolution[1]) + - " resolution at " + str(bestStream.stream_info.bandwidth) + "bps") + + if bestStream is not None: + print(station_id + " will use " + + str(bestStream.stream_info.resolution[0]) + "x" + str(bestStream.stream_info.resolution[1]) + + " resolution at " + str(bestStream.stream_info.bandwidth) + "bps") return bestStream.absolute_uri else: print("No variant streams found for this station. Assuming single stream only.") return videoUrlRes['streamUrl'] - diff --git a/SSDPServer.py b/SSDPServer.py index 69824d2..5cc6bed 100644 --- a/SSDPServer.py +++ b/SSDPServer.py @@ -223,4 +223,4 @@ def do_byebye(self, usn): except (AttributeError, socket.error) as msg: logger.error("failure sending out byebye notification: %r" % msg) except KeyError as msg: - logger.error("error building byebye notification: %r" % msg) \ No newline at end of file + logger.error("error building byebye notification: %r" % msg) diff --git a/fcc_facility/get_facilities.py b/fcc_facility/get_facilities.py index 8b28613..e7e8bf1 100644 --- a/fcc_facility/get_facilities.py +++ b/fcc_facility/get_facilities.py @@ -1,4 +1,9 @@ -import urllib, zipfile, os, sys, datetime, json +import urllib +import zipfile +import os +import sys +import datetime +import json os.chdir(os.path.dirname(sys.argv[0])) @@ -32,17 +37,17 @@ if fac_line_split[12] != '': fac_status_date_split = fac_line_split[12].split('/') - + if fac_line_split[15] != '': fac_lic_expiration_date_split = fac_line_split[15].split('/') - fac_lic_expiration_date_datetime = datetime.datetime(int(fac_lic_expiration_date_split[2]), - int(fac_lic_expiration_date_split[0]), - int(fac_lic_expiration_date_split[1]), + fac_lic_expiration_date_datetime = datetime.datetime(int(fac_lic_expiration_date_split[2]), + int(fac_lic_expiration_date_split[0]), + int(fac_lic_expiration_date_split[1]), 23, 59, 59, 999999) - + if fac_line_split[21] != '': fac_callsign_eff_date_split = fac_line_split[21].split('/') - + if fac_line_split[29] != '': fac_last_change_date_split = fac_line_split[29].split('/') @@ -79,9 +84,9 @@ "last_change_date": fac_line_split[29] } - if ((fac_obj['fac_status'] == 'LICEN') - and (fac_lic_expiration_date_datetime != None) - and (fac_lic_expiration_date_datetime > current_date) + if ((fac_obj['fac_status'] == 'LICEN') + and (fac_lic_expiration_date_datetime is not None) + and (fac_lic_expiration_date_datetime > current_date) and (fac_obj['fac_service'] in ('DT', 'TX', 'TV', 'TB', 'LD', 'DC'))): sys.stdout.write(fac_obj['fac_callsign'] + '.') sys.stdout.flush() @@ -105,4 +110,4 @@ tv_dma_file.write("%s\n" % dma_list_item) print('Complete!') -print('Found ' + str(fac_found_count) + ' items.') \ No newline at end of file +print('Found ' + str(fac_found_count) + ' items.') diff --git a/m3u8/__init__.py b/m3u8/__init__.py index 40a002c..349b75b 100644 --- a/m3u8/__init__.py +++ b/m3u8/__init__.py @@ -10,10 +10,9 @@ try: from urllib.request import urlopen, Request - from urllib.error import HTTPError from urllib.parse import urlparse, urljoin except ImportError: # Python 2.x - from urllib2 import urlopen, Request, HTTPError + from urllib2 import urlopen, Request from urlparse import urlparse, urljoin from m3u8.model import (M3U8, Segment, SegmentList, PartialSegment, @@ -26,10 +25,11 @@ PYTHON_MAJOR_VERSION = sys.version_info __all__ = ('M3U8', 'Segment', 'SegmentList', 'PartialSegment', - 'PartialSegmentList', 'Key', 'Playlist', 'IFramePlaylist', - 'Media', 'MediaList', 'PlaylistList', 'Start', 'RenditionReport', - 'RenditionReportList', 'ServerControl', 'Skip', 'PartInformation', - 'loads', 'load', 'parse', 'ParseError') + 'PartialSegmentList', 'Key', 'Playlist', 'IFramePlaylist', + 'Media', 'MediaList', 'PlaylistList', 'Start', 'RenditionReport', + 'RenditionReportList', 'ServerControl', 'Skip', 'PartInformation', + 'loads', 'load', 'parse', 'ParseError') + def loads(content, uri=None, custom_tags_parser=None): ''' diff --git a/m3u8/iso8601/__init__.py b/m3u8/iso8601/__init__.py index 11b1adc..4efd455 100644 --- a/m3u8/iso8601/__init__.py +++ b/m3u8/iso8601/__init__.py @@ -1 +1,2 @@ +# pylama:ignore=W0401,W0611 from .iso8601 import * diff --git a/m3u8/iso8601/iso8601.py b/m3u8/iso8601/iso8601.py index 0c149f6..f89a4c8 100644 --- a/m3u8/iso8601/iso8601.py +++ b/m3u8/iso8601/iso8601.py @@ -16,10 +16,8 @@ __all__ = ["parse_date", "ParseError", "UTC", "FixedOffset"] -if sys.version_info >= (3, 0, 0): - _basestring = str -else: - _basestring = basestring +if sys.version_info.major >= 3: + basestring = str # Adapted from http://delete.me.uk/2005/03/iso8601.html @@ -67,11 +65,14 @@ re.VERBOSE ) + class ParseError(Exception): """Raised when there is a problem parsing a date string""" + if sys.version_info >= (3, 2, 0): UTC = datetime.timezone.utc + def FixedOffset(offset_hours, offset_minutes, name): return datetime.timezone( datetime.timedelta( @@ -80,6 +81,7 @@ def FixedOffset(offset_hours, offset_minutes, name): else: # Yoinked from python docs ZERO = datetime.timedelta(0) + class Utc(datetime.tzinfo): """UTC Timezone @@ -150,6 +152,7 @@ def to_int(d, key, default_to_zero=False, default=None, required=True): else: return int(value) + def parse_timezone(matches, default_timezone=UTC): """Parses ISO 8601 time zone specs into tzinfo offsets @@ -171,6 +174,7 @@ def parse_timezone(matches, default_timezone=UTC): minutes = -minutes return FixedOffset(hours, minutes, description) + def parse_date(datestring, default_timezone=UTC): """Parses ISO 8601 dates into datetime objects @@ -188,7 +192,7 @@ def parse_date(datestring, default_timezone=UTC): constructing the datetime instance. """ - if not isinstance(datestring, _basestring): + if not isinstance(datestring, basestring): raise ParseError("Expecting a string %r" % datestring) m = ISO8601_REGEX.match(datestring) if not m: diff --git a/m3u8/iso8601/test_iso8601.py b/m3u8/iso8601/test_iso8601.py index 0d01ffb..82e9d69 100644 --- a/m3u8/iso8601/test_iso8601.py +++ b/m3u8/iso8601/test_iso8601.py @@ -9,19 +9,23 @@ from iso8601 import iso8601 + def test_iso8601_regex(): assert iso8601.ISO8601_REGEX.match("2006-10-11T00:14:33Z") + def test_fixedoffset_eq(): # See https://bitbucket.org/micktwomey/pyiso8601/issues/19 datetime.tzinfo() == iso8601.FixedOffset(2, 0, '+2:00') + def test_parse_no_timezone_different_default(): tz = iso8601.FixedOffset(2, 0, "test offset") d = iso8601.parse_date("2007-01-01T08:00:00", default_timezone=tz) assert d == datetime.datetime(2007, 1, 1, 8, 0, 0, 0, tz) assert d.tzinfo == tz + def test_parse_utc_different_default(): """Z should mean 'UTC', not 'default'. @@ -30,6 +34,7 @@ def test_parse_utc_different_default(): d = iso8601.parse_date("2007-01-01T08:00:00Z", default_timezone=tz) assert d == datetime.datetime(2007, 1, 1, 8, 0, 0, 0, iso8601.UTC) + @pytest.mark.parametrize("invalid_date, error_string", [ ("2013-10-", "Unable to parse date string"), ("2013-", "Unable to parse date string"), @@ -52,6 +57,7 @@ def test_parse_invalid_date(invalid_date, error_string): assert exc.errisinstance(iso8601.ParseError) assert str(exc.value).startswith(error_string) + @pytest.mark.parametrize("valid_date,expected_datetime,isoformat", [ ("2007-06-23 06:40:34.00Z", datetime.datetime(2007, 6, 23, 6, 40, 34, 0, iso8601.UTC), "2007-06-23T06:40:34+00:00"), # Handle a separator other than T ("1997-07-16T19:20+01:00", datetime.datetime(1997, 7, 16, 19, 20, 0, 0, iso8601.FixedOffset(1, 0, "+01:00")), "1997-07-16T19:20:00+01:00"), # Parse with no seconds diff --git a/m3u8/mixins.py b/m3u8/mixins.py index 33f228c..13ae781 100644 --- a/m3u8/mixins.py +++ b/m3u8/mixins.py @@ -1,4 +1,3 @@ - import os from m3u8.parser import is_url diff --git a/m3u8/model.py b/m3u8/model.py index 48bfd46..fcfa3e7 100644 --- a/m3u8/model.py +++ b/m3u8/model.py @@ -1,4 +1,5 @@ # coding: utf-8 +# pylama:ignore=E303 # Copyright 2014 Globo.com Player authors. All rights reserved. # Use of this source code is governed by a MIT License # license that can be found in the LICENSE file. @@ -157,11 +158,11 @@ def __init__(self, content=None, base_path=None, base_uri=None, strict=False, cu def _initialize_attributes(self): - self.keys = [ Key(base_uri=self.base_uri, **params) if params else None - for params in self.data.get('keys', []) ] - self.segments = SegmentList([ Segment(base_uri=self.base_uri, keyobject=find_key(segment.get('key', {}), self.keys), **segment) - for segment in self.data.get('segments', []) ]) - #self.keys = get_uniques([ segment.key for segment in self.segments ]) + self.keys = [Key(base_uri=self.base_uri, **params) if params else None + for params in self.data.get('keys', [])] + self.segments = SegmentList([Segment(base_uri=self.base_uri, keyobject=find_key(segment.get('key', {}), self.keys), **segment) + for segment in self.data.get('segments', [])]) + # self.keys = get_uniques([ segment.key for segment in self.segments ]) for attr, param in self.simple_attributes: setattr(self, attr, self.data.get(param)) @@ -172,18 +173,18 @@ def _initialize_attributes(self): self.files.append(key.uri) self.files.extend(self.segments.uri) - self.media = MediaList([ Media(base_uri=self.base_uri, **media) - for media in self.data.get('media', []) ]) + self.media = MediaList([Media(base_uri=self.base_uri, **media) + for media in self.data.get('media', [])]) - self.playlists = PlaylistList([ Playlist(base_uri=self.base_uri, media=self.media, **playlist) - for playlist in self.data.get('playlists', []) ]) + self.playlists = PlaylistList([Playlist(base_uri=self.base_uri, media=self.media, **playlist) + for playlist in self.data.get('playlists', [])]) self.iframe_playlists = PlaylistList() for ifr_pl in self.data.get('iframe_playlists', []): self.iframe_playlists.append(IFramePlaylist(base_uri=self.base_uri, uri=ifr_pl['uri'], iframe_stream_info=ifr_pl['iframe_stream_info']) - ) + ) self.segment_map = self.data.get('segment_map') start = self.data.get('start', None) @@ -198,15 +199,15 @@ def _initialize_attributes(self): skip = self.data.get('skip', None) self.skip = skip and Skip(**skip) - self.rendition_reports = RenditionReportList([ RenditionReport(base_uri=self.base_uri, **rendition_report) - for rendition_report in self.data.get('rendition_reports', []) ]) + self.rendition_reports = RenditionReportList([RenditionReport(base_uri=self.base_uri, **rendition_report) + for rendition_report in self.data.get('rendition_reports', [])]) - self.session_data = SessionDataList([ SessionData(**session_data) - for session_data in self.data.get('session_data', []) - if 'data_id' in session_data ]) + self.session_data = SessionDataList([SessionData(**session_data) + for session_data in self.data.get('session_data', []) + if 'data_id' in session_data]) - self.session_keys = [ SessionKey(base_uri=self.base_uri, **params) if params else None - for params in self.data.get('session_keys', []) ] + self.session_keys = [SessionKey(base_uri=self.base_uri, **params) if params else None + for params in self.data.get('session_keys', [])] def __unicode__(self): return self.dumps() @@ -422,7 +423,7 @@ def __init__(self, uri=None, base_uri=None, program_date_time=None, current_prog self.scte35 = scte35 self.scte35_duration = scte35_duration self.key = keyobject - self.parts = PartialSegmentList( [ PartialSegment(base_uri=self.base_uri, **partial) for partial in parts ] if parts else [] ) + self.parts = PartialSegmentList([PartialSegment(base_uri=self.base_uri, **partial) for partial in parts] if parts else []) if init_section is not None: self.init_section = InitializationSection(self.base_uri, **init_section) else: @@ -509,7 +510,7 @@ def uri(self): def by_key(self, key): - return [ segment for segment in self if segment.key == key ] + return [segment for segment in self if segment.key == key] @@ -574,12 +575,14 @@ def dumps(self, last_segment): def __str__(self): return self.dumps(None) + class PartialSegmentList(list, GroupedBasePathMixin): def __str__(self): output = [str(part) for part in self] return '\n'.join(output) + class Key(BasePathMixin): ''' Key used to encrypt the segments in a m3u8 playlist (EXT-X-KEY) @@ -636,6 +639,7 @@ def __eq__(self, other): def __ne__(self, other): return not self.__eq__(other) + class InitializationSection(BasePathMixin): ''' Used to obtain Media Initialization Section required to @@ -676,9 +680,11 @@ def __eq__(self, other): def __ne__(self, other): return not self.__eq__(other) + class SessionKey(Key): tag = ext_x_session_key + class Playlist(BasePathMixin): ''' Playlist object representing a link to a variant M3U8 with a specific bitrate. @@ -699,7 +705,7 @@ def __init__(self, uri, stream_info, media, base_uri): self.base_uri = base_uri resolution = stream_info.get('resolution') - if resolution != None: + if resolution is not None: resolution = resolution.strip('"') values = resolution.split('x') resolution_pair = (int(values[0]), int(values[1])) @@ -958,6 +964,7 @@ def __str__(self): return ext_x_start + ':' + ','.join(output) + class RenditionReport(BasePathMixin): def __init__(self, base_uri, uri, last_msn, last_part=None): self.base_uri = base_uri @@ -978,12 +985,14 @@ def dumps(self): def __str__(self): return self.dumps() + class RenditionReportList(list, GroupedBasePathMixin): def __str__(self): output = [str(report) for report in self] return '\n'.join(output) + class ServerControl(object): def __init__(self, can_skip_until=None, can_block_reload=None, hold_back=None, part_hold_back=None): @@ -1011,6 +1020,7 @@ def dumps(self): def __str__(self): return self.dumps() + class Skip(object): def __init__(self, skipped_segments=None): self.skipped_segments = skipped_segments @@ -1022,6 +1032,7 @@ def dumps(self): def __str__(self): return self.dumps() + class PartInformation(object): def __init__(self, part_target=None): self.part_target = part_target @@ -1033,6 +1044,7 @@ def dumps(self): def __str__(self): return self.dumps() + class SessionData(object): def __init__(self, data_id, value=None, uri=None, language=None): self.data_id = data_id @@ -1055,6 +1067,7 @@ def dumps(self): def __str__(self): return self.dumps() + def find_key(keydata, keylist): if not keydata: return None @@ -1071,6 +1084,7 @@ def find_key(keydata, keylist): def denormalize_attribute(attribute): return attribute.replace('_', '-').upper() + def quoted(string): return '"%s"' % string diff --git a/m3u8/parser.py b/m3u8/parser.py index d6f32b8..773fa89 100644 --- a/m3u8/parser.py +++ b/m3u8/parser.py @@ -1,4 +1,5 @@ # coding: utf-8 +# pylama:ignore=E303 # Copyright 2014 Globo.com Player authors. All rights reserved. # Use of this source code is governed by a MIT License # license that can be found in the LICENSE file. @@ -275,6 +276,7 @@ def _parse_attribute_list(prefix, line, atribute_parser): return attributes + def _parse_stream_inf(line, data, state): data['is_variant'] = True data['media_sequence'] = None @@ -341,12 +343,14 @@ def _parse_cueout_cont(line, state): state['current_cue_out_duration'] = res.group(1) state['current_cue_out_scte35'] = res.group(2) + def _cueout_no_duration(line): # this needs to be called first since line.split in all other # parsers will throw a ValueError if passed just this tag if line == protocol.ext_x_cue_out: return (None, None) + def _cueout_elemental(line, state, prevline): param, value = line.split(':', 1) res = re.match('.*EXT-OATCLS-SCTE35:(.*)$', prevline) @@ -355,6 +359,7 @@ def _cueout_elemental(line, state, prevline): else: return None + def _cueout_envivio(line, state, prevline): param, value = line.split(':', 1) res = re.match('.*DURATION=(.*),.*,CUE="(.*)"', value) @@ -363,6 +368,7 @@ def _cueout_envivio(line, state, prevline): else: return None + def _cueout_simple(line): # this needs to be called after _cueout_elemental # as it would capture those cues incompletely @@ -371,6 +377,7 @@ def _cueout_simple(line): if res: return (None, res.group(1)) + def _parse_cueout(line, state, prevline): _cueout_state = (_cueout_no_duration(line) or _cueout_elemental(line, state, prevline) @@ -380,18 +387,20 @@ def _parse_cueout(line, state, prevline): state['current_cue_out_scte35'] = _cueout_state[0] state['current_cue_out_duration'] = _cueout_state[1] + def _parse_server_control(line, data, state): attribute_parser = { "can_block_reload": str, - "hold_back": lambda x: float(x), - "part_hold_back": lambda x: float(x), - "can_skip_until": lambda x: float(x) + "hold_back": lambda x: float(x), + "part_hold_back": lambda x: float(x), + "can_skip_until": lambda x: float(x) } data['server_control'] = _parse_attribute_list( protocol.ext_x_server_control, line, attribute_parser ) + def _parse_part_inf(line, data, state): attribute_parser = { "part_target": lambda x: float(x) @@ -401,6 +410,7 @@ def _parse_part_inf(line, data, state): protocol.ext_x_part_inf, line, attribute_parser ) + def _parse_rendition_report(line, data, state): attribute_parser = remove_quotes_parser('uri') attribute_parser['last_msn'] = int @@ -412,6 +422,7 @@ def _parse_rendition_report(line, data, state): data['rendition_reports'].append(rendition_report) + def _parse_part(line, data, state): attribute_parser = remove_quotes_parser('uri') attribute_parser['duration'] = lambda x: float(x) @@ -434,6 +445,7 @@ def _parse_part(line, data, state): segment['parts'].append(part) + def _parse_skip(line, data, state): attribute_parser = { "skipped_segments": int @@ -441,11 +453,13 @@ def _parse_skip(line, data, state): data['skip'] = _parse_attribute_list(protocol.ext_x_skip, line, attribute_parser) + def _parse_session_data(line, data, state): quoted = remove_quotes_parser('data_id', 'value', 'uri', 'language') session_data = _parse_attribute_list(protocol.ext_x_session_data, line, quoted) data['session_data'].append(session_data) + def _parse_session_key(line, data, state): params = ATTRIBUTELISTPATTERN.split(line.replace(protocol.ext_x_session_key + ':', ''))[1::2] key = {} @@ -454,6 +468,7 @@ def _parse_session_key(line, data, state): key[normalize_attribute(name)] = remove_quotes(value) data['session_keys'].append(key) + def string_to_lines(string): return string.strip().splitlines() @@ -484,4 +499,3 @@ def normalize_attribute(attribute): def is_url(uri): return uri.startswith(('https://', 'http://')) - diff --git a/main.py b/main.py index ae60fc4..a23b2c7 100644 --- a/main.py +++ b/main.py @@ -1,8 +1,16 @@ -import subprocess, os, sys, random, threading, socket, time, errno, SocketServer, ConfigParser +# pylama:ignore=E722,E303,E302,E305 +import subprocess +import os +import sys +import random +import threading +import socket +import time +import errno +import ConfigParser import SSDPServer import LocastService from templates import templates -from functools import partial from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer from multiprocessing import Process @@ -16,7 +24,7 @@ def clean_exit(): os._exit(0) - + # with help from https://www.acmesystems.it/python_http # and https://stackoverflow.com/questions/21631799/how-can-i-pass-parameters-to-a-requesthandler @@ -36,39 +44,39 @@ class PlexHttpHandler(BaseHTTPRequestHandler): local_locast = None bytes_per_read = 1024000 - def do_GET(self): + def do_GET(self): base_url = self.address + ':' + self.port # paths and logic mostly pulled from telly:routes.go: https://github.com/tellytv/telly if (self.path == '/') or (self.path == '/device.xml'): self.send_response(200) - self.send_header('Content-type','application/xml') + self.send_header('Content-type', 'application/xml') self.end_headers() self.wfile.write(self.templates['xmlDiscover'].format(self.reporting_model, self.uuid, base_url)) elif self.path == '/discover.json': self.send_response(200) - self.send_header('Content-type','application/json') + self.send_header('Content-type', 'application/json') self.end_headers() - self.wfile.write(self.templates['jsonDiscover'].format(self.reporting_model, - self.reporting_firmware_name, + self.wfile.write(self.templates['jsonDiscover'].format(self.reporting_model, + self.reporting_firmware_name, self.tuner_count, - self.reporting_firmware_ver, - self.uuid, + self.reporting_firmware_ver, + self.uuid, base_url)) elif self.path == '/lineup_status.json': self.send_response(200) - self.send_header('Content-type','application/json') + self.send_header('Content-type', 'application/json') self.end_headers() if self.station_scan: self.wfile.write(self.templates['jsonLineupStatus']) else: self.wfile.write(self.templates['jsonLineupComplete']) - - elif self.path == '/lineup.json': # TODO + + elif self.path == '/lineup.json': # TODO self.send_response(200) - self.send_header('Content-type','application/json') + self.send_header('Content-type', 'application/json') self.end_headers() returnJSON = '' @@ -80,9 +88,9 @@ def do_GET(self): returnJSON = "[" + returnJSON + "]" self.wfile.write(returnJSON) - elif self.path == '/lineup.xml': # TODO + elif self.path == '/lineup.xml': # TODO self.send_response(200) - self.send_header('Content-type','application/xml') + self.send_header('Content-type', 'application/xml') self.end_headers() returnXML = '' for station_item in self.station_list: @@ -95,12 +103,12 @@ def do_GET(self): channelUri = self.local_locast.get_station_stream_uri(channelId) self.send_response(200) - self.send_header('Content-type','video/mpeg; codecs="avc1.4D401E') + self.send_header('Content-type', 'video/mpeg; codecs="avc1.4D401E') self.end_headers() ffmpeg_proc = subprocess.Popen(["ffmpeg", "-i", channelUri, "-codec", "copy", "-f", "mpegts", "pipe:1"], stdout=subprocess.PIPE) - + # get initial videodata. if that works, then keep grabbing it videoData = ffmpeg_proc.stdout.read(self.bytes_per_read) @@ -117,19 +125,19 @@ def do_GET(self): if e.errno == errno.EPIPE: # Send SIGTERM to shutdown ffmpeg ffmpeg_proc.terminate() - # ffmpeg writes a bit of data out to stderr after it terminates, - # need to read any hanging data to prevent a zombie process. + # ffmpeg writes a bit of data out to stderr after it terminates, + # need to read any hanging data to prevent a zombie process. ffmpeg_proc.communicate() break else: raise - + videoData = ffmpeg_proc.stdout.read(self.bytes_per_read) ffmpeg_proc.terminate() ffmpeg_proc.communicate() - + # elif self.path == '/epg.xml': @@ -139,16 +147,16 @@ def do_GET(self): elif self.path == '/debug.json': self.send_response(200) - self.send_header('Content-type','application/json') + self.send_header('Content-type', 'application/json') self.end_headers() else: print("Unknown request to " + self.path) self.send_response(501) - self.send_header('Content-type','text/html') + self.send_header('Content-type', 'text/html') self.end_headers() self.wfile.write(self.templates['htmlError'].format('501 - Not Implemented')) - + return @@ -183,20 +191,20 @@ def do_POST(self): self.station_scan = True self.send_response(200) - self.send_header('Content-type','text/html') + self.send_header('Content-type', 'text/html') self.end_headers() self.station_list = locast.get_stations() self.station_scan = False - + elif queryData['scan'] == 'abort': self.send_response(200) - self.send_header('Content-type','text/html') + self.send_header('Content-type', 'text/html') self.end_headers() else: print("Unknown scan command " + queryData['scan']) self.send_response(400) - self.send_header('Content-type','text/html') + self.send_header('Content-type', 'text/html') self.end_headers() self.wfile.write(self.templates['htmlError'].format(queryData['scan'] + ' is not a valid scan command')) @@ -263,9 +271,9 @@ def ssdpServerProcess(address, port, uuid): -################################### Startup Logic +# Startup Logic if __name__ == '__main__': - + # set to directory of script os.chdir(os.path.dirname(os.path.abspath(__file__))) @@ -284,7 +292,7 @@ def ssdpServerProcess(address, port, uuid): 'reporting_model': 'HDHR3-US', 'reporting_firmware_name': 'hdhomerun3_atsc', 'reporting_firmware_ver': '20150826', - 'concurrent_listeners': '10' #to convert + 'concurrent_listeners': '10' # to convert } config_handler = ConfigParser.RawConfigParser() @@ -331,7 +339,7 @@ def ssdpServerProcess(address, port, uuid): if DEVICE_UUID is None: print("No UUID found. Generating one now...") # from https://pynative.com/python-generate-random-string/ - # create a string that wouldn't be a real device uuid for + # create a string that wouldn't be a real device uuid for DEVICE_UUID = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8)) config_handler.set('main', 'uuid', DEVICE_UUID) @@ -346,7 +354,7 @@ def ssdpServerProcess(address, port, uuid): ffmpeg_proc = None - + if (OVERRIDE_LATITUDE is not None) and (OVERRIDE_LONGITUDE is not None): mock_location = { "latitude": OVERRIDE_LATITUDE, @@ -358,7 +366,7 @@ def ssdpServerProcess(address, port, uuid): locast = LocastService.LocastService("./", mock_location, OVERRIDE_ZIPCODE) station_list = None - + if (not locast.login(LOCAST_USERNAME, LOCAST_PASSWORD)) or (not locast.validate_user()): print("Exiting...") clean_exit() diff --git a/templates.py b/templates.py index 9c2c35b..c96e18e 100644 --- a/templates.py +++ b/templates.py @@ -86,4 +86,4 @@ "GuideName": "{}", "URL": "http://{}" }}""" -} \ No newline at end of file +}