From adb4ea2990cf41d54241dc46ca8b50db00b695a2 Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Sun, 25 Nov 2012 21:01:07 -0600 Subject: [PATCH 01/67] Added the ability to override resizing the images served up from the cache if PIL is installed. Add `--noresize` to command line argument so that the user could disable the use of PIL on resizing the poster/banner cached images. (ex: they need PIL for another python app but don't want PIL to be used for sb..) Corrected the etree import so it mimics how tvdb_api does (will try cElementTree first then fall back to the slower non-C ElementTree) --- SickBeard.py | 8 ++++++-- sickbeard/__init__.py | 1 + sickbeard/webserve.py | 18 ++++++++++++------ 3 files changed, 19 insertions(+), 8 deletions(-) diff --git a/SickBeard.py b/SickBeard.py index c00f09851a..2a9c23a126 100755 --- a/SickBeard.py +++ b/SickBeard.py @@ -162,9 +162,9 @@ def main(): threading.currentThread().name = "MAIN" try: - opts, args = getopt.getopt(sys.argv[1:], "qfdp::", ['quiet', 'forceupdate', 'daemon', 'port=', 'pidfile=', 'nolaunch', 'config=', 'datadir=']) # @UnusedVariable + opts, args = getopt.getopt(sys.argv[1:], "qfdp::", ['quiet', 'forceupdate', 'port=', 'daemon', 'noresize', 'pidfile=', 'nolaunch', 'config=', 'datadir=']) # @UnusedVariable except getopt.GetoptError: - print "Available Options: --quiet, --forceupdate, --port, --daemon, --pidfile, --config, --datadir" + print "Available Options: --quiet, --forceupdate, --port, --daemon, --noresize, --pidfile, --nolaunch, --config, --datadir" sys.exit() forceUpdate = False @@ -198,6 +198,10 @@ def main(): consoleLogging = False sickbeard.DAEMON = True + # Prevent resizing of the banner/posters even if PIL is installed + if o in ('--noresize',): + sickbeard.NO_RESIZE = True + # Specify folder to load the config file from if o in ('--config',): sickbeard.CONFIG_FILE = os.path.abspath(a) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 460391559b..deadb509e6 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -66,6 +66,7 @@ PIDFILE = '' DAEMON = None +NO_RESIZE = False backlogSearchScheduler = None currentSearchScheduler = None diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 3a51aa749d..d69cf3093b 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -54,7 +54,10 @@ except ImportError: from lib import simplejson as json -import xml.etree.cElementTree as etree +try: + import xml.etree.cElementTree as etree +except ImportError: + import xml.etree.ElementTree as etree from sickbeard import browser @@ -2690,7 +2693,7 @@ def get_messages(self): return json.dumps(messages) - + class WebInterface: @cherrypy.expose @@ -2716,7 +2719,7 @@ def showPoster(self, show=None, which=None): return cherrypy.lib.static.serve_file(default_image_path, content_type="image/png") cache_obj = image_cache.ImageCache() - + if which == 'poster': image_file_name = cache_obj.poster_path(showObj.tvdbid) # this is for 'banner' but also the default case @@ -2724,6 +2727,9 @@ def showPoster(self, show=None, which=None): image_file_name = cache_obj.banner_path(showObj.tvdbid) if ek.ek(os.path.isfile, image_file_name): + # use startup argument to prevent using PIL even if installed + if sickbeard.NO_RESIZE: + return cherrypy.lib.static.serve_file(image_file_name, content_type="image/jpeg") try: from PIL import Image from cStringIO import StringIO @@ -2740,10 +2746,10 @@ def showPoster(self, show=None, which=None): else: return cherrypy.lib.static.serve_file(image_file_name, content_type="image/jpeg") im = im.resize(size, Image.ANTIALIAS) - buffer = StringIO() - im.save(buffer, 'JPEG', quality=85) + imgbuffer = StringIO() + im.save(imgbuffer, 'JPEG', quality=85) cherrypy.response.headers['Content-Type'] = 'image/jpeg' - return buffer.getvalue() + return imgbuffer.getvalue() else: return cherrypy.lib.static.serve_file(default_image_path, content_type="image/png") From e78a8326ab3c67e1879cbccdb375fba4ba46bd29 Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Mon, 10 Dec 2012 00:56:10 -0600 Subject: [PATCH 02/67] Minor whitespace cleanup, replaced console wrapper with one that is a little more robust and wont cause IE to freak out --- data/interfaces/default/inc_rootDirs.tmpl | 2 +- data/js/rootDirs.js | 57 +++++++++++++++-------- 2 files changed, 38 insertions(+), 21 deletions(-) diff --git a/data/interfaces/default/inc_rootDirs.tmpl b/data/interfaces/default/inc_rootDirs.tmpl index 27f995daab..b92639a9bd 100644 --- a/data/interfaces/default/inc_rootDirs.tmpl +++ b/data/interfaces/default/inc_rootDirs.tmpl @@ -12,7 +12,7 @@
- #for $cur_dir in $backend_dirs: #end for diff --git a/data/js/rootDirs.js b/data/js/rootDirs.js index a8593270a0..0e56ead8fd 100644 --- a/data/js/rootDirs.js +++ b/data/js/rootDirs.js @@ -1,27 +1,44 @@ -$(document).ready(function(){ - - function logMsg(msg) { - if (window.console && window.logMsg) - console.log(msg) +// Avoid `console` errors in browsers that lack a console. +(function() { + var method; + var noop = function noop() {}; + var methods = [ + 'assert', 'clear', 'count', 'debug', 'dir', 'dirxml', 'error', + 'exception', 'group', 'groupCollapsed', 'groupEnd', 'info', 'log', + 'markTimeline', 'profile', 'profileEnd', 'table', 'time', 'timeEnd', + 'timeStamp', 'trace', 'warn' + ]; + var length = methods.length; + var console = (window.console = window.console || {}); + + while (length--) { + method = methods[length]; + + // Only stub undefined methods. + if (!console[method]) { + console[method] = noop; + } } +}()); + +$(document).ready(function() { - function addRootDir(path){ + function addRootDir(path) { // check if it's the first one var is_default = false; if (!$('#whichDefaultRootDir').val().length) is_default = true; $('#rootDirs').append(''); - + syncOptionIDs(); - + if (is_default) setDefault($('#rootDirs option').attr('id')); refreshRootDirs(); - $.get(sbRoot+'/config/general/saveRootDirs', { rootDirString: $('#rootDirText').val() }); - + } function editRootDir(path) { @@ -43,7 +60,7 @@ $(document).ready(function(){ $('#addRootDir').click(function(){$(this).nFileBrowser(addRootDir)}); $('#editRootDir').click(function(){$(this).nFileBrowser(editRootDir, {initialDir: $("#rootDirs option:selected").val()})}); - $('#deleteRootDir').click(function(){ + $('#deleteRootDir').click(function() { if ($("#rootDirs option:selected").length) { var toDelete = $("#rootDirs option:selected"); @@ -56,15 +73,15 @@ $(document).ready(function(){ if (newDefault) { - logMsg('new default when deleting') - + console.log('new default when deleting'); + // we deleted the default so this isn't valid anymore $("#whichDefaultRootDir").val(''); // if we're deleting the default and there are options left then pick a new default if ($("#rootDirs option").length) setDefault($('#rootDirs option').attr('id')); - + } else if ($("#whichDefaultRootDir").val().length) { var old_default_num = $("#whichDefaultRootDir").val().substr(3); if (old_default_num > deleted_num) @@ -80,12 +97,12 @@ $(document).ready(function(){ if ($("#rootDirs option:selected").length) setDefault($("#rootDirs option:selected").attr('id')); refreshRootDirs(); - $.get(sbRoot+'/config/general/saveRootDirs', 'rootDirString='+$('#rootDirText').val()); + $.get(sbRoot+'/config/general/saveRootDirs', {rootDirString: $('#rootDirText').val()}); }); function setDefault(which, force){ - logMsg('setting default to '+which) + console.log('setting default to '+which); if (which != undefined && !which.length) return @@ -102,7 +119,7 @@ $(document).ready(function(){ var old_default = $('#'+$('#whichDefaultRootDir').val()); old_default.text(old_default.text().substring(1)); } - + $('#whichDefaultRootDir').val(which); } @@ -118,7 +135,7 @@ $(document).ready(function(){ if (!$("#rootDirs").length) return - + var do_disable = 'true'; // re-sync option ids @@ -148,11 +165,11 @@ $(document).ready(function(){ dir_text += '|' + $(this).val() }); log_str += 'def: '+ $('#whichDefaultRootDir').val(); - logMsg(log_str) + console.log(log_str); $('#rootDirText').val(dir_text); $('#rootDirText').change(); - logMsg('rootDirText: '+$('#rootDirText').val()) + console.log('rootDirText: '+$('#rootDirText').val()); } $('#rootDirs').click(refreshRootDirs); From 95c2662ef0545043c7ba6dd88abe7b6f755b428a Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Sat, 29 Dec 2012 04:32:07 -0600 Subject: [PATCH 03/67] * Removed 'unknown' as a possible archive quality from the qualityChooser/sb-api/apibuilder > As this creates an bad scenario where an 1080p bluray appear as 'low quality' or even worse could get replaced by some crap mobile friendly release because of 'unkown' having a higher internal value. * Added 1080p aired, 1080p web-dl, 1080i/720p mpeg2 (Raw) qualities * Adjust CSS/sorting routines to reflect changes. * Added related CSS for quality tags * Added '.ts' to be assumed RawHD quality --- data/css/style.css | 16 ++++- data/interfaces/default/apiBuilder.tmpl | 13 +++- data/interfaces/default/comingEpisodes.tmpl | 2 +- data/interfaces/default/history.tmpl | 2 +- data/interfaces/default/home.tmpl | 2 +- .../default/inc_qualityChooser.tmpl | 2 +- data/interfaces/default/manage.tmpl | 2 +- sickbeard/common.py | 67 ++++++++++++------- sickbeard/webapi.py | 61 ++++++++--------- 9 files changed, 99 insertions(+), 68 deletions(-) diff --git a/data/css/style.css b/data/css/style.css index 967959eade..0e564eeacc 100644 --- a/data/css/style.css +++ b/data/css/style.css @@ -1056,12 +1056,20 @@ span.quality { } span.Custom { background: none repeat scroll 0 0 #449; - /* blue */ + /* purplish blue */ +} +span.HD { + background: none repeat scroll 0 0 #008fbb; + /* greenish blue */ } -span.HD,span.WEB-DL,span.BluRay { +span.HD720p { background: none repeat scroll 0 0 #494; /* green */ } +span.HD1080p { + background: none repeat scroll 0 0 #499; + /* blue */ +} span.SD { background: none repeat scroll 0 0 #944; /* red */ @@ -1070,6 +1078,10 @@ span.Any { background: none repeat scroll 0 0 #444; /* black */ } +span.RawHD { + background: none repeat scroll 0 0 #999944; + /* dark orange */ +} /* unused boolean tags */ span.false { color: #933; diff --git a/data/interfaces/default/apiBuilder.tmpl b/data/interfaces/default/apiBuilder.tmpl index c454e7bccb..1234074cb6 100644 --- a/data/interfaces/default/apiBuilder.tmpl +++ b/data/interfaces/default/apiBuilder.tmpl @@ -190,14 +190,19 @@ addList("show.setquality", "$curShow.name", "&tvdbid=$curShow.tvdbid", "quality" //build out generic quality options addOptGroup("quality", "Quality Templates"); addOption("quality", "SD", "&initial=sdtv|sddvd"); -addOption("quality", "HD", "&initial=hdtv|hdwebdl|hdbluray"); -addOption("quality", "ANY", "&initial=sdtv|sddvd|hdtv|hdwebdl|hdbluray|unknown"); +addOption("quality", "HD", "&initial=hdtv|fullhdtv|hdwebdl|fullhdwebdl|hdbluray|fullhdbluray"); +addOption("quality", "HD720p", "&initial=hdtv|hdwebdl|hdbluray"); +addOption("quality", "HD1080p", "&initial=fullhdtv|fullhdwebdl|fullhdbluray"); +addOption("quality", "ANY", "&initial=sdtv|sddvd|hdtv|fullhdtv|hdwebdl|fullhdwebdl|hdbluray|fullhdbluray|unknown"); endOptGroup("quality"); addOptGroup("quality", "Inital (Custom)"); addList("quality", "SD TV", "&initial=sdtv", "quality-archive"); addList("quality", "SD DVD", "&initial=sddvd", "quality-archive"); addList("quality", "HD TV", "&initial=hdtv", "quality-archive"); +addList("quality", "RawHD TV", "&initial=rawhdtv", "quality-archive"); +addList("quality", "1080p HD TV", "&initial=fullhdtv", "quality-archive"); addList("quality", "720p Web-DL", "&initial=hdwebdl", "quality-archive"); +addList("quality", "1080p Web-DL", "&initial=fullhdwebdl", "quality-archive"); addList("quality", "720p BluRay", "&initial=hdbluray", "quality-archive"); addList("quality", "1080p BluRay", "&initial=fullhdbluray", "quality-archive"); addList("quality", "Unknown", "&initial=unknown", "quality-archive"); @@ -211,10 +216,12 @@ addOption("quality-archive", "Optional Param", "", 1); addOptGroup("quality-archive", "Archive (Custom)"); addList("quality-archive", "SD DVD", "&archive=sddvd"); addList("quality-archive", "HD TV", "&archive=hdtv"); +addList("quality-archive", "RawHD TV", "&archive=rawhdtv"); +addList("quality-archive", "1080p HD TV", "&archive=fullhdtv"); addList("quality-archive", "720p Web-DL", "&archive=hdwebdl"); +addList("quality-archive", "1080p Web-DL", "&archive=fullhdwebdl"); addList("quality-archive", "720p BluRay", "&archive=hdbluray"); addList("quality-archive", "1080p BluRay", "&archive=fullhdbluray"); -addList("quality-archive", "Unknown", "&archive=unknown"); endOptGroup("quality-archive"); addOptGroup("quality-archive", "Random (Custom)"); addList("quality-archive", "HD TV/1080p BluRay", "&archive=hdtv|fullhdbluray"); diff --git a/data/interfaces/default/comingEpisodes.tmpl b/data/interfaces/default/comingEpisodes.tmpl index 0afd841232..40396d4b22 100644 --- a/data/interfaces/default/comingEpisodes.tmpl +++ b/data/interfaces/default/comingEpisodes.tmpl @@ -43,7 +43,7 @@ return false; }, format: function(s) { - return s.replace('hd',3).replace('sd',1).replace('any',0).replace('best',2).replace('custom',4); + return s.replace('hd1080p',5).replace('hd720p',4).replace('hd',3).replace('sd',2).replace('any',1).replace('best',0).replace('custom',7); }, type: 'numeric' }); diff --git a/data/interfaces/default/history.tmpl b/data/interfaces/default/history.tmpl index ded4e5a086..83baae470e 100644 --- a/data/interfaces/default/history.tmpl +++ b/data/interfaces/default/history.tmpl @@ -74,7 +74,7 @@ #end if #end if - $Quality.qualityStrings[$curQuality] + $Quality.qualityStrings[$curQuality] #end for diff --git a/data/interfaces/default/home.tmpl b/data/interfaces/default/home.tmpl index b6868d3696..610f32b7f2 100644 --- a/data/interfaces/default/home.tmpl +++ b/data/interfaces/default/home.tmpl @@ -39,7 +39,7 @@ return false; }, format: function(s) { - return s.replace('hd',3).replace('sd',1).replace('any',0).replace('best',2).replace('custom',4); + return s.replace('hd1080p',5).replace('hd720p',4).replace('hd',3).replace('sd',2).replace('any',1).replace('best',0).replace('custom',7); }, type: 'numeric' }); diff --git a/data/interfaces/default/inc_qualityChooser.tmpl b/data/interfaces/default/inc_qualityChooser.tmpl index 0f574fff3a..4a6d6bc2d8 100644 --- a/data/interfaces/default/inc_qualityChooser.tmpl +++ b/data/interfaces/default/inc_qualityChooser.tmpl @@ -36,7 +36,7 @@

Archive

- #set $bestQualityList = filter(lambda x: x > $Quality.SDTV, $Quality.qualityStrings) + #set $bestQualityList = filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings) -#for $curPreset in sorted($qualityPresets): - +#for $curPreset in sorted($qualityPresets, reverse=True): + #end for From 5aa3b58fb74ff9e2cfedc3ef09397c53cc56dfa2 Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Wed, 16 Jan 2013 14:41:53 -0600 Subject: [PATCH 08/67] Show `missing.png` on the history page if the provider that was previously used is no longer in SB/config. --- data/interfaces/default/history.tmpl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/data/interfaces/default/history.tmpl b/data/interfaces/default/history.tmpl index 83baae470e..5da3b876c7 100644 --- a/data/interfaces/default/history.tmpl +++ b/data/interfaces/default/history.tmpl @@ -70,6 +70,8 @@ #set $provider = $providers.getProviderClass($generic.GenericProvider.makeID($hItem["provider"])) #if $provider != None: $provider.name + #else: + missing provider #end if #end if #end if From c0d376c223b3580b83ce9f5f7a481599300c1633 Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Wed, 16 Jan 2013 19:34:31 -0600 Subject: [PATCH 09/67] Added Add1080pAndRawHDQualities migration code. This will update the user 'default' quality along with migrating the previous qualities to their new values. Also PEP8 whitespace fixes. --- sickbeard/databases/mainDB.py | 242 +++++++++++++++++++++++++--------- 1 file changed, 177 insertions(+), 65 deletions(-) diff --git a/sickbeard/databases/mainDB.py b/sickbeard/databases/mainDB.py index 12d25ba337..a5352fdee2 100644 --- a/sickbeard/databases/mainDB.py +++ b/sickbeard/databases/mainDB.py @@ -23,17 +23,16 @@ from sickbeard.providers.generic import GenericProvider from sickbeard import encodingKludge as ek -from sickbeard.name_parser.parser import NameParser, InvalidNameException +from sickbeard.name_parser.parser import NameParser, InvalidNameException -class MainSanityCheck(db.DBSanityCheck): +class MainSanityCheck(db.DBSanityCheck): def check(self): self.fix_duplicate_shows() self.fix_duplicate_episodes() self.fix_orphan_episodes() def fix_duplicate_shows(self): - sqlResults = self.connection.select("SELECT show_id, tvdb_id, COUNT(tvdb_id) as count FROM tv_shows GROUP BY tvdb_id HAVING count > 1") for cur_duplicate in sqlResults: @@ -41,7 +40,7 @@ def fix_duplicate_shows(self): logger.log(u"Duplicate show detected! tvdb_id: " + str(cur_duplicate["tvdb_id"]) + u" count: " + str(cur_duplicate["count"]), logger.DEBUG) cur_dupe_results = self.connection.select("SELECT show_id, tvdb_id FROM tv_shows WHERE tvdb_id = ? LIMIT ?", - [cur_duplicate["tvdb_id"], int(cur_duplicate["count"])-1] + [cur_duplicate["tvdb_id"], int(cur_duplicate["count"]) - 1] ) for cur_dupe_id in cur_dupe_results: @@ -52,15 +51,14 @@ def fix_duplicate_shows(self): logger.log(u"No duplicate show, check passed") def fix_duplicate_episodes(self): - sqlResults = self.connection.select("SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1") for cur_duplicate in sqlResults: - logger.log(u"Duplicate episode detected! showid: " + str(cur_duplicate["showid"]) + u" season: "+str(cur_duplicate["season"]) + u" episode: "+str(cur_duplicate["episode"]) + u" count: " + str(cur_duplicate["count"]), logger.DEBUG) + logger.log(u"Duplicate episode detected! showid: " + str(cur_duplicate["showid"]) + u" season: " + str(cur_duplicate["season"]) + u" episode: " + str(cur_duplicate["episode"]) + u" count: " + str(cur_duplicate["count"]), logger.DEBUG) cur_dupe_results = self.connection.select("SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?", - [cur_duplicate["showid"], cur_duplicate["season"], cur_duplicate["episode"], int(cur_duplicate["count"])-1] + [cur_duplicate["showid"], cur_duplicate["season"], cur_duplicate["episode"], int(cur_duplicate["count"]) - 1] ) for cur_dupe_id in cur_dupe_results: @@ -71,17 +69,17 @@ def fix_duplicate_episodes(self): logger.log(u"No duplicate episode, check passed") def fix_orphan_episodes(self): - sqlResults = self.connection.select("SELECT episode_id, showid, tv_shows.tvdb_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.tvdb_id WHERE tv_shows.tvdb_id is NULL") for cur_orphan in sqlResults: logger.log(u"Orphan episode detected! episode_id: " + str(cur_orphan["episode_id"]) + " showid: " + str(cur_orphan["showid"]), logger.DEBUG) - logger.log(u"Deleting orphan episode with episode_id: "+str(cur_orphan["episode_id"])) + logger.log(u"Deleting orphan episode with episode_id: " + str(cur_orphan["episode_id"])) self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_orphan["episode_id"]]) else: logger.log(u"No orphan episode, check passed") + def backupDatabase(version): helpers.backupVersionedFile(db.dbFilename(), version) @@ -90,6 +88,7 @@ def backupDatabase(version): # ====================== # Add new migrations at the bottom of the list; subclass the previous migration. + class InitialSchema (db.SchemaUpgrade): def test(self): return self.hasTable("tv_shows") @@ -104,6 +103,7 @@ def execute(self): for query in queries: self.connection.action(query) + class AddTvrId (InitialSchema): def test(self): return self.hasColumn("tv_shows", "tvr_id") @@ -111,6 +111,7 @@ def test(self): def execute(self): self.addColumn("tv_shows", "tvr_id") + class AddTvrName (AddTvrId): def test(self): return self.hasColumn("tv_shows", "tvr_name") @@ -118,6 +119,7 @@ def test(self): def execute(self): self.addColumn("tv_shows", "tvr_name", "TEXT", "") + class AddAirdateIndex (AddTvrName): def test(self): return self.hasTable("idx_tv_episodes_showid_airdate") @@ -125,6 +127,7 @@ def test(self): def execute(self): self.connection.action("CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate);") + class NumericProviders (AddAirdateIndex): def test(self): return self.connection.tableInfo("history")['provider']['type'] == 'TEXT' @@ -157,12 +160,12 @@ def upgradeHistory(self, number, name): args = [curResult["action"], curResult["date"], curResult["showid"], curResult["season"], curResult["episode"], curResult["quality"], curResult["resource"], provider] self.connection.action(sql, args) + class NewQualitySettings (NumericProviders): def test(self): return self.hasTable("db_version") def execute(self): - backupDatabase(0) # old stuff that's been removed from common but we need it to upgrade @@ -229,7 +232,6 @@ def execute(self): if didUpdate: os.remove(db.dbFilename(suffix='v0')) - ### Update show qualities toUpdate = self.connection.select("SELECT * FROM tv_shows") for curUpdate in toUpdate: @@ -246,13 +248,12 @@ def execute(self): elif int(curUpdate["quality"]) == BEST: newQuality = common.BEST else: - logger.log(u"Unknown show quality: "+str(curUpdate["quality"]), logger.WARNING) + logger.log(u"Unknown show quality: " + str(curUpdate["quality"]), logger.WARNING) newQuality = None if newQuality: self.connection.action("UPDATE tv_shows SET quality = ? WHERE show_id = ?", [newQuality, curUpdate["show_id"]]) - ### Update history toUpdate = self.connection.select("SELECT * FROM history") for curUpdate in toUpdate: @@ -282,6 +283,7 @@ def execute(self): self.connection.action("CREATE TABLE db_version (db_version INTEGER);") self.connection.action("INSERT INTO db_version (db_version) VALUES (?)", [1]) + class DropOldHistoryTable(NewQualitySettings): def test(self): return self.checkDBVersion() >= 2 @@ -290,12 +292,12 @@ def execute(self): self.connection.action("DROP TABLE history_old") self.incDBVersion() + class UpgradeHistoryForGenericProviders(DropOldHistoryTable): def test(self): return self.checkDBVersion() >= 3 def execute(self): - providerMap = {'NZBs': 'NZBs.org', 'BinReq': 'Bin-Req', 'NZBsRUS': '''NZBs'R'US''', @@ -306,6 +308,7 @@ def execute(self): self.incDBVersion() + class AddAirByDateOption(UpgradeHistoryForGenericProviders): def test(self): return self.checkDBVersion() >= 4 @@ -314,24 +317,27 @@ def execute(self): self.connection.action("ALTER TABLE tv_shows ADD air_by_date NUMERIC") self.incDBVersion() + class ChangeSabConfigFromIpToHost(AddAirByDateOption): def test(self): return self.checkDBVersion() >= 5 - + def execute(self): sickbeard.SAB_HOST = 'http://' + sickbeard.SAB_HOST + '/sabnzbd/' self.incDBVersion() + class FixSabHostURL(ChangeSabConfigFromIpToHost): def test(self): return self.checkDBVersion() >= 6 - + def execute(self): if sickbeard.SAB_HOST.endswith('/sabnzbd/'): - sickbeard.SAB_HOST = sickbeard.SAB_HOST.replace('/sabnzbd/','/') + sickbeard.SAB_HOST = sickbeard.SAB_HOST.replace('/sabnzbd/', '/') sickbeard.save_config() self.incDBVersion() + class AddLang (FixSabHostURL): def test(self): return self.hasColumn("tv_shows", "lang") @@ -339,13 +345,14 @@ def test(self): def execute(self): self.addColumn("tv_shows", "lang", "TEXT", "en") + class PopulateRootDirs (AddLang): def test(self): return self.checkDBVersion() >= 7 - + def execute(self): dir_results = self.connection.select("SELECT location FROM tv_shows") - + dir_counts = {} for cur_dir in dir_results: cur_root_dir = ek.ek(os.path.dirname, ek.ek(os.path.normpath, cur_dir["location"])) @@ -353,31 +360,30 @@ def execute(self): dir_counts[cur_root_dir] = 1 else: dir_counts[cur_root_dir] += 1 - - logger.log(u"Dir counts: "+str(dir_counts), logger.DEBUG) - + + logger.log(u"Dir counts: " + str(dir_counts), logger.DEBUG) + if not dir_counts: self.incDBVersion() return - + default_root_dir = dir_counts.values().index(max(dir_counts.values())) - - new_root_dirs = str(default_root_dir)+'|'+'|'.join(dir_counts.keys()) - logger.log(u"Setting ROOT_DIRS to: "+new_root_dirs, logger.DEBUG) - + + new_root_dirs = str(default_root_dir) + '|' + '|'.join(dir_counts.keys()) + logger.log(u"Setting ROOT_DIRS to: " + new_root_dirs, logger.DEBUG) + sickbeard.ROOT_DIRS = new_root_dirs - + sickbeard.save_config() - + self.incDBVersion() - -class SetNzbTorrentSettings(PopulateRootDirs): + +class SetNzbTorrentSettings(PopulateRootDirs): def test(self): return self.checkDBVersion() >= 8 - - def execute(self): + def execute(self): use_torrents = False use_nzbs = False @@ -385,42 +391,40 @@ def execute(self): if cur_provider.isEnabled(): if cur_provider.providerType == GenericProvider.NZB: use_nzbs = True - logger.log(u"Provider "+cur_provider.name+" is enabled, enabling NZBs in the upgrade") + logger.log(u"Provider " + cur_provider.name + " is enabled, enabling NZBs in the upgrade") break elif cur_provider.providerType == GenericProvider.TORRENT: use_torrents = True - logger.log(u"Provider "+cur_provider.name+" is enabled, enabling Torrents in the upgrade") + logger.log(u"Provider " + cur_provider.name + " is enabled, enabling Torrents in the upgrade") break sickbeard.USE_TORRENTS = use_torrents sickbeard.USE_NZBS = use_nzbs - + sickbeard.save_config() - + self.incDBVersion() + class FixAirByDateSetting(SetNzbTorrentSettings): - def test(self): return self.checkDBVersion() >= 9 def execute(self): - shows = self.connection.select("SELECT * FROM tv_shows") - + for cur_show in shows: if cur_show["genre"] and "talk show" in cur_show["genre"].lower(): self.connection.action("UPDATE tv_shows SET air_by_date = ? WHERE tvdb_id = ?", [1, cur_show["tvdb_id"]]) - + self.incDBVersion() -class AddSizeAndSceneNameFields(FixAirByDateSetting): +class AddSizeAndSceneNameFields(FixAirByDateSetting): def test(self): return self.checkDBVersion() >= 10 - - def execute(self): + def execute(self): backupDatabase(10) if not self.hasColumn("tv_episodes", "file_size"): @@ -430,12 +434,12 @@ def execute(self): self.addColumn("tv_episodes", "release_name", "TEXT", "") ep_results = self.connection.select("SELECT episode_id, location, file_size FROM tv_episodes") - + logger.log(u"Adding file size to all episodes in DB, please be patient") for cur_ep in ep_results: if not cur_ep["location"]: continue - + # if there is no size yet then populate it for us if (not cur_ep["file_size"] or not int(cur_ep["file_size"])) and ek.ek(os.path.isfile, cur_ep["location"]): cur_size = ek.ek(os.path.getsize, cur_ep["location"]) @@ -443,19 +447,19 @@ def execute(self): # check each snatch to see if we can use it to get a release name from history_results = self.connection.select("SELECT * FROM history WHERE provider != -1 ORDER BY date ASC") - + logger.log(u"Adding release name to all episodes still in history") for cur_result in history_results: # find the associated download, if there isn't one then ignore it download_results = self.connection.select("SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?", [cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["date"]]) if not download_results: - logger.log(u"Found a snatch in the history for "+cur_result["resource"]+" but couldn't find the associated download, skipping it", logger.DEBUG) + logger.log(u"Found a snatch in the history for " + cur_result["resource"] + " but couldn't find the associated download, skipping it", logger.DEBUG) continue nzb_name = cur_result["resource"] file_name = ek.ek(os.path.basename, download_results[0]["resource"]) - + # take the extension off the filename, it's not needed if '.' in file_name: file_name = file_name.rpartition('.')[0] @@ -464,20 +468,20 @@ def execute(self): ep_results = self.connection.select("SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''", [cur_result["showid"], cur_result["season"], cur_result["episode"]]) if not ep_results: - logger.log(u"The episode "+nzb_name+" was found in history but doesn't exist on disk anymore, skipping", logger.DEBUG) + logger.log(u"The episode " + nzb_name + " was found in history but doesn't exist on disk anymore, skipping", logger.DEBUG) continue - # get the status/quality of the existing ep and make sure it's what we expect + # get the status/quality of the existing ep and make sure it's what we expect ep_status, ep_quality = common.Quality.splitCompositeStatus(int(ep_results[0]["status"])) if ep_status != common.DOWNLOADED: continue - + if ep_quality != int(cur_result["quality"]): - continue + continue # make sure this is actually a real release name and not a season pack or something for cur_name in (nzb_name, file_name): - logger.log(u"Checking if "+cur_name+" is actually a good release name", logger.DEBUG) + logger.log(u"Checking if " + cur_name + " is actually a good release name", logger.DEBUG) try: np = NameParser(False) parse_result = np.parse(cur_name) @@ -491,44 +495,43 @@ def execute(self): # check each snatch to see if we can use it to get a release name from empty_results = self.connection.select("SELECT episode_id, location FROM tv_episodes WHERE release_name = ''") - + logger.log(u"Adding release name to all episodes with obvious scene filenames") for cur_result in empty_results: - + ep_file_name = ek.ek(os.path.basename, cur_result["location"]) ep_file_name = os.path.splitext(ep_file_name)[0] - + # I only want to find real scene names here so anything with a space in it is out if ' ' in ep_file_name: continue - + try: np = NameParser(False) parse_result = np.parse(ep_file_name) except InvalidNameException: continue - + if not parse_result.release_group: continue - - logger.log(u"Name "+ep_file_name+" gave release group of "+parse_result.release_group+", seems valid", logger.DEBUG) + + logger.log(u"Name " + ep_file_name + " gave release group of " + parse_result.release_group + ", seems valid", logger.DEBUG) self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [ep_file_name, cur_result["episode_id"]]) self.incDBVersion() -class RenameSeasonFolders(AddSizeAndSceneNameFields): +class RenameSeasonFolders(AddSizeAndSceneNameFields): def test(self): return self.checkDBVersion() >= 11 - + def execute(self): - # rename the column self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows") self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT)") sql = "INSERT INTO tv_shows(show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, tvr_id, tvr_name, air_by_date, lang) SELECT show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, seasonfolders, paused, startyear, tvr_id, tvr_name, air_by_date, lang FROM tmp_tv_shows" self.connection.action(sql) - + # flip the values to be opposite of what they were before self.connection.action("UPDATE tv_shows SET flatten_folders = 2 WHERE flatten_folders = 1") self.connection.action("UPDATE tv_shows SET flatten_folders = 1 WHERE flatten_folders = 0") @@ -536,3 +539,112 @@ def execute(self): self.connection.action("DROP TABLE tmp_tv_shows") self.incDBVersion() + + +class Add1080pAndRawHDQualities(RenameSeasonFolders): + """Add support for 1080p related qualities along with RawHD + + Quick overview of what the upgrade needs to do: + + quality | old | new + -------------------------- + hdwebdl | 1<<3 | 1<<5 + hdbluray | 1<<4 | 1<<7 + fullhdbluray | 1<<5 | 1<<8 + -------------------------- + rawhdtv | | 1<<3 + fullhdtv | | 1<<4 + fullhdwebdl | | 1<<6 + """ + + def test(self): + return self.checkDBVersion() >= 12 + + def _update_status(self, old_status): + (status, quality) = common.Quality.splitCompositeStatus(old_status) + return common.Quality.compositeStatus(status, self._update_quality(quality)) + + def _update_quality(self, old_quality): + """Update bitwise flags to reflect new quality values + + Check flag bits (clear old then set their new locations) starting + with the highest bits so we dont overwrite data we need later on + """ + + result = old_quality + # move fullhdbluray from 1<<5 to 1<<8 if set + if(result & (1<<5)): + result = result & ~(1<<5) + result = result | (1<<8) + # move hdbluray from 1<<4 to 1<<7 if set + if(result & (1<<4)): + result = result & ~(1<<4) + result = result | (1<<7) + # move hdwebdl from 1<<3 to 1<<5 if set + if(result & (1<<3)): + result = result & ~(1<<3) + result = result | (1<<5) + + return result + + def _update_composite_qualities(self, status): + """Unpack, Update, Return new quality values + + Unpack the composite archive/initial values. + Update either qualities if needed. + Then return the new compsite quality value. + """ + + best = (status & (0xffff << 16)) >> 16 + initial = status & (0xffff) + + best = self._update_quality(best) + initial = self._update_quality(initial) + + result = ((best << 16) | initial) + return result + + def execute(self): + backupDatabase(self.checkDBVersion()) + + # update the default quality so we dont grab the wrong qualities after migration + sickbeard.QUALITY_DEFAULT = self._update_composite_qualities(sickbeard.QUALITY_DEFAULT) + sickbeard.save_config() + + # upgrade previous HD to HD720p -- shift previous qualities to new placevalues + old_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], []) + new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, common.Quality.HDBLURAY], []) + + # update ANY -- shift existing qualities and add new 1080p qualities, note that rawHD was not added to the ANY template + old_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], []) + new_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV, common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY, common.Quality.UNKNOWN], []) + + # update qualities (including templates) + shows = self.connection.select("SELECT * FROM tv_shows") + for cur_show in shows: + if cur_show["quality"] == old_hd: + new_quality = new_hd + elif cur_show["quality"] == old_any: + new_quality = new_any + else: + new_quality = self._update_composite_qualities(cur_show["quality"]) + self.connection.action("UPDATE tv_shows SET quality = ? WHERE tvdb_id = ?", [new_quality, cur_show["tvdb_id"]]) + + # update status that are are within the old hdwebdl (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768) + episodes = self.connection.select("SELECT * FROM tv_episodes WHERE status/100 < 32768 AND status/100 >= 8") + for cur_episode in episodes: + self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [self._update_status(cur_episode["status"]), cur_episode["episode_id"]]) + + # make two seperate passes through the history since snatched and downloaded (action & quality) may not always coordinate together + + # update previous history so it shows the correct action + historyAction = self.connection.select("SELECT * FROM history WHERE action/100 < 32768 AND action/100 >= 8") + for cur_entry in historyAction: + self.connection.action("UPDATE history SET action = ? WHERE showid = ? AND date = ?", [self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]]) + + # update previous history so it shows the correct quality + historyQuality = self.connection.select("SELECT * FROM history WHERE quality < 32768 AND quality >= 8") + for cur_entry in historyQuality: + self.connection.action("UPDATE history SET quality = ? WHERE showid = ? AND date = ?", [self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]]) + + self.incDBVersion() From a2d4ee99abeec0dca216b2e846e1c6d4c56596a8 Mon Sep 17 00:00:00 2001 From: Robert Massa Date: Fri, 18 Jan 2013 16:12:36 +0100 Subject: [PATCH 10/67] Add TorrentLeech provider. --- data/images/providers/torrentleech.png | Bin 0 -> 725 bytes data/interfaces/default/config_providers.tmpl | 23 +- sickbeard/__init__.py | 5 +- sickbeard/providers/__init__.py | 3 +- sickbeard/providers/torrentleech.py | 86 +++++++ sickbeard/webserve.py | 235 +++++++++--------- 6 files changed, 228 insertions(+), 124 deletions(-) create mode 100644 data/images/providers/torrentleech.png create mode 100644 sickbeard/providers/torrentleech.py diff --git a/data/images/providers/torrentleech.png b/data/images/providers/torrentleech.png new file mode 100644 index 0000000000000000000000000000000000000000..9d8fa87be7a428cf76084e77a89c781da1dcd9ac GIT binary patch literal 725 zcmV;`0xJE9P)5r00006VoOIv0RI60 z0RN!9r;`8x010qNS#tmY3ljhU3ljkVnw%H_000McNliru-2oB~Ha6)zIuif@0%A!- zK~yNuebT>68(|#B@%LREq7W{I3eqITQp}J+tppsM%+hppk@_cS=;BmxaZ=jMx;a>k zcFHJp>QaZsE>^K;JTLw_xDYPaON87#PlsTrRQrYR@;nc(=ktZHBBhl4Ri#pit*tGl zrlwe3T?JroZ;xuVN~6)>;NXC4Hp}|@I#NoMd!Sq{W11$O=TRsukw_%5Z5x1uWSkqwoG_UDsJ$T0}}o5ct&V^xhep_9<)oKw0K^HhaK1PVH+-x=}m&^P(JS3h-U>F9GNCaO# zG zkHrt9(`oXr-=Jw4=_|qcSD(g(_!sbfpZWQDCMRA|C@k?y*J-!fq()N!IF5t
- +
@@ -78,14 +78,14 @@

Configure Built-In Providers

Check with provider's website on how to obtain an API key if needed.

- +
-
+
+ +
+
+ +
+
@@ -169,7 +178,7 @@
- +
@@ -222,7 +231,7 @@
+
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index e9b4eb4245..8994ce92dc 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -30,7 +30,7 @@ # apparently py2exe won't build these unless they're imported somewhere from sickbeard import providers, metadata -from providers import ezrss, tvtorrents, btn, nzbsrus, newznab, womble +from providers import ezrss, tvtorrents, torrentleech, btn, nzbsrus, newznab, womble from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser @@ -157,6 +157,9 @@ TVTORRENTS_DIGEST = None TVTORRENTS_HASH = None +TORRENTLEECH = False +TORRENTLEECH_KEY = None + BTN = False BTN_API_KEY = None diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 56fac153bf..28830668f5 100755 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -18,6 +18,7 @@ __all__ = ['ezrss', 'tvtorrents', + 'torrentleech', 'nzbsrus', 'womble', 'btn', @@ -74,7 +75,7 @@ def getNewznabProviderList(data): providerDict[curDefault.name].name = curDefault.name providerDict[curDefault.name].url = curDefault.url providerDict[curDefault.name].needs_auth = curDefault.needs_auth - + return filter(lambda x: x, providerList) diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py new file mode 100644 index 0000000000..62fa873ea0 --- /dev/null +++ b/sickbeard/providers/torrentleech.py @@ -0,0 +1,86 @@ +# Author: Robert Massa +# URL: http://code.google.com/p/sickbeard/ +# +# This file is based upon tvtorrents.py. +# +# This file is part of Sick Beard. +# +# Sick Beard is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Sick Beard is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Sick Beard. If not, see . + +from xml.dom.minidom import parseString + +import sickbeard +import generic + +from sickbeard import helpers +from sickbeard import logger +from sickbeard import tvcache + + +class TorrentLeechProvider(generic.TorrentProvider): + + def __init__(self): + generic.TorrentProvider.__init__(self, "TorrentLeech") + + self.supportsBacklog = False + self.cache = TorrentLeechCache(self) + self.url = 'http://www.torrentleech.org/' + + def isEnabled(self): + return sickbeard.TORRENTLEECH + + def imageName(self): + return 'torrentleech.png' + + +class TorrentLeechCache(tvcache.TVCache): + + def __init__(self, provider): + tvcache.TVCache.__init__(self, provider) + + # only poll every 15 minutes + self.minTime = 15 + + def _getRSSData(self): + url = 'http://rss.torrentleech.org/' + sickbeard.TORRENTLEECH_KEY + logger.log(u"TorrentLeech cache update URL: " + url, logger.DEBUG) + + data = self.provider.getURL(url) + + parsedXML = parseString(data) + channel = parsedXML.getElementsByTagName('channel')[0] + description = channel.getElementsByTagName('description')[0] + + description_text = helpers.get_xml_text(description) + + if "Your RSS key is invalid" in description_text: + logger.log(u"TorrentLeech key invalid, check your config", logger.ERROR) + + return data + + def _parseItem(self, item): + (title, url) = self.provider._get_title_and_url(item) + + # torrentleech converts dots to spaces, undo this + title = title.replace(' ', '.') + + if not title or not url: + logger.log(u"The XML returned from the TorrentLeech RSS feed is incomplete, this result is unusable", logger.ERROR) + return + + logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) + + self._addCacheEntry(title, url) + +provider = TorrentLeechProvider() diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 20f0dfce8b..ac36b99ca4 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -212,19 +212,19 @@ def showEpisodeStatuses(self, tvdb_id, whichStatus): status_list = [int(whichStatus)] if status_list[0] == SNATCHED: status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER - + cur_show_results = myDB.select("SELECT season, episode, name FROM tv_episodes WHERE showid = ? AND season != 0 AND status IN ("+','.join(['?']*len(status_list))+")", [int(tvdb_id)] + status_list) - + result = {} for cur_result in cur_show_results: cur_season = int(cur_result["season"]) cur_episode = int(cur_result["episode"]) - + if cur_season not in result: result[cur_season] = {} - + result[cur_season][cur_episode] = cur_result["name"] - + return json.dumps(result) @cherrypy.expose @@ -237,7 +237,7 @@ def episodeStatuses(self, whichStatus=None): status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER else: status_list = [] - + t = PageTemplate(file="manage_episodeStatuses.tmpl") t.submenu = ManageMenu t.whichStatus = whichStatus @@ -245,7 +245,7 @@ def episodeStatuses(self, whichStatus=None): # if we have no status then this is as far as we need to go if not status_list: return _munge(t) - + myDB = db.DBConnection() status_results = myDB.select("SELECT show_name, tv_shows.tvdb_id as tvdb_id FROM tv_episodes, tv_shows WHERE tv_episodes.status IN ("+','.join(['?']*len(status_list))+") AND season != 0 AND tv_episodes.showid = tv_shows.tvdb_id ORDER BY show_name", status_list) @@ -258,11 +258,11 @@ def episodeStatuses(self, whichStatus=None): ep_counts[cur_tvdb_id] = 1 else: ep_counts[cur_tvdb_id] += 1 - + show_names[cur_tvdb_id] = cur_status_result["show_name"] if cur_tvdb_id not in sorted_show_ids: sorted_show_ids.append(cur_tvdb_id) - + t.show_names = show_names t.ep_counts = ep_counts t.sorted_show_ids = sorted_show_ids @@ -270,26 +270,26 @@ def episodeStatuses(self, whichStatus=None): @cherrypy.expose def changeEpisodeStatuses(self, oldStatus, newStatus, *args, **kwargs): - + status_list = [int(oldStatus)] if status_list[0] == SNATCHED: status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER to_change = {} - + # make a list of all shows and their associated args for arg in kwargs: tvdb_id, what = arg.split('-') - + # we don't care about unchecked checkboxes if kwargs[arg] != 'on': continue - + if tvdb_id not in to_change: to_change[tvdb_id] = [] - + to_change[tvdb_id].append(what) - + myDB = db.DBConnection() for cur_tvdb_id in to_change: @@ -301,19 +301,19 @@ def changeEpisodeStatuses(self, oldStatus, newStatus, *args, **kwargs): to_change[cur_tvdb_id] = all_eps Home().setStatus(cur_tvdb_id, '|'.join(to_change[cur_tvdb_id]), newStatus, direct=True) - + redirect('/manage/episodeStatuses') @cherrypy.expose def backlogShow(self, tvdb_id): - + show_obj = helpers.findCertainShow(sickbeard.showList, int(tvdb_id)) - + if show_obj: sickbeard.backlogSearchScheduler.action.searchBacklog([show_obj]) #@UndefinedVariable - + redirect("/manage/backlogOverview") - + @cherrypy.expose def backlogOverview(self): @@ -383,11 +383,11 @@ def massEdit(self, toEdit=None): root_dir_list = [] for curShow in showList: - + cur_root_dir = ek.ek(os.path.dirname, curShow._location) if cur_root_dir not in root_dir_list: - root_dir_list.append(cur_root_dir) - + root_dir_list.append(cur_root_dir) + # if we know they're not all the same then no point even bothering if paused_all_same: # if we had a value already and this value is different then they're not all the same @@ -443,7 +443,7 @@ def massEditSubmit(self, paused=None, flatten_folders=None, quality_preset=False logger.log(u"For show "+showObj.name+" changing dir from "+showObj._location+" to "+new_show_dir) else: new_show_dir = showObj._location - + if paused == 'keep': new_paused = showObj.paused else: @@ -458,7 +458,7 @@ def massEditSubmit(self, paused=None, flatten_folders=None, quality_preset=False if quality_preset == 'keep': anyQualities, bestQualities = Quality.splitQuality(showObj.quality) - + curErrors += Home().editShow(curShow, new_show_dir, anyQualities, bestQualities, new_flatten_folders, new_paused, directCall=True) if curErrors: @@ -628,7 +628,7 @@ def index(self): @cherrypy.expose def saveRootDirs(self, rootDirString=None): sickbeard.ROOT_DIRS = rootDirString - + @cherrypy.expose def saveAddShowDefaults(self, defaultFlattenFolders, defaultStatus, anyQualities, bestQualities): @@ -643,7 +643,7 @@ def saveAddShowDefaults(self, defaultFlattenFolders, defaultStatus, anyQualities bestQualities = [] newQuality = Quality.combineQualities(map(int, anyQualities), map(int, bestQualities)) - + sickbeard.STATUS_DEFAULT = int(defaultStatus) sickbeard.QUALITY_DEFAULT = int(newQuality) @@ -663,14 +663,14 @@ def generateKey(self): from hashlib import md5 except ImportError: from md5 import md5 - + # Create some values to seed md5 t = str(time.time()) r = str(random.random()) - + # Create the md5 instance and give it the current time m = md5(t) - + # Update the md5 instance with the random variable m.update(r) @@ -723,14 +723,14 @@ def saveGeneral(self, log_dir=None, web_port=None, web_log=None, web_ipv6=None, sickbeard.USE_API = use_api sickbeard.API_KEY = api_key - + if enable_https == "on": enable_https = 1 else: enable_https = 0 - + sickbeard.ENABLE_HTTPS = enable_https - + if not config.change_HTTPS_CERT(https_cert): results += ["Unable to create directory " + os.path.normpath(https_cert) + ", https cert dir not changed."] @@ -894,7 +894,7 @@ def savePostProcessing(self, naming_pattern=None, naming_multi_ep=None, sickbeard.metadata_provider_dict['Sony PS3'].set_config(sony_ps3_data) sickbeard.metadata_provider_dict['WDTV'].set_config(wdtv_data) sickbeard.metadata_provider_dict['TIVO'].set_config(tivo_data) - + if self.isNamingValid(naming_pattern, naming_multi_ep) != "invalid": sickbeard.NAMING_PATTERN = naming_pattern sickbeard.NAMING_MULTI_EP = int(naming_multi_ep) @@ -929,16 +929,16 @@ def testNaming(self, pattern=None, multi=None, abd=False): result = naming.test_name(pattern, multi, abd) - result = ek.ek(os.path.join, result['dir'], result['name']) + result = ek.ek(os.path.join, result['dir'], result['name']) return result - + @cherrypy.expose def isNamingValid(self, pattern=None, multi=None, abd=False): if pattern == None: return "invalid" - - # air by date shows just need one check, we don't need to worry about season folders + + # air by date shows just need one check, we don't need to worry about season folders if abd: is_valid = naming.check_valid_abd_naming(pattern) require_season_folders = False @@ -946,7 +946,7 @@ def isNamingValid(self, pattern=None, multi=None, abd=False): else: # check validity of single and multi ep cases for the whole path is_valid = naming.check_valid_naming(pattern, multi) - + # check validity of single and multi ep cases for only the file name require_season_folders = naming.check_force_season_folders(pattern, multi) @@ -957,7 +957,7 @@ def isNamingValid(self, pattern=None, multi=None, abd=False): else: return "invalid" - + class ConfigProviders: @cherrypy.expose @@ -1029,6 +1029,7 @@ def deleteNewznabProvider(self, id): def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None, nzbs_r_us_uid=None, nzbs_r_us_hash=None, newznab_string=None, tvtorrents_digest=None, tvtorrents_hash=None, + torrentleech_key=None, btn_api_key=None, newzbin_username=None, newzbin_password=None, provider_order=None): @@ -1092,6 +1093,8 @@ def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None, sickbeard.EZRSS = curEnabled elif curProvider == 'tvtorrents': sickbeard.TVTORRENTS = curEnabled + elif curProvider == 'torrentleech': + sickbeard.TORRENTLEECH = curEnabled elif curProvider == 'btn': sickbeard.BTN = curEnabled elif curProvider in newznabProviderDict: @@ -1102,6 +1105,8 @@ def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None, sickbeard.TVTORRENTS_DIGEST = tvtorrents_digest.strip() sickbeard.TVTORRENTS_HASH = tvtorrents_hash.strip() + sickbeard.TORRENTLEECH_KEY = torrentleech_key.strip() + sickbeard.BTN_API_KEY = btn_api_key.strip() sickbeard.NZBSRUS_UID = nzbs_r_us_uid.strip() @@ -1134,16 +1139,16 @@ def saveNotifications(self, use_xbmc=None, xbmc_notify_onsnatch=None, xbmc_notif xbmc_update_library=None, xbmc_update_full=None, xbmc_host=None, xbmc_username=None, xbmc_password=None, use_plex=None, plex_notify_onsnatch=None, plex_notify_ondownload=None, plex_update_library=None, plex_server_host=None, plex_host=None, plex_username=None, plex_password=None, - use_growl=None, growl_notify_onsnatch=None, growl_notify_ondownload=None, growl_host=None, growl_password=None, - use_prowl=None, prowl_notify_onsnatch=None, prowl_notify_ondownload=None, prowl_api=None, prowl_priority=0, - use_twitter=None, twitter_notify_onsnatch=None, twitter_notify_ondownload=None, + use_growl=None, growl_notify_onsnatch=None, growl_notify_ondownload=None, growl_host=None, growl_password=None, + use_prowl=None, prowl_notify_onsnatch=None, prowl_notify_ondownload=None, prowl_api=None, prowl_priority=0, + use_twitter=None, twitter_notify_onsnatch=None, twitter_notify_ondownload=None, use_notifo=None, notifo_notify_onsnatch=None, notifo_notify_ondownload=None, notifo_username=None, notifo_apisecret=None, use_boxcar=None, boxcar_notify_onsnatch=None, boxcar_notify_ondownload=None, boxcar_username=None, use_pushover=None, pushover_notify_onsnatch=None, pushover_notify_ondownload=None, pushover_userkey=None, use_libnotify=None, libnotify_notify_onsnatch=None, libnotify_notify_ondownload=None, use_nmj=None, nmj_host=None, nmj_database=None, nmj_mount=None, use_synoindex=None, use_trakt=None, trakt_username=None, trakt_password=None, trakt_api=None, - use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None, pytivo_update_library=None, + use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None, pytivo_update_library=None, pytivo_host=None, pytivo_share_name=None, pytivo_tivo_name=None, use_nma=None, nma_notify_onsnatch=None, nma_notify_ondownload=None, nma_api=None, nma_priority=0 ): @@ -1208,7 +1213,7 @@ def saveNotifications(self, use_xbmc=None, xbmc_notify_onsnatch=None, xbmc_notif use_growl = 1 else: use_growl = 0 - + if prowl_notify_onsnatch == "on": prowl_notify_onsnatch = 1 else: @@ -1298,7 +1303,7 @@ def saveNotifications(self, use_xbmc=None, xbmc_notify_onsnatch=None, xbmc_notif use_pytivo = 1 else: use_pytivo = 0 - + if pytivo_notify_onsnatch == "on": pytivo_notify_onsnatch = 1 else: @@ -1408,7 +1413,7 @@ def saveNotifications(self, use_xbmc=None, xbmc_notify_onsnatch=None, xbmc_notif sickbeard.NMA_NOTIFY_ONDOWNLOAD = nma_notify_ondownload sickbeard.NMA_API = nma_api sickbeard.NMA_PRIORITY = nma_priority - + sickbeard.save_config() if len(results) > 0: @@ -1434,7 +1439,7 @@ def index(self): general = ConfigGeneral() search = ConfigSearch() - + postProcessing = ConfigPostProcessing() providers = ConfigProviders() @@ -1567,16 +1572,16 @@ def searchTVDBForShowName(self, name, lang="en"): def massAddTable(self, rootDir=None): t = PageTemplate(file="home_massAddTable.tmpl") t.submenu = HomeMenu() - + myDB = db.DBConnection() if not rootDir: - return "No folders selected." + return "No folders selected." elif type(rootDir) != list: root_dirs = [rootDir] else: root_dirs = rootDir - + root_dirs = [urllib.unquote_plus(x) for x in root_dirs] default_index = int(sickbeard.ROOT_DIRS.split('|')[0]) @@ -1585,9 +1590,9 @@ def massAddTable(self, rootDir=None): if tmp in root_dirs: root_dirs.remove(tmp) root_dirs = [tmp]+root_dirs - + dir_list = [] - + for root_dir in root_dirs: try: file_list = ek.ek(os.listdir, root_dir) @@ -1599,36 +1604,36 @@ def massAddTable(self, rootDir=None): cur_path = ek.ek(os.path.normpath, ek.ek(os.path.join, root_dir, cur_file)) if not ek.ek(os.path.isdir, cur_path): continue - + cur_dir = { 'dir': cur_path, 'display_dir': ''+ek.ek(os.path.dirname, cur_path)+os.sep+''+ek.ek(os.path.basename, cur_path), } - + # see if the folder is in XBMC already dirResults = myDB.select("SELECT * FROM tv_shows WHERE location = ?", [cur_path]) - + if dirResults: cur_dir['added_already'] = True else: cur_dir['added_already'] = False - + dir_list.append(cur_dir) - + tvdb_id = '' show_name = '' for cur_provider in sickbeard.metadata_provider_dict.values(): (tvdb_id, show_name) = cur_provider.retrieveShowMetadata(cur_path) if tvdb_id and show_name: break - + cur_dir['existing_info'] = (tvdb_id, show_name) - + if tvdb_id and helpers.findCertainShow(sickbeard.showList, tvdb_id): - cur_dir['added_already'] = True + cur_dir['added_already'] = True t.dirList = dir_list - + return _munge(t) @cherrypy.expose @@ -1639,38 +1644,38 @@ def newShow(self, show_to_add=None, other_shows=None): """ t = PageTemplate(file="home_newShow.tmpl") t.submenu = HomeMenu() - + show_dir, tvdb_id, show_name = self.split_extra_show(show_to_add) - + if tvdb_id and show_name: use_provided_info = True else: use_provided_info = False - + # tell the template whether we're giving it show name & TVDB ID t.use_provided_info = use_provided_info - - # use the given show_dir for the tvdb search if available + + # use the given show_dir for the tvdb search if available if not show_dir: t.default_show_name = '' elif not show_name: t.default_show_name = ek.ek(os.path.basename, ek.ek(os.path.normpath, show_dir)).replace('.',' ') else: t.default_show_name = show_name - + # carry a list of other dirs if given if not other_shows: other_shows = [] elif type(other_shows) != list: other_shows = [other_shows] - + if use_provided_info: t.provided_tvdb_id = tvdb_id t.provided_tvdb_name = show_name - + t.provided_show_dir = show_dir t.other_shows = other_shows - + return _munge(t) @cherrypy.expose @@ -1681,52 +1686,52 @@ def addNewShow(self, whichSeries=None, tvdbLang="en", rootDir=None, defaultStatu Receive tvdb id, dir, and other options and create a show from them. If extra show dirs are provided then it forwards back to newShow, if not it goes to /home. """ - + # grab our list of other dirs if given if not other_shows: other_shows = [] elif type(other_shows) != list: other_shows = [other_shows] - - def finishAddShow(): + + def finishAddShow(): # if there are no extra shows then go home if not other_shows: redirect('/home') - + # peel off the next one next_show_dir = other_shows[0] rest_of_show_dirs = other_shows[1:] - + # go to add the next show return self.newShow(next_show_dir, rest_of_show_dirs) - + # if we're skipping then behave accordingly if skipShow: return finishAddShow() - + # sanity check on our inputs if (not rootDir and not fullShowPath) or not whichSeries: return "Missing params, no tvdb id or folder:"+repr(whichSeries)+" and "+repr(rootDir)+"/"+repr(fullShowPath) - + # figure out what show we're adding and where series_pieces = whichSeries.partition('|') if len(series_pieces) < 3: return "Error with show selection." - + tvdb_id = int(series_pieces[0]) show_name = series_pieces[2] - + # use the whole path if it's given, or else append the show name to the root dir to get the full show path if fullShowPath: show_dir = ek.ek(os.path.normpath, fullShowPath) else: show_dir = ek.ek(os.path.join, rootDir, helpers.sanitizeFileName(show_name)) - + # blanket policy - if the dir exists you should have used "add existing show" numbnuts if ek.ek(os.path.isdir, show_dir) and not fullShowPath: ui.notifications.error("Unable to add show", "Folder "+show_dir+" exists already") redirect('/home/addShows/existingShows') - + # don't create show dir if config says not to if sickbeard.ADD_SHOWS_WO_DIR: logger.log(u"Skipping initial creation of "+show_dir+" due to config.ini setting") @@ -1744,7 +1749,7 @@ def finishAddShow(): flatten_folders = 1 else: flatten_folders = 0 - + if not anyQualities: anyQualities = [] if not bestQualities: @@ -1754,22 +1759,22 @@ def finishAddShow(): if type(bestQualities) != list: bestQualities = [bestQualities] newQuality = Quality.combineQualities(map(int, anyQualities), map(int, bestQualities)) - + # add the show sickbeard.showQueueScheduler.action.addShow(tvdb_id, show_dir, int(defaultStatus), newQuality, flatten_folders, tvdbLang) #@UndefinedVariable ui.notifications.message('Show added', 'Adding the specified show into '+show_dir) return finishAddShow() - + @cherrypy.expose def existingShows(self): """ - Prints out the page to add existing shows from a root dir + Prints out the page to add existing shows from a root dir """ t = PageTemplate(file="home_addExistingShow.tmpl") t.submenu = HomeMenu() - + return _munge(t) def split_extra_show(self, extra_show): @@ -1781,7 +1786,7 @@ def split_extra_show(self, extra_show): show_dir = split_vals[0] tvdb_id = split_vals[1] show_name = '|'.join(split_vals[2:]) - + return (show_dir, tvdb_id, show_name) @cherrypy.expose @@ -1796,14 +1801,14 @@ def addExistingShows(self, shows_to_add=None, promptForSettings=None): shows_to_add = [] elif type(shows_to_add) != list: shows_to_add = [shows_to_add] - + shows_to_add = [urllib.unquote_plus(x) for x in shows_to_add] - + if promptForSettings == "on": promptForSettings = 1 else: promptForSettings = 0 - + tvdb_id_given = [] dirs_only = [] # separate all the ones with TVDB IDs @@ -1820,7 +1825,7 @@ def addExistingShows(self, shows_to_add=None, promptForSettings=None): # if they want me to prompt for settings then I will just carry on to the newShow page if promptForSettings and shows_to_add: return self.newShow(shows_to_add[0], shows_to_add[1:]) - + # if they don't want me to prompt for settings then I can just add all the nfo shows now num_added = 0 for cur_show in tvdb_id_given: @@ -1829,7 +1834,7 @@ def addExistingShows(self, shows_to_add=None, promptForSettings=None): # add the show sickbeard.showQueueScheduler.action.addShow(tvdb_id, show_dir, SKIPPED, sickbeard.QUALITY_DEFAULT, sickbeard.FLATTEN_FOLDERS_DEFAULT) #@UndefinedVariable num_added += 1 - + if num_added: ui.notifications.message("Shows Added", "Automatically added "+str(num_added)+" from their existing metadata files") @@ -2118,7 +2123,7 @@ def testTrakt(self, api=None, username=None, password=None): @cherrypy.expose def testNMA(self, nma_api=None, nma_priority=0): cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" - + result = notifiers.nma_notifier.test_notify(nma_api, nma_priority) if result: return "Test NMA notice sent successfully" @@ -2513,7 +2518,7 @@ def setStatus(self, show=None, eps=None, status=None, direct=False): ep_segment = str(epObj.airdate)[:7] else: ep_segment = epObj.season - + if ep_segment not in segment_list: segment_list.append(ep_segment) @@ -2618,18 +2623,18 @@ def doRename(self, show=None, eps=None): if eps == None: redirect("/home/displayShow?show=" + show) - + for curEp in eps.split('|'): epInfo = curEp.split('x') - + # this is probably the worst possible way to deal with double eps but I've kinda painted myself into a corner here with this stupid database ep_result = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND 5=5", [show, epInfo[0], epInfo[1]]) if not ep_result: logger.log(u"Unable to find an episode for "+curEp+", skipping", logger.WARNING) continue related_eps_result = myDB.select("SELECT * FROM tv_episodes WHERE location = ? AND episode != ?", [ep_result[0]["location"], epInfo[1]]) - + root_ep_obj = show_obj.getEpisode(int(epInfo[0]), int(epInfo[1])) for cur_related_ep in related_eps_result: related_ep_obj = show_obj.getEpisode(int(cur_related_ep["season"]), int(cur_related_ep["episode"])) @@ -2643,7 +2648,7 @@ def doRename(self, show=None, eps=None): @cherrypy.expose def searchEpisode(self, show=None, season=None, episode=None): - # retrieve the episode object and fail if we can't get one + # retrieve the episode object and fail if we can't get one ep_obj = _getEpisode(show, season, episode) if isinstance(ep_obj, str): return json.dumps({'result': 'failure'}) @@ -2663,13 +2668,13 @@ def searchEpisode(self, show=None, season=None, episode=None): return json.dumps({'result': 'failure'}) class UI: - + @cherrypy.expose def add_message(self): - + ui.notifications.message('Test 1', 'This is test number 1') ui.notifications.error('Test 2', 'This is test number 2') - + return "ok" @cherrypy.expose @@ -2684,7 +2689,7 @@ def get_messages(self): return json.dumps(messages) - + class WebInterface: @cherrypy.expose @@ -2710,7 +2715,7 @@ def showPoster(self, show=None, which=None): return cherrypy.lib.static.serve_file(default_image_path, content_type="image/png") cache_obj = image_cache.ImageCache() - + if which == 'poster': image_file_name = cache_obj.poster_path(showObj.tvdbid) # this is for 'banner' but also the default case @@ -2745,32 +2750,32 @@ def showPoster(self, show=None, which=None): def setComingEpsLayout(self, layout): if layout not in ('poster', 'banner', 'list'): layout = 'banner' - + sickbeard.COMING_EPS_LAYOUT = layout - + redirect("/comingEpisodes") @cherrypy.expose def toggleComingEpsDisplayPaused(self): - + sickbeard.COMING_EPS_DISPLAY_PAUSED = not sickbeard.COMING_EPS_DISPLAY_PAUSED - + redirect("/comingEpisodes") @cherrypy.expose def setComingEpsSort(self, sort): if sort not in ('date', 'network', 'show'): sort = 'date' - + sickbeard.COMING_EPS_SORT = sort - + redirect("/comingEpisodes") @cherrypy.expose def comingEpisodes(self, layout="None"): myDB = db.DBConnection() - + today = datetime.date.today().toordinal() next_week = (datetime.date.today() + datetime.timedelta(days=7)).toordinal() recently = (datetime.date.today() - datetime.timedelta(days=3)).toordinal() @@ -2824,7 +2829,7 @@ def comingEpisodes(self, layout="None"): t.layout = layout else: t.layout = sickbeard.COMING_EPS_LAYOUT - + return _munge(t) @@ -2835,11 +2840,11 @@ def comingEpisodes(self, layout="None"): config = Config() home = Home() - + api = Api() browser = browser.WebFileBrowser() errorlogs = ErrorLogs() - + ui = UI() From ea03dfa2510d00e0a2cc4105e0ccd2596907e75a Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Sat, 19 Jan 2013 10:53:13 -0600 Subject: [PATCH 11/67] Sort qualities on displayShow for the quality output and dropdown for force Download quality. Tighten up check for HD TV and fix 1080p HD TV check @midgetspy - found bug in common.py **nameQuality** regex check does not work if one quality ('HD TV') is part of another quality ('1080p HD TV') thus it gets wrongly matched to (HD TV). For now I changed fullhdtv string to '1080p HDTV' to bypass this for now. Possible fix is to change 'HD TV' to '720p HD TV' so it follows similar naming of web-dl/bluray.. but the negative is that everyones previous names wouldnt be right/match/need to be updated... or we change how the regex matches... --- data/interfaces/default/displayShow.tmpl | 6 +++--- sickbeard/common.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/data/interfaces/default/displayShow.tmpl b/data/interfaces/default/displayShow.tmpl index fd47c0ab50..6b00c085b4 100644 --- a/data/interfaces/default/displayShow.tmpl +++ b/data/interfaces/default/displayShow.tmpl @@ -78,10 +78,10 @@ $qualityPresetStrings[$show.quality] #else: #if $anyQualities: -initially download: <%=", ".join([Quality.qualityStrings[x] for x in anyQualities])%> #if $bestQualities then " + " else ""# +initially download: <%=", ".join([Quality.qualityStrings[x] for x in sorted(anyQualities)])%> #if $bestQualities then " + " else ""# #end if #if $bestQualities: -replace with: <%=", ".join([Quality.qualityStrings[x] for x in bestQualities])%> +replace with: <%=", ".join([Quality.qualityStrings[x] for x in sorted(bestQualities)])%> #end if #end if @@ -95,7 +95,7 @@ replace with: <%=", ".join([Quality.qualityStrings[x] for x in bestQualities]
Change selected episodes to
diff --git a/sickbeard/providers/nzbsrus.py b/sickbeard/providers/nzbsrus.py index c90365f307..0d7551139d 100644 --- a/sickbeard/providers/nzbsrus.py +++ b/sickbeard/providers/nzbsrus.py @@ -16,62 +16,108 @@ # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see . - - import urllib - +import generic import sickbeard -from sickbeard import exceptions, logger +try: + import xml.etree.cElementTree as etree +except ImportError: + import xml.etree.ElementTree as etree -from sickbeard import tvcache +from sickbeard import exceptions, logger +from sickbeard import tvcache, show_name_helpers -import generic class NZBsRUSProvider(generic.NZBProvider): - def __init__(self): - - generic.NZBProvider.__init__(self, "NZBs'R'US") - - self.cache = NZBsRUSCache(self) - - self.url = 'https://www.nzbsrus.com/' - - def isEnabled(self): - return sickbeard.NZBSRUS - - def _checkAuth(self): - if sickbeard.NZBSRUS_UID in (None, "") or sickbeard.NZBSRUS_HASH in (None, ""): - raise exceptions.AuthException("NZBs'R'US authentication details are empty, check your config") + def __init__(self): + generic.NZBProvider.__init__(self, "NZBs'R'US") + self.cache = NZBsRUSCache(self) + self.url = 'https://www.nzbsrus.com/' + self.supportsBacklog = True + + def isEnabled(self): + return sickbeard.NZBSRUS + + def _checkAuth(self): + if sickbeard.NZBSRUS_UID in (None, "") or sickbeard.NZBSRUS_HASH in (None, ""): + raise exceptions.AuthException("NZBs'R'US authentication details are empty, check your config") + + def _get_season_search_strings(self, show, season): + return [x for x in show_name_helpers.makeSceneSeasonSearchString(show, season)] + + def _get_episode_search_strings(self, ep_obj): + return [x for x in show_name_helpers.makeSceneSearchString(ep_obj)] + + def _doSearch(self, search, show=None): + params = {'uid': sickbeard.NZBSRUS_UID, + 'key': sickbeard.NZBSRUS_HASH, + 'xml': 1, + 'age': sickbeard.USENET_RETENTION, + 'lang0': 1, # English only from CouchPotato + 'lang1': 1, + 'lang3': 1, + 'c91': 1, # TV:HD + 'c104': 1, # TV:SD-x264 + 'c75': 1, # TV:XviD + 'searchtext': search} + + if not params['age']: + params['age'] = 500 + + searchURL = self.url + 'api.php?' + urllib.urlencode(params) + logger.log(u"NZBS'R'US search url: " + searchURL, logger.DEBUG) + + data = self.getURL(searchURL) + if not data: + return [] + + if not data.startswith(' Date: Thu, 24 Jan 2013 16:18:24 +0000 Subject: [PATCH 13/67] Added provider for nzbX. --- data/images/providers/nzbx.png | Bin 0 -> 665 bytes sickbeard/__init__.py | 15 ++- sickbeard/providers/__init__.py | 1 + sickbeard/providers/nzbx.py | 166 ++++++++++++++++++++++++++++++++ sickbeard/webserve.py | 2 + 5 files changed, 182 insertions(+), 2 deletions(-) create mode 100644 data/images/providers/nzbx.png create mode 100644 sickbeard/providers/nzbx.py diff --git a/data/images/providers/nzbx.png b/data/images/providers/nzbx.png new file mode 100644 index 0000000000000000000000000000000000000000..ada2b0fcb973e05258d9363e02d89c650f2485d8 GIT binary patch literal 665 zcmV;K0%rY*P)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D02*{fSaefwW^{L9 za%BKeVQFr3E>1;MAa*k@H7+qQF!XYv0006ANkl2MR=qPQQT zJdn6(;->L;;)BT^@X`3<`_?i#uv8e47>yG*HJQ=P0 zZ@RqZ<1gXdSlrt}UBKC499gz<3x`LA z{d#ge^Q_~r!vwN#q&n?;K)I1wNzEAPK+?$;!tuX^m$N~LF(YAgSFP7h0T3_+iW08j ztOexfCqTxU;$2+!vr*mK0andcJu1(4yPh-U-L`)cU+B6%003*ZMn`_%F{>9DOCp7b79@I*vw7 zy%Fc+@{hqIRU5gt>H9dc;^@374;ovQ<{O$i;LsbpFMZr3lw#YXAv70aZyOlNI}-}o z4`i!bl;G;Nw+4RQ67pYC2hpG~10lv(gUK5u^-u3Vz-@zJme4q)7ZYPYWm5fDRhvax zS;CPGOfS0lQF0rn?_9oEo>=B4x;fiB&b@cQ8_kqM=EKfY!(V2n#6t8K2rxH2^c9?* z3dSGq?8h{%X#iU5C{x)CbN3+Sp&h~aBQXC1^X6qr-6JKI00000NkvXXu0mjf*Ci-e literal 0 HcmV?d00001 diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 0338c50567..423ecd5098 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -30,7 +30,7 @@ # apparently py2exe won't build these unless they're imported somewhere from sickbeard import providers, metadata -from providers import ezrss, tvtorrents, btn, nzbsrus, newznab, womble +from providers import ezrss, tvtorrents, btn, nzbsrus, newznab, womble, nzbx from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser @@ -177,6 +177,9 @@ WOMBLE = False +NZBX = False +NZBX_COMPLETION = 100 + NZBSRUS = False NZBSRUS_UID = None NZBSRUS_HASH = None @@ -329,7 +332,7 @@ def initialize(consoleLogging=True): showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, TVDB_API_PARMS, \ NAMING_PATTERN, NAMING_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, \ RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \ - NZBSRUS, NZBSRUS_UID, NZBSRUS_HASH, WOMBLE, providerList, newznabProviderList, \ + NZBSRUS, NZBSRUS_UID, NZBSRUS_HASH, WOMBLE, NZBX, NZBX_COMPLETION, providerList, newznabProviderList, \ EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, \ USE_NOTIFO, NOTIFO_USERNAME, NOTIFO_APISECRET, NOTIFO_NOTIFY_ONDOWNLOAD, NOTIFO_NOTIFY_ONSNATCH, \ USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \ @@ -557,6 +560,10 @@ def initialize(consoleLogging=True): CheckSection(CFG, 'Womble') WOMBLE = bool(check_setting_int(CFG, 'Womble', 'womble', 1)) + CheckSection(CFG, 'nzbX') + NZBX = bool(check_setting_int(CFG, 'nzbX', 'nzbx', 0)) + NZBX_COMPLETION = check_setting_int(CFG, 'nzbX', 'nzbx_completion', 100) + CheckSection(CFG, 'SABnzbd') SAB_USERNAME = check_setting_str(CFG, 'SABnzbd', 'sab_username', '') SAB_PASSWORD = check_setting_str(CFG, 'SABnzbd', 'sab_password', '') @@ -1030,6 +1037,10 @@ def save_config(): new_config['Womble'] = {} new_config['Womble']['womble'] = int(WOMBLE) + new_config['nzbX'] = {} + new_config['nzbX']['nzbx'] = int(NZBX) + new_config['nzbX']['nzbx_completion'] = int(NZBX_COMPLETION) + new_config['SABnzbd'] = {} new_config['SABnzbd']['sab_username'] = SAB_USERNAME new_config['SABnzbd']['sab_password'] = SAB_PASSWORD diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 56fac153bf..8d90695780 100755 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -21,6 +21,7 @@ 'nzbsrus', 'womble', 'btn', + 'nzbx' ] import sickbeard diff --git a/sickbeard/providers/nzbx.py b/sickbeard/providers/nzbx.py new file mode 100644 index 0000000000..e0ce363c98 --- /dev/null +++ b/sickbeard/providers/nzbx.py @@ -0,0 +1,166 @@ +# Author: Jordon Smith +# URL: http://code.google.com/p/sickbeard/ +# +# This file is part of Sick Beard. +# +# Sick Beard is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Sick Beard is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Sick Beard. If not, see . + +import urllib +import generic +import sickbeard + +from sickbeard import tvcache +from sickbeard import logger +from sickbeard import classes +from sickbeard import show_name_helpers +from datetime import datetime + +try: + import json +except ImportError: + from lib import simplejson as json + + +class NzbXProvider(generic.NZBProvider): + + def __init__(self): + generic.NZBProvider.__init__(self, "nzbX") + self.cache = NzbXCache(self) + self.url = 'https://nzbx.co/' + self.supportsBacklog = True + + def isEnabled(self): + return sickbeard.NZBX + + def _get_season_search_strings(self, show, season): + return [x + '*' for x in show_name_helpers.makeSceneSeasonSearchString(show, season)] + + def _get_episode_search_strings(self, ep_obj): + return [x for x in show_name_helpers.makeSceneSearchString(ep_obj)] + + def _get_title_and_url(self, item): + title = item['name'] + url = self.url + 'nzb?' + str(item['guid']) + '*|*' + urllib.quote_plus(title) + return (title, url) + + def _doSearch(self, search, show=None): + params = {'age': sickbeard.USENET_RETENTION, + 'completion': sickbeard.NZBX_COMPLETION, + 'cat': 'tv-hd|tv-sd', + 'limit': 250, + 'q': search} + + if not params['age']: + params['age'] = 500 + + if not params['completion']: + params['completion'] = 100 + + url = self.url + 'api/sickbeard?' + urllib.urlencode(params) + logger.log(u"nzbX search url: " + url, logger.DEBUG) + + data = self.getURL(url) + try: + items = json.loads(data) + except ValueError: + logger.log(u"Error trying to decode " + self.provider.name + " RSS feed", logger.ERROR) + return[] + + results = [] + for item in items: + if item['name'] and item['guid']: + results.append(item) + else: + logger.log(u"Partial result from " + self.provider.name, logger.DEBUG) + return results + + def findPropers(self, date=None): + params = {'completion': 100, + 'cat': 'tv-hd|tv-sd', + 'age': 4, + 'q': '.proper.|.repack.'} + + url = self.url + 'api/sickbeard?' + urllib.urlencode(params) + logger.log(u"nzbX proper search url: " + url, logger.DEBUG) + + data = self.getURL(url) + try: + items = json.loads(data) + except ValueError: + logger.log(u"Error trying to decode " + self.provider.name + " RSS feed", logger.ERROR) + return[] + + results = [] + for item in items: + if item['name'] and item['guid'] and item['postdate']: + name, url = self._get_title_and_url(item) + results.append(classes.Proper(name, url, datetime.fromtimestamp(item['postdate']))) + else: + logger.log(u"Partial result from " + self.provider.name, logger.DEBUG) + return results + + +class NzbXCache(tvcache.TVCache): + + def __init__(self, provider): + tvcache.TVCache.__init__(self, provider) + self.minTime = 20 + + def _getRSSData(self): + params = {'q': '', + 'completion': sickbeard.NZBX_COMPLETION, + 'cat': 'tv-hd|tv-sd', + 'limit': 250} + + if not params['completion']: + params['completion'] = 100 + + url = self.provider.url + 'api/sickbeard?' + urllib.urlencode(params) + logger.log(u"nzbX cache update URL: " + url, logger.DEBUG) + return self.provider.getURL(url) + + def _parseItem(self, item): + title, url = self.provider._get_title_and_url(item) + logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) + self._addCacheEntry(title, url) + + def updateCache(self): + if not self.shouldUpdate(): + return + + data = self._getRSSData() + # as long as the http request worked we count this as an update + if data: + self.setLastUpdate() + else: + return + + # now that we've loaded the current RSS feed lets delete the old cache + logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") + self._clearCache() + + try: + items = json.loads(data) + except ValueError: + logger.log(u"Error trying to decode " + self.provider.name + " RSS feed", logger.ERROR) + return + + for item in items: + if item['name'] and item['guid']: + self._parseItem(item) + else: + logger.log(u"Partial result from " + self.provider.name, logger.DEBUG) + +provider = NzbXProvider() + diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 4f0e18afd4..129cae4803 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -1091,6 +1091,8 @@ def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None, sickbeard.BINREQ = curEnabled elif curProvider == 'womble_s_index': sickbeard.WOMBLE = curEnabled + elif curProvider == 'nzbx': + sickbeard.NZBX = curEnabled elif curProvider == 'ezrss': sickbeard.EZRSS = curEnabled elif curProvider == 'tvtorrents': From 262752d6092d45c1af4b4a8ecf024eb7b85d5744 Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Tue, 29 Jan 2013 15:57:25 -0600 Subject: [PATCH 14/67] Removed undocumented quality `any` from the sb-api. Unable to find this actually used on SBConnect or any app for that matter.. so shouldn't break anything. > The SB quality templates can change and we do not want people submitting a template name but rather use the quality makeup. (ex, sdtv/sddvd for SD). This way it protects people from us changing things with the templates as the app coders can still just submit what they want SD to be / handle legacy qualities easier. (if sb.api = 3 then HD is this... if its 4 then HD is now this...) --- sickbeard/webapi.py | 33 ++++++++++++++------------------- 1 file changed, 14 insertions(+), 19 deletions(-) diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index fbeb94e3c3..fb0d22af80 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -35,7 +35,7 @@ from sickbeard import encodingKludge as ek from sickbeard import search_queue from sickbeard.common import SNATCHED, SNATCHED_PROPER, DOWNLOADED, SKIPPED, UNAIRED, IGNORED, ARCHIVED, WANTED, UNKNOWN -from common import ANY, Quality, qualityPresetStrings, statusStrings +from common import Quality, qualityPresetStrings, statusStrings from sickbeard import image_cache from lib.tvdb_api import tvdb_api, tvdb_exceptions try: @@ -611,8 +611,7 @@ def _getQualityMap(): Quality.FULLHDWEBDL: 'fullhdwebdl', Quality.HDBLURAY: 'hdbluray', Quality.FULLHDBLURAY: 'fullhdbluray', - Quality.UNKNOWN: 'unknown', - ANY: 'any'} + Quality.UNKNOWN: 'unknown'} def _getRootDirs(): @@ -1536,8 +1535,8 @@ class CMD_SickBeardSetDefaults(ApiCall): def __init__(self, args, kwargs): # required # optional - self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown", "any"]) - self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "any"]) + self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) + self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray"]) self.future_show_paused, args = self.check_params(args, kwargs, "future_show_paused", None, False, "bool", []) self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", None, False, "bool", []) self.status, args = self.check_params(args, kwargs, "status", None, False, "string", ["wanted", "skipped", "archived", "ignored"]) @@ -1556,8 +1555,7 @@ def run(self): 'fullhdwebdl': Quality.FULLHDWEBDL, 'hdbluray': Quality.HDBLURAY, 'fullhdbluray': Quality.FULLHDBLURAY, - 'unknown': Quality.UNKNOWN, - 'any': ANY } + 'unknown': Quality.UNKNOWN} iqualityID = [] aqualityID = [] @@ -1689,8 +1687,8 @@ def __init__(self, args, kwargs): self.location, args = self.check_params(args, kwargs, "location", None, True, "string", []) self.tvdbid, args = self.check_params(args, kwargs, "tvdbid", None, True, "int", []) # optional - self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown", "any"]) - self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "any"]) + self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) + self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray"]) self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", str(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, "bool", []) # super, missing, help ApiCall.__init__(self, args, kwargs) @@ -1725,8 +1723,7 @@ def run(self): 'fullhdwebdl': Quality.FULLHDWEBDL, 'hdbluray': Quality.HDBLURAY, 'fullhdbluray': Quality.FULLHDBLURAY, - 'unknown': Quality.UNKNOWN, - 'any': ANY } + 'unknown': Quality.UNKNOWN} #use default quality as a failsafe newQuality = int(sickbeard.QUALITY_DEFAULT) @@ -1771,8 +1768,8 @@ def __init__(self, args, kwargs): self.tvdbid, args = self.check_params(args, kwargs, "tvdbid", None, True, "int", []) # optional self.location, args = self.check_params(args, kwargs, "location", None, False, "string", []) - self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown", "any"]) - self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "any"]) + self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) + self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray"]) self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", str(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, "bool", []) self.status, args = self.check_params(args, kwargs, "status", None, False, "string", ["wanted", "skipped", "archived", "ignored"]) self.lang, args = self.check_params(args, kwargs, "lang", "en", False, "string", self.valid_languages.keys()) @@ -1806,8 +1803,7 @@ def run(self): 'fullhdwebdl': Quality.FULLHDWEBDL, 'hdbluray': Quality.HDBLURAY, 'fullhdbluray': Quality.FULLHDBLURAY, - 'unknown': Quality.UNKNOWN, - 'any': ANY } + 'unknown': Quality.UNKNOWN} # use default quality as a failsafe newQuality = int(sickbeard.QUALITY_DEFAULT) @@ -2160,8 +2156,8 @@ def __init__(self, args, kwargs): # optional # this for whatever reason removes hdbluray not sdtv... which is just wrong. reverting to previous code.. plus we didnt use the new code everywhere. # self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", _getQualityMap().values()[1:]) - self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown", "any"]) - self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "any"]) + self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list", ["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"]) + self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", ["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl", "fullhdwebdl", "hdbluray", "fullhdbluray"]) # super, missing, help ApiCall.__init__(self, args, kwargs) @@ -2182,8 +2178,7 @@ def run(self): 'fullhdwebdl': Quality.FULLHDWEBDL, 'hdbluray': Quality.HDBLURAY, 'fullhdbluray': Quality.FULLHDBLURAY, - 'unknown': Quality.UNKNOWN, - 'any': ANY } + 'unknown': Quality.UNKNOWN} #use default quality as a failsafe newQuality = int(sickbeard.QUALITY_DEFAULT) From 370d9b518bd04560d518d9faf66008e55c0dae52 Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Tue, 29 Jan 2013 22:38:44 -0600 Subject: [PATCH 15/67] PEP8 to cleanup whitespace/cosmetic concerns. --- sickbeard/show_queue.py | 67 +++++++++++++++++++++-------------------- 1 file changed, 35 insertions(+), 32 deletions(-) diff --git a/sickbeard/show_queue.py b/sickbeard/show_queue.py index 4d7a28dbed..ca58fa5198 100644 --- a/sickbeard/show_queue.py +++ b/sickbeard/show_queue.py @@ -32,13 +32,13 @@ from sickbeard import name_cache from sickbeard.exceptions import ex + class ShowQueue(generic_queue.GenericQueue): def __init__(self): generic_queue.GenericQueue.__init__(self) self.queue_name = "SHOWQUEUE" - def _isInQueue(self, show, actions): return show in [x.show for x in self.queue if x.action_id in actions] @@ -68,7 +68,7 @@ def isBeingRenamed(self, show): return self._isBeingSomethinged(show, (ShowQueueActions.RENAME,)) def _getLoadingShowList(self): - return [x for x in self.queue+[self.currentItem] if x != None and x.isLoading] + return [x for x in self.queue + [self.currentItem] if x != None and x.isLoading] loadingShowList = property(_getLoadingShowList) @@ -102,7 +102,7 @@ def refreshShow(self, show, force=False): return queueItemObj = QueueItemRefresh(show) - + self.add_item(queueItemObj) return queueItemObj @@ -117,18 +117,19 @@ def renameShowEpisodes(self, show, force=False): def addShow(self, tvdb_id, showDir, default_status=None, quality=None, flatten_folders=None, lang="en"): queueItemObj = QueueItemAdd(tvdb_id, showDir, default_status, quality, flatten_folders, lang) - + self.add_item(queueItemObj) return queueItemObj + class ShowQueueActions: - REFRESH=1 - ADD=2 - UPDATE=3 - FORCEUPDATE=4 - RENAME=5 - + REFRESH = 1 + ADD = 2 + UPDATE = 3 + FORCEUPDATE = 4 + RENAME = 5 + names = {REFRESH: 'Refresh', ADD: 'Add', UPDATE: 'Update', @@ -136,6 +137,7 @@ class ShowQueueActions: RENAME: 'Rename', } + class ShowQueueItem(generic_queue.QueueItem): """ Represents an item in the queue waiting to be executed @@ -149,9 +151,9 @@ class ShowQueueItem(generic_queue.QueueItem): def __init__(self, action_id, show): generic_queue.QueueItem.__init__(self, ShowQueueActions.names[action_id], action_id) self.show = show - + def isInQueue(self): - return self in sickbeard.showQueueScheduler.action.queue+[sickbeard.showQueueScheduler.action.currentItem] #@UndefinedVariable + return self in sickbeard.showQueueScheduler.action.queue + [sickbeard.showQueueScheduler.action.currentItem] #@UndefinedVariable def _getName(self): return str(self.show.tvdbid) @@ -178,7 +180,7 @@ def __init__(self, tvdb_id, showDir, default_status, quality, flatten_folders, l # this will initialize self.show to None ShowQueueItem.__init__(self, ShowQueueActions.ADD, self.show) - + def _getName(self): """ Returns the show name if there is a show object created, if not returns @@ -205,7 +207,7 @@ def execute(self): ShowQueueItem.execute(self) - logger.log(u"Starting to add show "+self.showDir) + logger.log(u"Starting to add show " + self.showDir) try: # make sure the tvdb ids are valid @@ -213,9 +215,9 @@ def execute(self): ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if self.lang: ltvdb_api_parms['language'] = self.lang - - logger.log(u"TVDB: "+repr(ltvdb_api_parms)) - + + logger.log(u"TVDB: " + repr(ltvdb_api_parms)) + t = tvdb_api.Tvdb(**ltvdb_api_parms) s = t[self.tvdb_id] @@ -232,8 +234,8 @@ def execute(self): self._finishEarly() return except tvdb_exceptions.tvdb_exception, e: - logger.log(u"Error contacting TVDB: "+ex(e), logger.ERROR) - ui.notifications.error("Unable to add show", "Unable to look up the show in "+self.showDir+" on TVDB, not using the NFO. Delete .nfo and add manually in the correct language.") + logger.log(u"Error contacting TVDB: " + ex(e), logger.ERROR) + ui.notifications.error("Unable to add show", "Unable to look up the show in " + self.showDir + " on TVDB, not using the NFO. Delete .nfo and add manually in the correct language.") self._finishEarly() return @@ -250,15 +252,15 @@ def execute(self): self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT self.show.flatten_folders = self.flatten_folders if self.flatten_folders != None else sickbeard.FLATTEN_FOLDERS_DEFAULT self.show.paused = False - + # be smartish about this if self.show.genre and "talk show" in self.show.genre.lower(): self.show.air_by_date = 1 except tvdb_exceptions.tvdb_exception, e: - logger.log(u"Unable to add show due to an error with TVDB: "+ex(e), logger.ERROR) + logger.log(u"Unable to add show due to an error with TVDB: " + ex(e), logger.ERROR) if self.show: - ui.notifications.error("Unable to add "+str(self.show.name)+" due to an error with TVDB") + ui.notifications.error("Unable to add " + str(self.show.name) + " due to an error with TVDB") else: ui.notifications.error("Unable to add show due to an error with TVDB") self._finishEarly() @@ -271,7 +273,7 @@ def execute(self): return except Exception, e: - logger.log(u"Error trying to add show: "+ex(e), logger.ERROR) + logger.log(u"Error trying to add show: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) self._finishEarly() raise @@ -291,7 +293,7 @@ def execute(self): self.show.writeMetadata() self.show.populateCache() - + except Exception, e: logger.log(u"Error with TVDB, not creating episode list: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) @@ -304,8 +306,8 @@ def execute(self): # if they gave a custom status then change all the eps to it if self.default_status != SKIPPED: - logger.log(u"Setting all episodes to the specified default status: "+str(self.default_status)) - myDB = db.DBConnection(); + logger.log(u"Setting all episodes to the specified default status: " + str(self.default_status)) + myDB = db.DBConnection() myDB.action("UPDATE tv_episodes SET status = ? WHERE status = ? AND showid = ? AND season != 0", [self.default_status, SKIPPED, self.show.tvdbid]) # if they started with WANTED eps then run the backlog @@ -335,7 +337,7 @@ def execute(self): ShowQueueItem.execute(self) - logger.log(u"Performing refresh on "+self.show.name) + logger.log(u"Performing refresh on " + self.show.name) self.show.refreshDir() self.show.writeMetadata() @@ -394,13 +396,13 @@ def execute(self): ShowQueueItem.execute(self) - logger.log(u"Beginning update of "+self.show.name) + logger.log(u"Beginning update of " + self.show.name) logger.log(u"Retrieving show info from TVDB", logger.DEBUG) try: self.show.loadFromTVDB(cache=not self.force) except tvdb_exceptions.tvdb_error, e: - logger.log(u"Unable to contact TVDB, aborting: "+ex(e), logger.WARNING) + logger.log(u"Unable to contact TVDB, aborting: " + ex(e), logger.WARNING) return # get episode list from DB @@ -412,7 +414,7 @@ def execute(self): try: TVDBEpList = self.show.loadEpisodesFromTVDB(cache=not self.force) except tvdb_exceptions.tvdb_exception, e: - logger.log(u"Unable to get info from TVDB, the show info will not be refreshed: "+ex(e), logger.ERROR) + logger.log(u"Unable to get info from TVDB, the show info will not be refreshed: " + ex(e), logger.ERROR) TVDBEpList = None if TVDBEpList == None: @@ -423,14 +425,14 @@ def execute(self): # for each ep we found on TVDB delete it from the DB list for curSeason in TVDBEpList: for curEpisode in TVDBEpList[curSeason]: - logger.log(u"Removing "+str(curSeason)+"x"+str(curEpisode)+" from the DB list", logger.DEBUG) + logger.log(u"Removing " + str(curSeason) + "x" + str(curEpisode) + " from the DB list", logger.DEBUG) if curSeason in DBEpList and curEpisode in DBEpList[curSeason]: del DBEpList[curSeason][curEpisode] # for the remaining episodes in the DB list just delete them from the DB for curSeason in DBEpList: for curEpisode in DBEpList[curSeason]: - logger.log(u"Permanently deleting episode "+str(curSeason)+"x"+str(curEpisode)+" from the database", logger.MESSAGE) + logger.log(u"Permanently deleting episode " + str(curSeason) + "x" + str(curEpisode) + " from the database", logger.MESSAGE) curEp = self.show.getEpisode(curSeason, curEpisode) try: curEp.deleteEpisode() @@ -446,6 +448,7 @@ def execute(self): sickbeard.showQueueScheduler.action.refreshShow(self.show, True) #@UndefinedVariable + class QueueItemForceUpdate(QueueItemUpdate): def __init__(self, show=None): ShowQueueItem.__init__(self, ShowQueueActions.FORCEUPDATE, show) From 4e2e68531569796af812ae8fc906f9928e8acbe5 Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Tue, 29 Jan 2013 22:40:29 -0600 Subject: [PATCH 16/67] Set default value of paused to 0 not False, seems this was the odd ball usage. As we check for 0/1 throughout the sb cheetah templates and also pass 0/1 when editing the show. This also keeps the returned data for SB-API consistent. --- sickbeard/show_queue.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/show_queue.py b/sickbeard/show_queue.py index ca58fa5198..f538bbf866 100644 --- a/sickbeard/show_queue.py +++ b/sickbeard/show_queue.py @@ -251,7 +251,7 @@ def execute(self): self.show.location = self.showDir self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT self.show.flatten_folders = self.flatten_folders if self.flatten_folders != None else sickbeard.FLATTEN_FOLDERS_DEFAULT - self.show.paused = False + self.show.paused = 0 # be smartish about this if self.show.genre and "talk show" in self.show.genre.lower(): From b2fb1a43ff7fca629f43e7d09074d43ba9f12a60 Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sat, 2 Feb 2013 22:00:22 -0700 Subject: [PATCH 17/67] Added three new default NZB providers --- data/images/providers/nzbfinder.png | Bin 0 -> 877 bytes data/images/providers/nzbgeek.png | Bin 0 -> 747 bytes sickbeard/providers/__init__.py | 2 +- 3 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 data/images/providers/nzbfinder.png create mode 100644 data/images/providers/nzbgeek.png diff --git a/data/images/providers/nzbfinder.png b/data/images/providers/nzbfinder.png new file mode 100644 index 0000000000000000000000000000000000000000..ac45d833b21f9eab7a136f513698d2d1aa9fff12 GIT binary patch literal 877 zcmV-z1CsoSP)b{3qw;Jm);m z^E=P~D1eD49Vn)xG?aZlC(a#|-aC&t@@=ac3Vt>flXJu2Jt88qx@x(6TusVi|xU8z&5s{`Vj>yyo8E~%+`Q?f;7N&`@N7BE@Z_vz<* z=5Bf$Q5nt!ecjAS#TlPB2cI5>=&vskx%pxTGZ9*vTlTLnwn?qeDQ2rCMMVWN(EW>i zvT0+kgG7J(^gCn|0PNYdLwYZq$62+E#@(B7`^pDLp=fZSe*G zHhfUqZteVOP4_}St4j-USi*R0QASfMdHAn1Eh_gd`TF?jfC3o#<9qV$USxcbOI=Ao zp_%gwn)lX6E4FUo`j5YH<9MwyE#BosQn65Lh&NWu!{S|xLD%)|exE~D zxvjFZ{v)Y;*)K9Nf~4ZK1%o6bzfb$%|7yE>_2gUSMJE~#eM!n!#_+TF#Q8I4m54^# zFmSsI$+}pZG6F_?5MyirV|0Koc5K@gcNFqrz^;;d60^-74Fp!AO!;Ug3$Ir#Cu_*O zY}GsG-eE=kZB*VYYb-H>t|^`+%GMXyOoT#LJgZ)-q^{ZpBIL?vr_kBxr}du3v6e7t z5h^_%nPoF$G8^iz!5FWU+cD>vx&QYlGk-#F|3CZ(v_WeIV|ttW00000NkvXXu0mjf D++mQ^ literal 0 HcmV?d00001 diff --git a/data/images/providers/nzbgeek.png b/data/images/providers/nzbgeek.png new file mode 100644 index 0000000000000000000000000000000000000000..0a2ddb86ae3071e9442968947c4b9e42fd4221a1 GIT binary patch literal 747 zcmVkHg{lTaRG2GAYPHhfh*@i}G;rmc?>e2HxOa=JCE4~r ze;yp#S9KE)E^&OWUa5Q1uHI;W6^g^Bhdw?zpX@v+C!szf2-`;!@pYC-?nECqeTOZBOha{ez9eekU zbD3K?Ni54kjG=h&??G4H1EUZmDCFYxUw-=GN2r&}l^`)syT2_iR>~DJmygfA`F02)&9Yogf`E(yI$VI-P;GYR_2%yG!{)97 zuTGyl|H*}z^ozvhn=3Jva@8ObLC|4ll}g|HAAFc+z5W*tYf%6N07t&DzWVpQdkznT zn?`ex1m#)USzTM7m^kifmm-n8wQ}?J+S;AFo8}Gxjv#^W%)aIh5&%>!1i}9%0C#u% dH*^qiJO?z)Sv~)ejGq7i002ovPDHLkV1f#9St Date: Sat, 2 Feb 2013 22:00:48 -0700 Subject: [PATCH 18/67] Fixed some bugs with episodes ending up in an unknown state and causing errors --- sickbeard/classes.py | 6 +++--- sickbeard/common.py | 3 +++ 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/sickbeard/classes.py b/sickbeard/classes.py index fdf7f34716..878f33e29f 100644 --- a/sickbeard/classes.py +++ b/sickbeard/classes.py @@ -23,7 +23,7 @@ import urllib import datetime -from common import USER_AGENT +from common import USER_AGENT, Quality class SickBeardURLopener(urllib.FancyURLopener): version = USER_AGENT @@ -84,7 +84,7 @@ def __init__(self, episodes): self.episodes = episodes # quality of the release - self.quality = -1 + self.quality = Quality.UNKNOWN # release name self.name = "" @@ -149,7 +149,7 @@ def __init__(self, name, url, date): self.url = url self.date = date self.provider = None - self.quality = -1 + self.quality = Quality.UNKNOWN self.tvdbid = -1 self.season = -1 diff --git a/sickbeard/common.py b/sickbeard/common.py index 400d20aff1..94819b477d 100644 --- a/sickbeard/common.py +++ b/sickbeard/common.py @@ -174,6 +174,9 @@ def qualityDownloaded(status): @staticmethod def splitCompositeStatus(status): + if status == UNKNOWN: + return (UNKNOWN, Quality.UNKNOWN) + """Returns a tuple containing (status, quality)""" for x in sorted(Quality.qualityStrings.keys(), reverse=True): if status > x*100: From 6cb5e760e5257d9f0b8fee1a0c3b77b1acfdfe5d Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Fri, 8 Feb 2013 00:42:55 -0700 Subject: [PATCH 19/67] Fixed the roman numeral bug in the name parser --- sickbeard/name_parser/parser.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py index 6d467c76b0..5ba84b20ea 100644 --- a/sickbeard/name_parser/parser.py +++ b/sickbeard/name_parser/parser.py @@ -162,7 +162,7 @@ def _convert_number(self, number): if type(number) == int: return number - # the lazy way + # good lord I'm lazy if number.lower() == 'i': return 1 if number.lower() == 'ii': return 2 if number.lower() == 'iii': return 3 @@ -178,6 +178,20 @@ def _convert_number(self, number): if number.lower() == 'xiii': return 13 if number.lower() == 'xiv': return 14 if number.lower() == 'xv': return 15 + if number.lower() == 'xvi': return 16 + if number.lower() == 'xvii': return 17 + if number.lower() == 'xviii': return 18 + if number.lower() == 'ixx': return 19 + if number.lower() == 'xx': return 20 + if number.lower() == 'xxi': return 21 + if number.lower() == 'xxii': return 22 + if number.lower() == 'xxiii': return 23 + if number.lower() == 'xiv': return 24 + if number.lower() == 'xv': return 25 + if number.lower() == 'xvi': return 26 + if number.lower() == 'xvii': return 27 + if number.lower() == 'xviii': return 28 + if number.lower() == 'xxix': return 29 return int(number) From da31c4ac90d846cc1940e6a9eac826f4c55b58c3 Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sun, 10 Feb 2013 20:46:26 -0700 Subject: [PATCH 20/67] Added tests for the parsing bug and fixed it. --- sickbeard/common.py | 4 ++-- tests/common_tests.py | 45 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 2 deletions(-) create mode 100644 tests/common_tests.py diff --git a/sickbeard/common.py b/sickbeard/common.py index e61d879a45..d89e7ad419 100644 --- a/sickbeard/common.py +++ b/sickbeard/common.py @@ -90,7 +90,7 @@ class Quality: SDDVD: "SD DVD", HDTV: "HD TV", RAWHDTV: "RawHD TV", - FULLHDTV: "1080p HDTV", + FULLHDTV: "1080p HD TV", HDWEBDL: "720p WEB-DL", FULLHDWEBDL: "1080p WEB-DL", HDBLURAY: "720p BluRay", @@ -133,7 +133,7 @@ def nameQuality(name): name = os.path.basename(name) # if we have our exact text then assume we put it there - for x in Quality.qualityStrings: + for x in sorted(Quality.qualityStrings, reverse=True): if x == Quality.UNKNOWN: continue diff --git a/tests/common_tests.py b/tests/common_tests.py new file mode 100644 index 0000000000..fe1407ceb2 --- /dev/null +++ b/tests/common_tests.py @@ -0,0 +1,45 @@ +import unittest + +import sys, os.path +sys.path.append(os.path.abspath('..')) + +from sickbeard import common + +class QualityTests(unittest.TestCase): + + def test_SDTV(self): + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.HDTV.x264-GROUP")) + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.HDTV.XViD-GROUP")) + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.PDTV.XViD-GROUP")) + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.PDTV.x264-GROUP")) + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.DSR.x264-GROUP")) + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.DSR.XViD-GROUP")) + + def test_SDDVD(self): + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRiP.XViD-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRiP.x264-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRiP.DiVX-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRip.WS.XViD-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRip.WS.x264-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRip.WS.DiVX-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.BDRIP.XViD-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.BDRIP.x264-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.BDRIP.DiVX-GROUP")) + + def test_HDTV(self): + self.assertEqual(common.Quality.HDTV, common.Quality.nameQuality("Test.Show.S01E02.720p.HDTV.x264-GROUP")) + + def test_reverse_parsing(self): + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test Show - S01E02 - SD TV - GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test Show - S01E02 - SD DVD - GROUP")) + self.assertEqual(common.Quality.HDTV, common.Quality.nameQuality("Test Show - S01E02 - HD TV - GROUP")) + self.assertEqual(common.Quality.RAWHDTV, common.Quality.nameQuality("Test Show - S01E02 - RawHD TV - GROUP")) + self.assertEqual(common.Quality.FULLHDTV, common.Quality.nameQuality("Test Show - S01E02 - 1080p HD TV - GROUP")) + self.assertEqual(common.Quality.HDWEBDL, common.Quality.nameQuality("Test Show - S01E02 - 720p WEB-DL - GROUP")) + self.assertEqual(common.Quality.FULLHDWEBDL, common.Quality.nameQuality("Test Show - S01E02 - 1080p WEB-DL - GROUP")) + self.assertEqual(common.Quality.HDBLURAY, common.Quality.nameQuality("Test Show - S01E02 - 720p BluRay - GROUP")) + self.assertEqual(common.Quality.FULLHDBLURAY, common.Quality.nameQuality("Test Show - S01E02 - 1080p BluRay - GROUP")) + +if __name__ == '__main__': + suite = unittest.TestLoader().loadTestsFromTestCase(QualityTests) + unittest.TextTestRunner(verbosity=2).run(suite) From 0a0868967e6923c60d98f26301fb5396a330dad5 Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sun, 10 Feb 2013 21:23:31 -0700 Subject: [PATCH 21/67] Fixed the roman numeral stuff --- sickbeard/name_parser/parser.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py index 5ba84b20ea..d48746be61 100644 --- a/sickbeard/name_parser/parser.py +++ b/sickbeard/name_parser/parser.py @@ -181,16 +181,16 @@ def _convert_number(self, number): if number.lower() == 'xvi': return 16 if number.lower() == 'xvii': return 17 if number.lower() == 'xviii': return 18 - if number.lower() == 'ixx': return 19 + if number.lower() == 'xix': return 19 if number.lower() == 'xx': return 20 if number.lower() == 'xxi': return 21 if number.lower() == 'xxii': return 22 if number.lower() == 'xxiii': return 23 - if number.lower() == 'xiv': return 24 - if number.lower() == 'xv': return 25 - if number.lower() == 'xvi': return 26 - if number.lower() == 'xvii': return 27 - if number.lower() == 'xviii': return 28 + if number.lower() == 'xxiv': return 24 + if number.lower() == 'xxv': return 25 + if number.lower() == 'xxvi': return 26 + if number.lower() == 'xxvii': return 27 + if number.lower() == 'xxviii': return 28 if number.lower() == 'xxix': return 29 return int(number) From 4e5762579c1efd6d810321364bfe4d21c4b0dc9a Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sun, 10 Feb 2013 21:24:48 -0700 Subject: [PATCH 22/67] Fixed a crash in the notifo provider error logging (but not the underlying issue) --- sickbeard/notifiers/notifo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/notifiers/notifo.py b/sickbeard/notifiers/notifo.py index 11337e0701..0fba8e67f0 100644 --- a/sickbeard/notifiers/notifo.py +++ b/sickbeard/notifiers/notifo.py @@ -66,7 +66,7 @@ def _sendNotifo(self, msg, title, username, apisecret, label="SickBeard"): result = json.load(data) except ValueError, e: - logger.log(u"Unable to decode JSON: "+data, logger.ERROR) + logger.log(u"Unable to decode JSON: "+repr(data), logger.ERROR) return False except IOError, e: From d500e98220b0a6f0b30c8be0ca6c03e24b4d6356 Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Sun, 10 Feb 2013 22:48:41 -0600 Subject: [PATCH 23/67] Adding provider image for usenet-crawler. --- data/images/providers/usenet_crawler.png | Bin 0 -> 818 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 data/images/providers/usenet_crawler.png diff --git a/data/images/providers/usenet_crawler.png b/data/images/providers/usenet_crawler.png new file mode 100644 index 0000000000000000000000000000000000000000..5c48557d044c1aeef61c47d19261d7274008610b GIT binary patch literal 818 zcmV-21I_%2P)jp|a^VC`AKB0}+&EP#FdVQa#vPAVC8aL=QnDMh^{?FF{aAM$jbEM5RT8X=T}M zQgOKEd{6)B+&CLHo3-}(|Gn0~);3Jj1kOUJUuPt^M#AfpoB2|`k>m+^^mT!04)E>6 ze&&~4C4G4_1QZ*gz0E>cDT{eMUe^!1R zPn@6G2W3ZVH>&Iy2KDFkK;{1?W?qnJeY+ z*xFvQ6Rv&O|WbQY~2HCv%-@pVH;m*9{krV zs9}GmOVHI8!L5^xWKJD}e}nx$hzft2pw%i{vg-#_a?%YJZdcIit}wwFyLJ5?OSpZm z9K)SK&K!v<#oZEnP-%Z^fZ8f$=eon1ovTDHXVL!*%EK=vX%+%rYO6~Kb}pu~E1kh! wx3cKP9TgxeyONw4r!kaCWaPi^{8xYh0H*xtHVHtB6#xJL07*qoM6N<$f*vMz!vFvP literal 0 HcmV?d00001 From 620f0a83945f46f557eef614a95897dd0fbf9ad2 Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sun, 10 Feb 2013 23:11:19 -0700 Subject: [PATCH 24/67] Don't show nzb-specific provider config when nzbs are disabled --- data/interfaces/default/config_providers.tmpl | 13 +++++++------ sickbeard/webserve.py | 2 +- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/data/interfaces/default/config_providers.tmpl b/data/interfaces/default/config_providers.tmpl index f1e1d873cc..d691fe0f35 100755 --- a/data/interfaces/default/config_providers.tmpl +++ b/data/interfaces/default/config_providers.tmpl @@ -87,17 +87,17 @@ #set $provider_config_list = [] #for $cur_provider in ("nzbs_r_us", "tvtorrents", "torrentleech", "btn"): #set $cur_provider_obj = $sickbeard.providers.getProviderClass($cur_provider) - #if $cur_provider_obj.providerType == $GenericProvider.NZB and not $sickbeard.USE_NZBS: - #continue - #elif $cur_provider_obj.providerType == $GenericProvider.TORRENT and not $sickbeard.USE_TORRENTS: - #continue - #end if $provider_config_list.append($cur_provider_obj) #end for #if $provider_config_list: @@ -181,6 +181,7 @@ +#if $sickbeard.USE_NZBS:
@@ -236,7 +237,7 @@
- +#end if

diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index c6ec8d92d0..6f7fc0514e 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -1030,7 +1030,7 @@ def deleteNewznabProvider(self, id): @cherrypy.expose def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None, - nzbs_r_us_uid=None, nzbs_r_us_hash=None, newznab_string=None, + nzbs_r_us_uid=None, nzbs_r_us_hash=None, newznab_string='', tvtorrents_digest=None, tvtorrents_hash=None, torrentleech_key=None, btn_api_key=None, From ad57acd80b769956769e273318b24fb04c031744 Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sun, 10 Feb 2013 23:12:55 -0700 Subject: [PATCH 25/67] Fixed torrentleech provider pull to save config and detect bad auth correctly. --- sickbeard/__init__.py | 11 ++++++++++- sickbeard/providers/torrentleech.py | 22 ++++++++++------------ 2 files changed, 20 insertions(+), 13 deletions(-) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index f2de6cd26c..be349cadac 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -320,7 +320,8 @@ def initialize(consoleLogging=True): USE_PLEX, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_UPDATE_LIBRARY, \ PLEX_SERVER_HOST, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, \ showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, showList, loadingShowList, \ - NZBS, NZBS_UID, NZBS_HASH, EZRSS, TVTORRENTS, TVTORRENTS_DIGEST, TVTORRENTS_HASH, BTN, BTN_API_KEY, TORRENT_DIR, USENET_RETENTION, SOCKET_TIMEOUT, \ + NZBS, NZBS_UID, NZBS_HASH, EZRSS, TVTORRENTS, TVTORRENTS_DIGEST, TVTORRENTS_HASH, BTN, BTN_API_KEY, TORRENTLEECH, TORRENTLEECH_KEY, \ + TORRENT_DIR, USENET_RETENTION, SOCKET_TIMEOUT, \ SEARCH_FREQUENCY, DEFAULT_SEARCH_FREQUENCY, BACKLOG_SEARCH_FREQUENCY, \ QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, STATUS_DEFAULT, \ GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, \ @@ -537,6 +538,10 @@ def initialize(consoleLogging=True): BTN = bool(check_setting_int(CFG, 'BTN', 'btn', 0)) BTN_API_KEY = check_setting_str(CFG, 'BTN', 'btn_api_key', '') + CheckSection(CFG, 'TorrentLeech') + TORRENTLEECH = bool(check_setting_int(CFG, 'TorrentLeech', 'torrentleech', 0)) + TORRENTLEECH_KEY = check_setting_str(CFG, 'TorrentLeech', 'torrentleech_key', '') + CheckSection(CFG, 'NZBs') NZBS = bool(check_setting_int(CFG, 'NZBs', 'nzbs', 0)) NZBS_UID = check_setting_str(CFG, 'NZBs', 'nzbs_uid', '') @@ -1010,6 +1015,10 @@ def save_config(): new_config['BTN']['btn'] = int(BTN) new_config['BTN']['btn_api_key'] = BTN_API_KEY + new_config['TorrentLeech'] = {} + new_config['TorrentLeech']['torrentleech'] = int(TORRENTLEECH) + new_config['TorrentLeech']['torrentleech_key'] = TORRENTLEECH_KEY + new_config['NZBs'] = {} new_config['NZBs']['nzbs'] = int(NZBS) new_config['NZBs']['nzbs_uid'] = NZBS_UID diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index 62fa873ea0..d79db76d12 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -23,9 +23,7 @@ import sickbeard import generic -from sickbeard import helpers -from sickbeard import logger -from sickbeard import tvcache +from sickbeard import helpers, logger, exceptions, tvcache class TorrentLeechProvider(generic.TorrentProvider): @@ -53,23 +51,23 @@ def __init__(self, provider): self.minTime = 15 def _getRSSData(self): + + if not sickbeard.TORRENTLEECH_KEY: + raise exceptions.AuthException("TorrentLeech requires an API key to work correctly") + url = 'http://rss.torrentleech.org/' + sickbeard.TORRENTLEECH_KEY logger.log(u"TorrentLeech cache update URL: " + url, logger.DEBUG) data = self.provider.getURL(url) - parsedXML = parseString(data) - channel = parsedXML.getElementsByTagName('channel')[0] - description = channel.getElementsByTagName('description')[0] - - description_text = helpers.get_xml_text(description) - - if "Your RSS key is invalid" in description_text: - logger.log(u"TorrentLeech key invalid, check your config", logger.ERROR) - return data def _parseItem(self, item): + description = helpers.get_xml_text(item.getElementsByTagName('description')[0]) + + if "Your RSS key is invalid" in description: + raise exceptions.AuthException("TorrentLeech key invalid") + (title, url) = self.provider._get_title_and_url(item) # torrentleech converts dots to spaces, undo this From 9a118d865174ebdd2f60c8dfba8c22e23f7d4f5f Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Wed, 13 Feb 2013 00:45:44 -0600 Subject: [PATCH 26/67] Added `webrip` as a valid string to the associated qualities and did some cleanup. Added 1080p related qualities to the common_tests as well as update their order to follow our regex matching. --- sickbeard/common.py | 17 ++++++------- tests/common_tests.py | 56 +++++++++++++++++++++++++++++++++++++------ 2 files changed, 58 insertions(+), 15 deletions(-) diff --git a/sickbeard/common.py b/sickbeard/common.py index d89e7ad419..22173305d6 100644 --- a/sickbeard/common.py +++ b/sickbeard/common.py @@ -17,7 +17,8 @@ # along with Sick Beard. If not, see . import os.path -import operator, platform +import operator +import platform import re from sickbeard import version @@ -144,23 +145,23 @@ def nameQuality(name): checkName = lambda list, func: func([re.search(x, name, re.I) for x in list]) - if checkName(["(pdtv|hdtv|dsr|tvrip).(xvid|x264)"], all) and not checkName(["(720|1080)[pi]"], all): + if checkName(["(pdtv|hdtv|dsr|tvrip|webrip).(xvid|x264)"], all) and not checkName(["(720|1080)[pi]"], all): return Quality.SDTV elif checkName(["(dvdrip|bdrip)(.ws)?.(xvid|divx|x264)"], any) and not checkName(["(720|1080)[pi]"], all): return Quality.SDDVD elif checkName(["720p", "hdtv", "x264"], all) or checkName(["hr.ws.pdtv.x264"], any) and not checkName(["(1080)[pi]"], all): return Quality.HDTV - elif checkName(["720p", "hdtv", "mpeg2"], all) or checkName(["1080i", "hdtv", "mpeg2"], all): + elif checkName(["720p|1080i", "hdtv", "mpeg2"], all): return Quality.RAWHDTV elif checkName(["1080p", "hdtv", "x264"], all): return Quality.FULLHDTV - elif checkName(["720p", "web.dl"], all) or checkName(["720p", "itunes", "h.?264"], all): + elif checkName(["720p", "web.dl|webrip"], all) or checkName(["720p", "itunes", "h.?264"], all): return Quality.HDWEBDL - elif checkName(["1080p", "web.dl"], all) or checkName(["1080p", "itunes", "h.?264"], all): + elif checkName(["1080p", "web.dl|webrip"], all) or checkName(["1080p", "itunes", "h.?264"], all): return Quality.FULLHDWEBDL - elif checkName(["720p", "bluray", "x264"], all) or checkName(["720p", "hddvd", "x264"], all): + elif checkName(["720p", "bluray|hddvd", "x264"], all): return Quality.HDBLURAY - elif checkName(["1080p", "bluray", "x264"], all) or checkName(["1080p", "hddvd", "x264"], all): + elif checkName(["1080p", "bluray|hddvd", "x264"], all): return Quality.FULLHDBLURAY else: return Quality.UNKNOWN @@ -188,7 +189,7 @@ def qualityDownloaded(status): def splitCompositeStatus(status): if status == UNKNOWN: return (UNKNOWN, Quality.UNKNOWN) - + """Returns a tuple containing (status, quality)""" for x in sorted(Quality.qualityStrings.keys(), reverse=True): if status > x * 100: diff --git a/tests/common_tests.py b/tests/common_tests.py index fe1407ceb2..ef10ebd7e0 100644 --- a/tests/common_tests.py +++ b/tests/common_tests.py @@ -1,33 +1,74 @@ import unittest -import sys, os.path +import sys +import os.path sys.path.append(os.path.abspath('..')) from sickbeard import common + class QualityTests(unittest.TestCase): + # TODO: repack / proper ? air-by-date ? season rip? multi-ep? + def test_SDTV(self): - self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.HDTV.x264-GROUP")) - self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.HDTV.XViD-GROUP")) self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.PDTV.XViD-GROUP")) self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.PDTV.x264-GROUP")) - self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.DSR.x264-GROUP")) + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.HDTV.XViD-GROUP")) + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.HDTV.x264-GROUP")) self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.DSR.XViD-GROUP")) + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.DSR.x264-GROUP")) + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.TVRip.XViD-GROUP")) + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.TVRip.x264-GROUP")) + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.WEBRip.XViD-GROUP")) + self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test.Show.S01E02.WEBRip.x264-GROUP")) def test_SDDVD(self): self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRiP.XViD-GROUP")) - self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRiP.x264-GROUP")) self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRiP.DiVX-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRiP.x264-GROUP")) self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRip.WS.XViD-GROUP")) - self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRip.WS.x264-GROUP")) self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRip.WS.DiVX-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.DVDRip.WS.x264-GROUP")) self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.BDRIP.XViD-GROUP")) - self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.BDRIP.x264-GROUP")) self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.BDRIP.DiVX-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.BDRIP.x264-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.BDRIP.WS.XViD-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.BDRIP.WS.DiVX-GROUP")) + self.assertEqual(common.Quality.SDDVD, common.Quality.nameQuality("Test.Show.S01E02.BDRIP.WS.x264-GROUP")) def test_HDTV(self): self.assertEqual(common.Quality.HDTV, common.Quality.nameQuality("Test.Show.S01E02.720p.HDTV.x264-GROUP")) + self.assertEqual(common.Quality.HDTV, common.Quality.nameQuality("Test.Show.S01E02.HR.WS.PDTV.x264-GROUP")) # why does this not pass? + + def test_RAWHDTV(self): + self.assertEqual(common.Quality.RAWHDTV, common.Quality.nameQuality("Test.Show.S01E02.720p.HDTV.DD5.1.MPEG2-GROUP")) + self.assertEqual(common.Quality.RAWHDTV, common.Quality.nameQuality("Test.Show.S01E02.1080i.HDTV.DD2.0.MPEG2-GROUP")) + #self.assertEqual(common.Quality.RAWHDTV, common.Quality.nameQuality("Test Show - S01E02 - 1080i HDTV MPA1.0 H.264 - GROUP")) # TODO: add support or leave mpeg only? + + def test_FULLHDTV(self): + self.assertEqual(common.Quality.FULLHDTV, common.Quality.nameQuality("Test.Show.S01E02.1080p.HDTV.x264-GROUP")) + + def test_HDWEBDL(self): + self.assertEqual(common.Quality.HDWEBDL, common.Quality.nameQuality("Test.Show.S01E02.720p.WEB-DL-GROUP")) + self.assertEqual(common.Quality.HDWEBDL, common.Quality.nameQuality("Test.Show.S01E02.720p.WEBRip-GROUP")) + self.assertEqual(common.Quality.HDWEBDL, common.Quality.nameQuality("Test.Show.S01E02.WEBRip.720p.H.264.AAC.2.0-GROUP")) + + def test_FULLHDWEBDL(self): + self.assertEqual(common.Quality.FULLHDWEBDL, common.Quality.nameQuality("Test.Show.S01E02.1080p.WEB-DL-GROUP")) + self.assertEqual(common.Quality.FULLHDWEBDL, common.Quality.nameQuality("Test.Show.S01E02.1080p.WEBRip-GROUP")) + self.assertEqual(common.Quality.FULLHDWEBDL, common.Quality.nameQuality("Test.Show.S01E02.WEBRip.1080p.H.264.AAC.2.0-GROUP")) + + def test_HDBLURAY(self): + self.assertEqual(common.Quality.HDBLURAY, common.Quality.nameQuality("Test.Show.S01E02.720p.BluRay.x264-GROUP")) + self.assertEqual(common.Quality.HDBLURAY, common.Quality.nameQuality("Test.Show.S01E02.720p.HDDVD.x264-GROUP")) + + def test_FULLHDBLURAY(self): + self.assertEqual(common.Quality.FULLHDBLURAY, common.Quality.nameQuality("Test.Show.S01E02.1080p.BluRay.x264-GROUP")) + self.assertEqual(common.Quality.FULLHDBLURAY, common.Quality.nameQuality("Test.Show.S01E02.1080p.HDDVD.x264-GROUP")) + + def test_UNKNOWN(self): + self.assertEqual(common.Quality.UNKNOWN, common.Quality.nameQuality("Test.Show.S01E02-SiCKBEARD")) def test_reverse_parsing(self): self.assertEqual(common.Quality.SDTV, common.Quality.nameQuality("Test Show - S01E02 - SD TV - GROUP")) @@ -39,6 +80,7 @@ def test_reverse_parsing(self): self.assertEqual(common.Quality.FULLHDWEBDL, common.Quality.nameQuality("Test Show - S01E02 - 1080p WEB-DL - GROUP")) self.assertEqual(common.Quality.HDBLURAY, common.Quality.nameQuality("Test Show - S01E02 - 720p BluRay - GROUP")) self.assertEqual(common.Quality.FULLHDBLURAY, common.Quality.nameQuality("Test Show - S01E02 - 1080p BluRay - GROUP")) + self.assertEqual(common.Quality.UNKNOWN, common.Quality.nameQuality("Test Show - S01E02 - Unknown - SiCKBEARD")) if __name__ == '__main__': suite = unittest.TestLoader().loadTestsFromTestCase(QualityTests) From 4b27da92dc12857b33612198f59b2894f363d71b Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Wed, 13 Feb 2013 09:09:46 -0600 Subject: [PATCH 27/67] Looks like we forgot to update our tests code for the custom_naming related changes... cleaned up some minor things (still needs work!) --- tests/test_lib.py | 31 +++++++++++++------------------ tests/tv_tests.py | 4 ---- 2 files changed, 13 insertions(+), 22 deletions(-) diff --git a/tests/test_lib.py b/tests/test_lib.py index b004146ead..d68feb57bd 100644 --- a/tests/test_lib.py +++ b/tests/test_lib.py @@ -47,7 +47,7 @@ FILEDIR = os.path.join(TESTDIR, SHOWNAME) FILEPATH = os.path.join(FILEDIR, FILENAME) -SHOWDIR = os.path.join(TESTDIR, SHOWNAME+" final") +SHOWDIR = os.path.join(TESTDIR, SHOWNAME +" final") #sickbeard.logger.sb_log_instance = sickbeard.logger.SBRotatingLogHandler(os.path.join(TESTDIR, 'sickbeard.log'), sickbeard.logger.NUM_LOGS, sickbeard.logger.LOG_SIZE) sickbeard.logger.SBRotatingLogHandler.log_file = os.path.join(os.path.join(TESTDIR, 'Logs'), 'test_sickbeard.log') @@ -66,21 +66,16 @@ def createTestLogFolder(): #================= sickbeard.SYS_ENCODING = 'UTF-8' sickbeard.showList = [] -sickbeard.QUALITY_DEFAULT = 4 -sickbeard.SEASON_FOLDERS_DEFAULT = 1 -sickbeard.SEASON_FOLDERS_FORMAT = 'Season %02d' - -sickbeard.NAMING_SHOW_NAME = 1 -sickbeard.NAMING_EP_NAME = 1 -sickbeard.NAMING_EP_TYPE = 0 -sickbeard.NAMING_MULTI_EP_TYPE = 1 -sickbeard.NAMING_SEP_TYPE = 0 -sickbeard.NAMING_USE_PERIODS = 0 -sickbeard.NAMING_QUALITY = 0 -sickbeard.NAMING_DATES = 1 +sickbeard.QUALITY_DEFAULT = 4 #hdtv +sickbeard.FLATTEN_FOLDERS_DEFAULT = 0 + +sickbeard.NAMING_PATTERN = '' +sickbeard.NAMING_ABD_PATTERN = '' +sickbeard.NAMING_MULTI_EP = 1 + sickbeard.PROVIDER_ORDER = ["sick_beard_index"] -sickbeard.newznabProviderList = providers.getNewznabProviderList("Sick Beard Index|http://momo.sickbeard.com/||1!!!NZBs.org|http://beta.nzbs.org/||0") +sickbeard.newznabProviderList = providers.getNewznabProviderList("Sick Beard Index|http://lolo.sickbeard.com/|0|0") sickbeard.providerList = providers.makeProviderList() sickbeard.PROG_DIR = os.path.abspath('..') @@ -136,11 +131,11 @@ def __init__(self, providerName): # Create the table if it's not already there try: - sql = "CREATE TABLE "+providerName+" (name TEXT, season NUMERIC, episodes TEXT, tvrid NUMERIC, tvdbid NUMERIC, url TEXT, time NUMERIC, quality TEXT);" + sql = "CREATE TABLE " + providerName + " (name TEXT, season NUMERIC, episodes TEXT, tvrid NUMERIC, tvdbid NUMERIC, url TEXT, time NUMERIC, quality TEXT);" self.connection.execute(sql) self.connection.commit() except sqlite3.OperationalError, e: - if str(e) != "table "+providerName+" already exists": + if str(e) != "table " + providerName + " already exists": raise # Create the table if it's not already there @@ -167,7 +162,7 @@ def setUp_test_db(): db.upgradeDatabase(db.DBConnection(), mainDB.InitialSchema) # fix up any db problems db.sanityCheckDatabase(db.DBConnection(), mainDB.MainSanityCheck) - + #and for cache.b too db.upgradeDatabase(db.DBConnection("cache.db"), cache_db.InitialSchema) @@ -183,6 +178,7 @@ def tearDown_test_db(): if os.path.exists(os.path.join(TESTDIR, TESTCACHEDBNAME)): os.remove(os.path.join(TESTDIR, TESTCACHEDBNAME)) + def setUp_test_episode_file(): if not os.path.exists(FILEDIR): os.makedirs(FILEDIR) @@ -219,4 +215,3 @@ def tearDown_test_show_dir(): print "==================" print "or just call all_tests.py" - diff --git a/tests/tv_tests.py b/tests/tv_tests.py index 40734dfc0c..ce9573375d 100644 --- a/tests/tv_tests.py +++ b/tests/tv_tests.py @@ -17,13 +17,11 @@ # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see . -import random import unittest import test_lib as test import sickbeard from sickbeard.tv import TVEpisode, TVShow -from sickbeard import exceptions class TVShowTests(test.SickbeardTestDBCase): @@ -113,5 +111,3 @@ def test_getEpisode(self): print "######################################################################" suite = unittest.TestLoader().loadTestsFromTestCase(TVTests) unittest.TextTestRunner(verbosity=2).run(suite) - - From 922effb2b2a65aadb71d8202d27fe17bb9a3a05f Mon Sep 17 00:00:00 2001 From: Bo Osterud Date: Sun, 17 Feb 2013 11:25:30 +0100 Subject: [PATCH 28/67] Bugfix :: Postprocessing: Multiple videofiles in same folder and nzbName variable set would lead to loss of files (All videofiles would be processed using the same parsing from the nzbName, leading to each videofile overwriting the previous one). --- sickbeard/processTV.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sickbeard/processTV.py b/sickbeard/processTV.py index 57c41569d9..267f8ed576 100644 --- a/sickbeard/processTV.py +++ b/sickbeard/processTV.py @@ -94,6 +94,10 @@ def processDir (dirName, nzbName=None, recurse=False): remainingFolders = filter(lambda x: ek.ek(os.path.isdir, ek.ek(os.path.join, dirName, x)), fileList) + # If nzbName is set and there's more than one videofile in the folder, files will be lost (overwritten). + if nzbName != None and len(videoFiles) >= 2: + nzbName = None + # process any files in the dir for cur_video_file_path in videoFiles: From 7989ab55404f62bd5cb42b876e4a8e918f04e977 Mon Sep 17 00:00:00 2001 From: Michael Reid Date: Sun, 17 Feb 2013 14:02:43 -0400 Subject: [PATCH 29/67] Fix broken synoindex for new folder creation --- sickbeard/postProcessor.py | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py index 90bad08ff8..23cd8226c1 100755 --- a/sickbeard/postProcessor.py +++ b/sickbeard/postProcessor.py @@ -700,13 +700,13 @@ def process(self): self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") - if os.path.isdir(self.file_path): - self._log(u"File " + self.file_path + " seems to be a directory") - return False - for ignore_file in self.IGNORED_FILESTRINGS: - if ignore_file in self.file_path: - self._log(u"File " + self.file_path + " is ignored type, skipping") - return False + if os.path.isdir(self.file_path): + self._log(u"File " + self.file_path + " seems to be a directory") + return False + for ignore_file in self.IGNORED_FILESTRINGS: + if ignore_file in self.file_path: + self._log(u"File " + self.file_path + " is ignored type, skipping") + return False # reset per-file stuff self.in_history = False @@ -767,6 +767,8 @@ def process(self): self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) + # do the library update for synoindex + notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location) @@ -852,16 +854,16 @@ def process(self): ep_obj.createMetaFiles() ep_obj.saveToDB() - # do the library update - notifiers.xbmc_notifier.update_library(ep_obj.show.name) - - # do the library update for Plex Media Server - notifiers.plex_notifier.update_library() + # do the library update + notifiers.xbmc_notifier.update_library(ep_obj.show.name) + + # do the library update for Plex Media Server + notifiers.plex_notifier.update_library() # do the library update for synoindex notifiers.synoindex_notifier.addFile(ep_obj.location) - - # do the library update for trakt + + # do the library update for trakt notifiers.trakt_notifier.update_library(ep_obj) # do the library update for pyTivo From 7360cd546d0db1e5eeda4c4f564e2bf727abc562 Mon Sep 17 00:00:00 2001 From: Michael Reid Date: Sun, 17 Feb 2013 14:29:57 -0400 Subject: [PATCH 30/67] Add synoindex update for folder operations MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Inserted synoindex calls for the following functions: makeDir() - Called from webserve.py make_dirs() - Called from postProcessor.py delete_empty_folders() - Called from postProcessor.py --- sickbeard/helpers.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index ce18c1b11d..64b8f1a3bd 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -198,6 +198,8 @@ def makeDir (dir): if not ek.ek(os.path.isdir, dir): try: ek.ek(os.makedirs, dir) + # do the library update for synoindex + notifiers.synoindex_notifier.addFolder(dir) except OSError: return False return True @@ -463,6 +465,8 @@ def make_dirs(path): ek.ek(os.mkdir, sofar) # use normpath to remove end separator, otherwise checks permissions against itself chmodAsParent(ek.ek(os.path.normpath, sofar)) + # do the library update for synoindex + notifiers.synoindex_notifier.addFolder(sofar) except (OSError, IOError), e: logger.log(u"Failed creating " + sofar + " : " + ex(e), logger.ERROR) return False @@ -525,6 +529,8 @@ def delete_empty_folders(check_empty_dir, keep_dir=None): logger.log(u"Deleting empty folder: " + check_empty_dir) # need shutil.rmtree when ignore_items is really implemented ek.ek(os.rmdir, check_empty_dir) + # do the library update for synoindex + notifiers.synoindex_notifier.deleteFolder(check_empty_dir) except (WindowsError, OSError), e: logger.log(u"Unable to delete " + check_empty_dir + ": " + repr(e) + " / " + str(e), logger.WARNING) break From bd5bdc0992e86445e1d37a48ba9d482d15e5bc93 Mon Sep 17 00:00:00 2001 From: Michael Reid Date: Sun, 17 Feb 2013 14:41:07 -0400 Subject: [PATCH 31/67] Revert "Fix broken synoindex for new folder creation" This reverts commit 7989ab55404f62bd5cb42b876e4a8e918f04e977. --- sickbeard/postProcessor.py | 30 ++++++++++++++---------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py index 23cd8226c1..90bad08ff8 100755 --- a/sickbeard/postProcessor.py +++ b/sickbeard/postProcessor.py @@ -700,13 +700,13 @@ def process(self): self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") - if os.path.isdir(self.file_path): - self._log(u"File " + self.file_path + " seems to be a directory") - return False - for ignore_file in self.IGNORED_FILESTRINGS: - if ignore_file in self.file_path: - self._log(u"File " + self.file_path + " is ignored type, skipping") - return False + if os.path.isdir(self.file_path): + self._log(u"File " + self.file_path + " seems to be a directory") + return False + for ignore_file in self.IGNORED_FILESTRINGS: + if ignore_file in self.file_path: + self._log(u"File " + self.file_path + " is ignored type, skipping") + return False # reset per-file stuff self.in_history = False @@ -767,8 +767,6 @@ def process(self): self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) - # do the library update for synoindex - notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location) @@ -854,16 +852,16 @@ def process(self): ep_obj.createMetaFiles() ep_obj.saveToDB() - # do the library update - notifiers.xbmc_notifier.update_library(ep_obj.show.name) - - # do the library update for Plex Media Server - notifiers.plex_notifier.update_library() + # do the library update + notifiers.xbmc_notifier.update_library(ep_obj.show.name) + + # do the library update for Plex Media Server + notifiers.plex_notifier.update_library() # do the library update for synoindex notifiers.synoindex_notifier.addFile(ep_obj.location) - - # do the library update for trakt + + # do the library update for trakt notifiers.trakt_notifier.update_library(ep_obj) # do the library update for pyTivo From 6b6d07bb7401e1e63e50a1bdfebaee6bb38da284 Mon Sep 17 00:00:00 2001 From: Michael Reid Date: Sun, 17 Feb 2013 14:44:56 -0400 Subject: [PATCH 32/67] Fix broken synoindex for new folder creation --- sickbeard/postProcessor.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py index 90bad08ff8..037e14ed91 100755 --- a/sickbeard/postProcessor.py +++ b/sickbeard/postProcessor.py @@ -767,6 +767,8 @@ def process(self): self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) + # do the library update for synoindex + notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location) From 5dd33643a22d3e9437ba88625ec49f9135cdbd9d Mon Sep 17 00:00:00 2001 From: racquemis Date: Wed, 20 Feb 2013 10:56:39 +0100 Subject: [PATCH 33/67] Added NMJv2 Notifier --- .../default/config_notifications.tmpl | 86 +++++++++ data/js/configNotifications.js | 42 +++++ sickbeard/__init__.py | 19 +- sickbeard/notifiers/__init__.py | 3 + sickbeard/notifiers/nmjv2.py | 172 ++++++++++++++++++ sickbeard/webserve.py | 31 ++++ 6 files changed, 352 insertions(+), 1 deletion(-) create mode 100644 sickbeard/notifiers/nmjv2.py diff --git a/data/interfaces/default/config_notifications.tmpl b/data/interfaces/default/config_notifications.tmpl index 826cf0ad58..c5a336cf63 100755 --- a/data/interfaces/default/config_notifications.tmpl +++ b/data/interfaces/default/config_notifications.tmpl @@ -262,6 +262,92 @@ + +
+
+ +

NMJv2

+

The Networked Media Jukebox, or NMJv2, is the official media jukebox interface made available for the Popcorn Hour 300 & 400-series.

+
+
+
+ + +
+ +
+
+ + +
+
+ + + + + +
+
+ + +
+
+ + +
+
Click below to test.
+ + +
+ +
+
diff --git a/data/js/configNotifications.js b/data/js/configNotifications.js index a9ed18fe89..f1aeb4563d 100644 --- a/data/js/configNotifications.js +++ b/data/js/configNotifications.js @@ -125,6 +125,48 @@ $(document).ready(function(){ function (data){ $('#testNMJ-result').html(data); }); }); + $('#settingsNMJv2').click(function(){ + if (!$('#nmjv2_host').val()) { + alert('Please fill in the Popcorn IP address'); + $('#nmjv2_host').focus(); + return; + } + $('#testNMJv2-result').html(loading); + var nmjv2_host = $('#nmjv2_host').val(); + var nmjv2_dbloc; + var radios = document.getElementsByName("nmjv2_dbloc"); + for (var i = 0; i < radios.length; i++){ + if (radios[i].checked) { + nmjv2_dbloc=radios[i].value; + break; + } + } + + var nmjv2_dbinstance=$('#NMJv2db_instance').val(); + $.get(sbRoot+"/home/settingsNMJv2", {'host': nmjv2_host,'dbloc': nmjv2_dbloc,'instance': nmjv2_dbinstance}, + function (data){ + if (data == null) { + $('#nmjv2_database').removeAttr('readonly'); + } + var JSONData = $.parseJSON(data); + $('#testNMJv2-result').html(JSONData.message); + $('#nmjv2_database').val(JSONData.database); + + if (JSONData.database) + $('#nmjv2_database').attr('readonly', true); + else + $('#nmjv2_database').removeAttr('readonly'); + }); + }); + + $('#testNMJv2').click(function(){ + $('#testNMJv2-result').html(loading); + var nmjv2_host = $("#nmjv2_host").val(); + + $.get(sbRoot+"/home/testNMJv2", {'host': nmjv2_host}, + function (data){ $('#testNMJv2-result').html(data); }); + }); + $('#testTrakt').click(function(){ $('#testTrakt-result').html(loading); var trakt_api = $("#trakt_api").val(); diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index be349cadac..ec55ce5919 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -269,6 +269,11 @@ USE_SYNOINDEX = False +USE_NMJv2 = False +NMJv2_HOST = None +NMJv2_DATABASE = None +NMJv2_DBLOC = None + USE_TRAKT = False TRAKT_USERNAME = None TRAKT_PASSWORD = None @@ -338,7 +343,7 @@ def initialize(consoleLogging=True): USE_NOTIFO, NOTIFO_USERNAME, NOTIFO_APISECRET, NOTIFO_NOTIFY_ONDOWNLOAD, NOTIFO_NOTIFY_ONSNATCH, \ USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \ USE_PUSHOVER, PUSHOVER_USERKEY, PUSHOVER_NOTIFY_ONDOWNLOAD, PUSHOVER_NOTIFY_ONSNATCH, \ - USE_LIBNOTIFY, LIBNOTIFY_NOTIFY_ONSNATCH, LIBNOTIFY_NOTIFY_ONDOWNLOAD, USE_NMJ, NMJ_HOST, NMJ_DATABASE, NMJ_MOUNT, USE_SYNOINDEX, \ + USE_LIBNOTIFY, LIBNOTIFY_NOTIFY_ONSNATCH, LIBNOTIFY_NOTIFY_ONDOWNLOAD, USE_NMJ, NMJ_HOST, NMJ_DATABASE, NMJ_MOUNT, USE_NMJv2, NMJv2_HOST, NMJv2_DATABASE, NMJv2_DBLOC, USE_SYNOINDEX, \ USE_BANNER, USE_LISTVIEW, METADATA_XBMC, METADATA_MEDIABROWSER, METADATA_PS3, METADATA_SYNOLOGY, metadata_provider_dict, \ NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, \ COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, METADATA_WDTV, METADATA_TIVO, IGNORE_WORDS, CREATE_MISSING_SHOW_DIRS, \ @@ -649,6 +654,12 @@ def initialize(consoleLogging=True): NMJ_DATABASE = check_setting_str(CFG, 'NMJ', 'nmj_database', '') NMJ_MOUNT = check_setting_str(CFG, 'NMJ', 'nmj_mount', '') + CheckSection(CFG, 'NMJv2') + USE_NMJv2 = bool(check_setting_int(CFG, 'NMJv2', 'use_nmjv2', 0)) + NMJv2_HOST = check_setting_str(CFG, 'NMJv2', 'nmjv2_host', '') + NMJv2_DATABASE = check_setting_str(CFG, 'NMJv2', 'nmjv2_database', '') + NMJ_DBLOC = check_setting_str(CFG, 'NMJv2', 'nmjv2_dbloc', '') + CheckSection(CFG, 'Synology') USE_SYNOINDEX = bool(check_setting_int(CFG, 'Synology', 'use_synoindex', 0)) @@ -1129,6 +1140,12 @@ def save_config(): new_config['Synology'] = {} new_config['Synology']['use_synoindex'] = int(USE_SYNOINDEX) + new_config['NMJv2'] = {} + new_config['NMJv2']['use_nmjv2'] = int(USE_NMJv2) + new_config['NMJv2']['nmjv2_host'] = NMJv2_HOST + new_config['NMJv2']['nmjv2_database'] = NMJv2_DATABASE + new_config['NMJv2']['nmjv2_dbloc'] = NMJv2_DBLOC + new_config['Trakt'] = {} new_config['Trakt']['use_trakt'] = int(USE_TRAKT) new_config['Trakt']['trakt_username'] = TRAKT_USERNAME diff --git a/sickbeard/notifiers/__init__.py b/sickbeard/notifiers/__init__.py index c78aaabfc6..fb28b7ddcd 100755 --- a/sickbeard/notifiers/__init__.py +++ b/sickbeard/notifiers/__init__.py @@ -21,6 +21,7 @@ import xbmc import plex import nmj +import nmjv2 import synoindex import pytivo @@ -42,6 +43,7 @@ plex_notifier = plex.PLEXNotifier() nmj_notifier = nmj.NMJNotifier() synoindex_notifier = synoindex.synoIndexNotifier() +nmjv2_notifier = nmjv2.NMJv2Notifier() pytivo_notifier = pytivo.pyTivoNotifier() # devices growl_notifier = growl.GrowlNotifier() @@ -60,6 +62,7 @@ xbmc_notifier, plex_notifier, nmj_notifier, + nmjv2_notifier, synoindex_notifier, pytivo_notifier, growl_notifier, diff --git a/sickbeard/notifiers/nmjv2.py b/sickbeard/notifiers/nmjv2.py new file mode 100644 index 0000000000..c5f096f54a --- /dev/null +++ b/sickbeard/notifiers/nmjv2.py @@ -0,0 +1,172 @@ +# Author: Jasper Lanting +# Based on nmj.py by Nico Berlee: http://nico.berlee.nl/ +# URL: http://code.google.com/p/sickbeard/ +# +# This file is part of Sick Beard. +# +# Sick Beard is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Sick Beard is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Sick Beard. If not, see . + +import urllib, urllib2,xml.dom.minidom +from xml.dom.minidom import parseString +import sickbeard +import telnetlib +import re +import time + +from sickbeard import logger + +try: + import xml.etree.cElementTree as etree +except ImportError: + import xml.etree.ElementTree as etree + + +class NMJv2Notifier: + + def notify_snatch(self, ep_name): + return False + #Not implemented: Start the scanner when snatched does not make any sense + + def notify_download(self, ep_name): + self._notifyNMJ() + + def test_notify(self, host): + return self._sendNMJ(host) + + def notify_settings(self, host, dbloc, instance): + """ + Retrieves the NMJv2 database location from Popcorn hour + + host: The hostname/IP of the Popcorn Hour server + dbloc: 'local' for PCH internal harddrive. 'network' for PCH network shares + instance: Allows for selection of different DB in case of multiple databases + + Returns: True if the settings were retrieved successfully, False otherwise + """ + try: + url_loc = "http://" + host + ":8008/file_operation?arg0=list_user_storage_file&arg1=&arg2="+instance+"&arg3=20&arg4=true&arg5=true&arg6=true&arg7=all&arg8=name_asc&arg9=false&arg10=false" + req = urllib2.Request(url_loc) + handle1 = urllib2.urlopen(req) + response1 = handle1.read() + xml = parseString(response1) + time.sleep (300.0 / 1000.0) + for node in xml.getElementsByTagName('path'): + xmlTag=node.toxml(); + xmlData=xmlTag.replace('','').replace('','').replace('[=]','') + url_db = "http://" + host + ":8008/metadata_database?arg0=check_database&arg1="+ xmlData + reqdb = urllib2.Request(url_db) + handledb = urllib2.urlopen(reqdb) + responsedb = handledb.read() + xmldb = parseString(responsedb) + returnvalue=xmldb.getElementsByTagName('returnValue')[0].toxml().replace('','').replace('','') + if returnvalue=="0": + DB_path=xmldb.getElementsByTagName('database_path')[0].toxml().replace('','').replace('','').replace('[=]','') + if dbloc=="local" and DB_path.find("localhost")>-1: + sickbeard.NMJv2_HOST=host + sickbeard.NMJv2_DATABASE=DB_path + return True + if dbloc=="network" and DB_path.find("://")>-1: + sickbeard.NMJv2_HOST=host + sickbeard.NMJv2_DATABASE=DB_path + return True + + except IOError, e: + logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e)) + return False + return False + + def _sendNMJ(self, host): + """ + Sends a NMJ update command to the specified machine + + host: The hostname/IP to send the request to (no port) + database: The database to send the requst to + mount: The mount URL to use (optional) + + Returns: True if the request succeeded, False otherwise + """ + + #if a host is provided then attempt to open a handle to that URL + try: + url_scandir = "http://" + host + ":8008/metadata_database?arg0=update_scandir&arg1="+ sickbeard.NMJv2_DATABASE +"&arg2=&arg3=update_all" + logger.log(u"NMJ scan update command send to host: %s" % (host)) + url_updatedb = "http://" + host + ":8008/metadata_database?arg0=scanner_start&arg1="+ sickbeard.NMJv2_DATABASE +"&arg2=background&arg3=" + logger.log(u"Try to mount network drive via url: %s" % (host), logger.DEBUG) + prereq = urllib2.Request(url_scandir) + req = urllib2.Request(url_updatedb) + handle1 = urllib2.urlopen(prereq) + response1 = handle1.read() + time.sleep (300.0 / 1000.0) + handle2 = urllib2.urlopen(req) + response2 = handle2.read() + except IOError, e: + logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e)) + return False + try: + et = etree.fromstring(response1) + result1 = et.findtext("returnValue") + except SyntaxError, e: + logger.log(u"Unable to parse XML returned from the Popcorn Hour: update_scandir, %s" % (e), logger.ERROR) + return False + try: + et = etree.fromstring(response2) + result2 = et.findtext("returnValue") + except SyntaxError, e: + logger.log(u"Unable to parse XML returned from the Popcorn Hour: scanner_start, %s" % (e), logger.ERROR) + return False + + # if the result was a number then consider that an error + error_codes=["8","11","22","49","50","51","60"] + error_messages=["Invalid parameter(s)/argument(s)", + "Invalid database path", + "Insufficient size", + "Database write error", + "Database read error", + "Open fifo pipe failed", + "Read only file system"] + if int(result1) > 0: + index=error_codes.index(result1) + logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index])) + return False + else: + if int(result2) > 0: + index=error_codes.index(result2) + logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index])) + return False + else: + logger.log(u"NMJv2 started background scan") + return True + + def _notifyNMJ(self, host=None, force=False): + """ + Sends a NMJ update command based on the SB config settings + + host: The host to send the command to (optional, defaults to the host in the config) + database: The database to use (optional, defaults to the database in the config) + mount: The mount URL (optional, defaults to the mount URL in the config) + force: If True then the notification will be sent even if NMJ is disabled in the config + """ + if not sickbeard.USE_NMJv2 and not force: + logger.log("Notification for NMJ scan update not enabled, skipping this notification", logger.DEBUG) + return False + + # fill in omitted parameters + if not host: + host = sickbeard.NMJv2_HOST + + logger.log(u"Sending scan command for NMJ ", logger.DEBUG) + + return self._sendNMJ(host) + +notifier = NMJv2Notifier diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 6f7fc0514e..7d368a8e8e 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -1150,6 +1150,7 @@ def saveNotifications(self, use_xbmc=None, xbmc_notify_onsnatch=None, xbmc_notif use_pushover=None, pushover_notify_onsnatch=None, pushover_notify_ondownload=None, pushover_userkey=None, use_libnotify=None, libnotify_notify_onsnatch=None, libnotify_notify_ondownload=None, use_nmj=None, nmj_host=None, nmj_database=None, nmj_mount=None, use_synoindex=None, + use_nmjv2=None, nmjv2_host=None, nmjv2_dbloc=None, nmjv2_database=None, use_trakt=None, trakt_username=None, trakt_password=None, trakt_api=None, use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None, pytivo_update_library=None, pytivo_host=None, pytivo_share_name=None, pytivo_tivo_name=None, @@ -1296,6 +1297,11 @@ def saveNotifications(self, use_xbmc=None, xbmc_notify_onsnatch=None, xbmc_notif use_synoindex = 1 else: use_synoindex = 0 + + if use_nmjv2 == "on": + use_nmjv2 = 1 + else: + use_nmjv2 = 0 if use_trakt == "on": use_trakt = 1 @@ -1398,6 +1404,11 @@ def saveNotifications(self, use_xbmc=None, xbmc_notify_onsnatch=None, xbmc_notif sickbeard.USE_SYNOINDEX = use_synoindex + sickbeard.USE_NMJv2 = use_nmjv2 + sickbeard.NMJv2_HOST = nmjv2_host + sickbeard.NMJv2_DATABASE = nmjv2_database + sickbeard.NMJv2_DBLOC = nmjv2_dbloc + sickbeard.USE_TRAKT = use_trakt sickbeard.TRAKT_USERNAME = trakt_username sickbeard.TRAKT_PASSWORD = trakt_password @@ -2113,6 +2124,26 @@ def settingsNMJ(self, host=None): else: return '{"message": "Failed! Make sure your Popcorn is on and NMJ is running. (see Log & Errors -> Debug for detailed info)", "database": "", "mount": ""}' + @cherrypy.expose + def testNMJv2(self, host=None): + cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" + + result = notifiers.nmjv2_notifier.test_notify(urllib.unquote_plus(host)) + if result: + return "Test notice sent successfully to "+urllib.unquote_plus(host) + else: + return "Test notice failed to "+urllib.unquote_plus(host) + + @cherrypy.expose + def settingsNMJv2(self, host=None, dbloc=None,instance=None): + cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" + result = notifiers.nmjv2_notifier.notify_settings(urllib.unquote_plus(host),dbloc,instance) + if result: + return '{"message": "NMJ Database found at: %(host)s", "database": "%(database)s"}' % {"host": host, "database": sickbeard.NMJv2_DATABASE} + else: + return '{"message": "Unable to find NMJ Database at location: %(dbloc)s. Is the right location selected and PCH running?", "database": ""}' % {"dbloc": dbloc} + + @cherrypy.expose def testTrakt(self, api=None, username=None, password=None): cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" From 82d204b598fefca7f7fb24149c094508f0f776a4 Mon Sep 17 00:00:00 2001 From: Jordon Smith Date: Thu, 21 Feb 2013 12:46:47 +0000 Subject: [PATCH 34/67] Cleaned up, removed duplicate code --- sickbeard/providers/nzbx.py | 67 +++++++------------------------------ 1 file changed, 13 insertions(+), 54 deletions(-) diff --git a/sickbeard/providers/nzbx.py b/sickbeard/providers/nzbx.py index e0ce363c98..9d0d342a76 100644 --- a/sickbeard/providers/nzbx.py +++ b/sickbeard/providers/nzbx.py @@ -54,15 +54,15 @@ def _get_title_and_url(self, item): url = self.url + 'nzb?' + str(item['guid']) + '*|*' + urllib.quote_plus(title) return (title, url) - def _doSearch(self, search, show=None): + def _doSearch(self, search, show=None, age=0): params = {'age': sickbeard.USENET_RETENTION, 'completion': sickbeard.NZBX_COMPLETION, 'cat': 'tv-hd|tv-sd', 'limit': 250, 'q': search} - if not params['age']: - params['age'] = 500 + if age or not params['age']: + params['age'] = age if not params['completion']: params['completion'] = 100 @@ -74,7 +74,7 @@ def _doSearch(self, search, show=None): try: items = json.loads(data) except ValueError: - logger.log(u"Error trying to decode " + self.provider.name + " RSS feed", logger.ERROR) + logger.log(u"Error trying to decode nzbX json data", logger.ERROR) return[] results = [] @@ -82,32 +82,15 @@ def _doSearch(self, search, show=None): if item['name'] and item['guid']: results.append(item) else: - logger.log(u"Partial result from " + self.provider.name, logger.DEBUG) + logger.log(u"Partial result from nzbx", logger.DEBUG) return results def findPropers(self, date=None): - params = {'completion': 100, - 'cat': 'tv-hd|tv-sd', - 'age': 4, - 'q': '.proper.|.repack.'} - - url = self.url + 'api/sickbeard?' + urllib.urlencode(params) - logger.log(u"nzbX proper search url: " + url, logger.DEBUG) - - data = self.getURL(url) - try: - items = json.loads(data) - except ValueError: - logger.log(u"Error trying to decode " + self.provider.name + " RSS feed", logger.ERROR) - return[] - results = [] - for item in items: - if item['name'] and item['guid'] and item['postdate']: + for item in self._doSearch('.proper.|.repack.', age=4): + if item['postdate']: name, url = self._get_title_and_url(item) results.append(classes.Proper(name, url, datetime.fromtimestamp(item['postdate']))) - else: - logger.log(u"Partial result from " + self.provider.name, logger.DEBUG) return results @@ -117,19 +100,6 @@ def __init__(self, provider): tvcache.TVCache.__init__(self, provider) self.minTime = 20 - def _getRSSData(self): - params = {'q': '', - 'completion': sickbeard.NZBX_COMPLETION, - 'cat': 'tv-hd|tv-sd', - 'limit': 250} - - if not params['completion']: - params['completion'] = 100 - - url = self.provider.url + 'api/sickbeard?' + urllib.urlencode(params) - logger.log(u"nzbX cache update URL: " + url, logger.DEBUG) - return self.provider.getURL(url) - def _parseItem(self, item): title, url = self.provider._get_title_and_url(item) logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) @@ -139,28 +109,17 @@ def updateCache(self): if not self.shouldUpdate(): return - data = self._getRSSData() - # as long as the http request worked we count this as an update - if data: - self.setLastUpdate() - else: + items = self.provider._doSearch('') + if not items: return + self.setLastUpdate() - # now that we've loaded the current RSS feed lets delete the old cache - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") + # now that we've got the latest releases lets delete the old cache + logger.log(u"Clearing nzbX cache and updating with new information") self._clearCache() - try: - items = json.loads(data) - except ValueError: - logger.log(u"Error trying to decode " + self.provider.name + " RSS feed", logger.ERROR) - return - for item in items: - if item['name'] and item['guid']: - self._parseItem(item) - else: - logger.log(u"Partial result from " + self.provider.name, logger.DEBUG) + self._parseItem(item) provider = NzbXProvider() From de11e1b03a8ac9f8dcaefac5ccc4c60bd364548e Mon Sep 17 00:00:00 2001 From: Jordon Smith Date: Wed, 20 Feb 2013 16:16:50 +0000 Subject: [PATCH 35/67] Added provider for omgwtfnzbs.org --- data/images/providers/omgwtfnzbs.png | Bin 0 -> 338 bytes data/interfaces/default/config_providers.tmpl | 17 ++- sickbeard/__init__.py | 18 ++- sickbeard/providers/__init__.py | 1 + sickbeard/providers/omgwtfnzbs.py | 121 ++++++++++++++++++ sickbeard/webserve.py | 6 + 6 files changed, 160 insertions(+), 3 deletions(-) create mode 100644 data/images/providers/omgwtfnzbs.png create mode 100644 sickbeard/providers/omgwtfnzbs.py diff --git a/data/images/providers/omgwtfnzbs.png b/data/images/providers/omgwtfnzbs.png new file mode 100644 index 0000000000000000000000000000000000000000..3de01dcf10bf94f13f431506982115c99b5c721b GIT binary patch literal 338 zcmeAS@N?(olHy`uVBq!ia0vp^0wB!61|;P_|4#%`EX7WqAsj$Z!;#Vf4nJ z@ErkR#;MwT(m=s4o-U3d7QJ&X8hSAYinKn|KPgf0Cg)Cz633#ju(GDsB?W?y;_fs= zxOBEI5#Z*wp1z>>cfbsZ^+#- zahmqk;A(<{;%v#j(H^Q{*+&T`L&XjM?)VlidX0>p0!$_Qy{zh hg)Z|6Lyv!5@?u4y7felGOb7a(!PC{xWt~$(696AtiwXb$ literal 0 HcmV?d00001 diff --git a/data/interfaces/default/config_providers.tmpl b/data/interfaces/default/config_providers.tmpl index 6f11ab6fbe..6ef8efeb5b 100755 --- a/data/interfaces/default/config_providers.tmpl +++ b/data/interfaces/default/config_providers.tmpl @@ -85,7 +85,7 @@ Configure Provider: #set $provider_config_list = [] - #for $cur_provider in ("nzbs_r_us", "tvtorrents", "torrentleech", "btn"): + #for $cur_provider in ("nzbs_r_us", "omgwtfnzbs", "tvtorrents", "torrentleech", "btn"): #set $cur_provider_obj = $sickbeard.providers.getProviderClass($cur_provider) $provider_config_list.append($cur_provider_obj) #end for @@ -141,6 +141,21 @@
+ +
+
+ +
+
+ +
+
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index be349cadac..dd7fcd8048 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -30,7 +30,7 @@ # apparently py2exe won't build these unless they're imported somewhere from sickbeard import providers, metadata -from providers import ezrss, tvtorrents, torrentleech, btn, nzbsrus, newznab, womble +from providers import ezrss, tvtorrents, torrentleech, btn, nzbsrus, newznab, womble, omgwtfnzbs from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser @@ -180,6 +180,10 @@ WOMBLE = False +OMGWTFNZBS = False +OMGWTFNZBS_UID = None +OMGWTFNZBS_KEY = None + NZBSRUS = False NZBSRUS_UID = None NZBSRUS_HASH = None @@ -333,7 +337,7 @@ def initialize(consoleLogging=True): showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, TVDB_API_PARMS, \ NAMING_PATTERN, NAMING_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, \ RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \ - NZBSRUS, NZBSRUS_UID, NZBSRUS_HASH, WOMBLE, providerList, newznabProviderList, \ + NZBSRUS, NZBSRUS_UID, NZBSRUS_HASH, WOMBLE, OMGWTFNZBS, OMGWTFNZBS_UID, OMGWTFNZBS_KEY, providerList, newznabProviderList, \ EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, \ USE_NOTIFO, NOTIFO_USERNAME, NOTIFO_APISECRET, NOTIFO_NOTIFY_ONDOWNLOAD, NOTIFO_NOTIFY_ONSNATCH, \ USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \ @@ -565,6 +569,11 @@ def initialize(consoleLogging=True): CheckSection(CFG, 'Womble') WOMBLE = bool(check_setting_int(CFG, 'Womble', 'womble', 1)) + CheckSection(CFG, 'omgwtfnzbs') + OMGWTFNZBS = bool(check_setting_int(CFG, 'omgwtfnzbs', 'omgwtfnzbs', 0)) + OMGWTFNZBS_UID = check_setting_str(CFG, 'omgwtfnzbs', 'omgwtfnzbs_uid', '') + OMGWTFNZBS_KEY = check_setting_str(CFG, 'omgwtfnzbs', 'omgwtfnzbs_key', '') + CheckSection(CFG, 'SABnzbd') SAB_USERNAME = check_setting_str(CFG, 'SABnzbd', 'sab_username', '') SAB_PASSWORD = check_setting_str(CFG, 'SABnzbd', 'sab_password', '') @@ -1042,6 +1051,11 @@ def save_config(): new_config['Womble'] = {} new_config['Womble']['womble'] = int(WOMBLE) + new_config['omgwtfnzbs'] = {} + new_config['omgwtfnzbs']['omgwtfnzbs'] = int(OMGWTFNZBS) + new_config['omgwtfnzbs']['omgwtfnzbs_uid'] = OMGWTFNZBS_UID + new_config['omgwtfnzbs']['omgwtfnzbs_key'] = OMGWTFNZBS_KEY + new_config['SABnzbd'] = {} new_config['SABnzbd']['sab_username'] = SAB_USERNAME new_config['SABnzbd']['sab_password'] = SAB_PASSWORD diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 9b2f39fd82..1fc4cdd474 100755 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -22,6 +22,7 @@ 'nzbsrus', 'womble', 'btn', + 'omgwtfnzbs' ] import sickbeard diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py new file mode 100644 index 0000000000..a7f129bb27 --- /dev/null +++ b/sickbeard/providers/omgwtfnzbs.py @@ -0,0 +1,121 @@ +# Author: Jordon Smith +# URL: http://code.google.com/p/sickbeard/ +# +# This file is part of Sick Beard. +# +# Sick Beard is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Sick Beard is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Sick Beard. If not, see . + +import urllib +import generic +import sickbeard + +from sickbeard import tvcache +from sickbeard import classes +from sickbeard import logger +from sickbeard import exceptions +from sickbeard import show_name_helpers +from datetime import datetime + +try: + import json +except ImportError: + from lib import simplejson as json + + +class OmgwtfnzbsProvider(generic.NZBProvider): + + def __init__(self): + generic.NZBProvider.__init__(self, "omgwtfnzbs") + self.cache = OmgwtfnzbsCache(self) + self.url = 'https://api.omgwtfnzbs.org/' + self.supportsBacklog = True + + def isEnabled(self): + return sickbeard.OMGWTFNZBS + + def _checkAuth(self): + if sickbeard.OMGWTFNZBS_UID in (None, "") or sickbeard.OMGWTFNZBS_KEY in (None, ""): + raise exceptions.AuthException("omgwtfnzbs authentication details are empty, check your config") + + def _get_season_search_strings(self, show, season): + return [x for x in show_name_helpers.makeSceneSeasonSearchString(show, season)] + + def _get_episode_search_strings(self, ep_obj): + return [x for x in show_name_helpers.makeSceneSearchString(ep_obj)] + + def _get_title_and_url(self, item): + return (item['release'], item['getnzb']) + + def _doSearch(self, search, show=None, retention=0): + params = {'user': sickbeard.OMGWTFNZBS_UID, + 'api': sickbeard.OMGWTFNZBS_KEY, + 'eng': 1, + 'catid': '19,20', # SD,HD + 'retention': sickbeard.USENET_RETENTION, + 'search': search} + + if retention or not params['retention']: + params['retention'] = retention + + url = self.url + 'json?' + urllib.urlencode(params) + logger.log(u"omgwtfnzbs search url: " + url, logger.DEBUG) + data = self.getURL(url) + try: + items = json.loads(data) + except ValueError: + logger.log(u"Error trying to decode omgwtfnzbs json response", logger.ERROR) + return [] + + results = [] + if 'notice' in items: + if 'api information is incorrect' in items.get('notice'): + raise exceptions.AuthException("omgwtfnzbs authentication details are incorrect") + else: + logger.log(u"omgwtfnzbs notice: " + items.get('notice'), logger.DEBUG) + else: + for item in items: + if 'release' in item and 'getnzb' in item: + results.append(item) + return results + + def findPropers(self, date=None): + search_terms = ['.PROPER.', '.REPACK.'] + results = [] + + for term in search_terms: + for item in self._doSearch(term, retention=4): + if 'usenetage' in item: + name, url = self._get_title_and_url(item) + results.append(classes.Proper(name, url, datetime.fromtimestamp(item['usenetage']))) + return results + + +class OmgwtfnzbsCache(tvcache.TVCache): + + def __init__(self, provider): + tvcache.TVCache.__init__(self, provider) + self.minTime = 20 + + def _getRSSData(self): + params = {'user': sickbeard.OMGWTFNZBS_UID, + 'api': sickbeard.OMGWTFNZBS_KEY, + 'eng': 1, + 'catid': '19,20'} # SD,HD + + url = 'http://rss.omgwtfnzbs.org/rss-download.php?' + urllib.urlencode(params) + logger.log(u"omgwtfnzbs cache update URL: " + url, logger.DEBUG) + return self.provider.getURL(url) + +provider = OmgwtfnzbsProvider() + diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 6f7fc0514e..f2dc99329a 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -1031,6 +1031,7 @@ def deleteNewznabProvider(self, id): @cherrypy.expose def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None, nzbs_r_us_uid=None, nzbs_r_us_hash=None, newznab_string='', + omgwtfnzbs_uid=None, omgwtfnzbs_key=None, tvtorrents_digest=None, tvtorrents_hash=None, torrentleech_key=None, btn_api_key=None, @@ -1092,6 +1093,8 @@ def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None, sickbeard.BINREQ = curEnabled elif curProvider == 'womble_s_index': sickbeard.WOMBLE = curEnabled + elif curProvider == 'omgwtfnzbs': + sickbeard.OMGWTFNZBS = curEnabled elif curProvider == 'ezrss': sickbeard.EZRSS = curEnabled elif curProvider == 'tvtorrents': @@ -1115,6 +1118,9 @@ def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None, sickbeard.NZBSRUS_UID = nzbs_r_us_uid.strip() sickbeard.NZBSRUS_HASH = nzbs_r_us_hash.strip() + sickbeard.OMGWTFNZBS_UID = omgwtfnzbs_uid.strip() + sickbeard.OMGWTFNZBS_KEY = omgwtfnzbs_key.strip() + sickbeard.PROVIDER_ORDER = provider_list sickbeard.save_config() From 8c06fbf13ba6277ba6b69d515e20c3df4b39cb4f Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sat, 23 Feb 2013 02:22:49 -0700 Subject: [PATCH 36/67] Don't bother restricting the RSS by age --- sickbeard/providers/newznab.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index b696889d7a..21bdcb1e0f 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -283,7 +283,6 @@ def __init__(self, provider): def _getRSSData(self): params = {"t": "tvsearch", - "age": sickbeard.USENET_RETENTION, "cat": '5040,5030'} # hack this in for now From 07fb5581ae203aa85aac88aa33bfc17f434f6627 Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sat, 23 Feb 2013 11:56:00 -0700 Subject: [PATCH 37/67] Pulled omgwtfnzbs, fixed merge conflicts --- data/images/providers/omgwtfnzbs.png | Bin 0 -> 338 bytes data/interfaces/default/config_providers.tmpl | 17 +++++++++++++++- sickbeard/__init__.py | 19 +++++++++++++++--- sickbeard/providers/__init__.py | 2 +- sickbeard/webserve.py | 6 ++++++ 5 files changed, 39 insertions(+), 5 deletions(-) create mode 100644 data/images/providers/omgwtfnzbs.png diff --git a/data/images/providers/omgwtfnzbs.png b/data/images/providers/omgwtfnzbs.png new file mode 100644 index 0000000000000000000000000000000000000000..3de01dcf10bf94f13f431506982115c99b5c721b GIT binary patch literal 338 zcmeAS@N?(olHy`uVBq!ia0vp^0wB!61|;P_|4#%`EX7WqAsj$Z!;#Vf4nJ z@ErkR#;MwT(m=s4o-U3d7QJ&X8hSAYinKn|KPgf0Cg)Cz633#ju(GDsB?W?y;_fs= zxOBEI5#Z*wp1z>>cfbsZ^+#- zahmqk;A(<{;%v#j(H^Q{*+&T`L&XjM?)VlidX0>p0!$_Qy{zh hg)Z|6Lyv!5@?u4y7felGOb7a(!PC{xWt~$(696AtiwXb$ literal 0 HcmV?d00001 diff --git a/data/interfaces/default/config_providers.tmpl b/data/interfaces/default/config_providers.tmpl index 6f11ab6fbe..6ef8efeb5b 100755 --- a/data/interfaces/default/config_providers.tmpl +++ b/data/interfaces/default/config_providers.tmpl @@ -85,7 +85,7 @@ Configure Provider: #set $provider_config_list = [] - #for $cur_provider in ("nzbs_r_us", "tvtorrents", "torrentleech", "btn"): + #for $cur_provider in ("nzbs_r_us", "omgwtfnzbs", "tvtorrents", "torrentleech", "btn"): #set $cur_provider_obj = $sickbeard.providers.getProviderClass($cur_provider) $provider_config_list.append($cur_provider_obj) #end for @@ -141,6 +141,21 @@
+ +
+
+ +
+
+ +
+
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 313f083d09..21e2f8c53f 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -30,7 +30,7 @@ # apparently py2exe won't build these unless they're imported somewhere from sickbeard import providers, metadata -from providers import ezrss, tvtorrents, torrentleech, btn, nzbsrus, newznab, womble, nzbx +from providers import ezrss, tvtorrents, torrentleech, btn, nzbsrus, newznab, womble, nzbx, omgwtfnzbs from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser @@ -183,6 +183,10 @@ NZBX = False NZBX_COMPLETION = 100 +OMGWTFNZBS = False +OMGWTFNZBS_UID = None +OMGWTFNZBS_KEY = None + NZBSRUS = False NZBSRUS_UID = None NZBSRUS_HASH = None @@ -336,7 +340,7 @@ def initialize(consoleLogging=True): showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, TVDB_API_PARMS, \ NAMING_PATTERN, NAMING_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, \ RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \ - NZBSRUS, NZBSRUS_UID, NZBSRUS_HASH, WOMBLE, NZBX, NZBX_COMPLETION, providerList, newznabProviderList, \ + NZBSRUS, NZBSRUS_UID, NZBSRUS_HASH, WOMBLE, NZBX, NZBX_COMPLETION, OMGWTFNZBS, OMGWTFNZBS_UID, OMGWTFNZBS_KEY, providerList, newznabProviderList, \ EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, \ USE_NOTIFO, NOTIFO_USERNAME, NOTIFO_APISECRET, NOTIFO_NOTIFY_ONDOWNLOAD, NOTIFO_NOTIFY_ONSNATCH, \ USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \ @@ -571,7 +575,11 @@ def initialize(consoleLogging=True): CheckSection(CFG, 'nzbX') NZBX = bool(check_setting_int(CFG, 'nzbX', 'nzbx', 0)) NZBX_COMPLETION = check_setting_int(CFG, 'nzbX', 'nzbx_completion', 100) - + CheckSection(CFG, 'omgwtfnzbs') + OMGWTFNZBS = bool(check_setting_int(CFG, 'omgwtfnzbs', 'omgwtfnzbs', 0)) + OMGWTFNZBS_UID = check_setting_str(CFG, 'omgwtfnzbs', 'omgwtfnzbs_uid', '') + OMGWTFNZBS_KEY = check_setting_str(CFG, 'omgwtfnzbs', 'omgwtfnzbs_key', '') + CheckSection(CFG, 'SABnzbd') SAB_USERNAME = check_setting_str(CFG, 'SABnzbd', 'sab_username', '') SAB_PASSWORD = check_setting_str(CFG, 'SABnzbd', 'sab_password', '') @@ -1053,6 +1061,11 @@ def save_config(): new_config['nzbX']['nzbx'] = int(NZBX) new_config['nzbX']['nzbx_completion'] = int(NZBX_COMPLETION) + new_config['omgwtfnzbs'] = {} + new_config['omgwtfnzbs']['omgwtfnzbs'] = int(OMGWTFNZBS) + new_config['omgwtfnzbs']['omgwtfnzbs_uid'] = OMGWTFNZBS_UID + new_config['omgwtfnzbs']['omgwtfnzbs_key'] = OMGWTFNZBS_KEY + new_config['SABnzbd'] = {} new_config['SABnzbd']['sab_username'] = SAB_USERNAME new_config['SABnzbd']['sab_password'] = SAB_PASSWORD diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index b5c52152d0..e21f4eb978 100755 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -22,7 +22,7 @@ 'nzbsrus', 'womble', 'btn', - 'nzbx' + 'nzbx', 'omgwtfnzbs', ] import sickbeard diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index a63225b9ac..e22ebb6fd0 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -1031,6 +1031,7 @@ def deleteNewznabProvider(self, id): @cherrypy.expose def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None, nzbs_r_us_uid=None, nzbs_r_us_hash=None, newznab_string='', + omgwtfnzbs_uid=None, omgwtfnzbs_key=None, tvtorrents_digest=None, tvtorrents_hash=None, torrentleech_key=None, btn_api_key=None, @@ -1094,6 +1095,8 @@ def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None, sickbeard.WOMBLE = curEnabled elif curProvider == 'nzbx': sickbeard.NZBX = curEnabled + elif curProvider == 'omgwtfnzbs': + sickbeard.OMGWTFNZBS = curEnabled elif curProvider == 'ezrss': sickbeard.EZRSS = curEnabled elif curProvider == 'tvtorrents': @@ -1117,6 +1120,9 @@ def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None, sickbeard.NZBSRUS_UID = nzbs_r_us_uid.strip() sickbeard.NZBSRUS_HASH = nzbs_r_us_hash.strip() + sickbeard.OMGWTFNZBS_UID = omgwtfnzbs_uid.strip() + sickbeard.OMGWTFNZBS_KEY = omgwtfnzbs_key.strip() + sickbeard.PROVIDER_ORDER = provider_list sickbeard.save_config() From f07f5c040c9a309a42067caf5160c3ebbd87346e Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sat, 23 Feb 2013 12:38:14 -0700 Subject: [PATCH 38/67] Removed the "beta" from beta.nzbs.org --- sickbeard/providers/__init__.py | 2 +- sickbeard/webserve.py | 2 +- tests/test_lib.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index b5c52152d0..afacb0e154 100755 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -96,7 +96,7 @@ def makeNewznabProvider(configString): return newProvider def getDefaultNewznabProviders(): - return 'Sick Beard Index|http://lolo.sickbeard.com/|0|0!!!NZBs.org|http://beta.nzbs.org/||0!!!NZBGeek|https://index.nzbgeek.info/||0!!!NZBFinder|http://www.nzbfinder.ws/||0!!!Usenet-Crawler|http://www.usenet-crawler.com/||0' + return 'Sick Beard Index|http://lolo.sickbeard.com/|0|0!!!NZBs.org|http://nzbs.org/||0!!!NZBGeek|https://index.nzbgeek.info/||0!!!NZBFinder|http://www.nzbfinder.ws/||0!!!Usenet-Crawler|http://www.usenet-crawler.com/||0' def getProviderModule(name): diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 345ce863cb..43330c0533 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -78,7 +78,7 @@ def __init__(self, *args, **KWs): if sickbeard.NZBS and sickbeard.NZBS_UID and sickbeard.NZBS_HASH: logger.log(u"NZBs.org has been replaced, please check the config to configure the new provider!", logger.ERROR) - ui.notifications.error("NZBs.org Config Update", "NZBs.org has a new site. Please update your config with the api key from http://beta.nzbs.org and then disable the old NZBs.org provider.") + ui.notifications.error("NZBs.org Config Update", "NZBs.org has a new site. Please update your config with the api key from http://nzbs.org and then disable the old NZBs.org provider.") if "X-Forwarded-Host" in cherrypy.request.headers: self.sbHost = cherrypy.request.headers['X-Forwarded-Host'] diff --git a/tests/test_lib.py b/tests/test_lib.py index b004146ead..b335ca3722 100644 --- a/tests/test_lib.py +++ b/tests/test_lib.py @@ -80,7 +80,7 @@ def createTestLogFolder(): sickbeard.NAMING_DATES = 1 sickbeard.PROVIDER_ORDER = ["sick_beard_index"] -sickbeard.newznabProviderList = providers.getNewznabProviderList("Sick Beard Index|http://momo.sickbeard.com/||1!!!NZBs.org|http://beta.nzbs.org/||0") +sickbeard.newznabProviderList = providers.getNewznabProviderList("Sick Beard Index|http://momo.sickbeard.com/||1!!!NZBs.org|http://nzbs.org/||0") sickbeard.providerList = providers.makeProviderList() sickbeard.PROG_DIR = os.path.abspath('..') From 3dba7ad75b655b2be5a13ed445678a5afa86154a Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sat, 23 Feb 2013 13:19:23 -0700 Subject: [PATCH 39/67] Differentiate snatched and downloaded on the show page --- data/css/style.css | 4 +++- sickbeard/common.py | 6 +++++- sickbeard/tv.py | 6 ++++-- sickbeard/webserve.py | 2 ++ 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/data/css/style.css b/data/css/style.css index 967959eade..28e58d3b99 100644 --- a/data/css/style.css +++ b/data/css/style.css @@ -531,7 +531,9 @@ displayShow.tmpl + manage_backlogOverview.tmpl .wanted { background-color: #ffb0b0; } - +.snatched { + background-color: #ebc1ea; +} /* ======================================================================= manage_backlogOverview.tmpl ========================================================================== */ diff --git a/sickbeard/common.py b/sickbeard/common.py index 94819b477d..f557cfa7af 100644 --- a/sickbeard/common.py +++ b/sickbeard/common.py @@ -243,11 +243,15 @@ class Overview: GOOD = 4 SKIPPED = SKIPPED # 5 + # For both snatched statuses. Note: SNATCHED/QUAL have same value and break dict. + SNATCHED = SNATCHED_PROPER # 9 + overviewStrings = {SKIPPED: "skipped", WANTED: "wanted", QUAL: "qual", GOOD: "good", - UNAIRED: "unaired"} + UNAIRED: "unaired", + SNATCHED: "snatched"} # Get our xml namespaces correct for lxml XML_NSMAP = {'xsi': 'http://www.w3.org/2001/XMLSchema-instance', diff --git a/sickbeard/tv.py b/sickbeard/tv.py index e4527cdc06..9822a41533 100644 --- a/sickbeard/tv.py +++ b/sickbeard/tv.py @@ -917,9 +917,11 @@ def getOverview(self, epStatus): maxBestQuality = None epStatus, curQuality = Quality.splitCompositeStatus(epStatus) - + + if epStatus in (SNATCHED, SNATCHED_PROPER): + return Overview.SNATCHED # if they don't want re-downloads then we call it good if they have anything - if maxBestQuality == None: + elif maxBestQuality == None: return Overview.GOOD # if they have one but it's not the best they want then mark it as qual elif curQuality < maxBestQuality: diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 43330c0533..31c6f24bf6 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -338,6 +338,7 @@ def backlogOverview(self): epCounts[Overview.QUAL] = 0 epCounts[Overview.GOOD] = 0 epCounts[Overview.UNAIRED] = 0 + epCounts[Overview.SNATCHED] = 0 sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC", [curShow.tvdbid]) @@ -2277,6 +2278,7 @@ def displayShow(self, show=None): epCounts[Overview.QUAL] = 0 epCounts[Overview.GOOD] = 0 epCounts[Overview.UNAIRED] = 0 + epCounts[Overview.SNATCHED] = 0 for curResult in sqlResults: From e10e8d4556aa1cb2ca130dc450ea127abf1518e3 Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sat, 23 Feb 2013 14:02:23 -0700 Subject: [PATCH 40/67] A possible fix for a newznab provider bug --- sickbeard/providers/newznab.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 21bdcb1e0f..554100f632 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -202,7 +202,7 @@ def _doSearch(self, search_params, show=None, max_age=0): return [] # hack this in until it's fixed server side - if not data.startswith('' + data try: From c00688b0a0c525477a1aa189cb7412447343f073 Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sat, 23 Feb 2013 14:12:25 -0700 Subject: [PATCH 41/67] OK, I won't bother. --- sickbeard/providers/newznab.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 554100f632..21bdcb1e0f 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -202,7 +202,7 @@ def _doSearch(self, search_params, show=None, max_age=0): return [] # hack this in until it's fixed server side - if not data.strip().startswith('' + data try: From b5c3cc044f41874d0f8ce91fb59f6442a7b158c6 Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sat, 23 Feb 2013 16:06:23 -0700 Subject: [PATCH 42/67] Added missed file from the last commit --- sickbeard/providers/omgwtfnzbs.py | 121 ++++++++++++++++++++++++++++++ 1 file changed, 121 insertions(+) create mode 100644 sickbeard/providers/omgwtfnzbs.py diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py new file mode 100644 index 0000000000..dc68530973 --- /dev/null +++ b/sickbeard/providers/omgwtfnzbs.py @@ -0,0 +1,121 @@ +# Author: Jordon Smith +# URL: http://code.google.com/p/sickbeard/ +# +# This file is part of Sick Beard. +# +# Sick Beard is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Sick Beard is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Sick Beard. If not, see . + +import urllib +import generic +import sickbeard + +from sickbeard import tvcache +from sickbeard import classes +from sickbeard import logger +from sickbeard import exceptions +from sickbeard import show_name_helpers +from datetime import datetime + +try: + import json +except ImportError: + from lib import simplejson as json + + +class OmgwtfnzbsProvider(generic.NZBProvider): + + def __init__(self): + generic.NZBProvider.__init__(self, "omgwtfnzbs") + self.cache = OmgwtfnzbsCache(self) + self.url = 'https://api.omgwtfnzbs.org/' + self.supportsBacklog = True + + def isEnabled(self): + return sickbeard.OMGWTFNZBS + + def _checkAuth(self): + if not sickbeard.OMGWTFNZBS_UID or not sickbeard.OMGWTFNZBS_KEY: + raise exceptions.AuthException("omgwtfnzbs authentication details are empty, check your config") + + def _get_season_search_strings(self, show, season): + return [x for x in show_name_helpers.makeSceneSeasonSearchString(show, season)] + + def _get_episode_search_strings(self, ep_obj): + return [x for x in show_name_helpers.makeSceneSearchString(ep_obj)] + + def _get_title_and_url(self, item): + return (item['release'], item['getnzb']) + + def _doSearch(self, search, show=None, retention=0): + params = {'user': sickbeard.OMGWTFNZBS_UID, + 'api': sickbeard.OMGWTFNZBS_KEY, + 'eng': 1, + 'catid': '19,20', # SD,HD + 'retention': sickbeard.USENET_RETENTION, + 'search': search} + + if retention or not params['retention']: + params['retention'] = retention + + url = self.url + 'json?' + urllib.urlencode(params) + logger.log(u"omgwtfnzbs search url: " + url, logger.DEBUG) + data = self.getURL(url) + try: + items = json.loads(data) + except ValueError: + logger.log(u"Error trying to decode omgwtfnzbs json response", logger.ERROR) + return [] + + results = [] + if 'notice' in items: + if 'api information is incorrect' in items.get('notice'): + raise exceptions.AuthException("omgwtfnzbs authentication details are incorrect") + else: + logger.log(u"omgwtfnzbs notice: " + items.get('notice'), logger.DEBUG) + else: + for item in items: + if 'release' in item and 'getnzb' in item: + results.append(item) + return results + + def findPropers(self, date=None): + search_terms = ['.PROPER.', '.REPACK.'] + results = [] + + for term in search_terms: + for item in self._doSearch(term, retention=4): + if 'usenetage' in item: + name, url = self._get_title_and_url(item) + results.append(classes.Proper(name, url, datetime.fromtimestamp(item['usenetage']))) + return results + + +class OmgwtfnzbsCache(tvcache.TVCache): + + def __init__(self, provider): + tvcache.TVCache.__init__(self, provider) + self.minTime = 20 + + def _getRSSData(self): + params = {'user': sickbeard.OMGWTFNZBS_UID, + 'api': sickbeard.OMGWTFNZBS_KEY, + 'eng': 1, + 'catid': '19,20'} # SD,HD + + url = 'http://rss.omgwtfnzbs.org/rss-download.php?' + urllib.urlencode(params) + logger.log(u"omgwtfnzbs cache update URL: " + url, logger.DEBUG) + return self.provider.getURL(url) + +provider = OmgwtfnzbsProvider() + From 207ac22d286ca612b6ad004693fc1886e3b9c0fc Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sun, 24 Feb 2013 00:01:38 -0700 Subject: [PATCH 43/67] Fixed a bug when updating the URL of a custom newznab provider. --- data/js/configProviders.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/js/configProviders.js b/data/js/configProviders.js index bde935dcc4..b19068ee61 100644 --- a/data/js/configProviders.js +++ b/data/js/configProviders.js @@ -136,7 +136,7 @@ $(document).ready(function(){ }); - $('#newznab_key').change(function(){ + $('#newznab_key,#newznab_url').change(function(){ var selectedProvider = $('#editANewznabProvider :selected').val(); From 3fad61a63cca12b07090e9678c45390ede64c6ce Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sun, 24 Feb 2013 01:36:13 -0700 Subject: [PATCH 44/67] Fix a rootDir error --- data/interfaces/default/inc_rootDirs.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/interfaces/default/inc_rootDirs.tmpl b/data/interfaces/default/inc_rootDirs.tmpl index b92639a9bd..27f995daab 100644 --- a/data/interfaces/default/inc_rootDirs.tmpl +++ b/data/interfaces/default/inc_rootDirs.tmpl @@ -12,7 +12,7 @@
- #for $cur_dir in $backend_dirs: #end for From 03136c66b4ac5edf66554afbd51d4ab47648a22e Mon Sep 17 00:00:00 2001 From: Nic Wolfe Date: Sun, 24 Feb 2013 01:42:56 -0700 Subject: [PATCH 45/67] Fixed a bug with the synoindex notifications on show add --- sickbeard/helpers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 64b8f1a3bd..f135f6b4e3 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -37,6 +37,7 @@ from sickbeard import db from sickbeard import encodingKludge as ek +from sickbeard import notifiers from lib.tvdb_api import tvdb_api, tvdb_exceptions From 5dda6a3a2be00b26fd73c42b71aa66db0d3a3cf7 Mon Sep 17 00:00:00 2001 From: Jonathon Saine Date: Fri, 8 Feb 2013 19:23:28 -0600 Subject: [PATCH 46/67] Added GUI/Config option to update only the first host in xbmc host list. This should appease everyone now. Cleaned up configNotifications.js to make jsLint/Hint happy as well as update code for jquery 1.9x as `.complete` is depreciated in favor of `.done` for the $.get calls. --- .../default/config_notifications.tmpl | 29 ++-- data/js/configNotifications.js | 155 +++++++++--------- sickbeard/__init__.py | 4 +- sickbeard/webserve.py | 26 ++- 4 files changed, 118 insertions(+), 96 deletions(-) diff --git a/data/interfaces/default/config_notifications.tmpl b/data/interfaces/default/config_notifications.tmpl index c5a336cf63..d2e13e4a40 100755 --- a/data/interfaces/default/config_notifications.tmpl +++ b/data/interfaces/default/config_notifications.tmpl @@ -66,6 +66,13 @@ Fall back to a full library update if per-show fails?
+
+ + +