-
Notifications
You must be signed in to change notification settings - Fork 278
/
queue.py
1207 lines (967 loc) · 49.4 KB
/
queue.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# coding=utf-8
"""Module with different types of Queue Items for searching and snatching."""
from __future__ import unicode_literals
import datetime
import logging
import operator
import re
import threading
import time
import traceback
from builtins import map
from builtins import str
from medusa import app, common, db, failed_history, helpers, history, ui, ws
from medusa.common import DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, SUBTITLED
from medusa.helper.common import enabled_providers
from medusa.helper.exceptions import AuthException, ex
from medusa.helpers import pretty_file_size
from medusa.logger.adapters.style import BraceAdapter
from medusa.name_parser.parser import InvalidNameException, InvalidShowException, NameParser
from medusa.queues import generic_queue
from medusa.search import BACKLOG_SEARCH, DAILY_SEARCH, FAILED_SEARCH, MANUAL_SEARCH, PROPER_SEARCH, SNATCH_RESULT, SearchType
from medusa.search.core import (
filter_results,
pick_result,
search_for_needed_episodes,
search_providers,
snatch_result,
)
from medusa.show.history import History
from six import itervalues, text_type
log = BraceAdapter(logging.getLogger(__name__))
log.logger.addHandler(logging.NullHandler())
SEARCH_HISTORY = []
SEARCH_HISTORY_SIZE = 100
class SearchQueue(generic_queue.GenericQueue):
"""Search queue class."""
def __init__(self):
"""Initialize the class."""
generic_queue.GenericQueue.__init__(self)
self.queue_name = 'SEARCHQUEUE'
self.force = False
def is_in_queue(self, show, segment):
"""Check if item is in queue."""
for cur_item in self.queue:
if isinstance(cur_item, (BacklogQueueItem, FailedQueueItem,
SnatchQueueItem, ManualSearchQueueItem)) \
and cur_item.show == show and cur_item.segment == segment:
return True
return False
def pause_backlog(self):
"""Pause the backlog."""
self.min_priority = generic_queue.QueuePriorities.HIGH
def unpause_backlog(self):
"""Unpause the backlog."""
self.min_priority = 0
def is_backlog_paused(self):
"""Check if backlog is paused."""
# backlog priorities are NORMAL, this should be done properly somewhere
return self.min_priority >= generic_queue.QueuePriorities.NORMAL
def is_backlog_in_progress(self):
"""Check is backlog is in progress."""
for cur_item in self.queue + [self.current_item]:
if isinstance(cur_item, BacklogQueueItem):
return True
return False
def is_dailysearch_in_progress(self):
"""Check if daily search is in progress."""
for cur_item in self.queue + [self.current_item]:
if isinstance(cur_item, DailySearchQueueItem):
return True
return False
def is_proper_search_in_progress(self):
"""Check if proper search is in progress."""
for cur_item in self.queue + [self.current_item]:
if isinstance(cur_item, ProperSearchQueueItem):
return True
return False
def queue_length(self):
"""Get queue lenght."""
length = {'backlog': 0, 'daily': 0}
for cur_item in self.queue:
if isinstance(cur_item, DailySearchQueueItem):
length['daily'] += 1
elif isinstance(cur_item, BacklogQueueItem):
length['backlog'] += 1
return length
def add_item(self, item):
"""Add item to queue."""
if isinstance(item, (DailySearchQueueItem, ProperSearchQueueItem)):
# daily searches and proper searches
generic_queue.GenericQueue.add_item(self, item)
elif isinstance(item, (BacklogQueueItem, FailedQueueItem,
SnatchQueueItem, ManualSearchQueueItem)) \
and not self.is_in_queue(item.show, item.segment):
generic_queue.GenericQueue.add_item(self, item)
else:
log.debug('Item already in the queue, skipping')
def force_daily(self):
"""Force daily searched."""
if not self.is_dailysearch_in_progress and not self.current_item.amActive:
self.force = True
return True
return False
class ForcedSearchQueue(generic_queue.GenericQueue):
"""Search Queue used for Manual, (forced) Backlog and Failed Search."""
def __init__(self):
"""Initialize ForcedSearch Queue."""
generic_queue.GenericQueue.__init__(self)
self.queue_name = 'FORCEDSEARCHQUEUE'
def is_in_queue(self, show, segment):
"""Verify if the show and segment (episode or number of episodes) are scheduled."""
for cur_item in self.queue:
if cur_item.show == show and cur_item.segment == segment:
return True
return False
def is_ep_in_queue(self, segment):
"""Verify if the show and segment (episode or number of episodes) are scheduled."""
for cur_item in self.queue:
if isinstance(cur_item, (BacklogQueueItem, FailedQueueItem, ManualSearchQueueItem)) and cur_item.segment == segment:
return True
return False
def is_show_in_queue(self, show):
"""Verify if the show is queued in this queue as a BacklogQueueItem, ManualSearchQueueItem or FailedQueueItem."""
for cur_item in self.queue:
if isinstance(cur_item, (BacklogQueueItem, FailedQueueItem, ManualSearchQueueItem)) and cur_item.show.indexerid == show:
return True
return False
def get_all_ep_from_queue(self, series_obj):
"""
Get QueueItems from the queue if the queue item is scheduled to search for the passed Show.
@param series_obj: Series object.
@return: A list of BacklogQueueItem, FailedQueueItem or FailedQueueItem items
"""
ep_obj_list = []
for cur_item in self.queue:
if isinstance(cur_item, (BacklogQueueItem, FailedQueueItem, ManualSearchQueueItem)):
if series_obj and cur_item.show.identifier != series_obj.identifier:
continue
ep_obj_list.append(cur_item)
return ep_obj_list
def is_backlog_paused(self):
"""
Verify if the ForcedSearchQueue's min_priority has been changed.
This indicates that the queue has been paused.
# backlog priorities are NORMAL, this should be done properly somewhere
"""
return self.min_priority >= generic_queue.QueuePriorities.NORMAL
def is_forced_search_in_progress(self):
"""Test of a forced search is currently running (can be backlog, manual or failed search).
It doesn't check what's in queue.
"""
if isinstance(self.current_item, (BacklogQueueItem, ManualSearchQueueItem, FailedQueueItem)):
return True
return False
def queue_length(self):
"""Get queue length."""
length = {'backlog_search': 0, 'manual_search': 0, 'failed': 0}
for cur_item in self.queue:
if isinstance(cur_item, FailedQueueItem):
length['failed'] += 1
elif isinstance(cur_item, ManualSearchQueueItem):
length['manual_search'] += 1
elif isinstance(cur_item, BacklogQueueItem):
length['backlog_search'] += 1
return length
def add_item(self, item):
"""Add a new ManualSearchQueueItem or FailedQueueItem to the ForcedSearchQueue."""
if isinstance(item, (ManualSearchQueueItem, FailedQueueItem, BacklogQueueItem)) and not self.is_ep_in_queue(item.segment):
# manual, snatch and failed searches
generic_queue.GenericQueue.add_item(self, item)
else:
log.debug('Item already in the queue, skipping')
class SnatchQueue(generic_queue.GenericQueue):
"""Queue for queuing SnatchQueueItem objects (snatch jobs)."""
def __init__(self):
"""Initialize the SnatchQueue object."""
generic_queue.GenericQueue.__init__(self)
self.queue_name = 'SNATCHQUEUE'
def is_in_queue(self, show, segment):
"""
Check if the passed show and segment (episode of list of episodes) is in the queue.
@param show: show object
@param segment: list of episode objects
@return: True or False
"""
for cur_item in self.queue:
if cur_item.show == show and cur_item.segment == segment:
return True
return False
def is_ep_in_queue(self, segment):
"""
Check if the passed segment (episode of list of episodes) is in the queue.
@param segment: list of episode objects
@return: True or False
"""
for cur_item in self.queue:
if cur_item.segment == segment:
return True
return False
def queue_length(self):
"""
Get the length of the current queue.
@return: length of queue
"""
return {'manual_snatch': len(self.queue)}
def add_item(self, item):
"""
Add a SnatchQueueItem queue item.
@param item: SnatchQueueItem gueue object
"""
if not self.is_in_queue(item.show, item.segment):
# backlog searches
generic_queue.GenericQueue.add_item(self, item)
else:
log.debug("Not adding item, it's already in the queue")
class DailySearchQueueItem(generic_queue.QueueItem):
"""Daily search queue item class."""
def __init__(self, scheduler_start_time, force):
"""Initialize the class."""
generic_queue.QueueItem.__init__(self, u'Daily Search', DAILY_SEARCH)
self.success = None
self.started = None
self.scheduler_start_time = scheduler_start_time
self.force = force
self.to_json.update({
'success': self.success,
'force': self.force
})
def run(self):
"""Run daily search thread."""
generic_queue.QueueItem.run(self)
self.started = True
try:
log.info('Beginning daily search for new episodes')
# Push an update to any open Web UIs through the WebSocket
ws.Message('QueueItemUpdate', self.to_json).push()
found_results = search_for_needed_episodes(self.scheduler_start_time, force=self.force)
if not found_results:
log.info('No needed episodes found')
else:
for result in found_results:
# just use the first result for now
if result.seeders not in (-1, None) and result.leechers not in (-1, None):
log.info(
'Downloading {name} with {seeders} seeders and {leechers} leechers'
' and size {size} from {provider}', {
'name': result.name,
'seeders': result.seeders,
'leechers': result.leechers,
'size': pretty_file_size(result.size),
'provider': result.provider.name,
}
)
else:
log.info(
'Downloading {name} with size: {size} from {provider}', {
'name': result.name,
'size': pretty_file_size(result.size),
'provider': result.provider.name,
}
)
# Set the search_type for the result.
result.search_type = SearchType.DAILY_SEARCH
# Create the queue item
snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result)
# Add the queue item to the queue
app.manual_snatch_scheduler.action.add_item(snatch_queue_item)
self.success = False
while snatch_queue_item.success is False:
if snatch_queue_item.started and snatch_queue_item.success:
self.success = True
time.sleep(1)
# give the CPU a break
time.sleep(common.cpu_presets[app.CPU_PRESET])
except Exception as error:
self.success = False
log.exception('DailySearchQueueItem Exception, error: {error!r}', {'error': error})
if self.success is None:
self.success = False
# Push an update to any open Web UIs through the WebSocket
ws.Message('QueueItemUpdate', self.to_json).push()
self.finish()
class ManualSearchQueueItem(generic_queue.QueueItem):
"""Manual search queue item class."""
def __init__(self, show, segment, manual_search_type='episode'):
"""
Initialize class of a QueueItem used to queue forced and manual searches.
:param show: A show object
:param segment: A list of episode objects.
:param manual_search_type: Used to switch between episode and season search. Options are 'episode' or 'season'.
:return: The run() method searches and snatches the episode(s) if possible or it only searches and saves results to cache tables.
"""
generic_queue.QueueItem.__init__(self, u'Manual Search', MANUAL_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.name = '{search_type}-{indexerid}'.format(
search_type='MANUAL',
indexerid=show.indexerid
)
self.success = None
self.started = None
self.results = None
self.show = show
self.segment = segment
self.manual_search_type = manual_search_type
self.to_json.update({
'show': self.show.to_json(),
'segment': [ep.to_json() for ep in self.segment],
'success': self.success,
'manualSearchType': self.manual_search_type
})
def run(self):
"""Run manual search thread."""
generic_queue.QueueItem.run(self)
self.started = True
try:
log.info(
'Beginning {search_type} {season_pack}search for: {ep}', {
'search_type': 'manual',
'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')],
'ep': self.segment[0].pretty_name()
}
)
# Push an update to any open Web UIs through the WebSocket
ws.Message('QueueItemUpdate', self.to_json).push()
search_result = search_providers(self.show, self.segment, forced_search=True, down_cur_quality=True,
manual_search=True, manual_search_type=self.manual_search_type)
if search_result:
self.results = search_result
self.success = True
if self.manual_search_type == 'season':
ui.notifications.message('We have found season packs for {show_name}'
.format(show_name=self.show.name),
'These should become visible in the manual select page.')
else:
ui.notifications.message('We have found results for {ep}'
.format(ep=self.segment[0].pretty_name()),
'These should become visible in the manual select page.')
else:
ui.notifications.message('No results were found')
log.info(
'Unable to find {search_type} {season_pack}results for: {ep}', {
'search_type': 'manual',
'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')],
'ep': self.segment[0].pretty_name()
}
)
# TODO: Remove catch all exception.
except Exception:
self.success = False
log.debug(traceback.format_exc())
# Keep a list with the 100 last executed searches
fifo(SEARCH_HISTORY, self, SEARCH_HISTORY_SIZE)
if self.success is None:
self.success = False
# Push an update to any open Web UIs through the WebSocket
msg = ws.Message('QueueItemUpdate', self.to_json)
msg.push()
self.finish()
class SnatchQueueItem(generic_queue.QueueItem):
"""
A queue item that can be used to queue the snatch of a search result.
@param show: A show object
@param segment: A list of episode objects
@param provider: The provider id. For example nyaatorrent and not NyaaTorrent. Or usernet_crawler and not Usenet-Crawler
@param cached_result: An sql result of the searched result retrieved from the provider cache table.
@return: The run() methods snatches the episode(s) if possible.
"""
def __init__(self, show, segment, search_result):
"""Initialize the class."""
generic_queue.QueueItem.__init__(self, u'Snatch Result', SNATCH_RESULT)
self.priority = generic_queue.QueuePriorities.HIGH
self.name = 'SNATCH-{indexer_id}'.format(indexer_id=search_result.series.indexerid)
self.success = None
self.started = None
self.segment = segment
self.show = show
self.results = None
self.search_result = search_result
self.to_json.update({
'show': self.show.to_json(),
'segment': [ep.to_json() for ep in self.segment],
'success': self.success,
'searchResult': self.search_result.to_json()
})
def run(self):
"""Run manual snatch job."""
generic_queue.QueueItem.run(self)
self.started = True
result = self.search_result
try:
log.info('Beginning to snatch release: {name}',
{'name': result.name})
# Push an update to any open Web UIs through the WebSocket
msg = ws.Message('QueueItemUpdate', self.to_json)
msg.push()
if result:
if result.seeders not in (-1, None) and result.leechers not in (-1, None):
log.info(
'Downloading {name} with {seeders} seeders and {leechers} leechers'
' and size {size} from {provider}, through a {search_type} search', {
'name': result.name,
'seeders': result.seeders,
'leechers': result.leechers,
'size': pretty_file_size(result.size),
'provider': result.provider.name,
'search_type': result.search_type
}
)
else:
log.info(
'Downloading {name} with size: {size} from {provider}, through a {search_type} search', {
'name': result.name,
'size': pretty_file_size(result.size),
'provider': result.provider.name,
'search_type': result.search_type
}
)
self.success = snatch_result(result)
else:
log.info('Unable to snatch release: {name}',
{'name': result.name})
# give the CPU a break
time.sleep(common.cpu_presets[app.CPU_PRESET])
except Exception:
self.success = False
log.exception('Snatch failed! For result: {name}', {'name': result.name})
ui.notifications.message('Error while snatching selected result',
'Unable to snatch the result for <i>{name}</i>'.format(name=result.name))
if self.success is None:
self.success = False
# Push an update to any open Web UIs through the WebSocket
msg = ws.Message('QueueItemUpdate', self.to_json)
msg.push()
self.finish()
class BacklogQueueItem(generic_queue.QueueItem):
"""Backlog queue item class."""
def __init__(self, show, segment):
"""Initialize the class."""
generic_queue.QueueItem.__init__(self, u'Backlog', BACKLOG_SEARCH)
self.priority = generic_queue.QueuePriorities.LOW
self.name = 'BACKLOG-{indexer_id}'.format(indexer_id=show.indexerid)
self.started = None
self.show = show
self.segment = segment
self.to_json.update({
'show': self.show.to_json(),
'segment': [ep.to_json() for ep in self.segment],
})
def run(self):
"""Run backlog search thread."""
generic_queue.QueueItem.run(self)
self.started = True
if not self.show.paused:
try:
log.info('Beginning backlog search for: {name}',
{'name': self.show.name})
# Push an update to any open Web UIs through the WebSocket
ws.Message('QueueItemUpdate', self.to_json).push()
search_result = search_providers(self.show, self.segment)
if search_result:
for result in search_result:
# just use the first result for now
if result.seeders not in (-1, None) and result.leechers not in (-1, None):
log.info(
'Downloading {name} with {seeders} seeders and {leechers} leechers '
'and size {size} from {provider}', {
'name': result.name,
'seeders': result.seeders,
'leechers': result.leechers,
'size': pretty_file_size(result.size),
'provider': result.provider.name,
}
)
else:
log.info(
'Downloading {name} with size: {size} from {provider}', {
'name': result.name,
'size': pretty_file_size(result.size),
'provider': result.provider.name,
}
)
# Set the search_type for the result.
result.search_type = SearchType.BACKLOG_SEARCH
# Create the queue item
snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result)
# Add the queue item to the queue
app.manual_snatch_scheduler.action.add_item(snatch_queue_item)
self.success = False
while snatch_queue_item.success is False:
if snatch_queue_item.started and snatch_queue_item.success:
self.success = True
time.sleep(1)
# give the CPU a break
time.sleep(common.cpu_presets[app.CPU_PRESET])
else:
log.info('No needed episodes found during backlog search for: {name}',
{'name': self.show.name})
# TODO: Remove the catch all exception.
except Exception:
self.success = False
log.debug(traceback.format_exc())
# Keep a list with the 100 last executed searches
fifo(SEARCH_HISTORY, self, SEARCH_HISTORY_SIZE)
if self.success is None:
self.success = False
# Push an update to any open Web UIs through the WebSocket
ws.Message('QueueItemUpdate', self.to_json).push()
self.finish()
class FailedQueueItem(generic_queue.QueueItem):
"""Failed queue item class."""
def __init__(self, show, segment, down_cur_quality=False):
"""Initialize the class."""
generic_queue.QueueItem.__init__(self, u'Retry', FAILED_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.name = 'RETRY-{indexer_id}'.format(indexer_id=show.indexerid)
self.success = None
self.started = None
self.show = show
self.segment = segment
self.down_cur_quality = down_cur_quality
self.to_json.update({
'show': self.show.to_json(),
'segment': [ep.to_json() for ep in self.segment],
'success': self.success,
'downloadCurrentQuality': self.down_cur_quality
})
def run(self):
"""Run failed thread."""
generic_queue.QueueItem.run(self)
self.started = True
# Push an update to any open Web UIs through the WebSocket
ws.Message('QueueItemUpdate', self.to_json).push()
try:
for ep_obj in self.segment:
log.info('Marking episode as failed: {ep}', {'ep': ep_obj.pretty_name()})
failed_history.mark_failed(ep_obj)
(release, provider) = failed_history.find_release(ep_obj)
if release:
failed_history.log_failed(release)
history.log_failed(ep_obj, release, provider)
failed_history.revert_episode(ep_obj)
log.info('Beginning failed download search for: {ep}',
{'ep': ep_obj.pretty_name()})
# If it is wanted, self.down_cur_quality doesnt matter
# if it isn't wanted, we need to make sure to not overwrite the existing ep that we reverted to!
search_result = search_providers(self.show, self.segment, forced_search=True)
if search_result:
for result in search_result:
# just use the first result for now
if result.seeders not in (-1, None) and result.leechers not in (-1, None):
log.info(
'Downloading {name} with {seeders} seeders and {leechers} leechers '
'and size {size} from {provider}', {
'name': result.name,
'seeders': result.seeders,
'leechers': result.leechers,
'size': pretty_file_size(result.size),
'provider': result.provider.name,
}
)
else:
log.info(
'Downloading {name} with size: {size} from {provider}', {
'name': result.name,
'size': pretty_file_size(result.size),
'provider': result.provider.name,
}
)
# Set the search_type for the result.
result.search_type = SearchType.FAILED_SEARCH
# Create the queue item
snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result)
# Add the queue item to the queue
app.manual_snatch_scheduler.action.add_item(snatch_queue_item)
self.success = False
while snatch_queue_item.success is False:
if snatch_queue_item.started and snatch_queue_item.success:
self.success = True
time.sleep(1)
# give the CPU a break
time.sleep(common.cpu_presets[app.CPU_PRESET])
else:
log.info('No needed episodes found during failed search for: {name}',
{'name': self.show.name})
# TODO: Replace the catch all exception with a more specific one.
except Exception:
self.success = False
log.info(traceback.format_exc())
# Keep a list with the 100 last executed searches
fifo(SEARCH_HISTORY, self, SEARCH_HISTORY_SIZE)
if self.success is None:
self.success = False
# Push an update to any open Web UIs through the WebSocket
ws.Message('QueueItemUpdate', self.to_json).push()
self.finish()
class ProperSearchQueueItem(generic_queue.QueueItem):
"""Proper search queue item class."""
def __init__(self, force, processed_propers, ignore_processed_propers):
"""Initialize the class."""
generic_queue.QueueItem.__init__(self, u'Proper Search', PROPER_SEARCH)
self.success = None
self.started = None
self.force = force
self.processed_propers = processed_propers
self.ignore_processed_propers = ignore_processed_propers
self.to_json.update({
'success': self.success,
'force': self.force
})
def run(self):
"""Run proper search thread."""
generic_queue.QueueItem.run(self)
self.started = True
try:
log.info('Beginning proper search for new episodes')
# Push an update to any open Web UIs through the WebSocket
ws.Message('QueueItemUpdate', self.to_json).push()
# If force we should ignore existing processed propers
self.ignore_processed_propers = False
if self.force:
self.ignore_processed_propers = True
log.debug("Ignoring already processed propers as it's a forced search")
log.info('Using proper search days: {search_days}', {'search_days': app.PROPERS_SEARCH_DAYS})
propers = self._get_proper_results()
if propers:
self._download_propers(propers)
self._set_last_proper_search(datetime.datetime.today().toordinal())
run_at = ''
if app.proper_finder_scheduler.start_time is None:
run_in = app.proper_finder_scheduler.lastRun + \
app.proper_finder_scheduler.cycleTime - datetime.datetime.now()
hours, remainder = divmod(run_in.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
run_at = ', next check in approx. {0}'.format(
'{0}h, {1}m'.format(hours, minutes) if 0 < hours else '{0}m, {1}s'.format(minutes, seconds))
log.info('Completed the search for new propers{run_at}', {'run_at': run_at})
# Push an update to any open Web UIs through the WebSocket
ws.Message('QueueItemUpdate', self.to_json).push()
# TODO: Remove the catch all exception.
except Exception:
self.success = False
log.debug(traceback.format_exc())
def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-branches, too-many-statements
"""Retrieve a list of recently aired episodes, and search for these episodes in the different providers."""
propers = {}
# For each provider get the list of propers
original_thread_name = threading.currentThread().name
providers = enabled_providers('backlog')
search_date = datetime.datetime.today() - datetime.timedelta(days=app.PROPERS_SEARCH_DAYS)
main_db_con = db.DBConnection()
if not app.POSTPONE_IF_NO_SUBS:
# Get the recently aired (last 2 days) shows from DB
recently_aired = main_db_con.select(
'SELECT indexer, showid, season, episode, status, airdate'
' FROM tv_episodes'
' WHERE airdate >= ?'
' AND status = ?',
[search_date.toordinal(), DOWNLOADED]
)
else:
# Get recently subtitled episodes (last 2 days) from DB
# Episode status becomes downloaded only after found subtitles
last_subtitled = search_date.strftime(History.date_format)
recently_aired = main_db_con.select('SELECT indexer_id AS indexer, showid, season, episode FROM history '
'WHERE date >= ? AND action = ?', [last_subtitled, SUBTITLED])
if not recently_aired:
log.info('No recently aired new episodes, nothing to search for')
return []
# Loop through the providers, and search for releases
for cur_provider in providers:
threading.currentThread().name = '{thread} :: [{provider}]'.format(thread=original_thread_name,
provider=cur_provider.name)
log.info('Searching for any new PROPER releases from {provider}', {'provider': cur_provider.name})
try:
cur_propers = cur_provider.find_propers(recently_aired)
except AuthException as e:
log.debug('Authentication error: {error}', {'error': ex(e)})
continue
# if they haven't been added by a different provider than add the proper to the list
for proper in cur_propers:
name = self._sanitize_name(proper.name)
if name not in propers:
log.debug('Found new possible proper result: {name}', {'name': proper.name})
propers[name] = proper
threading.currentThread().name = original_thread_name
# Take the list of unique propers and get it sorted by
sorted_propers = sorted(list(itervalues(propers)), key=operator.attrgetter('date'), reverse=True)
final_propers = []
# Keep only items from last PROPER_SEARCH_DAYS setting in processed propers:
latest_proper = datetime.datetime.now() - datetime.timedelta(days=app.PROPERS_SEARCH_DAYS)
self.processed_propers = [p for p in self.processed_propers if p.get('date') >= latest_proper]
# Get proper names from processed propers
processed_propers_names = [proper.get('name') for proper in self.processed_propers if proper.get('name')]
for cur_proper in sorted_propers:
if not self.ignore_processed_propers and cur_proper.name in processed_propers_names:
log.debug(u'Proper already processed. Skipping: {proper_name}', {'proper_name': cur_proper.name})
continue
try:
cur_proper.parse_result = NameParser().parse(cur_proper.name)
except (InvalidNameException, InvalidShowException) as error:
log.debug('{error}', {'error': error})
continue
if not cur_proper.parse_result.proper_tags:
log.info('Skipping non-proper: {name}', {'name': cur_proper.name})
continue
if not cur_proper.series.episodes.get(cur_proper.parse_result.season_number) or \
any([ep for ep in cur_proper.parse_result.episode_numbers
if not cur_proper.series.episodes[cur_proper.parse_result.season_number].get(ep)]):
log.info('Skipping proper for wrong season/episode: {name}', {'name': cur_proper.name})
continue
log.debug('Proper tags for {proper}: {tags}', {
'proper': cur_proper.name,
'tags': cur_proper.parse_result.proper_tags
})
if not cur_proper.parse_result.series_name:
log.debug('Ignoring invalid show: {name}', {'name': cur_proper.name})
if cur_proper.name not in processed_propers_names:
self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date})
continue
if not cur_proper.parse_result.episode_numbers:
log.debug('Ignoring full season instead of episode: {name}', {'name': cur_proper.name})
if cur_proper.name not in processed_propers_names:
self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date})
continue
log.debug('Successful match! Matched {original_name} to show {new_name}',
{'original_name': cur_proper.parse_result.original_name,
'new_name': cur_proper.parse_result.series.name
})
# Map the indexerid in the db to the show's indexerid
cur_proper.indexerid = cur_proper.parse_result.series.indexerid
# Map the indexer in the db to the show's indexer
cur_proper.indexer = cur_proper.parse_result.series.indexer
# Map our Proper instance
cur_proper.series = cur_proper.parse_result.series
cur_proper.actual_season = cur_proper.parse_result.season_number
cur_proper.actual_episodes = cur_proper.parse_result.episode_numbers
cur_proper.release_group = cur_proper.parse_result.release_group
cur_proper.version = cur_proper.parse_result.version
cur_proper.quality = cur_proper.parse_result.quality
cur_proper.proper_tags = cur_proper.parse_result.proper_tags
cur_proper.update_search_result()
# filter release, in this case, it's just a quality gate. As we only send one result.
wanted_results = filter_results(cur_proper)
best_result = pick_result(wanted_results)
if not best_result:
log.info('Rejected proper: {name}', {'name': cur_proper.name})
if cur_proper.name not in processed_propers_names:
self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date})
continue
# only get anime proper if it has release group and version
if best_result.series.is_anime:
if not best_result.release_group and best_result.version == -1:
log.info('Ignoring proper without release group and version: {name}', {'name': best_result.name})
if cur_proper.name not in processed_propers_names:
self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date})
continue
# check if we have the episode as DOWNLOADED
main_db_con = db.DBConnection()
sql_results = main_db_con.select('SELECT quality, release_name '
'FROM tv_episodes WHERE indexer = ? '
'AND showid = ? AND season = ? '
'AND episode = ? AND status = ?',
[best_result.indexer,
best_result.series.indexerid,
best_result.actual_season,
best_result.actual_episodes[0],
DOWNLOADED])
if not sql_results:
log.info("Ignoring proper because this episode doesn't have 'DOWNLOADED' status: {name}", {
'name': best_result.name
})
continue
# only keep the proper if we have already downloaded an episode with the same quality
old_quality = int(sql_results[0]['quality'])
if old_quality != best_result.quality:
log.info('Ignoring proper because quality is different: {name}', {'name': best_result.name})
if cur_proper.name not in processed_propers_names:
self.processed_propers.append({'name': cur_proper.name, 'date': cur_proper.date})
continue
# only keep the proper if we have already downloaded an episode with the same codec
release_name = sql_results[0]['release_name']
if release_name:
release_name_guess = NameParser()._parse_string(release_name)
current_codec = release_name_guess.video_codec
# Ignore proper if codec differs from downloaded release codec
if all([current_codec, best_result.parse_result.video_codec,
best_result.parse_result.video_codec != current_codec]):
log.info('Ignoring proper because codec is different: {name}', {'name': best_result.name})
if best_result.name not in processed_propers_names:
self.processed_propers.append({'name': best_result.name, 'date': best_result.date})
continue
streaming_service = release_name_guess.guess.get(u'streaming_service')
# Ignore proper if streaming service differs from downloaded release streaming service
if best_result.parse_result.guess.get(u'streaming_service') != streaming_service:
log.info('Ignoring proper because streaming service is different: {name}',
{'name': best_result.name})
if best_result.name not in processed_propers_names:
self.processed_propers.append({'name': best_result.name, 'date': best_result.date})
continue
else:
log.debug("Coudn't find a release name in database. Skipping codec comparison for: {name}", {