-
Notifications
You must be signed in to change notification settings - Fork 1
/
test_job_queue.py
104 lines (81 loc) · 2.63 KB
/
test_job_queue.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
import sys
from nose.tools import *
from job_queue import Job_Queue, try_using
from cStringIO import StringIO
class Capturing(list):
def __enter__(self):
self._stdout = sys.stdout
sys.stdout = self._stringio = StringIO()
return self
def __exit__(self, *args):
self.extend(self._stringio.getvalue().splitlines())
sys.stdout = self._stdout
class Test_Job_Queue():
from multiprocessing import Process as Bucket
def test_init(self):
jobs = Job_Queue(5)
assert_equal(jobs._queued, [])
assert_equal(jobs._running, [])
assert_equal(jobs._completed, [])
assert_equal(jobs._num_of_jobs, 0)
assert_equal(jobs._max, 5)
assert_false(jobs._finished)
assert_false(jobs._closed)
assert_false(jobs._debug)
jobs._debug = True
assert_true(jobs._debug)
def populate(self,queue_size=5,job_size=10,debug=False):
jobs = Job_Queue(queue_size)
jobs._debug = debug
def foo():
return 10
for x in range(job_size):
jobs.append(self.Bucket(
target = foo,
args = [],
kwargs = {},
))
return jobs
def test_empty(self):
"""This case is when less jobs than the size of the queue are added
"""
jobs = self.populate(2,1)
jobs.close()
jobs.start()
@raises(Exception)
def test_some(self):
jobs = self.populate()
jobs.start()
def test_length(self):
jobs = self.populate()
assert_equal( jobs._num_of_jobs, 10)
assert_equal( len(jobs._queued), 10)
assert_equal(len(jobs), 10)
def test_closed(self):
jobs = self.populate()
assert_false(jobs._closed)
jobs.close()
assert_true(jobs._closed)
def test_runs(self):
jobs = self.populate()
jobs.close()
assert_false(jobs._finished)
jobs.start()
assert_true(jobs._finished)
assert_false(jobs._all_alive())
assert_equal(jobs._running,[])
def test_runs_debug(self):
with Capturing() as output:
jobs = self.populate(debug=True)
jobs.close()
assert_false(jobs._finished)
jobs.start()
assert_true(jobs._finished)
assert_false(jobs._all_alive())
assert_equal(jobs._running,[])
assert_true(len(output)>0)
def test_runs_try_using(self):
try_using("multiprocessing")
try_using("threading")
class Test_Job_Queue_Threads(Test_Job_Queue):
from threading import Thread as Bucket