##// END OF EJS Templates
fix message when trying new table due to mismatch in SQLiteDB...
MinRK -
Show More
@@ -1,411 +1,412 b''
1 1 """A TaskRecord backend using sqlite3
2 2
3 3 Authors:
4 4
5 5 * Min RK
6 6 """
7 7 #-----------------------------------------------------------------------------
8 8 # Copyright (C) 2011 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-----------------------------------------------------------------------------
13 13
14 14 import json
15 15 import os
16 16 import cPickle as pickle
17 17 from datetime import datetime
18 18
19 19 try:
20 20 import sqlite3
21 21 except ImportError:
22 22 sqlite3 = None
23 23
24 24 from zmq.eventloop import ioloop
25 25
26 26 from IPython.utils.traitlets import Unicode, Instance, List, Dict
27 27 from .dictdb import BaseDB
28 28 from IPython.utils.jsonutil import date_default, extract_dates, squash_dates
29 29
30 30 #-----------------------------------------------------------------------------
31 31 # SQLite operators, adapters, and converters
32 32 #-----------------------------------------------------------------------------
33 33
34 34 try:
35 35 buffer
36 36 except NameError:
37 37 # py3k
38 38 buffer = memoryview
39 39
40 40 operators = {
41 41 '$lt' : "<",
42 42 '$gt' : ">",
43 43 # null is handled weird with ==,!=
44 44 '$eq' : "=",
45 45 '$ne' : "!=",
46 46 '$lte': "<=",
47 47 '$gte': ">=",
48 48 '$in' : ('=', ' OR '),
49 49 '$nin': ('!=', ' AND '),
50 50 # '$all': None,
51 51 # '$mod': None,
52 52 # '$exists' : None
53 53 }
54 54 null_operators = {
55 55 '=' : "IS NULL",
56 56 '!=' : "IS NOT NULL",
57 57 }
58 58
59 59 def _adapt_dict(d):
60 60 return json.dumps(d, default=date_default)
61 61
62 62 def _convert_dict(ds):
63 63 if ds is None:
64 64 return ds
65 65 else:
66 66 if isinstance(ds, bytes):
67 67 # If I understand the sqlite doc correctly, this will always be utf8
68 68 ds = ds.decode('utf8')
69 69 return extract_dates(json.loads(ds))
70 70
71 71 def _adapt_bufs(bufs):
72 72 # this is *horrible*
73 73 # copy buffers into single list and pickle it:
74 74 if bufs and isinstance(bufs[0], (bytes, buffer)):
75 75 return sqlite3.Binary(pickle.dumps(map(bytes, bufs),-1))
76 76 elif bufs:
77 77 return bufs
78 78 else:
79 79 return None
80 80
81 81 def _convert_bufs(bs):
82 82 if bs is None:
83 83 return []
84 84 else:
85 85 return pickle.loads(bytes(bs))
86 86
87 87 #-----------------------------------------------------------------------------
88 88 # SQLiteDB class
89 89 #-----------------------------------------------------------------------------
90 90
91 91 class SQLiteDB(BaseDB):
92 92 """SQLite3 TaskRecord backend."""
93 93
94 94 filename = Unicode('tasks.db', config=True,
95 95 help="""The filename of the sqlite task database. [default: 'tasks.db']""")
96 96 location = Unicode('', config=True,
97 97 help="""The directory containing the sqlite task database. The default
98 98 is to use the cluster_dir location.""")
99 99 table = Unicode("", config=True,
100 100 help="""The SQLite Table to use for storing tasks for this session. If unspecified,
101 101 a new table will be created with the Hub's IDENT. Specifying the table will result
102 102 in tasks from previous sessions being available via Clients' db_query and
103 103 get_result methods.""")
104 104
105 105 if sqlite3 is not None:
106 106 _db = Instance('sqlite3.Connection')
107 107 else:
108 108 _db = None
109 109 # the ordered list of column names
110 110 _keys = List(['msg_id' ,
111 111 'header' ,
112 112 'content',
113 113 'buffers',
114 114 'submitted',
115 115 'client_uuid' ,
116 116 'engine_uuid' ,
117 117 'started',
118 118 'completed',
119 119 'resubmitted',
120 120 'received',
121 121 'result_header' ,
122 122 'result_content' ,
123 123 'result_buffers' ,
124 124 'queue' ,
125 125 'pyin' ,
126 126 'pyout',
127 127 'pyerr',
128 128 'stdout',
129 129 'stderr',
130 130 ])
131 131 # sqlite datatypes for checking that db is current format
132 132 _types = Dict({'msg_id' : 'text' ,
133 133 'header' : 'dict text',
134 134 'content' : 'dict text',
135 135 'buffers' : 'bufs blob',
136 136 'submitted' : 'timestamp',
137 137 'client_uuid' : 'text',
138 138 'engine_uuid' : 'text',
139 139 'started' : 'timestamp',
140 140 'completed' : 'timestamp',
141 141 'resubmitted' : 'timestamp',
142 142 'received' : 'timestamp',
143 143 'result_header' : 'dict text',
144 144 'result_content' : 'dict text',
145 145 'result_buffers' : 'bufs blob',
146 146 'queue' : 'text',
147 147 'pyin' : 'text',
148 148 'pyout' : 'text',
149 149 'pyerr' : 'text',
150 150 'stdout' : 'text',
151 151 'stderr' : 'text',
152 152 })
153 153
154 154 def __init__(self, **kwargs):
155 155 super(SQLiteDB, self).__init__(**kwargs)
156 156 if sqlite3 is None:
157 157 raise ImportError("SQLiteDB requires sqlite3")
158 158 if not self.table:
159 159 # use session, and prefix _, since starting with # is illegal
160 160 self.table = '_'+self.session.replace('-','_')
161 161 if not self.location:
162 162 # get current profile
163 163 from IPython.core.application import BaseIPythonApplication
164 164 if BaseIPythonApplication.initialized():
165 165 app = BaseIPythonApplication.instance()
166 166 if app.profile_dir is not None:
167 167 self.location = app.profile_dir.location
168 168 else:
169 169 self.location = u'.'
170 170 else:
171 171 self.location = u'.'
172 172 self._init_db()
173 173
174 174 # register db commit as 2s periodic callback
175 175 # to prevent clogging pipes
176 176 # assumes we are being run in a zmq ioloop app
177 177 loop = ioloop.IOLoop.instance()
178 178 pc = ioloop.PeriodicCallback(self._db.commit, 2000, loop)
179 179 pc.start()
180 180
181 181 def _defaults(self, keys=None):
182 182 """create an empty record"""
183 183 d = {}
184 184 keys = self._keys if keys is None else keys
185 185 for key in keys:
186 186 d[key] = None
187 187 return d
188 188
189 189 def _check_table(self):
190 190 """Ensure that an incorrect table doesn't exist
191 191
192 192 If a bad (old) table does exist, return False
193 193 """
194 194 cursor = self._db.execute("PRAGMA table_info(%s)"%self.table)
195 195 lines = cursor.fetchall()
196 196 if not lines:
197 197 # table does not exist
198 198 return True
199 199 types = {}
200 200 keys = []
201 201 for line in lines:
202 202 keys.append(line[1])
203 203 types[line[1]] = line[2]
204 204 if self._keys != keys:
205 205 # key mismatch
206 206 self.log.warn('keys mismatch')
207 207 return False
208 208 for key in self._keys:
209 209 if types[key] != self._types[key]:
210 210 self.log.warn(
211 211 'type mismatch: %s: %s != %s'%(key,types[key],self._types[key])
212 212 )
213 213 return False
214 214 return True
215 215
216 216 def _init_db(self):
217 217 """Connect to the database and get new session number."""
218 218 # register adapters
219 219 sqlite3.register_adapter(dict, _adapt_dict)
220 220 sqlite3.register_converter('dict', _convert_dict)
221 221 sqlite3.register_adapter(list, _adapt_bufs)
222 222 sqlite3.register_converter('bufs', _convert_bufs)
223 223 # connect to the db
224 224 dbfile = os.path.join(self.location, self.filename)
225 225 self._db = sqlite3.connect(dbfile, detect_types=sqlite3.PARSE_DECLTYPES,
226 226 # isolation_level = None)#,
227 227 cached_statements=64)
228 228 # print dir(self._db)
229 first_table = self.table
229 first_table = previous_table = self.table
230 230 i=0
231 231 while not self._check_table():
232 232 i+=1
233 233 self.table = first_table+'_%i'%i
234 234 self.log.warn(
235 235 "Table %s exists and doesn't match db format, trying %s"%
236 (first_table,self.table)
236 (previous_table, self.table)
237 237 )
238 previous_table = self.table
238 239
239 240 self._db.execute("""CREATE TABLE IF NOT EXISTS %s
240 241 (msg_id text PRIMARY KEY,
241 242 header dict text,
242 243 content dict text,
243 244 buffers bufs blob,
244 245 submitted timestamp,
245 246 client_uuid text,
246 247 engine_uuid text,
247 248 started timestamp,
248 249 completed timestamp,
249 250 resubmitted timestamp,
250 251 received timestamp,
251 252 result_header dict text,
252 253 result_content dict text,
253 254 result_buffers bufs blob,
254 255 queue text,
255 256 pyin text,
256 257 pyout text,
257 258 pyerr text,
258 259 stdout text,
259 260 stderr text)
260 261 """%self.table)
261 262 self._db.commit()
262 263
263 264 def _dict_to_list(self, d):
264 265 """turn a mongodb-style record dict into a list."""
265 266
266 267 return [ d[key] for key in self._keys ]
267 268
268 269 def _list_to_dict(self, line, keys=None):
269 270 """Inverse of dict_to_list"""
270 271 keys = self._keys if keys is None else keys
271 272 d = self._defaults(keys)
272 273 for key,value in zip(keys, line):
273 274 d[key] = value
274 275
275 276 return d
276 277
277 278 def _render_expression(self, check):
278 279 """Turn a mongodb-style search dict into an SQL query."""
279 280 expressions = []
280 281 args = []
281 282
282 283 skeys = set(check.keys())
283 284 skeys.difference_update(set(self._keys))
284 285 skeys.difference_update(set(['buffers', 'result_buffers']))
285 286 if skeys:
286 287 raise KeyError("Illegal testing key(s): %s"%skeys)
287 288
288 289 for name,sub_check in check.iteritems():
289 290 if isinstance(sub_check, dict):
290 291 for test,value in sub_check.iteritems():
291 292 try:
292 293 op = operators[test]
293 294 except KeyError:
294 295 raise KeyError("Unsupported operator: %r"%test)
295 296 if isinstance(op, tuple):
296 297 op, join = op
297 298
298 299 if value is None and op in null_operators:
299 300 expr = "%s %s" % (name, null_operators[op])
300 301 else:
301 302 expr = "%s %s ?"%(name, op)
302 303 if isinstance(value, (tuple,list)):
303 304 if op in null_operators and any([v is None for v in value]):
304 305 # equality tests don't work with NULL
305 306 raise ValueError("Cannot use %r test with NULL values on SQLite backend"%test)
306 307 expr = '( %s )'%( join.join([expr]*len(value)) )
307 308 args.extend(value)
308 309 else:
309 310 args.append(value)
310 311 expressions.append(expr)
311 312 else:
312 313 # it's an equality check
313 314 if sub_check is None:
314 315 expressions.append("%s IS NULL" % name)
315 316 else:
316 317 expressions.append("%s = ?"%name)
317 318 args.append(sub_check)
318 319
319 320 expr = " AND ".join(expressions)
320 321 return expr, args
321 322
322 323 def add_record(self, msg_id, rec):
323 324 """Add a new Task Record, by msg_id."""
324 325 d = self._defaults()
325 326 d.update(rec)
326 327 d['msg_id'] = msg_id
327 328 line = self._dict_to_list(d)
328 329 tups = '(%s)'%(','.join(['?']*len(line)))
329 330 self._db.execute("INSERT INTO %s VALUES %s"%(self.table, tups), line)
330 331 # self._db.commit()
331 332
332 333 def get_record(self, msg_id):
333 334 """Get a specific Task Record, by msg_id."""
334 335 cursor = self._db.execute("""SELECT * FROM %s WHERE msg_id==?"""%self.table, (msg_id,))
335 336 line = cursor.fetchone()
336 337 if line is None:
337 338 raise KeyError("No such msg: %r"%msg_id)
338 339 return self._list_to_dict(line)
339 340
340 341 def update_record(self, msg_id, rec):
341 342 """Update the data in an existing record."""
342 343 query = "UPDATE %s SET "%self.table
343 344 sets = []
344 345 keys = sorted(rec.keys())
345 346 values = []
346 347 for key in keys:
347 348 sets.append('%s = ?'%key)
348 349 values.append(rec[key])
349 350 query += ', '.join(sets)
350 351 query += ' WHERE msg_id == ?'
351 352 values.append(msg_id)
352 353 self._db.execute(query, values)
353 354 # self._db.commit()
354 355
355 356 def drop_record(self, msg_id):
356 357 """Remove a record from the DB."""
357 358 self._db.execute("""DELETE FROM %s WHERE msg_id==?"""%self.table, (msg_id,))
358 359 # self._db.commit()
359 360
360 361 def drop_matching_records(self, check):
361 362 """Remove a record from the DB."""
362 363 expr,args = self._render_expression(check)
363 364 query = "DELETE FROM %s WHERE %s"%(self.table, expr)
364 365 self._db.execute(query,args)
365 366 # self._db.commit()
366 367
367 368 def find_records(self, check, keys=None):
368 369 """Find records matching a query dict, optionally extracting subset of keys.
369 370
370 371 Returns list of matching records.
371 372
372 373 Parameters
373 374 ----------
374 375
375 376 check: dict
376 377 mongodb-style query argument
377 378 keys: list of strs [optional]
378 379 if specified, the subset of keys to extract. msg_id will *always* be
379 380 included.
380 381 """
381 382 if keys:
382 383 bad_keys = [ key for key in keys if key not in self._keys ]
383 384 if bad_keys:
384 385 raise KeyError("Bad record key(s): %s"%bad_keys)
385 386
386 387 if keys:
387 388 # ensure msg_id is present and first:
388 389 if 'msg_id' in keys:
389 390 keys.remove('msg_id')
390 391 keys.insert(0, 'msg_id')
391 392 req = ', '.join(keys)
392 393 else:
393 394 req = '*'
394 395 expr,args = self._render_expression(check)
395 396 query = """SELECT %s FROM %s WHERE %s"""%(req, self.table, expr)
396 397 cursor = self._db.execute(query, args)
397 398 matches = cursor.fetchall()
398 399 records = []
399 400 for line in matches:
400 401 rec = self._list_to_dict(line, keys)
401 402 records.append(rec)
402 403 return records
403 404
404 405 def get_history(self):
405 406 """get all msg_ids, ordered by time submitted."""
406 407 query = """SELECT msg_id FROM %s ORDER by submitted ASC"""%self.table
407 408 cursor = self._db.execute(query)
408 409 # will be a list of length 1 tuples
409 410 return [ tup[0] for tup in cursor.fetchall()]
410 411
411 412 __all__ = ['SQLiteDB'] No newline at end of file
@@ -1,240 +1,243 b''
1 1 """Tests for db backends
2 2
3 3 Authors:
4 4
5 5 * Min RK
6 6 """
7 7
8 8 #-------------------------------------------------------------------------------
9 9 # Copyright (C) 2011 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-------------------------------------------------------------------------------
14 14
15 15 #-------------------------------------------------------------------------------
16 16 # Imports
17 17 #-------------------------------------------------------------------------------
18 18
19 19 from __future__ import division
20 20
21 import logging
21 22 import os
22 23 import tempfile
23 24 import time
24 25
25 26 from datetime import datetime, timedelta
26 27 from unittest import TestCase
27 28
28 29 from IPython.parallel import error
29 30 from IPython.parallel.controller.dictdb import DictDB
30 31 from IPython.parallel.controller.sqlitedb import SQLiteDB
31 32 from IPython.parallel.controller.hub import init_record, empty_record
32 33
33 34 from IPython.testing import decorators as dec
34 35 from IPython.zmq.session import Session
35 36
36 37
37 38 #-------------------------------------------------------------------------------
38 39 # TestCases
39 40 #-------------------------------------------------------------------------------
40 41
41 42
42 43 def setup():
43 44 global temp_db
44 45 temp_db = tempfile.NamedTemporaryFile(suffix='.db').name
45 46
46 47
47 48 class TestDictBackend(TestCase):
48 49 def setUp(self):
49 50 self.session = Session()
50 51 self.db = self.create_db()
51 52 self.load_records(16)
52 53
53 54 def create_db(self):
54 55 return DictDB()
55 56
56 57 def load_records(self, n=1):
57 58 """load n records for testing"""
58 59 #sleep 1/10 s, to ensure timestamp is different to previous calls
59 60 time.sleep(0.1)
60 61 msg_ids = []
61 62 for i in range(n):
62 63 msg = self.session.msg('apply_request', content=dict(a=5))
63 64 msg['buffers'] = []
64 65 rec = init_record(msg)
65 66 msg_id = msg['header']['msg_id']
66 67 msg_ids.append(msg_id)
67 68 self.db.add_record(msg_id, rec)
68 69 return msg_ids
69 70
70 71 def test_add_record(self):
71 72 before = self.db.get_history()
72 73 self.load_records(5)
73 74 after = self.db.get_history()
74 75 self.assertEquals(len(after), len(before)+5)
75 76 self.assertEquals(after[:-5],before)
76 77
77 78 def test_drop_record(self):
78 79 msg_id = self.load_records()[-1]
79 80 rec = self.db.get_record(msg_id)
80 81 self.db.drop_record(msg_id)
81 82 self.assertRaises(KeyError,self.db.get_record, msg_id)
82 83
83 84 def _round_to_millisecond(self, dt):
84 85 """necessary because mongodb rounds microseconds"""
85 86 micro = dt.microsecond
86 87 extra = int(str(micro)[-3:])
87 88 return dt - timedelta(microseconds=extra)
88 89
89 90 def test_update_record(self):
90 91 now = self._round_to_millisecond(datetime.now())
91 92 #
92 93 msg_id = self.db.get_history()[-1]
93 94 rec1 = self.db.get_record(msg_id)
94 95 data = {'stdout': 'hello there', 'completed' : now}
95 96 self.db.update_record(msg_id, data)
96 97 rec2 = self.db.get_record(msg_id)
97 98 self.assertEquals(rec2['stdout'], 'hello there')
98 99 self.assertEquals(rec2['completed'], now)
99 100 rec1.update(data)
100 101 self.assertEquals(rec1, rec2)
101 102
102 103 # def test_update_record_bad(self):
103 104 # """test updating nonexistant records"""
104 105 # msg_id = str(uuid.uuid4())
105 106 # data = {'stdout': 'hello there'}
106 107 # self.assertRaises(KeyError, self.db.update_record, msg_id, data)
107 108
108 109 def test_find_records_dt(self):
109 110 """test finding records by date"""
110 111 hist = self.db.get_history()
111 112 middle = self.db.get_record(hist[len(hist)//2])
112 113 tic = middle['submitted']
113 114 before = self.db.find_records({'submitted' : {'$lt' : tic}})
114 115 after = self.db.find_records({'submitted' : {'$gte' : tic}})
115 116 self.assertEquals(len(before)+len(after),len(hist))
116 117 for b in before:
117 118 self.assertTrue(b['submitted'] < tic)
118 119 for a in after:
119 120 self.assertTrue(a['submitted'] >= tic)
120 121 same = self.db.find_records({'submitted' : tic})
121 122 for s in same:
122 123 self.assertTrue(s['submitted'] == tic)
123 124
124 125 def test_find_records_keys(self):
125 126 """test extracting subset of record keys"""
126 127 found = self.db.find_records({'msg_id': {'$ne' : ''}},keys=['submitted', 'completed'])
127 128 for rec in found:
128 129 self.assertEquals(set(rec.keys()), set(['msg_id', 'submitted', 'completed']))
129 130
130 131 def test_find_records_msg_id(self):
131 132 """ensure msg_id is always in found records"""
132 133 found = self.db.find_records({'msg_id': {'$ne' : ''}},keys=['submitted', 'completed'])
133 134 for rec in found:
134 135 self.assertTrue('msg_id' in rec.keys())
135 136 found = self.db.find_records({'msg_id': {'$ne' : ''}},keys=['submitted'])
136 137 for rec in found:
137 138 self.assertTrue('msg_id' in rec.keys())
138 139 found = self.db.find_records({'msg_id': {'$ne' : ''}},keys=['msg_id'])
139 140 for rec in found:
140 141 self.assertTrue('msg_id' in rec.keys())
141 142
142 143 def test_find_records_in(self):
143 144 """test finding records with '$in','$nin' operators"""
144 145 hist = self.db.get_history()
145 146 even = hist[::2]
146 147 odd = hist[1::2]
147 148 recs = self.db.find_records({ 'msg_id' : {'$in' : even}})
148 149 found = [ r['msg_id'] for r in recs ]
149 150 self.assertEquals(set(even), set(found))
150 151 recs = self.db.find_records({ 'msg_id' : {'$nin' : even}})
151 152 found = [ r['msg_id'] for r in recs ]
152 153 self.assertEquals(set(odd), set(found))
153 154
154 155 def test_get_history(self):
155 156 msg_ids = self.db.get_history()
156 157 latest = datetime(1984,1,1)
157 158 for msg_id in msg_ids:
158 159 rec = self.db.get_record(msg_id)
159 160 newt = rec['submitted']
160 161 self.assertTrue(newt >= latest)
161 162 latest = newt
162 163 msg_id = self.load_records(1)[-1]
163 164 self.assertEquals(self.db.get_history()[-1],msg_id)
164 165
165 166 def test_datetime(self):
166 167 """get/set timestamps with datetime objects"""
167 168 msg_id = self.db.get_history()[-1]
168 169 rec = self.db.get_record(msg_id)
169 170 self.assertTrue(isinstance(rec['submitted'], datetime))
170 171 self.db.update_record(msg_id, dict(completed=datetime.now()))
171 172 rec = self.db.get_record(msg_id)
172 173 self.assertTrue(isinstance(rec['completed'], datetime))
173 174
174 175 def test_drop_matching(self):
175 176 msg_ids = self.load_records(10)
176 177 query = {'msg_id' : {'$in':msg_ids}}
177 178 self.db.drop_matching_records(query)
178 179 recs = self.db.find_records(query)
179 180 self.assertEquals(len(recs), 0)
180 181
181 182 def test_null(self):
182 183 """test None comparison queries"""
183 184 msg_ids = self.load_records(10)
184 185
185 186 query = {'msg_id' : None}
186 187 recs = self.db.find_records(query)
187 188 self.assertEquals(len(recs), 0)
188 189
189 190 query = {'msg_id' : {'$ne' : None}}
190 191 recs = self.db.find_records(query)
191 192 self.assertTrue(len(recs) >= 10)
192 193
193 194 def test_pop_safe_get(self):
194 195 """editing query results shouldn't affect record [get]"""
195 196 msg_id = self.db.get_history()[-1]
196 197 rec = self.db.get_record(msg_id)
197 198 rec.pop('buffers')
198 199 rec['garbage'] = 'hello'
199 200 rec2 = self.db.get_record(msg_id)
200 201 self.assertTrue('buffers' in rec2)
201 202 self.assertFalse('garbage' in rec2)
202 203
203 204 def test_pop_safe_find(self):
204 205 """editing query results shouldn't affect record [find]"""
205 206 msg_id = self.db.get_history()[-1]
206 207 rec = self.db.find_records({'msg_id' : msg_id})[0]
207 208 rec.pop('buffers')
208 209 rec['garbage'] = 'hello'
209 210 rec2 = self.db.find_records({'msg_id' : msg_id})[0]
210 211 self.assertTrue('buffers' in rec2)
211 212 self.assertFalse('garbage' in rec2)
212 213
213 214 def test_pop_safe_find_keys(self):
214 215 """editing query results shouldn't affect record [find+keys]"""
215 216 msg_id = self.db.get_history()[-1]
216 217 rec = self.db.find_records({'msg_id' : msg_id}, keys=['buffers'])[0]
217 218 rec.pop('buffers')
218 219 rec['garbage'] = 'hello'
219 220 rec2 = self.db.find_records({'msg_id' : msg_id})[0]
220 221 self.assertTrue('buffers' in rec2)
221 222 self.assertFalse('garbage' in rec2)
222 223
223 224
224 225 class TestSQLiteBackend(TestDictBackend):
225 226
226 227 @dec.skip_without('sqlite3')
227 228 def create_db(self):
228 229 location, fname = os.path.split(temp_db)
229 return SQLiteDB(location=location, fname=fname)
230 log = logging.getLogger('test')
231 log.setLevel(logging.CRITICAL)
232 return SQLiteDB(location=location, fname=fname, log=log)
230 233
231 234 def tearDown(self):
232 235 self.db._db.close()
233 236
234 237
235 238 def teardown():
236 239 """cleanup task db file after all tests have run"""
237 240 try:
238 241 os.remove(temp_db)
239 242 except:
240 243 pass
General Comments 0
You need to be logged in to leave comments. Login now