-
Notifications
You must be signed in to change notification settings - Fork 68
/
Copy pathtests.py
executable file
·392 lines (312 loc) · 13.1 KB
/
tests.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
#!/usr/bin/env python
from __future__ import absolute_import
import datetime
import json
import os
import re
import shutil
import sqlite3
import warnings
from subprocess import Popen, PIPE
from textwrap import dedent
from unittest import TestCase, main
import scraperwiki
import six
import sys
# scraperwiki.sql._State.echo = True
DB_NAME = 'scraperwiki.sqlite'
class Setup(TestCase):
def test_setup(self):
try:
os.remove('scraperwiki.sqlite')
except OSError:
pass
# called TestAAAWarning so that it gets run first by nosetests,
# which we need, otherwise the warning has already happened.
class TestAAAWarning(TestCase):
def test_save_no_warn(self):
with warnings.catch_warnings():
warnings.simplefilter("error")
scraperwiki.sql.save(['id'], dict(id=4, tumble='weed'),
table_name="warning_test")
class TestSaveGetVar(TestCase):
def savegetvar(self, var):
scraperwiki.sql.save_var(u"weird\u1234", var)
self.assertEqual(scraperwiki.sql.get_var(u"weird\u1234"), var)
def test_string(self):
self.savegetvar(u"asdio\u1234")
def test_int(self):
self.savegetvar(1)
def test_float(self):
self.savegetvar(1.1)
def test_bool(self):
self.savegetvar(False)
def test_bool2(self):
self.savegetvar(True)
def test_bytes(self):
self.savegetvar(b"asodpa\x00\x22")
def test_date(self):
date1 = datetime.datetime.now()
date2 = datetime.date.today()
scraperwiki.sql.save_var(u"weird\u1234", date1)
self.assertEqual(scraperwiki.sql.get_var(u"weird\u1234"), six.text_type(date1))
scraperwiki.sql.save_var(u"weird\u1234", date2)
self.assertEqual(scraperwiki.sql.get_var(u"weird\u1234"), six.text_type(date2))
def test_save_multiple_values(self):
scraperwiki.sql.save_var(u'foo\xc3', u'hello')
scraperwiki.sql.save_var(u'bar', u'goodbye\u1234')
self.assertEqual(u'hello', scraperwiki.sql.get_var(u'foo\xc3'))
self.assertEqual(u'goodbye\u1234', scraperwiki.sql.get_var(u'bar'))
class TestGetNonexistantVar(TestCase):
def test_get(self):
self.assertIsNone(scraperwiki.sql.get_var(u'meatball\xff'))
class TestSaveVar(TestCase):
def setUp(self):
super(TestSaveVar, self).setUp()
scraperwiki.sql.save_var(u"birthday\xfe", u"\u1234November 30, 1888")
connection = sqlite3.connect(DB_NAME)
self.cursor = connection.cursor()
def test_insert(self):
self.cursor.execute(u"""
SELECT name, value_blob, type
FROM `swvariables`
WHERE name == "birthday\xfe"
""")
((colname, value, _type),) = self.cursor.fetchall()
expected = [(u"birthday\xfe", u"\u1234November 30, 1888", "text",)]
observed = [(colname, type(b'')(value).decode('utf-8'), _type)]
self.assertEqual(observed, expected)
class SaveAndCheck(TestCase):
def save_and_check(self, dataIn, tableIn, dataOut, tableOut=None, twice=True):
if tableOut == None:
tableOut = '[' + tableIn + ']'
# Insert
with scraperwiki.sql.Transaction():
scraperwiki.sql.save([], dataIn, tableIn)
# Observe with pysqlite
connection = sqlite3.connect(DB_NAME)
cursor = connection.cursor()
cursor.execute(u"SELECT * FROM %s" % tableOut)
observed1 = cursor.fetchall()
connection.close()
if twice:
# Observe using this module
observed2 = scraperwiki.sql.select(u'* FROM %s' % tableOut)
# Check
expected1 = dataOut
expected2 = [dataIn] if type(dataIn) == dict else dataIn
self.assertListEqual(observed1, expected1)
self.assertListEqual(observed2, expected2)
class SaveAndSelect(TestCase):
def save_and_select(self, d):
scraperwiki.sql.save([], {u"foo\xdd": d})
observed = scraperwiki.sql.select(u'* FROM swdata')[0][u'foo\xdd']
self.assertEqual(d, observed)
class TestUniqueKeys(SaveAndSelect):
def test_empty(self):
scraperwiki.sql.save([], {u"foo\xde": 3}, table_name=u"Chico\xcc")
observed = scraperwiki.sql.execute(u'PRAGMA index_list(Chico\xcc)')
self.assertEqual(observed, {u'data': [], u'keys': []})
def test_two(self):
scraperwiki.sql.save([u'foo\xdc', u'bar\xcd'], {u'foo\xdc': 3, u'bar\xcd': 9}, u'Harpo\xbb')
observed = scraperwiki.sql.execute(
u'PRAGMA index_info(Harpo_foo_bar_unique)')
# Indexness
self.assertIsNotNone(observed)
# Indexed columns
expected1 = {
u'keys': [u'seqno', u'cid', u'name'],
u'data': [
(0, 0, u'foo\xdc'),
(1, 1, u'bar\xcd'),
]
}
expected2 = {
u'keys': [u'seqno', u'cid', u'name'],
u'data': [
(0, 1, u'foo\xdc'),
(1, 0, u'bar\xcd'),
]
}
try:
self.assertDictEqual(observed, expected1)
except Exception:
self.assertDictEqual(observed, expected2)
# Uniqueness
indices = scraperwiki.sql.execute(u'PRAGMA index_list(Harpo\xbb)')
namecol = indices[u"keys"].index(u'name')
for index in indices[u"data"]:
if index[namecol] == u'Harpo_foo_bar_unique':
break
else:
index = {}
uniquecol = indices[u"keys"].index(u'unique')
self.assertEqual(index[uniquecol], 1)
class TestSaveColumn(TestCase):
def test_add_column(self):
# Indicative for
# https://github.com/scraperwiki/scraperwiki-python/issues/64
# The bug is that in the first .save() of a process, a
# new column cannot be added if the table already exists.
# Because it's only the first .save() of a process, we
# need to run a subprocess.
connection = sqlite3.connect(DB_NAME)
cursor = connection.cursor()
cursor.execute(u'CREATE TABLE frigled\xaa (a TEXT);')
cursor.execute(u'INSERT INTO frigled\xaa VALUES ("boo\xaa")')
connection.close()
script = dedent(u"""
import scraperwiki
scraperwiki.sql.save(['id'], dict(id=1, a=u"bar\xaa", b=u"foo\xaa"))
""")
with open("/dev/null") as null:
process = Popen([sys.executable, "-c", script],
stdout=PIPE, stderr=PIPE, stdin=null)
stdout, stderr = process.communicate()
assert process.returncode == 0
self.assertEqual(stdout, "".encode('utf-8'))
self.assertEqual(stderr, "".encode('utf-8'))
class TestSave(SaveAndCheck):
def test_save_int(self):
self.save_and_check(
{u"model-number\xaa": 293}, u"model-numbers\xaa", [(293,)]
)
def test_save_string(self):
self.save_and_check(
{u"lastname\xaa": u"LeTourneau\u1234"}, u"diesel-engineers\xaa", [
(u'LeTourneau\u1234',)]
)
# Ensure we can round-trip a string and then json encode it.
# https://github.com/scraperwiki/scraperwiki-python/pull/85
scraperwiki.sql.save([], {"test": "teststring"}, table_name="teststring")
data = scraperwiki.sql.select("* FROM teststring")
json.dumps(data)
def test_save_twice(self):
self.save_and_check(
{u"modelNumber\xaa": 293}, u"modelNumbers", [(293,)]
)
self.save_and_check(
{u"modelNumber\xaa": 293}, u"modelNumbers\xaa", [(293,), (293,)], twice=False
)
def test_save_true(self):
self.save_and_check(
{u"a": True}, u"true", [(1,)]
)
def test_save_false(self):
self.save_and_check(
{u"a": False}, u"false", [(0,)]
)
def test_save_table_name(self):
"""
Test that after we use table_name= in one .save() a
subsequent .save without table_name= uses the `swdata`
table again.
"""
scraperwiki.sql.save(['id'], dict(id=1, stuff=1),
table_name=u'sticky\u1234')
scraperwiki.sql.save(['id'], dict(id=2, stuff=2))
results = scraperwiki.sql.select(u'* FROM sticky\u1234')
self.assertEqual(1, len(results))
(row, ) = results
self.assertDictEqual(dict(id=1, stuff=1), row)
def test_lxml_string(self):
"""Save lxml string."""
import lxml.html
# See https://github.com/scraperwiki/scraperwiki-python/issues/65
# Careful, this looks like a string (eg, when printed or
# repr()d), but is actually an instance of some class
# internal to lxml.
s = lxml.html.fromstring(b'<b>HelloӒ/b>').xpath(b'//b')[0].text_content()
self.save_and_check(
{"text": s},
"lxml",
[(six.text_type(s),)]
)
def test_save_and_drop(self):
scraperwiki.sql.save([], dict(foo=7), table_name=u"dropper\xaa")
scraperwiki.sql.execute(u"DROP TABLE dropper\xaa")
scraperwiki.sql.save([], dict(foo=9), table_name=u"dropper\xaa")
class TestQuestionMark(TestCase):
def test_one_question_mark_with_nonlist(self):
scraperwiki.sql.execute(u'CREATE TABLE zhuozi\xaa (\xaa TEXT);')
scraperwiki.sql.execute(u'INSERT INTO zhuozi\xaa VALUES (?)', u'apple\xff')
observed = scraperwiki.sql.select(u'* FROM zhuozi\xaa')
self.assertListEqual(observed, [{u'\xaa': u'apple\xff'}])
scraperwiki.sql.execute(u'DROP TABLE zhuozi\xaa')
def test_one_question_mark_with_list(self):
scraperwiki.sql.execute(u'CREATE TABLE zhuozi\xaa (\xaa TEXT);')
scraperwiki.sql.execute(u'INSERT INTO zhuozi\xaa VALUES (?)', [u'apple\xff'])
observed = scraperwiki.sql.select(u'* FROM zhuozi\xaa')
self.assertListEqual(observed, [{u'\xaa': u'apple\xff'}])
scraperwiki.sql.execute(u'DROP TABLE zhuozi\xaa')
def test_multiple_question_marks(self):
scraperwiki.sql.execute('CREATE TABLE zhuozi (a TEXT, b TEXT);')
scraperwiki.sql.execute(
'INSERT INTO zhuozi VALUES (?, ?)', ['apple', 'banana'])
observed = scraperwiki.sql.select('* FROM zhuozi')
self.assertListEqual(observed, [{'a': 'apple', 'b': 'banana'}])
scraperwiki.sql.execute('DROP TABLE zhuozi')
class TestDateTime(TestCase):
def rawdate(self, table="swdata", column="datetime"):
connection = sqlite3.connect(DB_NAME)
cursor = connection.cursor()
cursor.execute(u"SELECT {} FROM {}".format(column, table))
rawdate = cursor.fetchall()[0][0]
connection.close()
return rawdate
def test_save_date(self):
d = datetime.datetime.strptime('1991-03-30', '%Y-%m-%d').date()
with scraperwiki.sql.Transaction():
scraperwiki.sql.save([], {u"birthday\xaa": d})
self.assertEqual(
[{u'birthday\xaa': str(d)}],
scraperwiki.sql.select("* FROM swdata"))
self.assertEqual(
{u'keys': [u'birthday\xaa'], u'data': [(six.text_type(d),)]},
scraperwiki.sql.execute("SELECT * FROM swdata"))
self.assertEqual(str(d), self.rawdate(column=u"birthday\xaa"))
def test_save_datetime(self):
d = datetime.datetime.strptime('1990-03-30', '%Y-%m-%d')
with scraperwiki.sql.Transaction():
scraperwiki.sql.save([], {"birthday": d},
table_name="datetimetest")
exemplar = six.text_type(d)
# SQLAlchemy appears to convert with extended precision.
exemplar += ".000000"
self.assertEqual(
[{u'birthday': exemplar}],
scraperwiki.sql.select("* FROM datetimetest"))
self.assertDictEqual(
{u'keys': [u'birthday'], u'data': [(exemplar,)]},
scraperwiki.sql.execute("SELECT * FROM datetimetest"))
self.assertEqual(exemplar, self.rawdate(table="datetimetest", column="birthday"))
class TestStatus(TestCase):
'Test that the status endpoint works.'
def test_does_nothing_if_called_outside_box(self):
scraperwiki.status('ok')
def test_raises_exception_with_invalid_type_field(self):
self.assertRaises(AssertionError, scraperwiki.status, 'hello')
# XXX neeed some mocking tests for case of run inside a box
class TestUnicodeColumns(TestCase):
maxDiff = None
def test_add_column_once_only(self):
scraperwiki.sqlite.save(data = {"i": 1, u"a\xa0b": 1}, unique_keys = ['i'])
scraperwiki.sqlite.save(data = {"i": 2, u"a\xa0b": 2}, unique_keys = ['i'])
class TestImports(TestCase):
'Test that all module contents are imported.'
def setUp(self):
self.sw = __import__('scraperwiki')
def test_import_scraperwiki_root(self):
self.sw.scrape
def test_import_scraperwiki_sqlite(self):
self.sw.sqlite
def test_import_scraperwiki_sql(self):
self.sw.sql
def test_import_scraperwiki_status(self):
self.sw.status
def test_import_scraperwiki_utils(self):
self.sw.utils
def test_import_scraperwiki_special_utils(self):
self.sw.pdftoxml
if __name__ == '__main__':
main()