author | Rémi Cardona <remi.cardona@logilab.fr> |
Tue, 22 Dec 2015 17:00:03 +0100 | |
changeset 11036 | d7294e144cd2 |
parent 11028 | 66f94d7f9ca7 |
permissions | -rw-r--r-- |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
1 |
# -*- coding: utf-8 -*- |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
2 |
# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
3 |
# contact http://www.logilab.fr -- mailto:contact@logilab.fr |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
4 |
# |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
5 |
# This program is free software: you can redistribute it and/or modify it under |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
6 |
# the terms of the GNU Lesser General Public License as published by the Free |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
7 |
# Software Foundation, either version 2.1 of the License, or (at your option) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
8 |
# any later version. |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
9 |
# |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
10 |
# This program is distributed in the hope that it will be useful, but WITHOUT |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
11 |
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
12 |
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
13 |
# details. |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
14 |
# |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
15 |
# You should have received a copy of the GNU Lesser General Public License along |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
16 |
# with this program. If not, see <http://www.gnu.org/licenses/>. |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
17 |
"""Massive store test case""" |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
18 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
19 |
import itertools |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
20 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
21 |
from cubicweb.dataimport import ucsvreader |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
22 |
from cubicweb.devtools import testlib, PostgresApptestConfiguration |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
23 |
from cubicweb.devtools import startpgcluster, stoppgcluster |
11024
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
24 |
from cubicweb.dataimport.massive_store import MassiveObjectStore, PGHelper |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
25 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
26 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
27 |
def setUpModule(): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
28 |
startpgcluster(__file__) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
29 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
30 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
31 |
def tearDownModule(*args): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
32 |
stoppgcluster(__file__) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
33 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
34 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
35 |
class MassImportSimpleTC(testlib.CubicWebTC): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
36 |
configcls = PostgresApptestConfiguration |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
37 |
appid = 'data-massimport' |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
38 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
39 |
def cast(self, _type, value): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
40 |
try: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
41 |
return _type(value) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
42 |
except ValueError: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
43 |
return None |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
44 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
45 |
def push_geonames_data(self, dumpname, store): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
46 |
# Push timezones |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
47 |
cnx = store._cnx |
10938
a24a13742f3c
[test] Use datapath() in massive store tests
Denis Laxalde <denis.laxalde@logilab.fr>
parents:
10907
diff
changeset
|
48 |
for code, gmt, dst, raw_offset in ucsvreader(open(self.datapath('timeZones.txt'), 'rb'), |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
49 |
delimiter='\t'): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
50 |
cnx.create_entity('TimeZone', code=code, gmt=float(gmt), |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
51 |
dst=float(dst), raw_offset=float(raw_offset)) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
52 |
timezone_code = dict(cnx.execute('Any C, X WHERE X is TimeZone, X code C')) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
53 |
# Push data |
10858
1a3e56e346d2
[dataimport/test] feed binary data to ucsvreader
Julien Cristau <julien.cristau@logilab.fr>
parents:
10857
diff
changeset
|
54 |
for ind, infos in enumerate(ucsvreader(open(dumpname, 'rb'), |
10868
ffb5b6c25cec
[dataimport/test] update call to ucsvreader
Julien Cristau <julien.cristau@logilab.fr>
parents:
10866
diff
changeset
|
55 |
delimiter='\t', |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
56 |
ignore_errors=True)): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
57 |
latitude = self.cast(float, infos[4]) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
58 |
longitude = self.cast(float, infos[5]) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
59 |
population = self.cast(int, infos[14]) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
60 |
elevation = self.cast(int, infos[15]) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
61 |
gtopo = self.cast(int, infos[16]) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
62 |
feature_class = infos[6] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
63 |
if len(infos[6]) != 1: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
64 |
feature_class = None |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
65 |
entity = {'name': infos[1], |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
66 |
'asciiname': infos[2], |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
67 |
'alternatenames': infos[3], |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
68 |
'latitude': latitude, 'longitude': longitude, |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
69 |
'feature_class': feature_class, |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
70 |
'alternate_country_code':infos[9], |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
71 |
'admin_code_3': infos[12], |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
72 |
'admin_code_4': infos[13], |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
73 |
'population': population, 'elevation': elevation, |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
74 |
'gtopo30': gtopo, 'timezone': timezone_code.get(infos[17]), |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
75 |
'cwuri': u'http://sws.geonames.org/%s/' % int(infos[0]), |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
76 |
'geonameid': int(infos[0]), |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
77 |
} |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
78 |
store.prepare_insert_entity('Location', **entity) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
79 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
80 |
def test_autoflush_metadata(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
81 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
82 |
crs = cnx.system_sql('SELECT * FROM entities WHERE type=%(t)s', |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
83 |
{'t': 'Location'}) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
84 |
self.assertEqual(len(crs.fetchall()), 0) |
10875
75d1b2d66f18
[dataimport] remove autoflush_metadata from MassiveObjectStore parameters
Julien Cristau <julien.cristau@logilab.fr>
parents:
10872
diff
changeset
|
85 |
store = MassiveObjectStore(cnx) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
86 |
store.prepare_insert_entity('Location', name=u'toto') |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
87 |
store.flush() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
88 |
store.commit() |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
89 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
90 |
cnx.commit() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
91 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
92 |
crs = cnx.system_sql('SELECT * FROM entities WHERE type=%(t)s', |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
93 |
{'t': 'Location'}) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
94 |
self.assertEqual(len(crs.fetchall()), 1) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
95 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
96 |
def test_massimport_etype_metadata(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
97 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
98 |
store = MassiveObjectStore(cnx) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
99 |
timezone_eid = store.prepare_insert_entity('TimeZone') |
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
100 |
store.prepare_insert_entity('Location', timezone=timezone_eid) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
101 |
store.flush() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
102 |
store.commit() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
103 |
eid, etname = cnx.execute('Any X, TN WHERE X timezone TZ, X is T, ' |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
104 |
'T name TN')[0] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
105 |
self.assertEqual(cnx.entity_from_eid(eid).cw_etype, etname) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
106 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
107 |
def test_drop_index(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
108 |
with self.admin_access.repo_cnx() as cnx: |
10869
575982c948a9
[dataimport] remove drop_index parameter from massive store
Julien Cristau <julien.cristau@logilab.fr>
parents:
10868
diff
changeset
|
109 |
store = MassiveObjectStore(cnx) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
110 |
cnx.commit() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
111 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
112 |
crs = cnx.system_sql('SELECT indexname FROM pg_indexes') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
113 |
indexes = [r[0] for r in crs.fetchall()] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
114 |
self.assertNotIn('entities_pkey', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
115 |
self.assertNotIn('unique_entities_extid_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
116 |
self.assertNotIn('owned_by_relation_pkey', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
117 |
self.assertNotIn('owned_by_relation_to_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
118 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
119 |
def test_drop_index_recreation(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
120 |
with self.admin_access.repo_cnx() as cnx: |
10869
575982c948a9
[dataimport] remove drop_index parameter from massive store
Julien Cristau <julien.cristau@logilab.fr>
parents:
10868
diff
changeset
|
121 |
store = MassiveObjectStore(cnx) |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
122 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
123 |
cnx.commit() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
124 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
125 |
crs = cnx.system_sql('SELECT indexname FROM pg_indexes') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
126 |
indexes = [r[0] for r in crs.fetchall()] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
127 |
self.assertIn('entities_pkey', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
128 |
self.assertIn('unique_entities_extid_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
129 |
self.assertIn('owned_by_relation_p_key', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
130 |
self.assertIn('owned_by_relation_to_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
131 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
132 |
def test_eids_seq_range(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
133 |
with self.admin_access.repo_cnx() as cnx: |
11028
66f94d7f9ca7
[dataimport] make eids_seq_range as massive store instance attribute again
Julien Cristau <julien.cristau@logilab.fr>
parents:
11026
diff
changeset
|
134 |
store = MassiveObjectStore(cnx, eids_seq_range=1000) |
11026
ce9b3886955d
[dataimport] remove eids_seq_start attribute from massive store
Julien Cristau <julien.cristau@logilab.fr>
parents:
11024
diff
changeset
|
135 |
store.restart_eid_sequence(50000) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
136 |
store.prepare_insert_entity('Location', name=u'toto') |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
137 |
store.flush() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
138 |
cnx.commit() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
139 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
140 |
crs = cnx.system_sql("SELECT * FROM entities_id_seq") |
10860
252877c624f0
[dataimport/test] use the right assert methods instead of assertTrue with a comparison
Julien Cristau <julien.cristau@logilab.fr>
parents:
10858
diff
changeset
|
141 |
self.assertGreater(crs.fetchone()[0], 50000) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
142 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
143 |
def test_eid_entity(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
144 |
with self.admin_access.repo_cnx() as cnx: |
11028
66f94d7f9ca7
[dataimport] make eids_seq_range as massive store instance attribute again
Julien Cristau <julien.cristau@logilab.fr>
parents:
11026
diff
changeset
|
145 |
store = MassiveObjectStore(cnx, eids_seq_range=1000) |
11026
ce9b3886955d
[dataimport] remove eids_seq_start attribute from massive store
Julien Cristau <julien.cristau@logilab.fr>
parents:
11024
diff
changeset
|
146 |
store.restart_eid_sequence(50000) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
147 |
eid = store.prepare_insert_entity('Location', name=u'toto') |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
148 |
store.flush() |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
149 |
self.assertGreater(eid, 50000) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
150 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
151 |
def test_eid_entity_2(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
152 |
with self.admin_access.repo_cnx() as cnx: |
11026
ce9b3886955d
[dataimport] remove eids_seq_start attribute from massive store
Julien Cristau <julien.cristau@logilab.fr>
parents:
11024
diff
changeset
|
153 |
store = MassiveObjectStore(cnx) |
ce9b3886955d
[dataimport] remove eids_seq_start attribute from massive store
Julien Cristau <julien.cristau@logilab.fr>
parents:
11024
diff
changeset
|
154 |
store.restart_eid_sequence(50000) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
155 |
eid = store.prepare_insert_entity('Location', name=u'toto', eid=10000) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
156 |
store.flush() |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
157 |
self.assertEqual(eid, 10000) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
158 |
|
11024
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
159 |
@staticmethod |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
160 |
def get_db_descr(cnx): |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
161 |
pg_schema = ( |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
162 |
cnx.repo.config.system_source_config.get('db-namespace') |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
163 |
or 'public') |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
164 |
pgh = PGHelper(cnx, pg_schema) |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
165 |
all_tables = cnx.system_sql(''' |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
166 |
SELECT table_name |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
167 |
FROM information_schema.tables |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
168 |
where table_schema = %(s)s''', {'s': pg_schema}).fetchall() |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
169 |
all_tables_descr = {} |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
170 |
for tablename, in all_tables: |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
171 |
all_tables_descr[tablename] = set(pgh.index_list(tablename)).union( |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
172 |
set(pgh.constraint_list(tablename))) |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
173 |
return all_tables_descr |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
174 |
|
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
175 |
def test_identical_schema(self): |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
176 |
with self.admin_access.repo_cnx() as cnx: |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
177 |
init_descr = self.get_db_descr(cnx) |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
178 |
with self.admin_access.repo_cnx() as cnx: |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
179 |
store = MassiveObjectStore(cnx) |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
180 |
store.init_etype_table('CWUser') |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
181 |
store.finish() |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
182 |
with self.admin_access.repo_cnx() as cnx: |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
183 |
final_descr = self.get_db_descr(cnx) |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
184 |
self.assertEqual(init_descr, final_descr) |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
185 |
|
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
186 |
def test_on_commit_callback(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
187 |
counter = itertools.count() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
188 |
with self.admin_access.repo_cnx() as cnx: |
10855
cd91f46fa633
[dataimport] use next builtin instead of next method on iterators
Julien Cristau <julien.cristau@logilab.fr>
parents:
10853
diff
changeset
|
189 |
store = MassiveObjectStore(cnx, on_commit_callback=lambda:next(counter)) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
190 |
store.prepare_insert_entity('Location', name=u'toto') |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
191 |
store.flush() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
192 |
store.commit() |
10855
cd91f46fa633
[dataimport] use next builtin instead of next method on iterators
Julien Cristau <julien.cristau@logilab.fr>
parents:
10853
diff
changeset
|
193 |
self.assertGreaterEqual(next(counter), 1) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
194 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
195 |
def test_on_rollback_callback(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
196 |
counter = itertools.count() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
197 |
with self.admin_access.repo_cnx() as cnx: |
10855
cd91f46fa633
[dataimport] use next builtin instead of next method on iterators
Julien Cristau <julien.cristau@logilab.fr>
parents:
10853
diff
changeset
|
198 |
store = MassiveObjectStore(cnx, on_rollback_callback=lambda *_: next(counter)) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
199 |
store.prepare_insert_entity('Location', nm='toto') |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
200 |
store.flush() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
201 |
store.commit() |
10855
cd91f46fa633
[dataimport] use next builtin instead of next method on iterators
Julien Cristau <julien.cristau@logilab.fr>
parents:
10853
diff
changeset
|
202 |
self.assertGreaterEqual(next(counter), 1) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
203 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
204 |
def test_slave_mode_indexes(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
205 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
206 |
slave_store = MassiveObjectStore(cnx, slave_mode=True) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
207 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
208 |
crs = cnx.system_sql('SELECT indexname FROM pg_indexes') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
209 |
indexes = [r[0] for r in crs.fetchall()] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
210 |
self.assertIn('entities_pkey', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
211 |
self.assertIn('unique_entities_extid_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
212 |
self.assertIn('owned_by_relation_p_key', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
213 |
self.assertIn('owned_by_relation_to_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
214 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
215 |
def test_slave_mode_exception(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
216 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
217 |
master_store = MassiveObjectStore(cnx, slave_mode=False) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
218 |
slave_store = MassiveObjectStore(cnx, slave_mode=True) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
219 |
self.assertRaises(RuntimeError, slave_store.flush_meta_data) |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
220 |
self.assertRaises(RuntimeError, slave_store.finish) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
221 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
222 |
def test_simple_insert(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
223 |
with self.admin_access.repo_cnx() as cnx: |
10875
75d1b2d66f18
[dataimport] remove autoflush_metadata from MassiveObjectStore parameters
Julien Cristau <julien.cristau@logilab.fr>
parents:
10872
diff
changeset
|
224 |
store = MassiveObjectStore(cnx) |
10938
a24a13742f3c
[test] Use datapath() in massive store tests
Denis Laxalde <denis.laxalde@logilab.fr>
parents:
10907
diff
changeset
|
225 |
self.push_geonames_data(self.datapath('geonames.csv'), store) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
226 |
store.flush() |
10880
5fb592895e0f
[dataimport] remove implicit commits from MassiveObjectStore
Julien Cristau <julien.cristau@logilab.fr>
parents:
10875
diff
changeset
|
227 |
store.commit() |
5fb592895e0f
[dataimport] remove implicit commits from MassiveObjectStore
Julien Cristau <julien.cristau@logilab.fr>
parents:
10875
diff
changeset
|
228 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
229 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
230 |
rset = cnx.execute('Any X WHERE X is Location') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
231 |
self.assertEqual(len(rset), 4000) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
232 |
rset = cnx.execute('Any X WHERE X is Location, X timezone T') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
233 |
self.assertEqual(len(rset), 4000) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
234 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
235 |
def test_index_building(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
236 |
with self.admin_access.repo_cnx() as cnx: |
10875
75d1b2d66f18
[dataimport] remove autoflush_metadata from MassiveObjectStore parameters
Julien Cristau <julien.cristau@logilab.fr>
parents:
10872
diff
changeset
|
237 |
store = MassiveObjectStore(cnx) |
10938
a24a13742f3c
[test] Use datapath() in massive store tests
Denis Laxalde <denis.laxalde@logilab.fr>
parents:
10907
diff
changeset
|
238 |
self.push_geonames_data(self.datapath('geonames.csv'), store) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
239 |
store.flush() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
240 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
241 |
# Check index |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
242 |
crs = cnx.system_sql('SELECT indexname FROM pg_indexes') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
243 |
indexes = [r[0] for r in crs.fetchall()] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
244 |
self.assertNotIn('entities_pkey', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
245 |
self.assertNotIn('unique_entities_extid_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
246 |
self.assertNotIn('owned_by_relation_p_key', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
247 |
self.assertNotIn('owned_by_relation_to_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
248 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
249 |
# Cleanup -> index |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
250 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
251 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
252 |
# Check index again |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
253 |
crs = cnx.system_sql('SELECT indexname FROM pg_indexes') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
254 |
indexes = [r[0] for r in crs.fetchall()] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
255 |
self.assertIn('entities_pkey', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
256 |
self.assertIn('unique_entities_extid_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
257 |
self.assertIn('owned_by_relation_p_key', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
258 |
self.assertIn('owned_by_relation_to_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
259 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
260 |
def test_multiple_insert(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
261 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
262 |
store = MassiveObjectStore(cnx) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
263 |
store.init_etype_table('TestLocation') |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
264 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
265 |
store = MassiveObjectStore(cnx) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
266 |
store.init_etype_table('TestLocation') |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
267 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
268 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
269 |
def test_multiple_insert_relation(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
270 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
271 |
store = MassiveObjectStore(cnx) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
272 |
store.init_relation_table('used_language') |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
273 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
274 |
store = MassiveObjectStore(cnx) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
275 |
store.init_relation_table('used_language') |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
276 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
277 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
278 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
279 |
if __name__ == '__main__': |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
280 |
from logilab.common.testlib import unittest_main |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
281 |
unittest_main() |