author | Samuel Trégouët <samuel.tregouet@logilab.fr> |
Mon, 21 Dec 2015 12:29:50 +0100 | |
changeset 11024 | dc70698dcf6c |
parent 10938 | a24a13742f3c |
child 11026 | ce9b3886955d |
permissions | -rw-r--r-- |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
1 |
# -*- coding: utf-8 -*- |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
2 |
# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
3 |
# contact http://www.logilab.fr -- mailto:contact@logilab.fr |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
4 |
# |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
5 |
# This program is free software: you can redistribute it and/or modify it under |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
6 |
# the terms of the GNU Lesser General Public License as published by the Free |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
7 |
# Software Foundation, either version 2.1 of the License, or (at your option) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
8 |
# any later version. |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
9 |
# |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
10 |
# This program is distributed in the hope that it will be useful, but WITHOUT |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
11 |
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
12 |
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
13 |
# details. |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
14 |
# |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
15 |
# You should have received a copy of the GNU Lesser General Public License along |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
16 |
# with this program. If not, see <http://www.gnu.org/licenses/>. |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
17 |
"""Massive store test case""" |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
18 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
19 |
import itertools |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
20 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
21 |
from cubicweb.dataimport import ucsvreader |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
22 |
from cubicweb.devtools import testlib, PostgresApptestConfiguration |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
23 |
from cubicweb.devtools import startpgcluster, stoppgcluster |
11024
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
24 |
from cubicweb.dataimport.massive_store import MassiveObjectStore, PGHelper |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
25 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
26 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
27 |
def setUpModule(): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
28 |
startpgcluster(__file__) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
29 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
30 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
31 |
def tearDownModule(*args): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
32 |
stoppgcluster(__file__) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
33 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
34 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
35 |
class MassImportSimpleTC(testlib.CubicWebTC): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
36 |
configcls = PostgresApptestConfiguration |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
37 |
appid = 'data-massimport' |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
38 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
39 |
def cast(self, _type, value): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
40 |
try: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
41 |
return _type(value) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
42 |
except ValueError: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
43 |
return None |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
44 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
45 |
def push_geonames_data(self, dumpname, store): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
46 |
# Push timezones |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
47 |
cnx = store._cnx |
10938
a24a13742f3c
[test] Use datapath() in massive store tests
Denis Laxalde <denis.laxalde@logilab.fr>
parents:
10907
diff
changeset
|
48 |
for code, gmt, dst, raw_offset in ucsvreader(open(self.datapath('timeZones.txt'), 'rb'), |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
49 |
delimiter='\t'): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
50 |
cnx.create_entity('TimeZone', code=code, gmt=float(gmt), |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
51 |
dst=float(dst), raw_offset=float(raw_offset)) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
52 |
timezone_code = dict(cnx.execute('Any C, X WHERE X is TimeZone, X code C')) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
53 |
# Push data |
10858
1a3e56e346d2
[dataimport/test] feed binary data to ucsvreader
Julien Cristau <julien.cristau@logilab.fr>
parents:
10857
diff
changeset
|
54 |
for ind, infos in enumerate(ucsvreader(open(dumpname, 'rb'), |
10868
ffb5b6c25cec
[dataimport/test] update call to ucsvreader
Julien Cristau <julien.cristau@logilab.fr>
parents:
10866
diff
changeset
|
55 |
delimiter='\t', |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
56 |
ignore_errors=True)): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
57 |
latitude = self.cast(float, infos[4]) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
58 |
longitude = self.cast(float, infos[5]) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
59 |
population = self.cast(int, infos[14]) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
60 |
elevation = self.cast(int, infos[15]) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
61 |
gtopo = self.cast(int, infos[16]) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
62 |
feature_class = infos[6] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
63 |
if len(infos[6]) != 1: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
64 |
feature_class = None |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
65 |
entity = {'name': infos[1], |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
66 |
'asciiname': infos[2], |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
67 |
'alternatenames': infos[3], |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
68 |
'latitude': latitude, 'longitude': longitude, |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
69 |
'feature_class': feature_class, |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
70 |
'alternate_country_code':infos[9], |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
71 |
'admin_code_3': infos[12], |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
72 |
'admin_code_4': infos[13], |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
73 |
'population': population, 'elevation': elevation, |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
74 |
'gtopo30': gtopo, 'timezone': timezone_code.get(infos[17]), |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
75 |
'cwuri': u'http://sws.geonames.org/%s/' % int(infos[0]), |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
76 |
'geonameid': int(infos[0]), |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
77 |
} |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
78 |
store.prepare_insert_entity('Location', **entity) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
79 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
80 |
def test_autoflush_metadata(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
81 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
82 |
crs = cnx.system_sql('SELECT * FROM entities WHERE type=%(t)s', |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
83 |
{'t': 'Location'}) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
84 |
self.assertEqual(len(crs.fetchall()), 0) |
10875
75d1b2d66f18
[dataimport] remove autoflush_metadata from MassiveObjectStore parameters
Julien Cristau <julien.cristau@logilab.fr>
parents:
10872
diff
changeset
|
85 |
store = MassiveObjectStore(cnx) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
86 |
store.prepare_insert_entity('Location', name=u'toto') |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
87 |
store.flush() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
88 |
store.commit() |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
89 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
90 |
cnx.commit() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
91 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
92 |
crs = cnx.system_sql('SELECT * FROM entities WHERE type=%(t)s', |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
93 |
{'t': 'Location'}) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
94 |
self.assertEqual(len(crs.fetchall()), 1) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
95 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
96 |
def test_massimport_etype_metadata(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
97 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
98 |
store = MassiveObjectStore(cnx) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
99 |
timezone_eid = store.prepare_insert_entity('TimeZone') |
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
100 |
store.prepare_insert_entity('Location', timezone=timezone_eid) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
101 |
store.flush() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
102 |
store.commit() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
103 |
eid, etname = cnx.execute('Any X, TN WHERE X timezone TZ, X is T, ' |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
104 |
'T name TN')[0] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
105 |
self.assertEqual(cnx.entity_from_eid(eid).cw_etype, etname) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
106 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
107 |
def test_drop_index(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
108 |
with self.admin_access.repo_cnx() as cnx: |
10869
575982c948a9
[dataimport] remove drop_index parameter from massive store
Julien Cristau <julien.cristau@logilab.fr>
parents:
10868
diff
changeset
|
109 |
store = MassiveObjectStore(cnx) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
110 |
cnx.commit() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
111 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
112 |
crs = cnx.system_sql('SELECT indexname FROM pg_indexes') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
113 |
indexes = [r[0] for r in crs.fetchall()] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
114 |
self.assertNotIn('entities_pkey', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
115 |
self.assertNotIn('unique_entities_extid_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
116 |
self.assertNotIn('owned_by_relation_pkey', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
117 |
self.assertNotIn('owned_by_relation_to_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
118 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
119 |
def test_drop_index_recreation(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
120 |
with self.admin_access.repo_cnx() as cnx: |
10869
575982c948a9
[dataimport] remove drop_index parameter from massive store
Julien Cristau <julien.cristau@logilab.fr>
parents:
10868
diff
changeset
|
121 |
store = MassiveObjectStore(cnx) |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
122 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
123 |
cnx.commit() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
124 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
125 |
crs = cnx.system_sql('SELECT indexname FROM pg_indexes') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
126 |
indexes = [r[0] for r in crs.fetchall()] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
127 |
self.assertIn('entities_pkey', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
128 |
self.assertIn('unique_entities_extid_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
129 |
self.assertIn('owned_by_relation_p_key', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
130 |
self.assertIn('owned_by_relation_to_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
131 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
132 |
def test_eids_seq_range(self): |
10872
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
133 |
class MyMassiveObjectStore(MassiveObjectStore): |
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
134 |
eids_seq_range = 1000 |
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
135 |
eids_seq_start = 50000 |
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
136 |
|
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
137 |
with self.admin_access.repo_cnx() as cnx: |
10872
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
138 |
store = MyMassiveObjectStore(cnx) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
139 |
store.prepare_insert_entity('Location', name=u'toto') |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
140 |
store.flush() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
141 |
cnx.commit() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
142 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
143 |
crs = cnx.system_sql("SELECT * FROM entities_id_seq") |
10860
252877c624f0
[dataimport/test] use the right assert methods instead of assertTrue with a comparison
Julien Cristau <julien.cristau@logilab.fr>
parents:
10858
diff
changeset
|
144 |
self.assertGreater(crs.fetchone()[0], 50000) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
145 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
146 |
def test_eid_entity(self): |
10872
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
147 |
class MyMassiveObjectStore(MassiveObjectStore): |
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
148 |
eids_seq_range = 1000 |
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
149 |
eids_seq_start = 50000 |
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
150 |
|
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
151 |
with self.admin_access.repo_cnx() as cnx: |
10872
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
152 |
store = MyMassiveObjectStore(cnx) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
153 |
eid = store.prepare_insert_entity('Location', name=u'toto') |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
154 |
store.flush() |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
155 |
self.assertGreater(eid, 50000) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
156 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
157 |
def test_eid_entity_2(self): |
10872
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
158 |
class MyMassiveObjectStore(MassiveObjectStore): |
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
159 |
eids_seq_range = 1000 |
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
160 |
eids_seq_start = 50000 |
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
161 |
|
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
162 |
with self.admin_access.repo_cnx() as cnx: |
10872
ff4f94cfa2fb
[dataimport] turn eids_seq_{start,range} into class attributes
Julien Cristau <julien.cristau@logilab.fr>
parents:
10869
diff
changeset
|
163 |
store = MyMassiveObjectStore(cnx) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
164 |
eid = store.prepare_insert_entity('Location', name=u'toto', eid=10000) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
165 |
store.flush() |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
166 |
self.assertEqual(eid, 10000) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
167 |
|
11024
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
168 |
@staticmethod |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
169 |
def get_db_descr(cnx): |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
170 |
pg_schema = ( |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
171 |
cnx.repo.config.system_source_config.get('db-namespace') |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
172 |
or 'public') |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
173 |
pgh = PGHelper(cnx, pg_schema) |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
174 |
all_tables = cnx.system_sql(''' |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
175 |
SELECT table_name |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
176 |
FROM information_schema.tables |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
177 |
where table_schema = %(s)s''', {'s': pg_schema}).fetchall() |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
178 |
all_tables_descr = {} |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
179 |
for tablename, in all_tables: |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
180 |
all_tables_descr[tablename] = set(pgh.index_list(tablename)).union( |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
181 |
set(pgh.constraint_list(tablename))) |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
182 |
return all_tables_descr |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
183 |
|
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
184 |
def test_identical_schema(self): |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
185 |
with self.admin_access.repo_cnx() as cnx: |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
186 |
init_descr = self.get_db_descr(cnx) |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
187 |
with self.admin_access.repo_cnx() as cnx: |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
188 |
store = MassiveObjectStore(cnx) |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
189 |
store.init_etype_table('CWUser') |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
190 |
store.finish() |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
191 |
with self.admin_access.repo_cnx() as cnx: |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
192 |
final_descr = self.get_db_descr(cnx) |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
193 |
self.assertEqual(init_descr, final_descr) |
dc70698dcf6c
[dataimport] check that MassiveObjectStore restores the db schema properly
Samuel Trégouët <samuel.tregouet@logilab.fr>
parents:
10938
diff
changeset
|
194 |
|
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
195 |
def test_on_commit_callback(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
196 |
counter = itertools.count() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
197 |
with self.admin_access.repo_cnx() as cnx: |
10855
cd91f46fa633
[dataimport] use next builtin instead of next method on iterators
Julien Cristau <julien.cristau@logilab.fr>
parents:
10853
diff
changeset
|
198 |
store = MassiveObjectStore(cnx, on_commit_callback=lambda:next(counter)) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
199 |
store.prepare_insert_entity('Location', name=u'toto') |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
200 |
store.flush() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
201 |
store.commit() |
10855
cd91f46fa633
[dataimport] use next builtin instead of next method on iterators
Julien Cristau <julien.cristau@logilab.fr>
parents:
10853
diff
changeset
|
202 |
self.assertGreaterEqual(next(counter), 1) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
203 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
204 |
def test_on_rollback_callback(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
205 |
counter = itertools.count() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
206 |
with self.admin_access.repo_cnx() as cnx: |
10855
cd91f46fa633
[dataimport] use next builtin instead of next method on iterators
Julien Cristau <julien.cristau@logilab.fr>
parents:
10853
diff
changeset
|
207 |
store = MassiveObjectStore(cnx, on_rollback_callback=lambda *_: next(counter)) |
10863
8e1f6de61300
[dataimport] implement new store API on massive store
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
10860
diff
changeset
|
208 |
store.prepare_insert_entity('Location', nm='toto') |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
209 |
store.flush() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
210 |
store.commit() |
10855
cd91f46fa633
[dataimport] use next builtin instead of next method on iterators
Julien Cristau <julien.cristau@logilab.fr>
parents:
10853
diff
changeset
|
211 |
self.assertGreaterEqual(next(counter), 1) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
212 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
213 |
def test_slave_mode_indexes(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
214 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
215 |
slave_store = MassiveObjectStore(cnx, slave_mode=True) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
216 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
217 |
crs = cnx.system_sql('SELECT indexname FROM pg_indexes') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
218 |
indexes = [r[0] for r in crs.fetchall()] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
219 |
self.assertIn('entities_pkey', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
220 |
self.assertIn('unique_entities_extid_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
221 |
self.assertIn('owned_by_relation_p_key', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
222 |
self.assertIn('owned_by_relation_to_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
223 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
224 |
def test_slave_mode_exception(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
225 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
226 |
master_store = MassiveObjectStore(cnx, slave_mode=False) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
227 |
slave_store = MassiveObjectStore(cnx, slave_mode=True) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
228 |
self.assertRaises(RuntimeError, slave_store.flush_meta_data) |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
229 |
self.assertRaises(RuntimeError, slave_store.finish) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
230 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
231 |
def test_simple_insert(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
232 |
with self.admin_access.repo_cnx() as cnx: |
10875
75d1b2d66f18
[dataimport] remove autoflush_metadata from MassiveObjectStore parameters
Julien Cristau <julien.cristau@logilab.fr>
parents:
10872
diff
changeset
|
233 |
store = MassiveObjectStore(cnx) |
10938
a24a13742f3c
[test] Use datapath() in massive store tests
Denis Laxalde <denis.laxalde@logilab.fr>
parents:
10907
diff
changeset
|
234 |
self.push_geonames_data(self.datapath('geonames.csv'), store) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
235 |
store.flush() |
10880
5fb592895e0f
[dataimport] remove implicit commits from MassiveObjectStore
Julien Cristau <julien.cristau@logilab.fr>
parents:
10875
diff
changeset
|
236 |
store.commit() |
5fb592895e0f
[dataimport] remove implicit commits from MassiveObjectStore
Julien Cristau <julien.cristau@logilab.fr>
parents:
10875
diff
changeset
|
237 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
238 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
239 |
rset = cnx.execute('Any X WHERE X is Location') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
240 |
self.assertEqual(len(rset), 4000) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
241 |
rset = cnx.execute('Any X WHERE X is Location, X timezone T') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
242 |
self.assertEqual(len(rset), 4000) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
243 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
244 |
def test_index_building(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
245 |
with self.admin_access.repo_cnx() as cnx: |
10875
75d1b2d66f18
[dataimport] remove autoflush_metadata from MassiveObjectStore parameters
Julien Cristau <julien.cristau@logilab.fr>
parents:
10872
diff
changeset
|
246 |
store = MassiveObjectStore(cnx) |
10938
a24a13742f3c
[test] Use datapath() in massive store tests
Denis Laxalde <denis.laxalde@logilab.fr>
parents:
10907
diff
changeset
|
247 |
self.push_geonames_data(self.datapath('geonames.csv'), store) |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
248 |
store.flush() |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
249 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
250 |
# Check index |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
251 |
crs = cnx.system_sql('SELECT indexname FROM pg_indexes') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
252 |
indexes = [r[0] for r in crs.fetchall()] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
253 |
self.assertNotIn('entities_pkey', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
254 |
self.assertNotIn('unique_entities_extid_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
255 |
self.assertNotIn('owned_by_relation_p_key', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
256 |
self.assertNotIn('owned_by_relation_to_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
257 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
258 |
# Cleanup -> index |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
259 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
260 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
261 |
# Check index again |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
262 |
crs = cnx.system_sql('SELECT indexname FROM pg_indexes') |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
263 |
indexes = [r[0] for r in crs.fetchall()] |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
264 |
self.assertIn('entities_pkey', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
265 |
self.assertIn('unique_entities_extid_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
266 |
self.assertIn('owned_by_relation_p_key', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
267 |
self.assertIn('owned_by_relation_to_idx', indexes) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
268 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
269 |
def test_multiple_insert(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
270 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
271 |
store = MassiveObjectStore(cnx) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
272 |
store.init_etype_table('TestLocation') |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
273 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
274 |
store = MassiveObjectStore(cnx) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
275 |
store.init_etype_table('TestLocation') |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
276 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
277 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
278 |
def test_multiple_insert_relation(self): |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
279 |
with self.admin_access.repo_cnx() as cnx: |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
280 |
store = MassiveObjectStore(cnx) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
281 |
store.init_relation_table('used_language') |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
282 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
283 |
store = MassiveObjectStore(cnx) |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
284 |
store.init_relation_table('used_language') |
10866
ed62ba97d79e
[dataimport/test] use store.finish instead of deprecated store.cleanup
Julien Cristau <julien.cristau@logilab.fr>
parents:
10863
diff
changeset
|
285 |
store.finish() |
10853
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
286 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
287 |
|
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
288 |
if __name__ == '__main__': |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
289 |
from logilab.common.testlib import unittest_main |
de741492538d
[dataimport] backport massive store from dataio cube
Sylvain Thénault <sylvain.thenault@logilab.fr>
parents:
diff
changeset
|
290 |
unittest_main() |