[repo] straightforward bootstrap sequence. Closes #2841188
authorSylvain Thénault <sylvain.thenault@logilab.fr>
Fri, 26 Apr 2013 18:06:44 +0200
changeset 8947 3bbd416b09ec
parent 8946 fae2f561b0f5
child 8949 e01cd8a8c26b
[repo] straightforward bootstrap sequence. Closes #2841188 * kill fill_schema method which was doing to much different things, in favor of a deserialize_method which is simply reading schema from the db and returning it * set_schema doesn't bother with cubes This allows to easilly follow what's going on and so to cleanup init_cnxset_pool to get the following boostrap order: 1. load cubes (implies loading site_cubicweb) 2. load schema 3. init data sources 4. init cnx sets
hooks/test/unittest_syncschema.py
server/repository.py
server/sources/native.py
server/test/unittest_migractions.py
server/test/unittest_repository.py
--- a/hooks/test/unittest_syncschema.py	Wed Apr 24 14:49:49 2013 +0200
+++ b/hooks/test/unittest_syncschema.py	Fri Apr 26 18:06:44 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
 # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
 #
 # This file is part of CubicWeb.
@@ -34,7 +34,7 @@
 
     def setUp(self):
         super(SchemaModificationHooksTC, self).setUp()
-        self.repo.fill_schema()
+        self.repo.set_schema(self.repo.deserialize_schema(), resetvreg=False)
         self.__class__.schema_eids = schema_eids_idx(self.repo.schema)
 
     def index_exists(self, etype, attr, unique=False):
--- a/server/repository.py	Wed Apr 24 14:49:49 2013 +0200
+++ b/server/repository.py	Fri Apr 26 18:06:44 2013 +0200
@@ -212,45 +212,52 @@
         """should be called bootstrap_repository, as this is what it does"""
         config = self.config
         self._cnxsets_pool = Queue.Queue()
+        # 0. init a cnxset that will be used to fetch bootstrap information from
+        #    the database
         self._cnxsets_pool.put_nowait(pool.ConnectionsSet(self.sources))
+        # 1. set used cubes
+        if config.creating or not config.read_instance_schema:
+            config.bootstrap_cubes()
+        else:
+            self.set_schema(self.config.load_bootstrap_schema(), resetvreg=False)
+            config.init_cubes(self.get_cubes())
+        # 2. load schema
         if config.quick_start:
             # quick start: only to get a minimal repository to get cubes
             # information (eg dump/restore/...)
-            config._cubes = ()
-            # only load hooks and entity classes in the registry
+            #
+            # restrict appobject_path to only load hooks and entity classes in
+            # the registry
             config.cube_appobject_path = set(('hooks', 'entities'))
             config.cubicweb_appobject_path = set(('hooks', 'entities'))
-            self.set_schema(config.load_schema())
+            # limit connections pool to 1
             config['connections-pool-size'] = 1
-            # will be reinitialized later from cubes found in the database
-            config._cubes = None
-        elif config.creating or not config.read_instance_schema:
+        if config.quick_start or config.creating or not config.read_instance_schema:
+            # load schema from the file system
             if not config.creating:
-                # test start: use the file system schema (quicker)
                 self.warning("set fs instance'schema")
-            config.bootstrap_cubes()
             self.set_schema(config.load_schema())
         else:
             # normal start: load the instance schema from the database
-            self.fill_schema()
-        if not config.creating:
-            self.init_sources_from_database()
-            if 'CWProperty' in self.schema:
-                self.vreg.init_properties(self.properties())
-        else:
+            self.info('loading schema from the repository')
+            self.set_schema(self.deserialize_schema())
+        # 3. initialize data sources
+        if config.creating:
             # call init_creating so that for instance native source can
             # configurate tsearch according to postgres version
             for source in self.sources:
                 source.init_creating()
-        # close initialization connetions set and reopen fresh ones for proper
-        # initialization now that we know cubes
+        else:
+            self.init_sources_from_database()
+            if 'CWProperty' in self.schema:
+                self.vreg.init_properties(self.properties())
+        # 4. close initialization connection set and reopen fresh ones for
+        #    proper initialization
         self._get_cnxset().close(True)
         self.cnxsets = [] # list of available cnxsets (can't iterate on a Queue)
         for i in xrange(config['connections-pool-size']):
             self.cnxsets.append(pool.ConnectionsSet(self.sources))
             self._cnxsets_pool.put_nowait(self.cnxsets[-1])
-        if config.quick_start:
-            config.init_cubes(self.get_cubes())
 
     # internals ###############################################################
 
@@ -318,8 +325,6 @@
     def set_schema(self, schema, resetvreg=True):
         self.info('set schema %s %#x', schema.name, id(schema))
         if resetvreg:
-            if self.config._cubes is None:
-                self.config.init_cubes(self.get_cubes())
             # trigger full reload of all appobjects
             self.vreg.set_schema(schema)
         else:
@@ -331,12 +336,10 @@
             source.set_schema(schema)
         self.schema = schema
 
-    def fill_schema(self):
-        """load schema from the repository"""
+    def deserialize_schema(self):
+        """load schema from the database"""
         from cubicweb.server.schemaserial import deserialize_schema
-        self.info('loading schema from the repository')
         appschema = schema.CubicWebSchema(self.config.appid)
-        self.set_schema(self.config.load_bootstrap_schema(), resetvreg=False)
         self.debug('deserializing db schema into %s %#x', appschema.name, id(appschema))
         with self.internal_session() as session:
             try:
@@ -349,8 +352,7 @@
                 raise Exception('Is the database initialised ? (cause: %s)' %
                                 (ex.args and ex.args[0].strip() or 'unknown')), \
                                 None, sys.exc_info()[-1]
-        self.set_schema(appschema)
-
+        return appschema
 
     def _prepare_startup(self):
         """Prepare "Repository as a server" for startup.
--- a/server/sources/native.py	Wed Apr 24 14:49:49 2013 +0200
+++ b/server/sources/native.py	Fri Apr 26 18:06:44 2013 +0200
@@ -370,8 +370,12 @@
     def backup(self, backupfile, confirm, format='native'):
         """method called to create a backup of the source's data"""
         if format == 'portable':
-            self.repo.fill_schema()
-            self.set_schema(self.repo.schema)
+            # ensure the schema is the one stored in the database: if repository
+            # started in quick_start mode, the file system's one has been loaded
+            # so force reload
+            if self.repo.config.quick_start:
+                self.repo.set_schema(self.repo.deserialize_schema(),
+                                     resetvreg=False)
             helper = DatabaseIndependentBackupRestore(self)
             self.close_source_connections()
             try:
--- a/server/test/unittest_migractions.py	Wed Apr 24 14:49:49 2013 +0200
+++ b/server/test/unittest_migractions.py	Fri Apr 26 18:06:44 2013 +0200
@@ -49,10 +49,9 @@
     def _init_repo(cls):
         super(MigrationCommandsTC, cls)._init_repo()
         # we have to read schema from the database to get eid for schema entities
+        cls.repo.set_schema(cls.repo.deserialize_schema(), resetvreg=False)
+        # hack to read the schema from data/migrschema
         config = cls.config
-        config._cubes = None
-        cls.repo.fill_schema()
-        # hack to read the schema from data/migrschema
         config.appid = join('data', 'migratedapp')
         config._apphome = cls.datapath('migratedapp')
         global migrschema
--- a/server/test/unittest_repository.py	Wed Apr 24 14:49:49 2013 +0200
+++ b/server/test/unittest_repository.py	Fri Apr 26 18:06:44 2013 +0200
@@ -571,13 +571,17 @@
         helper.TYPE_CONVERTERS.pop('BabarTestType', None)
         super(SchemaDeserialTC, cls).tearDownClass()
 
-    def test_fill_schema(self):
+    def test_deserialization_base(self):
+        """Check the following deserialization
+
+        * all CWEtype has name
+        * Final type
+        * CWUniqueTogetherConstraint
+        * _unique_together__ content"""
         origshema = self.repo.schema
         try:
-            self.repo.schema = CubicWebSchema(self.repo.config.appid)
-            self.repo.config._cubes = None # avoid assertion error
             self.repo.config.repairing = True # avoid versions checking
-            self.repo.fill_schema()
+            self.repo.set_schema(self.repo.deserialize_schema())
             table = SQL_PREFIX + 'CWEType'
             namecol = SQL_PREFIX + 'name'
             finalcol = SQL_PREFIX + 'final'
@@ -617,10 +621,8 @@
     def test_custom_attribute_param(self):
         origshema = self.repo.schema
         try:
-            self.repo.schema = CubicWebSchema(self.repo.config.appid)
-            self.repo.config._cubes = None # avoid assertion error
             self.repo.config.repairing = True # avoid versions checking
-            self.repo.fill_schema()
+            self.repo.set_schema(self.repo.deserialize_schema())
             pes = self.repo.schema['Personne']
             attr = pes.rdef('custom_field_of_jungle')
             self.assertIn('jungle_speed', vars(attr))