[migration] always rebuild infered relation
This was skipped for some bad reason (see 12ad88615a12 which
introduced the change).
Fix for #231956 in Yams is necessary to allow this cset: during a
migration, we want to always reinfer relations while allowing explicit
redefinition of an infered relation later. Else, we may run the
migration with missing parts of the schema (the one that should have
been infered).
Closes #3685463.
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr## This file is part of CubicWeb.## CubicWeb is free software: you can redistribute it and/or modify it under the# terms of the GNU Lesser General Public License as published by the Free# Software Foundation, either version 2.1 of the License, or (at your option)# any later version.## CubicWeb is distributed in the hope that it will be useful, but WITHOUT# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more# details.## You should have received a copy of the GNU Lesser General Public License along# with CubicWeb. If not, see <http://www.gnu.org/licenses/>."""SQL utilities functions and classes."""__docformat__="restructuredtext en"importosimportreimportsubprocessfromdatetimeimportdatetime,datefromitertoolsimportifilterfromlogilabimportdatabaseasdb,commonaslgcfromlogilab.common.shellutilsimportProgressBarfromlogilab.common.dateimporttodate,todatetime,utcdatetime,utctimefromlogilab.database.sqlgenimportSQLGeneratorfromcubicwebimportBinary,ConfigurationErrorfromcubicweb.uilibimportremove_html_tagsfromcubicweb.schemaimportPURE_VIRTUAL_RTYPESfromcubicweb.serverimportSQL_CONNECT_HOOKSfromcubicweb.server.utilsimportcrypt_passwordfromrql.utilsimportRQL_FUNCTIONS_REGISTRYlgc.USE_MX_DATETIME=FalseSQL_PREFIX='cw_'def_run_command(cmd):print' '.join(cmd)returnsubprocess.call(cmd)defsqlexec(sqlstmts,cursor_or_execute,withpb=notos.environ.get('APYCOT_ROOT'),pbtitle='',delimiter=';',cnx=None):"""execute sql statements ignoring DROP/ CREATE GROUP or USER statements error. :sqlstmts_as_string: a string or a list of sql statements. :cursor_or_execute: sql cursor or a callback used to execute statements :cnx: if given, commit/rollback at each statement. :withpb: if True, display a progresse bar :pbtitle: a string displayed as the progress bar title (if `withpb=True`) :delimiter: a string used to split sqlstmts (if it is a string) Return the failed statements (same type as sqlstmts) """ifhasattr(cursor_or_execute,'execute'):execute=cursor_or_execute.executeelse:execute=cursor_or_executesqlstmts_as_string=Falseifisinstance(sqlstmts,basestring):sqlstmts_as_string=Truesqlstmts=sqlstmts.split(delimiter)ifwithpb:pb=ProgressBar(len(sqlstmts),title=pbtitle)failed=[]forsqlinsqlstmts:sql=sql.strip()ifwithpb:pb.update()ifnotsql:continuetry:# some dbapi modules doesn't accept unicode for sql stringexecute(str(sql))exceptException,err:ifcnx:cnx.rollback()failed.append(sql)else:ifcnx:cnx.commit()ifwithpb:printifsqlstmts_as_string:failed=delimiter.join(failed)returnfaileddefsqlgrants(schema,driver,user,text_index=True,set_owner=True,skip_relations=(),skip_entities=()):"""return sql to give all access privileges to the given user on the system schema """fromyams.schema2sqlimportgrant_schemafromcubicweb.server.sourcesimportnativeoutput=[]w=output.appendw(native.grant_schema(user,set_owner))w('')iftext_index:dbhelper=db.get_db_helper(driver)w(dbhelper.sql_grant_user_on_fti(user))w('')w(grant_schema(schema,user,set_owner,skip_entities=skip_entities,prefix=SQL_PREFIX))return'\n'.join(output)defsqlschema(schema,driver,text_index=True,user=None,set_owner=False,skip_relations=PURE_VIRTUAL_RTYPES,skip_entities=()):"""return the system sql schema, according to the given parameters"""fromyams.schema2sqlimportschema2sqlfromcubicweb.server.sourcesimportnativeifset_owner:assertuser,'user is argument required when set_owner is true'output=[]w=output.appendw(native.sql_schema(driver))w('')dbhelper=db.get_db_helper(driver)iftext_index:w(dbhelper.sql_init_fti().replace(';',';;'))w('')w(schema2sql(dbhelper,schema,prefix=SQL_PREFIX,skip_entities=skip_entities,skip_relations=skip_relations).replace(';',';;'))ifdbhelper.users_supportanduser:w('')w(sqlgrants(schema,driver,user,text_index,set_owner,skip_relations,skip_entities).replace(';',';;'))return'\n'.join(output)defsqldropschema(schema,driver,text_index=True,skip_relations=PURE_VIRTUAL_RTYPES,skip_entities=()):"""return the sql to drop the schema, according to the given parameters"""fromyams.schema2sqlimportdropschema2sqlfromcubicweb.server.sourcesimportnativeoutput=[]w=output.appendiftext_index:dbhelper=db.get_db_helper(driver)w(dbhelper.sql_drop_fti())w('')w(dropschema2sql(dbhelper,schema,prefix=SQL_PREFIX,skip_entities=skip_entities,skip_relations=skip_relations))w('')w(native.sql_drop_schema(driver))return'\n'.join(output)_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION=re.compile('^(?!(sql|pg)_)').matchdefsql_drop_all_user_tables(driver_or_helper,sqlcursor):"""Return ths sql to drop all tables found in the database system."""ifnotgetattr(driver_or_helper,'list_tables',None):dbhelper=db.get_db_helper(driver_or_helper)else:dbhelper=driver_or_helpercmds=[dbhelper.sql_drop_sequence('entities_id_seq')]# for mssql, we need to drop views before tablesifhasattr(dbhelper,'list_views'):cmds+=['DROP VIEW %s;'%namefornameinifilter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION,dbhelper.list_views(sqlcursor))]cmds+=['DROP TABLE %s;'%namefornameinifilter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION,dbhelper.list_tables(sqlcursor))]return'\n'.join(cmds)classSQLAdapterMixIn(object):"""Mixin for SQL data sources, getting a connection from a configuration dictionary and handling connection locking """def__init__(self,source_config):try:self.dbdriver=source_config['db-driver'].lower()dbname=source_config['db-name']exceptKeyError:raiseConfigurationError('missing some expected entries in sources file')dbhost=source_config.get('db-host')port=source_config.get('db-port')dbport=portandint(port)orNonedbuser=source_config.get('db-user')dbpassword=source_config.get('db-password')dbencoding=source_config.get('db-encoding','UTF-8')dbextraargs=source_config.get('db-extra-arguments')self.dbhelper=db.get_db_helper(self.dbdriver)self.dbhelper.record_connection_info(dbname,dbhost,dbport,dbuser,dbpassword,dbextraargs,dbencoding)self.sqlgen=SQLGenerator()# copy back some commonly accessed attributesdbapi_module=self.dbhelper.dbapi_moduleself.OperationalError=dbapi_module.OperationalErrorself.InterfaceError=dbapi_module.InterfaceErrorself.DbapiError=dbapi_module.Errorself._binary=self.dbhelper.binary_valueself._process_value=dbapi_module.process_valueself._dbencoding=dbencodingdefget_connection(self):"""open and return a connection to the database"""returnself.dbhelper.get_connection()defbackup_to_file(self,backupfile,confirm):forcmdinself.dbhelper.backup_commands(backupfile,keepownership=False):if_run_command(cmd):ifnotconfirm(' [Failed] Continue anyway?',default='n'):raiseException('Failed command: %s'%cmd)defrestore_from_file(self,backupfile,confirm,drop=True):forcmdinself.dbhelper.restore_commands(backupfile,keepownership=False,drop=drop):if_run_command(cmd):ifnotconfirm(' [Failed] Continue anyway?',default='n'):raiseException('Failed command: %s'%cmd)defmerge_args(self,args,query_args):ifargsisnotNone:newargs={}forkey,valinargs.iteritems():# convert cubicweb binary into db binaryifisinstance(val,Binary):val=self._binary(val.getvalue())newargs[key]=val# should not collidenewargs.update(query_args)returnnewargsreturnquery_argsdefprocess_result(self,cursor,column_callbacks=None,session=None):"""return a list of CubicWeb compliant values from data in the given cursor """returnlist(self.iter_process_result(cursor,column_callbacks,session))defiter_process_result(self,cursor,column_callbacks=None,session=None):"""return a iterator on tuples of CubicWeb compliant values from data in the given cursor """# use two different implementations to avoid paying the price of# callback lookup for each *cell* in results when there is nothing to# lookupifnotcolumn_callbacks:returnself.dbhelper.dbapi_module.process_cursor(cursor,self._dbencoding,Binary)assertsessionreturnself._cb_process_result(cursor,column_callbacks,session)def_cb_process_result(self,cursor,column_callbacks,session):# begin bind to locals for optimizationdescr=cursor.descriptionencoding=self._dbencodingprocess_value=self._process_valuebinary=Binary# /endcursor.arraysize=100whileTrue:results=cursor.fetchmany()ifnotresults:breakforlineinresults:result=[]forcol,valueinenumerate(line):ifvalueisNone:result.append(value)continuecbstack=column_callbacks.get(col,None)ifcbstackisNone:value=process_value(value,descr[col],encoding,binary)else:forcbincbstack:value=cb(self,session,value)result.append(value)yieldresultdefpreprocess_entity(self,entity):"""return a dictionary to use as extra argument to cursor.execute to insert/update an entity into a SQL database """attrs={}eschema=entity.e_schemaconverters=getattr(self.dbhelper,'TYPE_CONVERTERS',{})forattr,valueinentity.cw_edited.iteritems():ifvalueisnotNoneandeschema.subjrels[attr].final:atype=str(entity.e_schema.destination(attr))ifatypeinconverters:# It is easier to modify preprocess_entity rather# than add_entity (native) as this behavior# may also be used for update.value=converters[atype](value)elifatype=='Password':# XXX could be done using a TYPE_CONVERTERS callback# if value is a Binary instance, this mean we got it# from a query result and so it is already encryptedifisinstance(value,Binary):value=value.getvalue()else:value=crypt_password(value)value=self._binary(value)elifisinstance(value,Binary):value=self._binary(value.getvalue())attrs[SQL_PREFIX+str(attr)]=valueattrs[SQL_PREFIX+'eid']=entity.eidreturnattrs# these are overridden by set_log_methods below# only defining here to prevent pylint from complaininginfo=warning=error=critical=exception=debug=lambdamsg,*a,**kw:NonefromloggingimportgetLoggerfromcubicwebimportset_log_methodsset_log_methods(SQLAdapterMixIn,getLogger('cubicweb.sqladapter'))definit_sqlite_connexion(cnx):classgroup_concat(object):def__init__(self):self.values=set()defstep(self,value):ifvalueisnotNone:self.values.add(value)deffinalize(self):return', '.join(unicode(v)forvinself.values)cnx.create_aggregate("GROUP_CONCAT",1,group_concat)def_limit_size(text,maxsize,format='text/plain'):iflen(text)<maxsize:returntextifformatin('text/html','text/xhtml','text/xml'):text=remove_html_tags(text)iflen(text)>maxsize:text=text[:maxsize]+'...'returntextdeflimit_size3(text,format,maxsize):return_limit_size(text,maxsize,format)cnx.create_function("LIMIT_SIZE",3,limit_size3)deflimit_size2(text,maxsize):return_limit_size(text,maxsize)cnx.create_function("TEXT_LIMIT_SIZE",2,limit_size2)fromlogilab.common.dateimportstrptimedefweekday(ustr):try:dt=strptime(ustr,'%Y-%m-%d %H:%M:%S')except:dt=strptime(ustr,'%Y-%m-%d')# expect sunday to be 1, saturday 7 while weekday method return 0 for# mondayreturn(dt.weekday()+1)%7cnx.create_function("WEEKDAY",1,weekday)importyams.constraintsyams.constraints.patch_sqlite_decimal()sqlite_hooks=SQL_CONNECT_HOOKS.setdefault('sqlite',[])sqlite_hooks.append(init_sqlite_connexion)definit_postgres_connexion(cnx):cnx.cursor().execute('SET TIME ZONE UTC')# commit is needed, else setting are lost if the connection is first# rolled backcnx.commit()postgres_hooks=SQL_CONNECT_HOOKS.setdefault('postgres',[])postgres_hooks.append(init_postgres_connexion)