misc/migration/3.8.5_Any.py
author Aurelien Campeas <aurelien.campeas@logilab.fr>
Tue, 07 Dec 2010 12:18:20 +0100
brancholdstable
changeset 7078 bad26a22fe29
parent 5832 16a4235e031f
child 9460 a2a0bc984863
permissions -rw-r--r--
[test] New Handling of database for test. This patch adds a new TestDataBaseHandler class. TestDataBaseHandler are in charge of Setup, backup, restore, connection, repository caching and cleanup for database used during the test. TestDataBaseHandler reuse code and logic previously found in cubicweb.devtools functions and devtools.testlib.CubicwebTC. TestDataBaseHandler is an abstract class and must be subclassed to implement functionalities specific to each driver. TestDataBaseHandler can store and restore various database setups. devtools.testlib.CubicwebTC gains a test_db_id class attribute to specify that its TestCase uses a specific database that should be cached. The pre_setup_database class method is used to setup the database that will be cached. The setup_database method is kept uncached. The same TestDataBaseHandler are reused for every test using the same config object. TestDataBaseHandler try to reuse Repository objects as much as possible. All cubicweb test have been updated.

def migrate_varchar_to_nvarchar():
    dbdriver  = config.sources()['system']['db-driver']
    if dbdriver != "sqlserver2005":
        return

    introspection_sql = """\
SELECT table_schema, table_name, column_name, is_nullable, character_maximum_length
FROM information_schema.columns
WHERE data_type = 'VARCHAR' and table_name <> 'SYSDIAGRAMS'
"""
    has_index_sql = """\
SELECT i.name AS index_name,
       i.type_desc,
       i.is_unique,
       i.is_unique_constraint
FROM sys.indexes AS i, sys.index_columns as j, sys.columns as k
WHERE is_hypothetical = 0 AND i.index_id <> 0
AND i.object_id = j.object_id
AND i.index_id = j.index_id
AND i.object_id = OBJECT_ID('%(table)s')
AND k.name = '%(col)s'
AND k.object_id=i.object_id
AND j.column_id = k.column_id;"""

    generated_statements = []
    for schema, table, column, is_nullable, length in sql(introspection_sql, ask_confirm=False):
        qualified_table = '[%s].[%s]' % (schema, table)
        rset = sql(has_index_sql % {'table': qualified_table, 'col':column},
                   ask_confirm = False)
        drops = []
        creates = []
        for idx_name, idx_type, idx_unique, is_unique_constraint in rset:
            if is_unique_constraint:
                drops.append('ALTER TABLE %s DROP CONSTRAINT %s' % (qualified_table, idx_name))
                creates.append('ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)' % (qualified_table, idx_name, column))
            else:
                drops.append('DROP INDEX %s ON %s' % (idx_name, qualified_table))
                if idx_unique:
                    unique = 'UNIQUE'
                else:
                    unique = ''
                creates.append('CREATE %s %s INDEX %s ON %s(%s)' % (unique, idx_type, idx_name, qualified_table, column))

        if length == -1:
            length = 'max'
        if is_nullable == 'YES':
            not_null = 'NULL'
        else:
            not_null = 'NOT NULL'
        alter_sql = 'ALTER TABLE %s ALTER COLUMN %s NVARCHAR(%s) %s' % (qualified_table, column, length, not_null)
        generated_statements+= drops + [alter_sql] + creates


    for statement in generated_statements:
        print statement
        sql(statement, ask_confirm=False)
    commit()

migrate_varchar_to_nvarchar()