Show More
@@ -1,202 +1,202 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | This module defines standalone schema constraint classes. |
|
3 | 3 | """ |
|
4 | 4 | from sqlalchemy import schema |
|
5 | 5 | |
|
6 | 6 | from rhodecode.lib.dbmigrate.migrate.exceptions import * |
|
7 | 7 | from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_06 |
|
8 | 8 | |
|
9 | 9 | class ConstraintChangeset(object): |
|
10 | 10 | """Base class for Constraint classes.""" |
|
11 | 11 | |
|
12 | 12 | def _normalize_columns(self, cols, table_name=False): |
|
13 | 13 | """Given: column objects or names; return col names and |
|
14 | 14 | (maybe) a table""" |
|
15 | 15 | colnames = [] |
|
16 | 16 | table = None |
|
17 | 17 | for col in cols: |
|
18 | 18 | if isinstance(col, schema.Column): |
|
19 | 19 | if col.table is not None and table is None: |
|
20 | 20 | table = col.table |
|
21 | 21 | if table_name: |
|
22 | 22 | col = '.'.join((col.table.name, col.name)) |
|
23 | 23 | else: |
|
24 | 24 | col = col.name |
|
25 | 25 | colnames.append(col) |
|
26 | 26 | return colnames, table |
|
27 | 27 | |
|
28 | 28 | def __do_imports(self, visitor_name, *a, **kw): |
|
29 | 29 | engine = kw.pop('engine', self.table.bind) |
|
30 | 30 | from rhodecode.lib.dbmigrate.migrate.changeset.databases.visitor import (get_engine_visitor, |
|
31 | run_single_visitor) | |
|
31 | run_single_visitor) | |
|
32 | 32 | visitorcallable = get_engine_visitor(engine, visitor_name) |
|
33 | 33 | run_single_visitor(engine, visitorcallable, self, *a, **kw) |
|
34 | 34 | |
|
35 | 35 | def create(self, *a, **kw): |
|
36 | 36 | """Create the constraint in the database. |
|
37 | 37 | |
|
38 | 38 | :param engine: the database engine to use. If this is \ |
|
39 | 39 | :keyword:`None` the instance's engine will be used |
|
40 | 40 | :type engine: :class:`sqlalchemy.engine.base.Engine` |
|
41 | 41 | :param connection: reuse connection istead of creating new one. |
|
42 | 42 | :type connection: :class:`sqlalchemy.engine.base.Connection` instance |
|
43 | 43 | """ |
|
44 | 44 | # TODO: set the parent here instead of in __init__ |
|
45 | 45 | self.__do_imports('constraintgenerator', *a, **kw) |
|
46 | 46 | |
|
47 | 47 | def drop(self, *a, **kw): |
|
48 | 48 | """Drop the constraint from the database. |
|
49 | 49 | |
|
50 | 50 | :param engine: the database engine to use. If this is |
|
51 | 51 | :keyword:`None` the instance's engine will be used |
|
52 | 52 | :param cascade: Issue CASCADE drop if database supports it |
|
53 | 53 | :type engine: :class:`sqlalchemy.engine.base.Engine` |
|
54 | 54 | :type cascade: bool |
|
55 | 55 | :param connection: reuse connection istead of creating new one. |
|
56 | 56 | :type connection: :class:`sqlalchemy.engine.base.Connection` instance |
|
57 | 57 | :returns: Instance with cleared columns |
|
58 | 58 | """ |
|
59 | 59 | self.cascade = kw.pop('cascade', False) |
|
60 | 60 | self.__do_imports('constraintdropper', *a, **kw) |
|
61 | 61 | # the spirit of Constraint objects is that they |
|
62 | 62 | # are immutable (just like in a DB. they're only ADDed |
|
63 | 63 | # or DROPped). |
|
64 | 64 | #self.columns.clear() |
|
65 | 65 | return self |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | class PrimaryKeyConstraint(ConstraintChangeset, schema.PrimaryKeyConstraint): |
|
69 | 69 | """Construct PrimaryKeyConstraint |
|
70 | 70 | |
|
71 | 71 | Migrate's additional parameters: |
|
72 | 72 | |
|
73 | 73 | :param cols: Columns in constraint. |
|
74 | 74 | :param table: If columns are passed as strings, this kw is required |
|
75 | 75 | :type table: Table instance |
|
76 | 76 | :type cols: strings or Column instances |
|
77 | 77 | """ |
|
78 | 78 | |
|
79 | 79 | __migrate_visit_name__ = 'migrate_primary_key_constraint' |
|
80 | 80 | |
|
81 | 81 | def __init__(self, *cols, **kwargs): |
|
82 | 82 | colnames, table = self._normalize_columns(cols) |
|
83 | 83 | table = kwargs.pop('table', table) |
|
84 | 84 | super(PrimaryKeyConstraint, self).__init__(*colnames, **kwargs) |
|
85 | 85 | if table is not None: |
|
86 | 86 | self._set_parent(table) |
|
87 | 87 | |
|
88 | 88 | |
|
89 | 89 | def autoname(self): |
|
90 | 90 | """Mimic the database's automatic constraint names""" |
|
91 | 91 | return "%s_pkey" % self.table.name |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | class ForeignKeyConstraint(ConstraintChangeset, schema.ForeignKeyConstraint): |
|
95 | 95 | """Construct ForeignKeyConstraint |
|
96 | 96 | |
|
97 | 97 | Migrate's additional parameters: |
|
98 | 98 | |
|
99 | 99 | :param columns: Columns in constraint |
|
100 | 100 | :param refcolumns: Columns that this FK reffers to in another table. |
|
101 | 101 | :param table: If columns are passed as strings, this kw is required |
|
102 | 102 | :type table: Table instance |
|
103 | 103 | :type columns: list of strings or Column instances |
|
104 | 104 | :type refcolumns: list of strings or Column instances |
|
105 | 105 | """ |
|
106 | 106 | |
|
107 | 107 | __migrate_visit_name__ = 'migrate_foreign_key_constraint' |
|
108 | 108 | |
|
109 | 109 | def __init__(self, columns, refcolumns, *args, **kwargs): |
|
110 | 110 | colnames, table = self._normalize_columns(columns) |
|
111 | 111 | table = kwargs.pop('table', table) |
|
112 | 112 | refcolnames, reftable = self._normalize_columns(refcolumns, |
|
113 | 113 | table_name=True) |
|
114 | 114 | super(ForeignKeyConstraint, self).__init__(colnames, refcolnames, *args, |
|
115 | 115 | **kwargs) |
|
116 | 116 | if table is not None: |
|
117 | 117 | self._set_parent(table) |
|
118 | 118 | |
|
119 | 119 | @property |
|
120 | 120 | def referenced(self): |
|
121 | 121 | return [e.column for e in self.elements] |
|
122 | 122 | |
|
123 | 123 | @property |
|
124 | 124 | def reftable(self): |
|
125 | 125 | return self.referenced[0].table |
|
126 | 126 | |
|
127 | 127 | def autoname(self): |
|
128 | 128 | """Mimic the database's automatic constraint names""" |
|
129 | 129 | if hasattr(self.columns, 'keys'): |
|
130 | 130 | # SA <= 0.5 |
|
131 | 131 | firstcol = self.columns[self.columns.keys()[0]] |
|
132 | 132 | ret = "%(table)s_%(firstcolumn)s_fkey" % dict( |
|
133 | 133 | table=firstcol.table.name, |
|
134 | 134 | firstcolumn=firstcol.name,) |
|
135 | 135 | else: |
|
136 | 136 | # SA >= 0.6 |
|
137 | 137 | ret = "%(table)s_%(firstcolumn)s_fkey" % dict( |
|
138 | 138 | table=self.table.name, |
|
139 | 139 | firstcolumn=self.columns[0],) |
|
140 | 140 | return ret |
|
141 | 141 | |
|
142 | 142 | |
|
143 | 143 | class CheckConstraint(ConstraintChangeset, schema.CheckConstraint): |
|
144 | 144 | """Construct CheckConstraint |
|
145 | 145 | |
|
146 | 146 | Migrate's additional parameters: |
|
147 | 147 | |
|
148 | 148 | :param sqltext: Plain SQL text to check condition |
|
149 | 149 | :param columns: If not name is applied, you must supply this kw\ |
|
150 | 150 | to autoname constraint |
|
151 | 151 | :param table: If columns are passed as strings, this kw is required |
|
152 | 152 | :type table: Table instance |
|
153 | 153 | :type columns: list of Columns instances |
|
154 | 154 | :type sqltext: string |
|
155 | 155 | """ |
|
156 | 156 | |
|
157 | 157 | __migrate_visit_name__ = 'migrate_check_constraint' |
|
158 | 158 | |
|
159 | 159 | def __init__(self, sqltext, *args, **kwargs): |
|
160 | 160 | cols = kwargs.pop('columns', []) |
|
161 | 161 | if not cols and not kwargs.get('name', False): |
|
162 | 162 | raise InvalidConstraintError('You must either set "name"' |
|
163 | 163 | 'parameter or "columns" to autogenarate it.') |
|
164 | 164 | colnames, table = self._normalize_columns(cols) |
|
165 | 165 | table = kwargs.pop('table', table) |
|
166 | 166 | schema.CheckConstraint.__init__(self, sqltext, *args, **kwargs) |
|
167 | 167 | if table is not None: |
|
168 | 168 | if not SQLA_06: |
|
169 | 169 | self.table = table |
|
170 | 170 | self._set_parent(table) |
|
171 | 171 | self.colnames = colnames |
|
172 | 172 | |
|
173 | 173 | def autoname(self): |
|
174 | 174 | return "%(table)s_%(cols)s_check" % \ |
|
175 | 175 | dict(table=self.table.name, cols="_".join(self.colnames)) |
|
176 | 176 | |
|
177 | 177 | |
|
178 | 178 | class UniqueConstraint(ConstraintChangeset, schema.UniqueConstraint): |
|
179 | 179 | """Construct UniqueConstraint |
|
180 | 180 | |
|
181 | 181 | Migrate's additional parameters: |
|
182 | 182 | |
|
183 | 183 | :param cols: Columns in constraint. |
|
184 | 184 | :param table: If columns are passed as strings, this kw is required |
|
185 | 185 | :type table: Table instance |
|
186 | 186 | :type cols: strings or Column instances |
|
187 | 187 | |
|
188 | 188 | .. versionadded:: 0.6.0 |
|
189 | 189 | """ |
|
190 | 190 | |
|
191 | 191 | __migrate_visit_name__ = 'migrate_unique_constraint' |
|
192 | 192 | |
|
193 | 193 | def __init__(self, *cols, **kwargs): |
|
194 | 194 | self.colnames, table = self._normalize_columns(cols) |
|
195 | 195 | table = kwargs.pop('table', table) |
|
196 | 196 | super(UniqueConstraint, self).__init__(*self.colnames, **kwargs) |
|
197 | 197 | if table is not None: |
|
198 | 198 | self._set_parent(table) |
|
199 | 199 | |
|
200 | 200 | def autoname(self): |
|
201 | 201 | """Mimic the database's automatic constraint names""" |
|
202 | 202 | return "%s_%s_key" % (self.table.name, self.colnames[0]) |
@@ -1,80 +1,99 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Firebird database specific implementations of changeset classes. |
|
3 | 3 | """ |
|
4 | 4 | from sqlalchemy.databases import firebird as sa_base |
|
5 | ||
|
5 | from sqlalchemy.schema import PrimaryKeyConstraint | |
|
6 | 6 | from rhodecode.lib.dbmigrate.migrate import exceptions |
|
7 | 7 | from rhodecode.lib.dbmigrate.migrate.changeset import ansisql, SQLA_06 |
|
8 | 8 | |
|
9 | 9 | |
|
10 | 10 | if SQLA_06: |
|
11 | 11 | FBSchemaGenerator = sa_base.FBDDLCompiler |
|
12 | 12 | else: |
|
13 | 13 | FBSchemaGenerator = sa_base.FBSchemaGenerator |
|
14 | 14 | |
|
15 | 15 | class FBColumnGenerator(FBSchemaGenerator, ansisql.ANSIColumnGenerator): |
|
16 | 16 | """Firebird column generator implementation.""" |
|
17 | 17 | |
|
18 | 18 | |
|
19 | 19 | class FBColumnDropper(ansisql.ANSIColumnDropper): |
|
20 | 20 | """Firebird column dropper implementation.""" |
|
21 | 21 | |
|
22 | 22 | def visit_column(self, column): |
|
23 | 23 | """Firebird supports 'DROP col' instead of 'DROP COLUMN col' syntax |
|
24 | 24 | |
|
25 | 25 | Drop primary key and unique constraints if dropped column is referencing it.""" |
|
26 | 26 | if column.primary_key: |
|
27 | 27 | if column.table.primary_key.columns.contains_column(column): |
|
28 | 28 | column.table.primary_key.drop() |
|
29 | 29 | # TODO: recreate primary key if it references more than this column |
|
30 | if column.unique or getattr(column, 'unique_name', None): | |
|
31 | for cons in column.table.constraints: | |
|
32 | if cons.contains_column(column): | |
|
33 | cons.drop() | |
|
34 | # TODO: recreate unique constraint if it refenrences more than this column | |
|
35 | 30 | |
|
36 | table = self.start_alter_table(column) | |
|
31 | for index in column.table.indexes: | |
|
32 | # "column in index.columns" causes problems as all | |
|
33 | # column objects compare equal and return a SQL expression | |
|
34 | if column.name in [col.name for col in index.columns]: | |
|
35 | index.drop() | |
|
36 | # TODO: recreate index if it references more than this column | |
|
37 | ||
|
38 | for cons in column.table.constraints: | |
|
39 | if isinstance(cons,PrimaryKeyConstraint): | |
|
40 | # will be deleted only when the column its on | |
|
41 | # is deleted! | |
|
42 | continue | |
|
43 | ||
|
44 | if SQLA_06: | |
|
45 | should_drop = column.name in cons.columns | |
|
46 | else: | |
|
47 | should_drop = cons.contains_column(column) and cons.name | |
|
48 | if should_drop: | |
|
49 | self.start_alter_table(column) | |
|
50 | self.append("DROP CONSTRAINT ") | |
|
51 | self.append(self.preparer.format_constraint(cons)) | |
|
52 | self.execute() | |
|
53 | # TODO: recreate unique constraint if it refenrences more than this column | |
|
54 | ||
|
55 | self.start_alter_table(column) | |
|
37 | 56 | self.append('DROP %s' % self.preparer.format_column(column)) |
|
38 | 57 | self.execute() |
|
39 | 58 | |
|
40 | 59 | |
|
41 | 60 | class FBSchemaChanger(ansisql.ANSISchemaChanger): |
|
42 | 61 | """Firebird schema changer implementation.""" |
|
43 | 62 | |
|
44 | 63 | def visit_table(self, table): |
|
45 | 64 | """Rename table not supported""" |
|
46 | 65 | raise exceptions.NotSupportedError( |
|
47 | 66 | "Firebird does not support renaming tables.") |
|
48 | 67 | |
|
49 | 68 | def _visit_column_name(self, table, column, delta): |
|
50 | 69 | self.start_alter_table(table) |
|
51 | 70 | col_name = self.preparer.quote(delta.current_name, table.quote) |
|
52 | 71 | new_name = self.preparer.format_column(delta.result_column) |
|
53 | 72 | self.append('ALTER COLUMN %s TO %s' % (col_name, new_name)) |
|
54 | 73 | |
|
55 | 74 | def _visit_column_nullable(self, table, column, delta): |
|
56 | 75 | """Changing NULL is not supported""" |
|
57 | 76 | # TODO: http://www.firebirdfaq.org/faq103/ |
|
58 | 77 | raise exceptions.NotSupportedError( |
|
59 | 78 | "Firebird does not support altering NULL bevahior.") |
|
60 | 79 | |
|
61 | 80 | |
|
62 | 81 | class FBConstraintGenerator(ansisql.ANSIConstraintGenerator): |
|
63 | 82 | """Firebird constraint generator implementation.""" |
|
64 | 83 | |
|
65 | 84 | |
|
66 | 85 | class FBConstraintDropper(ansisql.ANSIConstraintDropper): |
|
67 | 86 | """Firebird constaint dropper implementation.""" |
|
68 | 87 | |
|
69 | 88 | def cascade_constraint(self, constraint): |
|
70 | 89 | """Cascading constraints is not supported""" |
|
71 | 90 | raise exceptions.NotSupportedError( |
|
72 | 91 | "Firebird does not support cascading constraints") |
|
73 | 92 | |
|
74 | 93 | |
|
75 | 94 | class FBDialect(ansisql.ANSIDialect): |
|
76 | 95 | columngenerator = FBColumnGenerator |
|
77 | 96 | columndropper = FBColumnDropper |
|
78 | 97 | schemachanger = FBSchemaChanger |
|
79 | 98 | constraintgenerator = FBConstraintGenerator |
|
80 | 99 | constraintdropper = FBConstraintDropper |
@@ -1,148 +1,155 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | `SQLite`_ database specific implementations of changeset classes. |
|
3 | 3 | |
|
4 | 4 | .. _`SQLite`: http://www.sqlite.org/ |
|
5 | 5 | """ |
|
6 | 6 | from UserDict import DictMixin |
|
7 | 7 | from copy import copy |
|
8 | 8 | |
|
9 | 9 | from sqlalchemy.databases import sqlite as sa_base |
|
10 | 10 | |
|
11 | 11 | from rhodecode.lib.dbmigrate.migrate import exceptions |
|
12 | 12 | from rhodecode.lib.dbmigrate.migrate.changeset import ansisql, SQLA_06 |
|
13 | 13 | |
|
14 | 14 | |
|
15 | 15 | if not SQLA_06: |
|
16 | 16 | SQLiteSchemaGenerator = sa_base.SQLiteSchemaGenerator |
|
17 | 17 | else: |
|
18 | 18 | SQLiteSchemaGenerator = sa_base.SQLiteDDLCompiler |
|
19 | 19 | |
|
20 | 20 | class SQLiteCommon(object): |
|
21 | 21 | |
|
22 | 22 | def _not_supported(self, op): |
|
23 | 23 | raise exceptions.NotSupportedError("SQLite does not support " |
|
24 | 24 | "%s; see http://www.sqlite.org/lang_altertable.html" % op) |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | class SQLiteHelper(SQLiteCommon): |
|
28 | 28 | |
|
29 | 29 | def recreate_table(self,table,column=None,delta=None): |
|
30 | 30 | table_name = self.preparer.format_table(table) |
|
31 | 31 | |
|
32 | 32 | # we remove all indexes so as not to have |
|
33 | 33 | # problems during copy and re-create |
|
34 | 34 | for index in table.indexes: |
|
35 | 35 | index.drop() |
|
36 | 36 | |
|
37 | 37 | self.append('ALTER TABLE %s RENAME TO migration_tmp' % table_name) |
|
38 | 38 | self.execute() |
|
39 | 39 | |
|
40 | 40 | insertion_string = self._modify_table(table, column, delta) |
|
41 | 41 | |
|
42 | 42 | table.create() |
|
43 | 43 | self.append(insertion_string % {'table_name': table_name}) |
|
44 | 44 | self.execute() |
|
45 | 45 | self.append('DROP TABLE migration_tmp') |
|
46 | 46 | self.execute() |
|
47 | 47 | |
|
48 | 48 | def visit_column(self, delta): |
|
49 | 49 | if isinstance(delta, DictMixin): |
|
50 | 50 | column = delta.result_column |
|
51 | 51 | table = self._to_table(delta.table) |
|
52 | 52 | else: |
|
53 | 53 | column = delta |
|
54 | 54 | table = self._to_table(column.table) |
|
55 | 55 | self.recreate_table(table,column,delta) |
|
56 | 56 | |
|
57 | 57 | class SQLiteColumnGenerator(SQLiteSchemaGenerator, |
|
58 | 58 | ansisql.ANSIColumnGenerator, |
|
59 | 59 | # at the end so we get the normal |
|
60 | 60 | # visit_column by default |
|
61 | 61 | SQLiteHelper, |
|
62 | 62 | SQLiteCommon |
|
63 | 63 | ): |
|
64 | 64 | """SQLite ColumnGenerator""" |
|
65 | 65 | |
|
66 | 66 | def _modify_table(self, table, column, delta): |
|
67 | 67 | columns = ' ,'.join(map( |
|
68 | 68 | self.preparer.format_column, |
|
69 | 69 | [c for c in table.columns if c.name!=column.name])) |
|
70 | 70 | return ('INSERT INTO %%(table_name)s (%(cols)s) ' |
|
71 | 71 | 'SELECT %(cols)s from migration_tmp')%{'cols':columns} |
|
72 | 72 | |
|
73 | 73 | def visit_column(self,column): |
|
74 | 74 | if column.foreign_keys: |
|
75 | 75 | SQLiteHelper.visit_column(self,column) |
|
76 | 76 | else: |
|
77 | 77 | super(SQLiteColumnGenerator,self).visit_column(column) |
|
78 | 78 | |
|
79 | 79 | class SQLiteColumnDropper(SQLiteHelper, ansisql.ANSIColumnDropper): |
|
80 | 80 | """SQLite ColumnDropper""" |
|
81 | 81 | |
|
82 | 82 | def _modify_table(self, table, column, delta): |
|
83 | ||
|
83 | 84 | columns = ' ,'.join(map(self.preparer.format_column, table.columns)) |
|
84 | 85 | return 'INSERT INTO %(table_name)s SELECT ' + columns + \ |
|
85 | 86 | ' from migration_tmp' |
|
86 | 87 | |
|
88 | def visit_column(self,column): | |
|
89 | # For SQLite, we *have* to remove the column here so the table | |
|
90 | # is re-created properly. | |
|
91 | column.remove_from_table(column.table,unset_table=False) | |
|
92 | super(SQLiteColumnDropper,self).visit_column(column) | |
|
93 | ||
|
87 | 94 | |
|
88 | 95 | class SQLiteSchemaChanger(SQLiteHelper, ansisql.ANSISchemaChanger): |
|
89 | 96 | """SQLite SchemaChanger""" |
|
90 | 97 | |
|
91 | 98 | def _modify_table(self, table, column, delta): |
|
92 | 99 | return 'INSERT INTO %(table_name)s SELECT * from migration_tmp' |
|
93 | 100 | |
|
94 | 101 | def visit_index(self, index): |
|
95 | 102 | """Does not support ALTER INDEX""" |
|
96 | 103 | self._not_supported('ALTER INDEX') |
|
97 | 104 | |
|
98 | 105 | |
|
99 | 106 | class SQLiteConstraintGenerator(ansisql.ANSIConstraintGenerator, SQLiteHelper, SQLiteCommon): |
|
100 | 107 | |
|
101 | 108 | def visit_migrate_primary_key_constraint(self, constraint): |
|
102 | 109 | tmpl = "CREATE UNIQUE INDEX %s ON %s ( %s )" |
|
103 | 110 | cols = ', '.join(map(self.preparer.format_column, constraint.columns)) |
|
104 | 111 | tname = self.preparer.format_table(constraint.table) |
|
105 | 112 | name = self.get_constraint_name(constraint) |
|
106 | 113 | msg = tmpl % (name, tname, cols) |
|
107 | 114 | self.append(msg) |
|
108 | 115 | self.execute() |
|
109 | 116 | |
|
110 | 117 | def _modify_table(self, table, column, delta): |
|
111 | 118 | return 'INSERT INTO %(table_name)s SELECT * from migration_tmp' |
|
112 | 119 | |
|
113 | 120 | def visit_migrate_foreign_key_constraint(self, *p, **k): |
|
114 | 121 | self.recreate_table(p[0].table) |
|
115 | 122 | |
|
116 | 123 | def visit_migrate_unique_constraint(self, *p, **k): |
|
117 | 124 | self.recreate_table(p[0].table) |
|
118 | 125 | |
|
119 | 126 | |
|
120 | 127 | class SQLiteConstraintDropper(ansisql.ANSIColumnDropper, |
|
121 | 128 | SQLiteCommon, |
|
122 | 129 | ansisql.ANSIConstraintCommon): |
|
123 | 130 | |
|
124 | 131 | def visit_migrate_primary_key_constraint(self, constraint): |
|
125 | 132 | tmpl = "DROP INDEX %s " |
|
126 | 133 | name = self.get_constraint_name(constraint) |
|
127 | 134 | msg = tmpl % (name) |
|
128 | 135 | self.append(msg) |
|
129 | 136 | self.execute() |
|
130 | 137 | |
|
131 | 138 | def visit_migrate_foreign_key_constraint(self, *p, **k): |
|
132 | 139 | self._not_supported('ALTER TABLE DROP CONSTRAINT') |
|
133 | 140 | |
|
134 | 141 | def visit_migrate_check_constraint(self, *p, **k): |
|
135 | 142 | self._not_supported('ALTER TABLE DROP CONSTRAINT') |
|
136 | 143 | |
|
137 | 144 | def visit_migrate_unique_constraint(self, *p, **k): |
|
138 | 145 | self._not_supported('ALTER TABLE DROP CONSTRAINT') |
|
139 | 146 | |
|
140 | 147 | |
|
141 | 148 | # TODO: technically primary key is a NOT NULL + UNIQUE constraint, should add NOT NULL to index |
|
142 | 149 | |
|
143 | 150 | class SQLiteDialect(ansisql.ANSIDialect): |
|
144 | 151 | columngenerator = SQLiteColumnGenerator |
|
145 | 152 | columndropper = SQLiteColumnDropper |
|
146 | 153 | schemachanger = SQLiteSchemaChanger |
|
147 | 154 | constraintgenerator = SQLiteConstraintGenerator |
|
148 | 155 | constraintdropper = SQLiteConstraintDropper |
@@ -1,669 +1,651 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Schema module providing common schema operations. |
|
3 | 3 | """ |
|
4 | 4 | import warnings |
|
5 | 5 | |
|
6 | 6 | from UserDict import DictMixin |
|
7 | 7 | |
|
8 | 8 | import sqlalchemy |
|
9 | 9 | |
|
10 | 10 | from sqlalchemy.schema import ForeignKeyConstraint |
|
11 | 11 | from sqlalchemy.schema import UniqueConstraint |
|
12 | 12 | |
|
13 | 13 | from rhodecode.lib.dbmigrate.migrate.exceptions import * |
|
14 | 14 | from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_06 |
|
15 | 15 | from rhodecode.lib.dbmigrate.migrate.changeset.databases.visitor import (get_engine_visitor, |
|
16 | 16 | run_single_visitor) |
|
17 | 17 | |
|
18 | 18 | |
|
19 | 19 | __all__ = [ |
|
20 | 20 | 'create_column', |
|
21 | 21 | 'drop_column', |
|
22 | 22 | 'alter_column', |
|
23 | 23 | 'rename_table', |
|
24 | 24 | 'rename_index', |
|
25 | 25 | 'ChangesetTable', |
|
26 | 26 | 'ChangesetColumn', |
|
27 | 27 | 'ChangesetIndex', |
|
28 | 28 | 'ChangesetDefaultClause', |
|
29 | 29 | 'ColumnDelta', |
|
30 | 30 | ] |
|
31 | 31 | |
|
32 | DEFAULT_ALTER_METADATA = True | |
|
33 | ||
|
34 | ||
|
35 | 32 | def create_column(column, table=None, *p, **kw): |
|
36 | 33 | """Create a column, given the table. |
|
37 | 34 | |
|
38 | 35 | API to :meth:`ChangesetColumn.create`. |
|
39 | 36 | """ |
|
40 | 37 | if table is not None: |
|
41 | 38 | return table.create_column(column, *p, **kw) |
|
42 | 39 | return column.create(*p, **kw) |
|
43 | 40 | |
|
44 | 41 | |
|
45 | 42 | def drop_column(column, table=None, *p, **kw): |
|
46 | 43 | """Drop a column, given the table. |
|
47 | 44 | |
|
48 | 45 | API to :meth:`ChangesetColumn.drop`. |
|
49 | 46 | """ |
|
50 | 47 | if table is not None: |
|
51 | 48 | return table.drop_column(column, *p, **kw) |
|
52 | 49 | return column.drop(*p, **kw) |
|
53 | 50 | |
|
54 | 51 | |
|
55 | 52 | def rename_table(table, name, engine=None, **kw): |
|
56 | 53 | """Rename a table. |
|
57 | 54 | |
|
58 | 55 | If Table instance is given, engine is not used. |
|
59 | 56 | |
|
60 | 57 | API to :meth:`ChangesetTable.rename`. |
|
61 | 58 | |
|
62 | 59 | :param table: Table to be renamed. |
|
63 | 60 | :param name: New name for Table. |
|
64 | 61 | :param engine: Engine instance. |
|
65 | 62 | :type table: string or Table instance |
|
66 | 63 | :type name: string |
|
67 | 64 | :type engine: obj |
|
68 | 65 | """ |
|
69 | 66 | table = _to_table(table, engine) |
|
70 | 67 | table.rename(name, **kw) |
|
71 | 68 | |
|
72 | 69 | |
|
73 | 70 | def rename_index(index, name, table=None, engine=None, **kw): |
|
74 | 71 | """Rename an index. |
|
75 | 72 | |
|
76 | 73 | If Index instance is given, |
|
77 | 74 | table and engine are not used. |
|
78 | 75 | |
|
79 | 76 | API to :meth:`ChangesetIndex.rename`. |
|
80 | 77 | |
|
81 | 78 | :param index: Index to be renamed. |
|
82 | 79 | :param name: New name for index. |
|
83 | 80 | :param table: Table to which Index is reffered. |
|
84 | 81 | :param engine: Engine instance. |
|
85 | 82 | :type index: string or Index instance |
|
86 | 83 | :type name: string |
|
87 | 84 | :type table: string or Table instance |
|
88 | 85 | :type engine: obj |
|
89 | 86 | """ |
|
90 | 87 | index = _to_index(index, table, engine) |
|
91 | 88 | index.rename(name, **kw) |
|
92 | 89 | |
|
93 | 90 | |
|
94 | 91 | def alter_column(*p, **k): |
|
95 | 92 | """Alter a column. |
|
96 | 93 | |
|
97 | 94 | This is a helper function that creates a :class:`ColumnDelta` and |
|
98 | 95 | runs it. |
|
99 | 96 | |
|
100 | 97 | :argument column: |
|
101 | 98 | The name of the column to be altered or a |
|
102 | 99 | :class:`ChangesetColumn` column representing it. |
|
103 | 100 | |
|
104 | 101 | :param table: |
|
105 | 102 | A :class:`~sqlalchemy.schema.Table` or table name to |
|
106 | 103 | for the table where the column will be changed. |
|
107 | 104 | |
|
108 | 105 | :param engine: |
|
109 | 106 | The :class:`~sqlalchemy.engine.base.Engine` to use for table |
|
110 | 107 | reflection and schema alterations. |
|
111 | 108 | |
|
112 | :param alter_metadata: | |
|
113 | If `True`, which is the default, the | |
|
114 | :class:`~sqlalchemy.schema.Column` will also modified. | |
|
115 | If `False`, the :class:`~sqlalchemy.schema.Column` will be left | |
|
116 | as it was. | |
|
117 | ||
|
118 | 109 | :returns: A :class:`ColumnDelta` instance representing the change. |
|
119 | 110 | |
|
120 | 111 | |
|
121 | 112 | """ |
|
122 | ||
|
123 | k.setdefault('alter_metadata', DEFAULT_ALTER_METADATA) | |
|
124 | ||
|
113 | ||
|
125 | 114 | if 'table' not in k and isinstance(p[0], sqlalchemy.Column): |
|
126 | 115 | k['table'] = p[0].table |
|
127 | 116 | if 'engine' not in k: |
|
128 | 117 | k['engine'] = k['table'].bind |
|
129 | 118 | |
|
130 | 119 | # deprecation |
|
131 | 120 | if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column): |
|
132 | 121 | warnings.warn( |
|
133 | 122 | "Passing a Column object to alter_column is deprecated." |
|
134 | 123 | " Just pass in keyword parameters instead.", |
|
135 | 124 | MigrateDeprecationWarning |
|
136 | 125 | ) |
|
137 | 126 | engine = k['engine'] |
|
127 | ||
|
128 | # enough tests seem to break when metadata is always altered | |
|
129 | # that this crutch has to be left in until they can be sorted | |
|
130 | # out | |
|
131 | k['alter_metadata']=True | |
|
132 | ||
|
138 | 133 | delta = ColumnDelta(*p, **k) |
|
139 | 134 | |
|
140 | 135 | visitorcallable = get_engine_visitor(engine, 'schemachanger') |
|
141 | 136 | engine._run_visitor(visitorcallable, delta) |
|
142 | 137 | |
|
143 | 138 | return delta |
|
144 | 139 | |
|
145 | 140 | |
|
146 | 141 | def _to_table(table, engine=None): |
|
147 | 142 | """Return if instance of Table, else construct new with metadata""" |
|
148 | 143 | if isinstance(table, sqlalchemy.Table): |
|
149 | 144 | return table |
|
150 | 145 | |
|
151 | 146 | # Given: table name, maybe an engine |
|
152 | 147 | meta = sqlalchemy.MetaData() |
|
153 | 148 | if engine is not None: |
|
154 | 149 | meta.bind = engine |
|
155 | 150 | return sqlalchemy.Table(table, meta) |
|
156 | 151 | |
|
157 | 152 | |
|
158 | 153 | def _to_index(index, table=None, engine=None): |
|
159 | 154 | """Return if instance of Index, else construct new with metadata""" |
|
160 | 155 | if isinstance(index, sqlalchemy.Index): |
|
161 | 156 | return index |
|
162 | 157 | |
|
163 | 158 | # Given: index name; table name required |
|
164 | 159 | table = _to_table(table, engine) |
|
165 | 160 | ret = sqlalchemy.Index(index) |
|
166 | 161 | ret.table = table |
|
167 | 162 | return ret |
|
168 | 163 | |
|
169 | 164 | |
|
170 | 165 | class ColumnDelta(DictMixin, sqlalchemy.schema.SchemaItem): |
|
171 | 166 | """Extracts the differences between two columns/column-parameters |
|
172 | 167 | |
|
173 | 168 | May receive parameters arranged in several different ways: |
|
174 | 169 | |
|
175 | 170 | * **current_column, new_column, \*p, \*\*kw** |
|
176 | 171 | Additional parameters can be specified to override column |
|
177 | 172 | differences. |
|
178 | 173 | |
|
179 | 174 | * **current_column, \*p, \*\*kw** |
|
180 | 175 | Additional parameters alter current_column. Table name is extracted |
|
181 | 176 | from current_column object. |
|
182 | 177 | Name is changed to current_column.name from current_name, |
|
183 | 178 | if current_name is specified. |
|
184 | 179 | |
|
185 | 180 | * **current_col_name, \*p, \*\*kw** |
|
186 | 181 | Table kw must specified. |
|
187 | 182 | |
|
188 | 183 | :param table: Table at which current Column should be bound to.\ |
|
189 | 184 | If table name is given, reflection will be used. |
|
190 | 185 | :type table: string or Table instance |
|
191 | :param alter_metadata: If True, it will apply changes to metadata. | |
|
192 | :type alter_metadata: bool | |
|
193 | :param metadata: If `alter_metadata` is true, \ | |
|
194 | metadata is used to reflect table names into | |
|
195 | :type metadata: :class:`MetaData` instance | |
|
186 | ||
|
187 | :param metadata: A :class:`MetaData` instance to store | |
|
188 | reflected table names | |
|
189 | ||
|
196 | 190 | :param engine: When reflecting tables, either engine or metadata must \ |
|
197 | 191 | be specified to acquire engine object. |
|
198 | 192 | :type engine: :class:`Engine` instance |
|
199 | 193 | :returns: :class:`ColumnDelta` instance provides interface for altered attributes to \ |
|
200 | 194 | `result_column` through :func:`dict` alike object. |
|
201 | 195 | |
|
202 | 196 | * :class:`ColumnDelta`.result_column is altered column with new attributes |
|
203 | 197 | |
|
204 | 198 | * :class:`ColumnDelta`.current_name is current name of column in db |
|
205 | 199 | |
|
206 | 200 | |
|
207 | 201 | """ |
|
208 | 202 | |
|
209 | 203 | # Column attributes that can be altered |
|
210 | 204 | diff_keys = ('name', 'type', 'primary_key', 'nullable', |
|
211 | 205 | 'server_onupdate', 'server_default', 'autoincrement') |
|
212 | 206 | diffs = dict() |
|
213 | 207 | __visit_name__ = 'column' |
|
214 | 208 | |
|
215 | 209 | def __init__(self, *p, **kw): |
|
210 | # 'alter_metadata' is not a public api. It exists purely | |
|
211 | # as a crutch until the tests that fail when 'alter_metadata' | |
|
212 | # behaviour always happens can be sorted out | |
|
216 | 213 | self.alter_metadata = kw.pop("alter_metadata", False) |
|
214 | ||
|
217 | 215 | self.meta = kw.pop("metadata", None) |
|
218 | 216 | self.engine = kw.pop("engine", None) |
|
219 | 217 | |
|
220 | 218 | # Things are initialized differently depending on how many column |
|
221 | 219 | # parameters are given. Figure out how many and call the appropriate |
|
222 | 220 | # method. |
|
223 | 221 | if len(p) >= 1 and isinstance(p[0], sqlalchemy.Column): |
|
224 | 222 | # At least one column specified |
|
225 | 223 | if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column): |
|
226 | 224 | # Two columns specified |
|
227 | 225 | diffs = self.compare_2_columns(*p, **kw) |
|
228 | 226 | else: |
|
229 | 227 | # Exactly one column specified |
|
230 | 228 | diffs = self.compare_1_column(*p, **kw) |
|
231 | 229 | else: |
|
232 | 230 | # Zero columns specified |
|
233 | 231 | if not len(p) or not isinstance(p[0], basestring): |
|
234 | 232 | raise ValueError("First argument must be column name") |
|
235 | 233 | diffs = self.compare_parameters(*p, **kw) |
|
236 | 234 | |
|
237 | 235 | self.apply_diffs(diffs) |
|
238 | 236 | |
|
239 | 237 | def __repr__(self): |
|
240 |
return '<ColumnDelta altermetadata=%r, %s>' % ( |
|
|
241 | super(ColumnDelta, self).__repr__()) | |
|
242 | ||
|
238 | return '<ColumnDelta altermetadata=%r, %s>' % ( | |
|
239 | self.alter_metadata, | |
|
240 | super(ColumnDelta, self).__repr__() | |
|
241 | ) | |
|
242 | ||
|
243 | 243 | def __getitem__(self, key): |
|
244 | 244 | if key not in self.keys(): |
|
245 | raise KeyError("No such diff key, available: %s" % self.diffs) | |
|
245 | raise KeyError("No such diff key, available: %s" % self.diffs ) | |
|
246 | 246 | return getattr(self.result_column, key) |
|
247 | 247 | |
|
248 | 248 | def __setitem__(self, key, value): |
|
249 | 249 | if key not in self.keys(): |
|
250 | raise KeyError("No such diff key, available: %s" % self.diffs) | |
|
250 | raise KeyError("No such diff key, available: %s" % self.diffs ) | |
|
251 | 251 | setattr(self.result_column, key, value) |
|
252 | 252 | |
|
253 | 253 | def __delitem__(self, key): |
|
254 | 254 | raise NotImplementedError |
|
255 | 255 | |
|
256 | 256 | def keys(self): |
|
257 | 257 | return self.diffs.keys() |
|
258 | 258 | |
|
259 | 259 | def compare_parameters(self, current_name, *p, **k): |
|
260 | 260 | """Compares Column objects with reflection""" |
|
261 | 261 | self.table = k.pop('table') |
|
262 | 262 | self.result_column = self._table.c.get(current_name) |
|
263 | 263 | if len(p): |
|
264 | 264 | k = self._extract_parameters(p, k, self.result_column) |
|
265 | 265 | return k |
|
266 | 266 | |
|
267 | 267 | def compare_1_column(self, col, *p, **k): |
|
268 | 268 | """Compares one Column object""" |
|
269 | 269 | self.table = k.pop('table', None) |
|
270 | 270 | if self.table is None: |
|
271 | 271 | self.table = col.table |
|
272 | 272 | self.result_column = col |
|
273 | 273 | if len(p): |
|
274 | 274 | k = self._extract_parameters(p, k, self.result_column) |
|
275 | 275 | return k |
|
276 | 276 | |
|
277 | 277 | def compare_2_columns(self, old_col, new_col, *p, **k): |
|
278 | 278 | """Compares two Column objects""" |
|
279 | 279 | self.process_column(new_col) |
|
280 | 280 | self.table = k.pop('table', None) |
|
281 | 281 | # we cannot use bool() on table in SA06 |
|
282 | 282 | if self.table is None: |
|
283 | 283 | self.table = old_col.table |
|
284 | 284 | if self.table is None: |
|
285 | 285 | new_col.table |
|
286 | 286 | self.result_column = old_col |
|
287 | 287 | |
|
288 | 288 | # set differences |
|
289 | 289 | # leave out some stuff for later comp |
|
290 | 290 | for key in (set(self.diff_keys) - set(('type',))): |
|
291 | 291 | val = getattr(new_col, key, None) |
|
292 | 292 | if getattr(self.result_column, key, None) != val: |
|
293 | 293 | k.setdefault(key, val) |
|
294 | 294 | |
|
295 | 295 | # inspect types |
|
296 | 296 | if not self.are_column_types_eq(self.result_column.type, new_col.type): |
|
297 | 297 | k.setdefault('type', new_col.type) |
|
298 | 298 | |
|
299 | 299 | if len(p): |
|
300 | 300 | k = self._extract_parameters(p, k, self.result_column) |
|
301 | 301 | return k |
|
302 | 302 | |
|
303 | 303 | def apply_diffs(self, diffs): |
|
304 | 304 | """Populate dict and column object with new values""" |
|
305 | 305 | self.diffs = diffs |
|
306 | 306 | for key in self.diff_keys: |
|
307 | 307 | if key in diffs: |
|
308 | 308 | setattr(self.result_column, key, diffs[key]) |
|
309 | 309 | |
|
310 | 310 | self.process_column(self.result_column) |
|
311 | 311 | |
|
312 | 312 | # create an instance of class type if not yet |
|
313 | 313 | if 'type' in diffs and callable(self.result_column.type): |
|
314 | 314 | self.result_column.type = self.result_column.type() |
|
315 | 315 | |
|
316 | 316 | # add column to the table |
|
317 | 317 | if self.table is not None and self.alter_metadata: |
|
318 | 318 | self.result_column.add_to_table(self.table) |
|
319 | 319 | |
|
320 | 320 | def are_column_types_eq(self, old_type, new_type): |
|
321 | 321 | """Compares two types to be equal""" |
|
322 | 322 | ret = old_type.__class__ == new_type.__class__ |
|
323 | 323 | |
|
324 | 324 | # String length is a special case |
|
325 | 325 | if ret and isinstance(new_type, sqlalchemy.types.String): |
|
326 | 326 | ret = (getattr(old_type, 'length', None) == \ |
|
327 | 327 | getattr(new_type, 'length', None)) |
|
328 | 328 | return ret |
|
329 | 329 | |
|
330 | 330 | def _extract_parameters(self, p, k, column): |
|
331 | 331 | """Extracts data from p and modifies diffs""" |
|
332 | 332 | p = list(p) |
|
333 | 333 | while len(p): |
|
334 | 334 | if isinstance(p[0], basestring): |
|
335 | 335 | k.setdefault('name', p.pop(0)) |
|
336 | 336 | elif isinstance(p[0], sqlalchemy.types.AbstractType): |
|
337 | 337 | k.setdefault('type', p.pop(0)) |
|
338 | 338 | elif callable(p[0]): |
|
339 | 339 | p[0] = p[0]() |
|
340 | 340 | else: |
|
341 | 341 | break |
|
342 | 342 | |
|
343 | 343 | if len(p): |
|
344 | 344 | new_col = column.copy_fixed() |
|
345 | 345 | new_col._init_items(*p) |
|
346 | 346 | k = self.compare_2_columns(column, new_col, **k) |
|
347 | 347 | return k |
|
348 | 348 | |
|
349 | 349 | def process_column(self, column): |
|
350 | 350 | """Processes default values for column""" |
|
351 | 351 | # XXX: this is a snippet from SA processing of positional parameters |
|
352 | 352 | if not SQLA_06 and column.args: |
|
353 | 353 | toinit = list(column.args) |
|
354 | 354 | else: |
|
355 | 355 | toinit = list() |
|
356 | 356 | |
|
357 | 357 | if column.server_default is not None: |
|
358 | 358 | if isinstance(column.server_default, sqlalchemy.FetchedValue): |
|
359 | 359 | toinit.append(column.server_default) |
|
360 | 360 | else: |
|
361 | 361 | toinit.append(sqlalchemy.DefaultClause(column.server_default)) |
|
362 | 362 | if column.server_onupdate is not None: |
|
363 | 363 | if isinstance(column.server_onupdate, FetchedValue): |
|
364 | 364 | toinit.append(column.server_default) |
|
365 | 365 | else: |
|
366 | 366 | toinit.append(sqlalchemy.DefaultClause(column.server_onupdate, |
|
367 | 367 | for_update=True)) |
|
368 | 368 | if toinit: |
|
369 | 369 | column._init_items(*toinit) |
|
370 | ||
|
370 | ||
|
371 | 371 | if not SQLA_06: |
|
372 | 372 | column.args = [] |
|
373 | 373 | |
|
374 | 374 | def _get_table(self): |
|
375 | 375 | return getattr(self, '_table', None) |
|
376 | 376 | |
|
377 | 377 | def _set_table(self, table): |
|
378 | 378 | if isinstance(table, basestring): |
|
379 | 379 | if self.alter_metadata: |
|
380 | 380 | if not self.meta: |
|
381 | 381 | raise ValueError("metadata must be specified for table" |
|
382 | 382 | " reflection when using alter_metadata") |
|
383 | 383 | meta = self.meta |
|
384 | 384 | if self.engine: |
|
385 | 385 | meta.bind = self.engine |
|
386 | 386 | else: |
|
387 | 387 | if not self.engine and not self.meta: |
|
388 | 388 | raise ValueError("engine or metadata must be specified" |
|
389 | 389 | " to reflect tables") |
|
390 | 390 | if not self.engine: |
|
391 | 391 | self.engine = self.meta.bind |
|
392 | 392 | meta = sqlalchemy.MetaData(bind=self.engine) |
|
393 | 393 | self._table = sqlalchemy.Table(table, meta, autoload=True) |
|
394 | 394 | elif isinstance(table, sqlalchemy.Table): |
|
395 | 395 | self._table = table |
|
396 | 396 | if not self.alter_metadata: |
|
397 | 397 | self._table.meta = sqlalchemy.MetaData(bind=self._table.bind) |
|
398 | ||
|
399 | 398 | def _get_result_column(self): |
|
400 | 399 | return getattr(self, '_result_column', None) |
|
401 | 400 | |
|
402 | 401 | def _set_result_column(self, column): |
|
403 | 402 | """Set Column to Table based on alter_metadata evaluation.""" |
|
404 | 403 | self.process_column(column) |
|
405 | 404 | if not hasattr(self, 'current_name'): |
|
406 | 405 | self.current_name = column.name |
|
407 | 406 | if self.alter_metadata: |
|
408 | 407 | self._result_column = column |
|
409 | 408 | else: |
|
410 | 409 | self._result_column = column.copy_fixed() |
|
411 | 410 | |
|
412 | 411 | table = property(_get_table, _set_table) |
|
413 | 412 | result_column = property(_get_result_column, _set_result_column) |
|
414 | 413 | |
|
415 | 414 | |
|
416 | 415 | class ChangesetTable(object): |
|
417 | 416 | """Changeset extensions to SQLAlchemy tables.""" |
|
418 | 417 | |
|
419 | 418 | def create_column(self, column, *p, **kw): |
|
420 | 419 | """Creates a column. |
|
421 | 420 | |
|
422 | 421 | The column parameter may be a column definition or the name of |
|
423 | 422 | a column in this table. |
|
424 | 423 | |
|
425 | 424 | API to :meth:`ChangesetColumn.create` |
|
426 | 425 | |
|
427 | 426 | :param column: Column to be created |
|
428 | 427 | :type column: Column instance or string |
|
429 | 428 | """ |
|
430 | 429 | if not isinstance(column, sqlalchemy.Column): |
|
431 | 430 | # It's a column name |
|
432 | 431 | column = getattr(self.c, str(column)) |
|
433 | 432 | column.create(table=self, *p, **kw) |
|
434 | 433 | |
|
435 | 434 | def drop_column(self, column, *p, **kw): |
|
436 | 435 | """Drop a column, given its name or definition. |
|
437 | 436 | |
|
438 | 437 | API to :meth:`ChangesetColumn.drop` |
|
439 | 438 | |
|
440 | 439 | :param column: Column to be droped |
|
441 | 440 | :type column: Column instance or string |
|
442 | 441 | """ |
|
443 | 442 | if not isinstance(column, sqlalchemy.Column): |
|
444 | 443 | # It's a column name |
|
445 | 444 | try: |
|
446 | 445 | column = getattr(self.c, str(column)) |
|
447 | 446 | except AttributeError: |
|
448 | 447 | # That column isn't part of the table. We don't need |
|
449 | 448 | # its entire definition to drop the column, just its |
|
450 | 449 | # name, so create a dummy column with the same name. |
|
451 | 450 | column = sqlalchemy.Column(str(column), sqlalchemy.Integer()) |
|
452 | 451 | column.drop(table=self, *p, **kw) |
|
453 | 452 | |
|
454 | 453 | def rename(self, name, connection=None, **kwargs): |
|
455 | 454 | """Rename this table. |
|
456 | 455 | |
|
457 | 456 | :param name: New name of the table. |
|
458 | 457 | :type name: string |
|
459 | :param alter_metadata: If True, table will be removed from metadata | |
|
460 | :type alter_metadata: bool | |
|
461 | 458 | :param connection: reuse connection istead of creating new one. |
|
462 | 459 | :type connection: :class:`sqlalchemy.engine.base.Connection` instance |
|
463 | 460 | """ |
|
464 | self.alter_metadata = kwargs.pop('alter_metadata', DEFAULT_ALTER_METADATA) | |
|
465 | 461 | engine = self.bind |
|
466 | 462 | self.new_name = name |
|
467 | 463 | visitorcallable = get_engine_visitor(engine, 'schemachanger') |
|
468 | 464 | run_single_visitor(engine, visitorcallable, self, connection, **kwargs) |
|
469 | 465 | |
|
470 | 466 | # Fix metadata registration |
|
471 |
|
|
|
472 | self.name = name | |
|
473 | self.deregister() | |
|
474 | self._set_parent(self.metadata) | |
|
467 | self.name = name | |
|
468 | self.deregister() | |
|
469 | self._set_parent(self.metadata) | |
|
475 | 470 | |
|
476 | 471 | def _meta_key(self): |
|
477 | 472 | return sqlalchemy.schema._get_table_key(self.name, self.schema) |
|
478 | 473 | |
|
479 | 474 | def deregister(self): |
|
480 | 475 | """Remove this table from its metadata""" |
|
481 | 476 | key = self._meta_key() |
|
482 | 477 | meta = self.metadata |
|
483 | 478 | if key in meta.tables: |
|
484 | 479 | del meta.tables[key] |
|
485 | 480 | |
|
486 | 481 | |
|
487 | 482 | class ChangesetColumn(object): |
|
488 | 483 | """Changeset extensions to SQLAlchemy columns.""" |
|
489 | 484 | |
|
490 | 485 | def alter(self, *p, **k): |
|
491 | 486 | """Makes a call to :func:`alter_column` for the column this |
|
492 | 487 | method is called on. |
|
493 | 488 | """ |
|
494 | 489 | if 'table' not in k: |
|
495 | 490 | k['table'] = self.table |
|
496 | 491 | if 'engine' not in k: |
|
497 | 492 | k['engine'] = k['table'].bind |
|
498 | 493 | return alter_column(self, *p, **k) |
|
499 | 494 | |
|
500 | 495 | def create(self, table=None, index_name=None, unique_name=None, |
|
501 | 496 | primary_key_name=None, populate_default=True, connection=None, **kwargs): |
|
502 | 497 | """Create this column in the database. |
|
503 | 498 | |
|
504 | 499 | Assumes the given table exists. ``ALTER TABLE ADD COLUMN``, |
|
505 | 500 | for most databases. |
|
506 | 501 | |
|
507 | 502 | :param table: Table instance to create on. |
|
508 | 503 | :param index_name: Creates :class:`ChangesetIndex` on this column. |
|
509 | 504 | :param unique_name: Creates :class:\ |
|
510 | 505 | `~migrate.changeset.constraint.UniqueConstraint` on this column. |
|
511 | 506 | :param primary_key_name: Creates :class:\ |
|
512 | 507 | `~migrate.changeset.constraint.PrimaryKeyConstraint` on this column. |
|
513 | :param alter_metadata: If True, column will be added to table object. | |
|
514 | 508 | :param populate_default: If True, created column will be \ |
|
515 | 509 | populated with defaults |
|
516 | 510 | :param connection: reuse connection istead of creating new one. |
|
517 | 511 | :type table: Table instance |
|
518 | 512 | :type index_name: string |
|
519 | 513 | :type unique_name: string |
|
520 | 514 | :type primary_key_name: string |
|
521 | :type alter_metadata: bool | |
|
522 | 515 | :type populate_default: bool |
|
523 | 516 | :type connection: :class:`sqlalchemy.engine.base.Connection` instance |
|
524 | 517 | |
|
525 | 518 | :returns: self |
|
526 | 519 | """ |
|
527 | 520 | self.populate_default = populate_default |
|
528 | self.alter_metadata = kwargs.pop('alter_metadata', DEFAULT_ALTER_METADATA) | |
|
529 | 521 | self.index_name = index_name |
|
530 | 522 | self.unique_name = unique_name |
|
531 | 523 | self.primary_key_name = primary_key_name |
|
532 | 524 | for cons in ('index_name', 'unique_name', 'primary_key_name'): |
|
533 | 525 | self._check_sanity_constraints(cons) |
|
534 | 526 | |
|
535 |
|
|
|
536 | self.add_to_table(table) | |
|
527 | self.add_to_table(table) | |
|
537 | 528 | engine = self.table.bind |
|
538 | 529 | visitorcallable = get_engine_visitor(engine, 'columngenerator') |
|
539 | 530 | engine._run_visitor(visitorcallable, self, connection, **kwargs) |
|
540 | 531 | |
|
541 | 532 | # TODO: reuse existing connection |
|
542 | 533 | if self.populate_default and self.default is not None: |
|
543 | 534 | stmt = table.update().values({self: engine._execute_default(self.default)}) |
|
544 | 535 | engine.execute(stmt) |
|
545 | 536 | |
|
546 | 537 | return self |
|
547 | 538 | |
|
548 | 539 | def drop(self, table=None, connection=None, **kwargs): |
|
549 | 540 | """Drop this column from the database, leaving its table intact. |
|
550 | 541 | |
|
551 | 542 | ``ALTER TABLE DROP COLUMN``, for most databases. |
|
552 | 543 | |
|
553 | :param alter_metadata: If True, column will be removed from table object. | |
|
554 | :type alter_metadata: bool | |
|
555 | 544 | :param connection: reuse connection istead of creating new one. |
|
556 | 545 | :type connection: :class:`sqlalchemy.engine.base.Connection` instance |
|
557 | 546 | """ |
|
558 | self.alter_metadata = kwargs.pop('alter_metadata', DEFAULT_ALTER_METADATA) | |
|
559 | 547 | if table is not None: |
|
560 | 548 | self.table = table |
|
561 | 549 | engine = self.table.bind |
|
562 | if self.alter_metadata: | |
|
563 | self.remove_from_table(self.table, unset_table=False) | |
|
564 | 550 | visitorcallable = get_engine_visitor(engine, 'columndropper') |
|
565 | 551 | engine._run_visitor(visitorcallable, self, connection, **kwargs) |
|
566 | if self.alter_metadata: | |
|
567 |
|
|
|
552 | self.remove_from_table(self.table, unset_table=False) | |
|
553 | self.table = None | |
|
568 | 554 | return self |
|
569 | 555 | |
|
570 | 556 | def add_to_table(self, table): |
|
571 | 557 | if table is not None and self.table is None: |
|
572 | 558 | self._set_parent(table) |
|
573 | 559 | |
|
574 |
def _col_name_in_constraint(self, |
|
|
560 | def _col_name_in_constraint(self,cons,name): | |
|
575 | 561 | return False |
|
576 | ||
|
562 | ||
|
577 | 563 | def remove_from_table(self, table, unset_table=True): |
|
578 | 564 | # TODO: remove primary keys, constraints, etc |
|
579 | 565 | if unset_table: |
|
580 | 566 | self.table = None |
|
581 | ||
|
567 | ||
|
582 | 568 | to_drop = set() |
|
583 | 569 | for index in table.indexes: |
|
584 | 570 | columns = [] |
|
585 | 571 | for col in index.columns: |
|
586 |
if col.name |
|
|
572 | if col.name!=self.name: | |
|
587 | 573 | columns.append(col) |
|
588 | 574 | if columns: |
|
589 |
index.columns |
|
|
575 | index.columns=columns | |
|
590 | 576 | else: |
|
591 | 577 | to_drop.add(index) |
|
592 | 578 | table.indexes = table.indexes - to_drop |
|
593 | ||
|
579 | ||
|
594 | 580 | to_drop = set() |
|
595 | 581 | for cons in table.constraints: |
|
596 | 582 | # TODO: deal with other types of constraint |
|
597 |
if isinstance(cons, |
|
|
583 | if isinstance(cons,(ForeignKeyConstraint, | |
|
598 | 584 | UniqueConstraint)): |
|
599 | 585 | for col_name in cons.columns: |
|
600 |
if not isinstance(col_name, |
|
|
586 | if not isinstance(col_name,basestring): | |
|
601 | 587 | col_name = col_name.name |
|
602 |
if self.name |
|
|
588 | if self.name==col_name: | |
|
603 | 589 | to_drop.add(cons) |
|
604 | 590 | table.constraints = table.constraints - to_drop |
|
605 | ||
|
591 | ||
|
606 | 592 | if table.c.contains_column(self): |
|
607 | 593 | table.c.remove(self) |
|
608 | 594 | |
|
609 | 595 | # TODO: this is fixed in 0.6 |
|
610 | 596 | def copy_fixed(self, **kw): |
|
611 | 597 | """Create a copy of this ``Column``, with all attributes.""" |
|
612 | 598 | return sqlalchemy.Column(self.name, self.type, self.default, |
|
613 | 599 | key=self.key, |
|
614 | 600 | primary_key=self.primary_key, |
|
615 | 601 | nullable=self.nullable, |
|
616 | 602 | quote=self.quote, |
|
617 | 603 | index=self.index, |
|
618 | 604 | unique=self.unique, |
|
619 | 605 | onupdate=self.onupdate, |
|
620 | 606 | autoincrement=self.autoincrement, |
|
621 | 607 | server_default=self.server_default, |
|
622 | 608 | server_onupdate=self.server_onupdate, |
|
623 | 609 | *[c.copy(**kw) for c in self.constraints]) |
|
624 | 610 | |
|
625 | 611 | def _check_sanity_constraints(self, name): |
|
626 | 612 | """Check if constraints names are correct""" |
|
627 | 613 | obj = getattr(self, name) |
|
628 | 614 | if (getattr(self, name[:-5]) and not obj): |
|
629 | 615 | raise InvalidConstraintError("Column.create() accepts index_name," |
|
630 | 616 | " primary_key_name and unique_name to generate constraints") |
|
631 | 617 | if not isinstance(obj, basestring) and obj is not None: |
|
632 | 618 | raise InvalidConstraintError( |
|
633 | 619 | "%s argument for column must be constraint name" % name) |
|
634 | 620 | |
|
635 | 621 | |
|
636 | 622 | class ChangesetIndex(object): |
|
637 | 623 | """Changeset extensions to SQLAlchemy Indexes.""" |
|
638 | 624 | |
|
639 | 625 | __visit_name__ = 'index' |
|
640 | 626 | |
|
641 | 627 | def rename(self, name, connection=None, **kwargs): |
|
642 | 628 | """Change the name of an index. |
|
643 | 629 | |
|
644 | 630 | :param name: New name of the Index. |
|
645 | 631 | :type name: string |
|
646 | :param alter_metadata: If True, Index object will be altered. | |
|
647 | :type alter_metadata: bool | |
|
648 | 632 | :param connection: reuse connection istead of creating new one. |
|
649 | 633 | :type connection: :class:`sqlalchemy.engine.base.Connection` instance |
|
650 | 634 | """ |
|
651 | self.alter_metadata = kwargs.pop('alter_metadata', DEFAULT_ALTER_METADATA) | |
|
652 | 635 | engine = self.table.bind |
|
653 | 636 | self.new_name = name |
|
654 | 637 | visitorcallable = get_engine_visitor(engine, 'schemachanger') |
|
655 | 638 | engine._run_visitor(visitorcallable, self, connection, **kwargs) |
|
656 |
|
|
|
657 | self.name = name | |
|
639 | self.name = name | |
|
658 | 640 | |
|
659 | 641 | |
|
660 | 642 | class ChangesetDefaultClause(object): |
|
661 | 643 | """Implements comparison between :class:`DefaultClause` instances""" |
|
662 | 644 | |
|
663 | 645 | def __eq__(self, other): |
|
664 | 646 | if isinstance(other, self.__class__): |
|
665 | 647 | if self.arg == other.arg: |
|
666 | 648 | return True |
|
667 | 649 | |
|
668 | 650 | def __ne__(self, other): |
|
669 | 651 | return not self.__eq__(other) |
@@ -1,253 +1,253 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Code to generate a Python model from a database or differences |
|
3 | 3 | between a model and database. |
|
4 | 4 | |
|
5 | 5 | Some of this is borrowed heavily from the AutoCode project at: |
|
6 | 6 | http://code.google.com/p/sqlautocode/ |
|
7 | 7 | """ |
|
8 | 8 | |
|
9 | 9 | import sys |
|
10 | 10 | import logging |
|
11 | 11 | |
|
12 | 12 | import sqlalchemy |
|
13 | 13 | |
|
14 | 14 | from rhodecode.lib.dbmigrate import migrate |
|
15 | 15 | from rhodecode.lib.dbmigrate.migrate import changeset |
|
16 | 16 | |
|
17 | 17 | log = logging.getLogger(__name__) |
|
18 | 18 | HEADER = """ |
|
19 | 19 | ## File autogenerated by genmodel.py |
|
20 | 20 | |
|
21 | 21 | from sqlalchemy import * |
|
22 | 22 | meta = MetaData() |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | DECLARATIVE_HEADER = """ |
|
26 | 26 | ## File autogenerated by genmodel.py |
|
27 | 27 | |
|
28 | 28 | from sqlalchemy import * |
|
29 | 29 | from sqlalchemy.ext import declarative |
|
30 | 30 | |
|
31 | 31 | Base = declarative.declarative_base() |
|
32 | 32 | """ |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | class ModelGenerator(object): |
|
36 | 36 | |
|
37 | 37 | def __init__(self, diff, engine, declarative=False): |
|
38 | 38 | self.diff = diff |
|
39 | 39 | self.engine = engine |
|
40 | 40 | self.declarative = declarative |
|
41 | 41 | |
|
42 | 42 | def column_repr(self, col): |
|
43 | 43 | kwarg = [] |
|
44 | 44 | if col.key != col.name: |
|
45 | 45 | kwarg.append('key') |
|
46 | 46 | if col.primary_key: |
|
47 | 47 | col.primary_key = True # otherwise it dumps it as 1 |
|
48 | 48 | kwarg.append('primary_key') |
|
49 | 49 | if not col.nullable: |
|
50 | 50 | kwarg.append('nullable') |
|
51 | 51 | if col.onupdate: |
|
52 | 52 | kwarg.append('onupdate') |
|
53 | 53 | if col.default: |
|
54 | 54 | if col.primary_key: |
|
55 | 55 | # I found that PostgreSQL automatically creates a |
|
56 | 56 | # default value for the sequence, but let's not show |
|
57 | 57 | # that. |
|
58 | 58 | pass |
|
59 | 59 | else: |
|
60 | 60 | kwarg.append('default') |
|
61 | 61 | ks = ', '.join('%s=%r' % (k, getattr(col, k)) for k in kwarg) |
|
62 | 62 | |
|
63 | 63 | # crs: not sure if this is good idea, but it gets rid of extra |
|
64 | 64 | # u'' |
|
65 | 65 | name = col.name.encode('utf8') |
|
66 | 66 | |
|
67 | 67 | type_ = col.type |
|
68 | 68 | for cls in col.type.__class__.__mro__: |
|
69 | 69 | if cls.__module__ == 'sqlalchemy.types' and \ |
|
70 | 70 | not cls.__name__.isupper(): |
|
71 | 71 | if cls is not type_.__class__: |
|
72 | 72 | type_ = cls() |
|
73 | 73 | break |
|
74 | 74 | |
|
75 | 75 | data = { |
|
76 | 76 | 'name': name, |
|
77 | 77 | 'type': type_, |
|
78 | 78 | 'constraints': ', '.join([repr(cn) for cn in col.constraints]), |
|
79 | 79 | 'args': ks and ks or ''} |
|
80 | 80 | |
|
81 | 81 | if data['constraints']: |
|
82 | 82 | if data['args']: |
|
83 | 83 | data['args'] = ',' + data['args'] |
|
84 | 84 | |
|
85 | 85 | if data['constraints'] or data['args']: |
|
86 | 86 | data['maybeComma'] = ',' |
|
87 | 87 | else: |
|
88 | 88 | data['maybeComma'] = '' |
|
89 | 89 | |
|
90 | 90 | commonStuff = """ %(maybeComma)s %(constraints)s %(args)s)""" % data |
|
91 | 91 | commonStuff = commonStuff.strip() |
|
92 | 92 | data['commonStuff'] = commonStuff |
|
93 | 93 | if self.declarative: |
|
94 | 94 | return """%(name)s = Column(%(type)r%(commonStuff)s""" % data |
|
95 | 95 | else: |
|
96 | 96 | return """Column(%(name)r, %(type)r%(commonStuff)s""" % data |
|
97 | 97 | |
|
98 | 98 | def getTableDefn(self, table): |
|
99 | 99 | out = [] |
|
100 | 100 | tableName = table.name |
|
101 | 101 | if self.declarative: |
|
102 | 102 | out.append("class %(table)s(Base):" % {'table': tableName}) |
|
103 | 103 | out.append(" __tablename__ = '%(table)s'" % {'table': tableName}) |
|
104 | 104 | for col in table.columns: |
|
105 | 105 | out.append(" %s" % self.column_repr(col)) |
|
106 | 106 | else: |
|
107 | 107 | out.append("%(table)s = Table('%(table)s', meta," % \ |
|
108 | 108 | {'table': tableName}) |
|
109 | 109 | for col in table.columns: |
|
110 | 110 | out.append(" %s," % self.column_repr(col)) |
|
111 | 111 | out.append(")") |
|
112 | 112 | return out |
|
113 | 113 | |
|
114 |
def _get_tables(self, |
|
|
114 | def _get_tables(self,missingA=False,missingB=False,modified=False): | |
|
115 | 115 | to_process = [] |
|
116 |
for bool_, |
|
|
117 |
(missingA, |
|
|
118 |
(missingB, |
|
|
119 |
(modified, |
|
|
116 | for bool_,names,metadata in ( | |
|
117 | (missingA,self.diff.tables_missing_from_A,self.diff.metadataB), | |
|
118 | (missingB,self.diff.tables_missing_from_B,self.diff.metadataA), | |
|
119 | (modified,self.diff.tables_different,self.diff.metadataA), | |
|
120 | 120 | ): |
|
121 | 121 | if bool_: |
|
122 | 122 | for name in names: |
|
123 | 123 | yield metadata.tables.get(name) |
|
124 | 124 | |
|
125 | 125 | def toPython(self): |
|
126 | 126 | """Assume database is current and model is empty.""" |
|
127 | 127 | out = [] |
|
128 | 128 | if self.declarative: |
|
129 | 129 | out.append(DECLARATIVE_HEADER) |
|
130 | 130 | else: |
|
131 | 131 | out.append(HEADER) |
|
132 | 132 | out.append("") |
|
133 | 133 | for table in self._get_tables(missingA=True): |
|
134 | 134 | out.extend(self.getTableDefn(table)) |
|
135 | 135 | out.append("") |
|
136 | 136 | return '\n'.join(out) |
|
137 | 137 | |
|
138 | 138 | def toUpgradeDowngradePython(self, indent=' '): |
|
139 | 139 | ''' Assume model is most current and database is out-of-date. ''' |
|
140 | 140 | decls = ['from rhodecode.lib.dbmigrate.migrate.changeset import schema', |
|
141 | 141 | 'meta = MetaData()'] |
|
142 | 142 | for table in self._get_tables( |
|
143 |
missingA=True, |
|
|
143 | missingA=True,missingB=True,modified=True | |
|
144 | 144 | ): |
|
145 | 145 | decls.extend(self.getTableDefn(table)) |
|
146 | 146 | |
|
147 | 147 | upgradeCommands, downgradeCommands = [], [] |
|
148 | 148 | for tableName in self.diff.tables_missing_from_A: |
|
149 | 149 | upgradeCommands.append("%(table)s.drop()" % {'table': tableName}) |
|
150 | 150 | downgradeCommands.append("%(table)s.create()" % \ |
|
151 | 151 | {'table': tableName}) |
|
152 | 152 | for tableName in self.diff.tables_missing_from_B: |
|
153 | 153 | upgradeCommands.append("%(table)s.create()" % {'table': tableName}) |
|
154 | 154 | downgradeCommands.append("%(table)s.drop()" % {'table': tableName}) |
|
155 | 155 | |
|
156 | 156 | for tableName in self.diff.tables_different: |
|
157 | 157 | dbTable = self.diff.metadataB.tables[tableName] |
|
158 | 158 | missingInDatabase, missingInModel, diffDecl = \ |
|
159 | 159 | self.diff.colDiffs[tableName] |
|
160 | 160 | for col in missingInDatabase: |
|
161 | 161 | upgradeCommands.append('%s.columns[%r].create()' % ( |
|
162 | 162 | modelTable, col.name)) |
|
163 | 163 | downgradeCommands.append('%s.columns[%r].drop()' % ( |
|
164 | 164 | modelTable, col.name)) |
|
165 | 165 | for col in missingInModel: |
|
166 | 166 | upgradeCommands.append('%s.columns[%r].drop()' % ( |
|
167 | 167 | modelTable, col.name)) |
|
168 | 168 | downgradeCommands.append('%s.columns[%r].create()' % ( |
|
169 | 169 | modelTable, col.name)) |
|
170 | 170 | for modelCol, databaseCol, modelDecl, databaseDecl in diffDecl: |
|
171 | 171 | upgradeCommands.append( |
|
172 |
'assert False, "Can\'t alter columns: %s:%s=>%s"' |
|
|
173 | modelTable, modelCol.name, databaseCol.name) | |
|
172 | 'assert False, "Can\'t alter columns: %s:%s=>%s"' % ( | |
|
173 | modelTable, modelCol.name, databaseCol.name)) | |
|
174 | 174 | downgradeCommands.append( |
|
175 |
'assert False, "Can\'t alter columns: %s:%s=>%s"' |
|
|
176 | modelTable, modelCol.name, databaseCol.name) | |
|
175 | 'assert False, "Can\'t alter columns: %s:%s=>%s"' % ( | |
|
176 | modelTable, modelCol.name, databaseCol.name)) | |
|
177 | 177 | pre_command = ' meta.bind = migrate_engine' |
|
178 | 178 | |
|
179 | 179 | return ( |
|
180 | 180 | '\n'.join(decls), |
|
181 | 181 | '\n'.join([pre_command] + ['%s%s' % (indent, line) for line in upgradeCommands]), |
|
182 | 182 | '\n'.join([pre_command] + ['%s%s' % (indent, line) for line in downgradeCommands])) |
|
183 | 183 | |
|
184 |
def _db_can_handle_this_change(self, |
|
|
184 | def _db_can_handle_this_change(self,td): | |
|
185 | 185 | if (td.columns_missing_from_B |
|
186 | 186 | and not td.columns_missing_from_A |
|
187 | 187 | and not td.columns_different): |
|
188 | 188 | # Even sqlite can handle this. |
|
189 | 189 | return True |
|
190 | 190 | else: |
|
191 | 191 | return not self.engine.url.drivername.startswith('sqlite') |
|
192 | 192 | |
|
193 | 193 | def applyModel(self): |
|
194 | 194 | """Apply model to current database.""" |
|
195 | 195 | |
|
196 | 196 | meta = sqlalchemy.MetaData(self.engine) |
|
197 | 197 | |
|
198 | 198 | for table in self._get_tables(missingA=True): |
|
199 | 199 | table = table.tometadata(meta) |
|
200 | 200 | table.drop() |
|
201 | 201 | for table in self._get_tables(missingB=True): |
|
202 | 202 | table = table.tometadata(meta) |
|
203 | 203 | table.create() |
|
204 | 204 | for modelTable in self._get_tables(modified=True): |
|
205 | 205 | tableName = modelTable.name |
|
206 | 206 | modelTable = modelTable.tometadata(meta) |
|
207 | 207 | dbTable = self.diff.metadataB.tables[tableName] |
|
208 | 208 | |
|
209 | 209 | td = self.diff.tables_different[tableName] |
|
210 | ||
|
210 | ||
|
211 | 211 | if self._db_can_handle_this_change(td): |
|
212 | ||
|
212 | ||
|
213 | 213 | for col in td.columns_missing_from_B: |
|
214 | 214 | modelTable.columns[col].create() |
|
215 | 215 | for col in td.columns_missing_from_A: |
|
216 | 216 | dbTable.columns[col].drop() |
|
217 | 217 | # XXX handle column changes here. |
|
218 | 218 | else: |
|
219 | 219 | # Sqlite doesn't support drop column, so you have to |
|
220 | 220 | # do more: create temp table, copy data to it, drop |
|
221 | 221 | # old table, create new table, copy data back. |
|
222 | 222 | # |
|
223 | 223 | # I wonder if this is guaranteed to be unique? |
|
224 | 224 | tempName = '_temp_%s' % modelTable.name |
|
225 | 225 | |
|
226 | 226 | def getCopyStatement(): |
|
227 | 227 | preparer = self.engine.dialect.preparer |
|
228 | 228 | commonCols = [] |
|
229 | 229 | for modelCol in modelTable.columns: |
|
230 | 230 | if modelCol.name in dbTable.columns: |
|
231 | 231 | commonCols.append(modelCol.name) |
|
232 | 232 | commonColsStr = ', '.join(commonCols) |
|
233 | 233 | return 'INSERT INTO %s (%s) SELECT %s FROM %s' % \ |
|
234 | 234 | (tableName, commonColsStr, commonColsStr, tempName) |
|
235 | 235 | |
|
236 | 236 | # Move the data in one transaction, so that we don't |
|
237 | 237 | # leave the database in a nasty state. |
|
238 | 238 | connection = self.engine.connect() |
|
239 | 239 | trans = connection.begin() |
|
240 | 240 | try: |
|
241 | 241 | connection.execute( |
|
242 | 242 | 'CREATE TEMPORARY TABLE %s as SELECT * from %s' % \ |
|
243 | 243 | (tempName, modelTable.name)) |
|
244 | 244 | # make sure the drop takes place inside our |
|
245 | 245 | # transaction with the bind parameter |
|
246 | 246 | modelTable.drop(bind=connection) |
|
247 | 247 | modelTable.create(bind=connection) |
|
248 | 248 | connection.execute(getCopyStatement()) |
|
249 | 249 | connection.execute('DROP TABLE %s' % tempName) |
|
250 | 250 | trans.commit() |
|
251 | 251 | except: |
|
252 | 252 | trans.rollback() |
|
253 | 253 | raise |
@@ -1,159 +1,160 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # -*- coding: utf-8 -*- |
|
3 | 3 | |
|
4 | 4 | import shutil |
|
5 | 5 | import warnings |
|
6 | 6 | import logging |
|
7 | import inspect | |
|
7 | 8 | from StringIO import StringIO |
|
8 | 9 | |
|
9 | 10 | from rhodecode.lib.dbmigrate import migrate |
|
10 | 11 | from rhodecode.lib.dbmigrate.migrate.versioning import genmodel, schemadiff |
|
11 | 12 | from rhodecode.lib.dbmigrate.migrate.versioning.config import operations |
|
12 | 13 | from rhodecode.lib.dbmigrate.migrate.versioning.template import Template |
|
13 | 14 | from rhodecode.lib.dbmigrate.migrate.versioning.script import base |
|
14 | 15 | from rhodecode.lib.dbmigrate.migrate.versioning.util import import_path, load_model, with_engine |
|
15 | 16 | from rhodecode.lib.dbmigrate.migrate.exceptions import MigrateDeprecationWarning, InvalidScriptError, ScriptError |
|
16 | 17 | |
|
17 | 18 | log = logging.getLogger(__name__) |
|
18 | 19 | __all__ = ['PythonScript'] |
|
19 | 20 | |
|
20 | 21 | |
|
21 | 22 | class PythonScript(base.BaseScript): |
|
22 | 23 | """Base for Python scripts""" |
|
23 | 24 | |
|
24 | 25 | @classmethod |
|
25 | 26 | def create(cls, path, **opts): |
|
26 | 27 | """Create an empty migration script at specified path |
|
27 | 28 | |
|
28 | 29 | :returns: :class:`PythonScript instance <migrate.versioning.script.py.PythonScript>`""" |
|
29 | 30 | cls.require_notfound(path) |
|
30 | 31 | |
|
31 | 32 | src = Template(opts.pop('templates_path', None)).get_script(theme=opts.pop('templates_theme', None)) |
|
32 | 33 | shutil.copy(src, path) |
|
33 | 34 | |
|
34 | 35 | return cls(path) |
|
35 | 36 | |
|
36 | 37 | @classmethod |
|
37 | 38 | def make_update_script_for_model(cls, engine, oldmodel, |
|
38 | 39 | model, repository, **opts): |
|
39 | 40 | """Create a migration script based on difference between two SA models. |
|
40 | 41 | |
|
41 | 42 | :param repository: path to migrate repository |
|
42 | 43 | :param oldmodel: dotted.module.name:SAClass or SAClass object |
|
43 | 44 | :param model: dotted.module.name:SAClass or SAClass object |
|
44 | 45 | :param engine: SQLAlchemy engine |
|
45 | 46 | :type repository: string or :class:`Repository instance <migrate.versioning.repository.Repository>` |
|
46 | 47 | :type oldmodel: string or Class |
|
47 | 48 | :type model: string or Class |
|
48 | 49 | :type engine: Engine instance |
|
49 | 50 | :returns: Upgrade / Downgrade script |
|
50 | 51 | :rtype: string |
|
51 | 52 | """ |
|
52 | ||
|
53 | ||
|
53 | 54 | if isinstance(repository, basestring): |
|
54 | 55 | # oh dear, an import cycle! |
|
55 | 56 | from rhodecode.lib.dbmigrate.migrate.versioning.repository import Repository |
|
56 | 57 | repository = Repository(repository) |
|
57 | 58 | |
|
58 | 59 | oldmodel = load_model(oldmodel) |
|
59 | 60 | model = load_model(model) |
|
60 | 61 | |
|
61 | 62 | # Compute differences. |
|
62 | 63 | diff = schemadiff.getDiffOfModelAgainstModel( |
|
63 | 64 | oldmodel, |
|
64 | 65 | model, |
|
65 | 66 | excludeTables=[repository.version_table]) |
|
66 | 67 | # TODO: diff can be False (there is no difference?) |
|
67 | 68 | decls, upgradeCommands, downgradeCommands = \ |
|
68 |
genmodel.ModelGenerator(diff, |
|
|
69 | genmodel.ModelGenerator(diff,engine).toUpgradeDowngradePython() | |
|
69 | 70 | |
|
70 | 71 | # Store differences into file. |
|
71 | 72 | src = Template(opts.pop('templates_path', None)).get_script(opts.pop('templates_theme', None)) |
|
72 | 73 | f = open(src) |
|
73 | 74 | contents = f.read() |
|
74 | 75 | f.close() |
|
75 | 76 | |
|
76 | 77 | # generate source |
|
77 | 78 | search = 'def upgrade(migrate_engine):' |
|
78 | 79 | contents = contents.replace(search, '\n\n'.join((decls, search)), 1) |
|
79 | 80 | if upgradeCommands: |
|
80 | 81 | contents = contents.replace(' pass', upgradeCommands, 1) |
|
81 | 82 | if downgradeCommands: |
|
82 | 83 | contents = contents.replace(' pass', downgradeCommands, 1) |
|
83 | 84 | return contents |
|
84 | 85 | |
|
85 | 86 | @classmethod |
|
86 | 87 | def verify_module(cls, path): |
|
87 | 88 | """Ensure path is a valid script |
|
88 | 89 | |
|
89 | 90 | :param path: Script location |
|
90 | 91 | :type path: string |
|
91 | 92 | :raises: :exc:`InvalidScriptError <migrate.exceptions.InvalidScriptError>` |
|
92 | 93 | :returns: Python module |
|
93 | 94 | """ |
|
94 | 95 | # Try to import and get the upgrade() func |
|
95 | 96 | module = import_path(path) |
|
96 | 97 | try: |
|
97 | 98 | assert callable(module.upgrade) |
|
98 | 99 | except Exception, e: |
|
99 | 100 | raise InvalidScriptError(path + ': %s' % str(e)) |
|
100 | 101 | return module |
|
101 | 102 | |
|
102 | 103 | def preview_sql(self, url, step, **args): |
|
103 | 104 | """Mocks SQLAlchemy Engine to store all executed calls in a string |
|
104 | 105 | and runs :meth:`PythonScript.run <migrate.versioning.script.py.PythonScript.run>` |
|
105 | 106 | |
|
106 | 107 | :returns: SQL file |
|
107 | 108 | """ |
|
108 | 109 | buf = StringIO() |
|
109 | 110 | args['engine_arg_strategy'] = 'mock' |
|
110 | 111 | args['engine_arg_executor'] = lambda s, p = '': buf.write(str(s) + p) |
|
111 | 112 | |
|
112 | 113 | @with_engine |
|
113 | 114 | def go(url, step, **kw): |
|
114 | 115 | engine = kw.pop('engine') |
|
115 | 116 | self.run(engine, step) |
|
116 | 117 | return buf.getvalue() |
|
117 | 118 | |
|
118 | 119 | return go(url, step, **args) |
|
119 | 120 | |
|
120 | 121 | def run(self, engine, step): |
|
121 | 122 | """Core method of Script file. |
|
122 | 123 | Exectues :func:`update` or :func:`downgrade` functions |
|
123 | 124 | |
|
124 | 125 | :param engine: SQLAlchemy Engine |
|
125 | 126 | :param step: Operation to run |
|
126 | 127 | :type engine: string |
|
127 | 128 | :type step: int |
|
128 | 129 | """ |
|
129 | 130 | if step > 0: |
|
130 | 131 | op = 'upgrade' |
|
131 | 132 | elif step < 0: |
|
132 | 133 | op = 'downgrade' |
|
133 | 134 | else: |
|
134 | 135 | raise ScriptError("%d is not a valid step" % step) |
|
135 | 136 | |
|
136 | 137 | funcname = base.operations[op] |
|
137 | 138 | script_func = self._func(funcname) |
|
138 | 139 | |
|
139 | try: | |
|
140 | script_func(engine) | |
|
141 | except TypeError: | |
|
142 | warnings.warn("upgrade/downgrade functions must accept engine" | |
|
143 | " parameter (since version > 0.5.4)", MigrateDeprecationWarning) | |
|
144 | raise | |
|
140 | # check for old way of using engine | |
|
141 | if not inspect.getargspec(script_func)[0]: | |
|
142 | raise TypeError("upgrade/downgrade functions must accept engine" | |
|
143 | " parameter (since version 0.5.4)") | |
|
144 | ||
|
145 | script_func(engine) | |
|
145 | 146 | |
|
146 | 147 | @property |
|
147 | 148 | def module(self): |
|
148 | 149 | """Calls :meth:`migrate.versioning.script.py.verify_module` |
|
149 | 150 | and returns it. |
|
150 | 151 | """ |
|
151 | 152 | if not hasattr(self, '_module'): |
|
152 | 153 | self._module = self.verify_module(self.path) |
|
153 | 154 | return self._module |
|
154 | 155 | |
|
155 | 156 | def _func(self, funcname): |
|
156 | 157 | if not hasattr(self.module, funcname): |
|
157 | 158 | msg = "Function '%s' is not defined in this script" |
|
158 | 159 | raise ScriptError(msg % funcname) |
|
159 | 160 | return getattr(self.module, funcname) |
@@ -1,48 +1,49 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # -*- coding: utf-8 -*- |
|
3 | 3 | import logging |
|
4 | 4 | import shutil |
|
5 | 5 | |
|
6 | 6 | from rhodecode.lib.dbmigrate.migrate.versioning.script import base |
|
7 | 7 | from rhodecode.lib.dbmigrate.migrate.versioning.template import Template |
|
8 | 8 | |
|
9 | 9 | |
|
10 | 10 | log = logging.getLogger(__name__) |
|
11 | 11 | |
|
12 | 12 | class SqlScript(base.BaseScript): |
|
13 | 13 | """A file containing plain SQL statements.""" |
|
14 | 14 | |
|
15 | 15 | @classmethod |
|
16 | 16 | def create(cls, path, **opts): |
|
17 | 17 | """Create an empty migration script at specified path |
|
18 | 18 | |
|
19 | 19 | :returns: :class:`SqlScript instance <migrate.versioning.script.sql.SqlScript>`""" |
|
20 | 20 | cls.require_notfound(path) |
|
21 | ||
|
21 | 22 | src = Template(opts.pop('templates_path', None)).get_sql_script(theme=opts.pop('templates_theme', None)) |
|
22 | 23 | shutil.copy(src, path) |
|
23 | 24 | return cls(path) |
|
24 | 25 | |
|
25 | 26 | # TODO: why is step parameter even here? |
|
26 | 27 | def run(self, engine, step=None, executemany=True): |
|
27 | 28 | """Runs SQL script through raw dbapi execute call""" |
|
28 | 29 | text = self.source() |
|
29 | 30 | # Don't rely on SA's autocommit here |
|
30 | 31 | # (SA uses .startswith to check if a commit is needed. What if script |
|
31 | 32 | # starts with a comment?) |
|
32 | 33 | conn = engine.connect() |
|
33 | 34 | try: |
|
34 | 35 | trans = conn.begin() |
|
35 | 36 | try: |
|
36 | 37 | # HACK: SQLite doesn't allow multiple statements through |
|
37 | 38 | # its execute() method, but it provides executescript() instead |
|
38 | 39 | dbapi = conn.engine.raw_connection() |
|
39 | 40 | if executemany and getattr(dbapi, 'executescript', None): |
|
40 | 41 | dbapi.executescript(text) |
|
41 | 42 | else: |
|
42 | 43 | conn.execute(text) |
|
43 | 44 | trans.commit() |
|
44 | 45 | except: |
|
45 | 46 | trans.rollback() |
|
46 | 47 | raise |
|
47 | 48 | finally: |
|
48 | 49 | conn.close() |
@@ -1,215 +1,214 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # -*- coding: utf-8 -*- |
|
3 | 3 | |
|
4 | 4 | """The migrate command-line tool.""" |
|
5 | 5 | |
|
6 | 6 | import sys |
|
7 | 7 | import inspect |
|
8 | 8 | import logging |
|
9 | 9 | from optparse import OptionParser, BadOptionError |
|
10 | 10 | |
|
11 | 11 | from rhodecode.lib.dbmigrate.migrate import exceptions |
|
12 | 12 | from rhodecode.lib.dbmigrate.migrate.versioning import api |
|
13 | 13 | from rhodecode.lib.dbmigrate.migrate.versioning.config import * |
|
14 | 14 | from rhodecode.lib.dbmigrate.migrate.versioning.util import asbool |
|
15 | 15 | |
|
16 | 16 | |
|
17 | 17 | alias = dict( |
|
18 | 18 | s=api.script, |
|
19 | 19 | vc=api.version_control, |
|
20 | 20 | dbv=api.db_version, |
|
21 | 21 | v=api.version, |
|
22 | 22 | ) |
|
23 | 23 | |
|
24 | 24 | def alias_setup(): |
|
25 | 25 | global alias |
|
26 | 26 | for key, val in alias.iteritems(): |
|
27 | 27 | setattr(api, key, val) |
|
28 | 28 | alias_setup() |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | class PassiveOptionParser(OptionParser): |
|
32 | 32 | |
|
33 | 33 | def _process_args(self, largs, rargs, values): |
|
34 | 34 | """little hack to support all --some_option=value parameters""" |
|
35 | 35 | |
|
36 | 36 | while rargs: |
|
37 | 37 | arg = rargs[0] |
|
38 | 38 | if arg == "--": |
|
39 | 39 | del rargs[0] |
|
40 | 40 | return |
|
41 | 41 | elif arg[0:2] == "--": |
|
42 | 42 | # if parser does not know about the option |
|
43 | 43 | # pass it along (make it anonymous) |
|
44 | 44 | try: |
|
45 | 45 | opt = arg.split('=', 1)[0] |
|
46 | 46 | self._match_long_opt(opt) |
|
47 | 47 | except BadOptionError: |
|
48 | 48 | largs.append(arg) |
|
49 | 49 | del rargs[0] |
|
50 | 50 | else: |
|
51 | 51 | self._process_long_opt(rargs, values) |
|
52 | 52 | elif arg[:1] == "-" and len(arg) > 1: |
|
53 | 53 | self._process_short_opts(rargs, values) |
|
54 | 54 | elif self.allow_interspersed_args: |
|
55 | 55 | largs.append(arg) |
|
56 | 56 | del rargs[0] |
|
57 | 57 | |
|
58 | 58 | def main(argv=None, **kwargs): |
|
59 | 59 | """Shell interface to :mod:`migrate.versioning.api`. |
|
60 | 60 | |
|
61 | 61 | kwargs are default options that can be overriden with passing |
|
62 | 62 | --some_option as command line option |
|
63 | 63 | |
|
64 | 64 | :param disable_logging: Let migrate configure logging |
|
65 | 65 | :type disable_logging: bool |
|
66 | 66 | """ |
|
67 | 67 | if argv is not None: |
|
68 | 68 | argv = argv |
|
69 | 69 | else: |
|
70 | 70 | argv = list(sys.argv[1:]) |
|
71 | 71 | commands = list(api.__all__) |
|
72 | 72 | commands.sort() |
|
73 | 73 | |
|
74 | 74 | usage = """%%prog COMMAND ... |
|
75 | 75 | |
|
76 | 76 | Available commands: |
|
77 | 77 | %s |
|
78 | 78 | |
|
79 | 79 | Enter "%%prog help COMMAND" for information on a particular command. |
|
80 | """ % '\n\t'.join(["%s - %s" % (command.ljust(28), | |
|
81 | api.command_desc.get(command)) for command in commands]) | |
|
80 | """ % '\n\t'.join(["%s - %s" % (command.ljust(28), api.command_desc.get(command)) for command in commands]) | |
|
82 | 81 | |
|
83 | 82 | parser = PassiveOptionParser(usage=usage) |
|
84 | 83 | parser.add_option("-d", "--debug", |
|
85 | 84 | action="store_true", |
|
86 | 85 | dest="debug", |
|
87 | 86 | default=False, |
|
88 | 87 | help="Shortcut to turn on DEBUG mode for logging") |
|
89 | 88 | parser.add_option("-q", "--disable_logging", |
|
90 | 89 | action="store_true", |
|
91 | 90 | dest="disable_logging", |
|
92 | 91 | default=False, |
|
93 | 92 | help="Use this option to disable logging configuration") |
|
94 | 93 | help_commands = ['help', '-h', '--help'] |
|
95 | 94 | HELP = False |
|
96 | 95 | |
|
97 | 96 | try: |
|
98 | 97 | command = argv.pop(0) |
|
99 | 98 | if command in help_commands: |
|
100 | 99 | HELP = True |
|
101 | 100 | command = argv.pop(0) |
|
102 | 101 | except IndexError: |
|
103 | 102 | parser.print_help() |
|
104 | 103 | return |
|
105 | 104 | |
|
106 | 105 | command_func = getattr(api, command, None) |
|
107 | 106 | if command_func is None or command.startswith('_'): |
|
108 | 107 | parser.error("Invalid command %s" % command) |
|
109 | 108 | |
|
110 | 109 | parser.set_usage(inspect.getdoc(command_func)) |
|
111 | 110 | f_args, f_varargs, f_kwargs, f_defaults = inspect.getargspec(command_func) |
|
112 | 111 | for arg in f_args: |
|
113 | 112 | parser.add_option( |
|
114 | 113 | "--%s" % arg, |
|
115 | 114 | dest=arg, |
|
116 | 115 | action='store', |
|
117 | 116 | type="string") |
|
118 | 117 | |
|
119 | 118 | # display help of the current command |
|
120 | 119 | if HELP: |
|
121 | 120 | parser.print_help() |
|
122 | 121 | return |
|
123 | 122 | |
|
124 | 123 | options, args = parser.parse_args(argv) |
|
125 | 124 | |
|
126 | 125 | # override kwargs with anonymous parameters |
|
127 | 126 | override_kwargs = dict() |
|
128 | 127 | for arg in list(args): |
|
129 | 128 | if arg.startswith('--'): |
|
130 | 129 | args.remove(arg) |
|
131 | 130 | if '=' in arg: |
|
132 | 131 | opt, value = arg[2:].split('=', 1) |
|
133 | 132 | else: |
|
134 | 133 | opt = arg[2:] |
|
135 | 134 | value = True |
|
136 | 135 | override_kwargs[opt] = value |
|
137 | 136 | |
|
138 | 137 | # override kwargs with options if user is overwriting |
|
139 | 138 | for key, value in options.__dict__.iteritems(): |
|
140 | 139 | if value is not None: |
|
141 | 140 | override_kwargs[key] = value |
|
142 | 141 | |
|
143 | 142 | # arguments that function accepts without passed kwargs |
|
144 | 143 | f_required = list(f_args) |
|
145 | 144 | candidates = dict(kwargs) |
|
146 | 145 | candidates.update(override_kwargs) |
|
147 | 146 | for key, value in candidates.iteritems(): |
|
148 | 147 | if key in f_args: |
|
149 | 148 | f_required.remove(key) |
|
150 | 149 | |
|
151 | 150 | # map function arguments to parsed arguments |
|
152 | 151 | for arg in args: |
|
153 | 152 | try: |
|
154 | 153 | kw = f_required.pop(0) |
|
155 | 154 | except IndexError: |
|
156 | 155 | parser.error("Too many arguments for command %s: %s" % (command, |
|
157 | 156 | arg)) |
|
158 | 157 | kwargs[kw] = arg |
|
159 | 158 | |
|
160 | 159 | # apply overrides |
|
161 | 160 | kwargs.update(override_kwargs) |
|
162 | 161 | |
|
163 | 162 | # configure options |
|
164 | 163 | for key, value in options.__dict__.iteritems(): |
|
165 | 164 | kwargs.setdefault(key, value) |
|
166 | 165 | |
|
167 | 166 | # configure logging |
|
168 | 167 | if not asbool(kwargs.pop('disable_logging', False)): |
|
169 | 168 | # filter to log =< INFO into stdout and rest to stderr |
|
170 | 169 | class SingleLevelFilter(logging.Filter): |
|
171 | 170 | def __init__(self, min=None, max=None): |
|
172 | 171 | self.min = min or 0 |
|
173 | 172 | self.max = max or 100 |
|
174 | 173 | |
|
175 | 174 | def filter(self, record): |
|
176 | 175 | return self.min <= record.levelno <= self.max |
|
177 | 176 | |
|
178 | 177 | logger = logging.getLogger() |
|
179 | 178 | h1 = logging.StreamHandler(sys.stdout) |
|
180 | 179 | f1 = SingleLevelFilter(max=logging.INFO) |
|
181 | 180 | h1.addFilter(f1) |
|
182 | 181 | h2 = logging.StreamHandler(sys.stderr) |
|
183 | 182 | f2 = SingleLevelFilter(min=logging.WARN) |
|
184 | 183 | h2.addFilter(f2) |
|
185 | 184 | logger.addHandler(h1) |
|
186 | 185 | logger.addHandler(h2) |
|
187 | 186 | |
|
188 | 187 | if options.debug: |
|
189 | 188 | logger.setLevel(logging.DEBUG) |
|
190 | 189 | else: |
|
191 | 190 | logger.setLevel(logging.INFO) |
|
192 | 191 | |
|
193 | 192 | log = logging.getLogger(__name__) |
|
194 | 193 | |
|
195 | 194 | # check if all args are given |
|
196 | 195 | try: |
|
197 | 196 | num_defaults = len(f_defaults) |
|
198 | 197 | except TypeError: |
|
199 | 198 | num_defaults = 0 |
|
200 | 199 | f_args_default = f_args[len(f_args) - num_defaults:] |
|
201 | 200 | required = list(set(f_required) - set(f_args_default)) |
|
202 | 201 | if required: |
|
203 | 202 | parser.error("Not enough arguments for command %s: %s not specified" \ |
|
204 | 203 | % (command, ', '.join(required))) |
|
205 | 204 | |
|
206 | 205 | # handle command |
|
207 | 206 | try: |
|
208 | 207 | ret = command_func(**kwargs) |
|
209 | 208 | if ret is not None: |
|
210 | 209 | log.info(ret) |
|
211 | 210 | except (exceptions.UsageError, exceptions.KnownError), e: |
|
212 | 211 | parser.error(e.args[0]) |
|
213 | 212 | |
|
214 | 213 | if __name__ == "__main__": |
|
215 | 214 | main() |
@@ -1,94 +1,94 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # -*- coding: utf-8 -*- |
|
3 | 3 | |
|
4 | 4 | import os |
|
5 | 5 | import shutil |
|
6 | 6 | import sys |
|
7 | 7 | |
|
8 | 8 | from pkg_resources import resource_filename |
|
9 | 9 | |
|
10 | 10 | from rhodecode.lib.dbmigrate.migrate.versioning.config import * |
|
11 | 11 | from rhodecode.lib.dbmigrate.migrate.versioning import pathed |
|
12 | 12 | |
|
13 | 13 | |
|
14 | 14 | class Collection(pathed.Pathed): |
|
15 | 15 | """A collection of templates of a specific type""" |
|
16 | 16 | _mask = None |
|
17 | 17 | |
|
18 | 18 | def get_path(self, file): |
|
19 | 19 | return os.path.join(self.path, str(file)) |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | class RepositoryCollection(Collection): |
|
23 | 23 | _mask = '%s' |
|
24 | 24 | |
|
25 | 25 | class ScriptCollection(Collection): |
|
26 | 26 | _mask = '%s.py_tmpl' |
|
27 | 27 | |
|
28 | 28 | class ManageCollection(Collection): |
|
29 | 29 | _mask = '%s.py_tmpl' |
|
30 | 30 | |
|
31 | 31 | class SQLScriptCollection(Collection): |
|
32 | 32 | _mask = '%s.py_tmpl' |
|
33 | 33 | |
|
34 | 34 | class Template(pathed.Pathed): |
|
35 | 35 | """Finds the paths/packages of various Migrate templates. |
|
36 | 36 | |
|
37 | 37 | :param path: Templates are loaded from rhodecode.lib.dbmigrate.migrate package |
|
38 | 38 | if `path` is not provided. |
|
39 | 39 | """ |
|
40 | 40 | pkg = 'rhodecode.lib.dbmigrate.migrate.versioning.templates' |
|
41 | 41 | _manage = 'manage.py_tmpl' |
|
42 | 42 | |
|
43 | 43 | def __new__(cls, path=None): |
|
44 | 44 | if path is None: |
|
45 | 45 | path = cls._find_path(cls.pkg) |
|
46 | 46 | return super(Template, cls).__new__(cls, path) |
|
47 | 47 | |
|
48 | 48 | def __init__(self, path=None): |
|
49 | 49 | if path is None: |
|
50 | 50 | path = Template._find_path(self.pkg) |
|
51 | 51 | super(Template, self).__init__(path) |
|
52 | 52 | self.repository = RepositoryCollection(os.path.join(path, 'repository')) |
|
53 | 53 | self.script = ScriptCollection(os.path.join(path, 'script')) |
|
54 | 54 | self.manage = ManageCollection(os.path.join(path, 'manage')) |
|
55 | 55 | self.sql_script = SQLScriptCollection(os.path.join(path, 'sql_script')) |
|
56 | 56 | |
|
57 | 57 | @classmethod |
|
58 | 58 | def _find_path(cls, pkg): |
|
59 | 59 | """Returns absolute path to dotted python package.""" |
|
60 | 60 | tmp_pkg = pkg.rsplit('.', 1) |
|
61 | 61 | |
|
62 | 62 | if len(tmp_pkg) != 1: |
|
63 | 63 | return resource_filename(tmp_pkg[0], tmp_pkg[1]) |
|
64 | 64 | else: |
|
65 | 65 | return resource_filename(tmp_pkg[0], '') |
|
66 | 66 | |
|
67 | 67 | def _get_item(self, collection, theme=None): |
|
68 | 68 | """Locates and returns collection. |
|
69 | 69 | |
|
70 | 70 | :param collection: name of collection to locate |
|
71 | 71 | :param type_: type of subfolder in collection (defaults to "_default") |
|
72 | 72 | :returns: (package, source) |
|
73 | 73 | :rtype: str, str |
|
74 | 74 | """ |
|
75 | 75 | item = getattr(self, collection) |
|
76 | 76 | theme_mask = getattr(item, '_mask') |
|
77 | 77 | theme = theme_mask % (theme or 'default') |
|
78 | 78 | return item.get_path(theme) |
|
79 | 79 | |
|
80 | 80 | def get_repository(self, *a, **kw): |
|
81 | 81 | """Calls self._get_item('repository', *a, **kw)""" |
|
82 | 82 | return self._get_item('repository', *a, **kw) |
|
83 | ||
|
83 | ||
|
84 | 84 | def get_script(self, *a, **kw): |
|
85 | 85 | """Calls self._get_item('script', *a, **kw)""" |
|
86 | 86 | return self._get_item('script', *a, **kw) |
|
87 | 87 | |
|
88 | 88 | def get_sql_script(self, *a, **kw): |
|
89 | 89 | """Calls self._get_item('sql_script', *a, **kw)""" |
|
90 | 90 | return self._get_item('sql_script', *a, **kw) |
|
91 | 91 | |
|
92 | 92 | def get_manage(self, *a, **kw): |
|
93 | 93 | """Calls self._get_item('manage', *a, **kw)""" |
|
94 | 94 | return self._get_item('manage', *a, **kw) |
General Comments 0
You need to be logged in to leave comments.
Login now