Show More
@@ -1,108 +1,108 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.controllers.error |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | RhodeCode error controller |
|
7 | 7 | |
|
8 | 8 | :created_on: Dec 8, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | import os |
|
26 | 26 | import cgi |
|
27 | 27 | import logging |
|
28 | 28 | import paste.fileapp |
|
29 | 29 | |
|
30 | 30 | from pylons import tmpl_context as c, request, config, url |
|
31 | 31 | from pylons.i18n.translation import _ |
|
32 | 32 | from pylons.middleware import media_path |
|
33 | 33 | |
|
34 | 34 | from rhodecode.lib.base import BaseController, render |
|
35 | 35 | |
|
36 | 36 | log = logging.getLogger(__name__) |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | class ErrorController(BaseController): |
|
40 | 40 | """Generates error documents as and when they are required. |
|
41 | 41 | |
|
42 | 42 | The ErrorDocuments middleware forwards to ErrorController when error |
|
43 | 43 | related status codes are returned from the application. |
|
44 | 44 | |
|
45 | 45 | This behavior can be altered by changing the parameters to the |
|
46 | 46 | ErrorDocuments middleware in your config/middleware.py file. |
|
47 | 47 | """ |
|
48 | 48 | |
|
49 | 49 | def __before__(self): |
|
50 | 50 | #disable all base actions since we don't need them here |
|
51 | 51 | pass |
|
52 | 52 | |
|
53 | 53 | def document(self): |
|
54 | 54 | resp = request.environ.get('pylons.original_response') |
|
55 | 55 | c.rhodecode_name = config.get('rhodecode_title') |
|
56 | 56 | |
|
57 | 57 | log.debug('### %s ###' % resp.status) |
|
58 | 58 | |
|
59 | 59 | e = request.environ |
|
60 | 60 | c.serv_p = r'%(protocol)s://%(host)s/' \ |
|
61 | 61 | % {'protocol': e.get('wsgi.url_scheme'), |
|
62 | 62 | 'host': e.get('HTTP_HOST'), } |
|
63 | 63 | |
|
64 | 64 | c.error_message = cgi.escape(request.GET.get('code', str(resp.status))) |
|
65 | 65 | c.error_explanation = self.get_error_explanation(resp.status_int) |
|
66 | 66 | |
|
67 | 67 | # redirect to when error with given seconds |
|
68 | 68 | c.redirect_time = 0 |
|
69 | 69 | c.redirect_module = _('Home page') |
|
70 | 70 | c.url_redirect = "/" |
|
71 | 71 | |
|
72 | 72 | return render('/errors/error_document.html') |
|
73 | 73 | |
|
74 | 74 | def img(self, id): |
|
75 | 75 | """Serve Pylons' stock images""" |
|
76 | 76 | return self._serve_file(os.path.join(media_path, 'img', id)) |
|
77 | 77 | |
|
78 | 78 | def style(self, id): |
|
79 | 79 | """Serve Pylons' stock stylesheets""" |
|
80 | 80 | return self._serve_file(os.path.join(media_path, 'style', id)) |
|
81 | 81 | |
|
82 | 82 | def _serve_file(self, path): |
|
83 | 83 | """Call Paste's FileApp (a WSGI application) to serve the file |
|
84 | 84 | at the specified path |
|
85 | 85 | """ |
|
86 | 86 | fapp = paste.fileapp.FileApp(path) |
|
87 | 87 | return fapp(request.environ, self.start_response) |
|
88 | 88 | |
|
89 | 89 | def get_error_explanation(self, code): |
|
90 |
|
|
|
91 |
[400, 401, 403, 404, 500] |
|
|
90 | """ get the error explanations of int codes | |
|
91 | [400, 401, 403, 404, 500]""" | |
|
92 | 92 | try: |
|
93 | 93 | code = int(code) |
|
94 | 94 | except Exception: |
|
95 | 95 | code = 500 |
|
96 | 96 | |
|
97 | 97 | if code == 400: |
|
98 | 98 | return _('The request could not be understood by the server' |
|
99 | 99 | ' due to malformed syntax.') |
|
100 | 100 | if code == 401: |
|
101 | 101 | return _('Unauthorized access to resource') |
|
102 | 102 | if code == 403: |
|
103 | 103 | return _("You don't have permission to view this page") |
|
104 | 104 | if code == 404: |
|
105 | 105 | return _('The resource could not be found') |
|
106 | 106 | if code == 500: |
|
107 | 107 | return _('The server encountered an unexpected condition' |
|
108 | 108 | ' which prevented it from fulfilling the request.') |
@@ -1,284 +1,284 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Code to generate a Python model from a database or differences |
|
3 | 3 | between a model and database. |
|
4 | 4 | |
|
5 | 5 | Some of this is borrowed heavily from the AutoCode project at: |
|
6 | 6 | http://code.google.com/p/sqlautocode/ |
|
7 | 7 | """ |
|
8 | 8 | |
|
9 | 9 | import sys |
|
10 | 10 | import logging |
|
11 | 11 | |
|
12 | 12 | import sqlalchemy |
|
13 | 13 | |
|
14 | 14 | from rhodecode.lib.dbmigrate import migrate |
|
15 | 15 | from rhodecode.lib.dbmigrate.migrate import changeset |
|
16 | 16 | |
|
17 | 17 | |
|
18 | 18 | log = logging.getLogger(__name__) |
|
19 | 19 | HEADER = """ |
|
20 | 20 | ## File autogenerated by genmodel.py |
|
21 | 21 | |
|
22 | 22 | from sqlalchemy import * |
|
23 | 23 | meta = MetaData() |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | DECLARATIVE_HEADER = """ |
|
27 | 27 | ## File autogenerated by genmodel.py |
|
28 | 28 | |
|
29 | 29 | from sqlalchemy import * |
|
30 | 30 | from sqlalchemy.ext import declarative |
|
31 | 31 | |
|
32 | 32 | Base = declarative.declarative_base() |
|
33 | 33 | """ |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | class ModelGenerator(object): |
|
37 | 37 | """Various transformations from an A, B diff. |
|
38 | 38 | |
|
39 | 39 | In the implementation, A tends to be called the model and B |
|
40 | 40 | the database (although this is not true of all diffs). |
|
41 | 41 | The diff is directionless, but transformations apply the diff |
|
42 | 42 | in a particular direction, described in the method name. |
|
43 | 43 | """ |
|
44 | 44 | |
|
45 | 45 | def __init__(self, diff, engine, declarative=False): |
|
46 | 46 | self.diff = diff |
|
47 | 47 | self.engine = engine |
|
48 | 48 | self.declarative = declarative |
|
49 | 49 | |
|
50 | 50 | def column_repr(self, col): |
|
51 | 51 | kwarg = [] |
|
52 | 52 | if col.key != col.name: |
|
53 | 53 | kwarg.append('key') |
|
54 | 54 | if col.primary_key: |
|
55 | 55 | col.primary_key = True # otherwise it dumps it as 1 |
|
56 | 56 | kwarg.append('primary_key') |
|
57 | 57 | if not col.nullable: |
|
58 | 58 | kwarg.append('nullable') |
|
59 | 59 | if col.onupdate: |
|
60 | 60 | kwarg.append('onupdate') |
|
61 | 61 | if col.default: |
|
62 | 62 | if col.primary_key: |
|
63 | 63 | # I found that PostgreSQL automatically creates a |
|
64 | 64 | # default value for the sequence, but let's not show |
|
65 | 65 | # that. |
|
66 | 66 | pass |
|
67 | 67 | else: |
|
68 | 68 | kwarg.append('default') |
|
69 | 69 | args = ['%s=%r' % (k, getattr(col, k)) for k in kwarg] |
|
70 | 70 | |
|
71 | 71 | # crs: not sure if this is good idea, but it gets rid of extra |
|
72 | 72 | # u'' |
|
73 | 73 | name = col.name.encode('utf8') |
|
74 | 74 | |
|
75 | 75 | type_ = col.type |
|
76 | 76 | for cls in col.type.__class__.__mro__: |
|
77 | 77 | if cls.__module__ == 'sqlalchemy.types' and \ |
|
78 | 78 | not cls.__name__.isupper(): |
|
79 | 79 | if cls is not type_.__class__: |
|
80 | 80 | type_ = cls() |
|
81 | 81 | break |
|
82 | 82 | |
|
83 | 83 | type_repr = repr(type_) |
|
84 | 84 | if type_repr.endswith('()'): |
|
85 | 85 | type_repr = type_repr[:-2] |
|
86 | 86 | |
|
87 | 87 | constraints = [repr(cn) for cn in col.constraints] |
|
88 | 88 | |
|
89 | 89 | data = { |
|
90 | 90 | 'name': name, |
|
91 | 91 | 'commonStuff': ', '.join([type_repr] + constraints + args), |
|
92 | 92 | } |
|
93 | 93 | |
|
94 | 94 | if self.declarative: |
|
95 | 95 | return """%(name)s = Column(%(commonStuff)s)""" % data |
|
96 | 96 | else: |
|
97 | 97 | return """Column(%(name)r, %(commonStuff)s)""" % data |
|
98 | 98 | |
|
99 | 99 | def _getTableDefn(self, table, metaName='meta'): |
|
100 | 100 | out = [] |
|
101 | 101 | tableName = table.name |
|
102 | 102 | if self.declarative: |
|
103 | 103 | out.append("class %(table)s(Base):" % {'table': tableName}) |
|
104 | 104 | out.append(" __tablename__ = '%(table)s'\n" % |
|
105 | 105 | {'table': tableName}) |
|
106 | 106 | for col in table.columns: |
|
107 | 107 | out.append(" %s" % self.column_repr(col)) |
|
108 | 108 | out.append('\n') |
|
109 | 109 | else: |
|
110 | 110 | out.append("%(table)s = Table('%(table)s', %(meta)s," % |
|
111 | 111 | {'table': tableName, 'meta': metaName}) |
|
112 | 112 | for col in table.columns: |
|
113 | 113 | out.append(" %s," % self.column_repr(col)) |
|
114 | 114 | out.append(")\n") |
|
115 | 115 | return out |
|
116 | 116 | |
|
117 | 117 | def _get_tables(self,missingA=False,missingB=False,modified=False): |
|
118 | 118 | to_process = [] |
|
119 | 119 | for bool_,names,metadata in ( |
|
120 | 120 | (missingA,self.diff.tables_missing_from_A,self.diff.metadataB), |
|
121 | 121 | (missingB,self.diff.tables_missing_from_B,self.diff.metadataA), |
|
122 | 122 | (modified,self.diff.tables_different,self.diff.metadataA), |
|
123 | 123 | ): |
|
124 | 124 | if bool_: |
|
125 | 125 | for name in names: |
|
126 | 126 | yield metadata.tables.get(name) |
|
127 | 127 | |
|
128 | 128 | def genBDefinition(self): |
|
129 | 129 | """Generates the source code for a definition of B. |
|
130 | 130 | |
|
131 | 131 | Assumes a diff where A is empty. |
|
132 | 132 | |
|
133 | 133 | Was: toPython. Assume database (B) is current and model (A) is empty. |
|
134 | 134 | """ |
|
135 | 135 | |
|
136 | 136 | out = [] |
|
137 | 137 | if self.declarative: |
|
138 | 138 | out.append(DECLARATIVE_HEADER) |
|
139 | 139 | else: |
|
140 | 140 | out.append(HEADER) |
|
141 | 141 | out.append("") |
|
142 | 142 | for table in self._get_tables(missingA=True): |
|
143 | 143 | out.extend(self._getTableDefn(table)) |
|
144 | 144 | return '\n'.join(out) |
|
145 | 145 | |
|
146 | 146 | def genB2AMigration(self, indent=' '): |
|
147 |
|
|
|
147 | """Generate a migration from B to A. | |
|
148 | 148 | |
|
149 | 149 | Was: toUpgradeDowngradePython |
|
150 | 150 | Assume model (A) is most current and database (B) is out-of-date. |
|
151 |
|
|
|
151 | """ | |
|
152 | 152 | |
|
153 | 153 | decls = ['from migrate.changeset import schema', |
|
154 | 154 | 'pre_meta = MetaData()', |
|
155 | 155 | 'post_meta = MetaData()', |
|
156 | 156 | ] |
|
157 | 157 | upgradeCommands = ['pre_meta.bind = migrate_engine', |
|
158 | 158 | 'post_meta.bind = migrate_engine'] |
|
159 | 159 | downgradeCommands = list(upgradeCommands) |
|
160 | 160 | |
|
161 | 161 | for tn in self.diff.tables_missing_from_A: |
|
162 | 162 | pre_table = self.diff.metadataB.tables[tn] |
|
163 | 163 | decls.extend(self._getTableDefn(pre_table, metaName='pre_meta')) |
|
164 | 164 | upgradeCommands.append( |
|
165 | 165 | "pre_meta.tables[%(table)r].drop()" % {'table': tn}) |
|
166 | 166 | downgradeCommands.append( |
|
167 | 167 | "pre_meta.tables[%(table)r].create()" % {'table': tn}) |
|
168 | 168 | |
|
169 | 169 | for tn in self.diff.tables_missing_from_B: |
|
170 | 170 | post_table = self.diff.metadataA.tables[tn] |
|
171 | 171 | decls.extend(self._getTableDefn(post_table, metaName='post_meta')) |
|
172 | 172 | upgradeCommands.append( |
|
173 | 173 | "post_meta.tables[%(table)r].create()" % {'table': tn}) |
|
174 | 174 | downgradeCommands.append( |
|
175 | 175 | "post_meta.tables[%(table)r].drop()" % {'table': tn}) |
|
176 | 176 | |
|
177 | 177 | for (tn, td) in self.diff.tables_different.iteritems(): |
|
178 | 178 | if td.columns_missing_from_A or td.columns_different: |
|
179 | 179 | pre_table = self.diff.metadataB.tables[tn] |
|
180 | 180 | decls.extend(self._getTableDefn( |
|
181 | 181 | pre_table, metaName='pre_meta')) |
|
182 | 182 | if td.columns_missing_from_B or td.columns_different: |
|
183 | 183 | post_table = self.diff.metadataA.tables[tn] |
|
184 | 184 | decls.extend(self._getTableDefn( |
|
185 | 185 | post_table, metaName='post_meta')) |
|
186 | 186 | |
|
187 | 187 | for col in td.columns_missing_from_A: |
|
188 | 188 | upgradeCommands.append( |
|
189 | 189 | 'pre_meta.tables[%r].columns[%r].drop()' % (tn, col)) |
|
190 | 190 | downgradeCommands.append( |
|
191 | 191 | 'pre_meta.tables[%r].columns[%r].create()' % (tn, col)) |
|
192 | 192 | for col in td.columns_missing_from_B: |
|
193 | 193 | upgradeCommands.append( |
|
194 | 194 | 'post_meta.tables[%r].columns[%r].create()' % (tn, col)) |
|
195 | 195 | downgradeCommands.append( |
|
196 | 196 | 'post_meta.tables[%r].columns[%r].drop()' % (tn, col)) |
|
197 | 197 | for modelCol, databaseCol, modelDecl, databaseDecl in td.columns_different: |
|
198 | 198 | upgradeCommands.append( |
|
199 | 199 | 'assert False, "Can\'t alter columns: %s:%s=>%s"' % ( |
|
200 | 200 | tn, modelCol.name, databaseCol.name)) |
|
201 | 201 | downgradeCommands.append( |
|
202 | 202 | 'assert False, "Can\'t alter columns: %s:%s=>%s"' % ( |
|
203 | 203 | tn, modelCol.name, databaseCol.name)) |
|
204 | 204 | |
|
205 | 205 | return ( |
|
206 | 206 | '\n'.join(decls), |
|
207 | 207 | '\n'.join('%s%s' % (indent, line) for line in upgradeCommands), |
|
208 | 208 | '\n'.join('%s%s' % (indent, line) for line in downgradeCommands)) |
|
209 | 209 | |
|
210 | 210 | def _db_can_handle_this_change(self,td): |
|
211 | 211 | """Check if the database can handle going from B to A.""" |
|
212 | 212 | |
|
213 | 213 | if (td.columns_missing_from_B |
|
214 | 214 | and not td.columns_missing_from_A |
|
215 | 215 | and not td.columns_different): |
|
216 | 216 | # Even sqlite can handle column additions. |
|
217 | 217 | return True |
|
218 | 218 | else: |
|
219 | 219 | return not self.engine.url.drivername.startswith('sqlite') |
|
220 | 220 | |
|
221 | 221 | def runB2A(self): |
|
222 | 222 | """Goes from B to A. |
|
223 | 223 | |
|
224 | 224 | Was: applyModel. Apply model (A) to current database (B). |
|
225 | 225 | """ |
|
226 | 226 | |
|
227 | 227 | meta = sqlalchemy.MetaData(self.engine) |
|
228 | 228 | |
|
229 | 229 | for table in self._get_tables(missingA=True): |
|
230 | 230 | table = table.tometadata(meta) |
|
231 | 231 | table.drop() |
|
232 | 232 | for table in self._get_tables(missingB=True): |
|
233 | 233 | table = table.tometadata(meta) |
|
234 | 234 | table.create() |
|
235 | 235 | for modelTable in self._get_tables(modified=True): |
|
236 | 236 | tableName = modelTable.name |
|
237 | 237 | modelTable = modelTable.tometadata(meta) |
|
238 | 238 | dbTable = self.diff.metadataB.tables[tableName] |
|
239 | 239 | |
|
240 | 240 | td = self.diff.tables_different[tableName] |
|
241 | 241 | |
|
242 | 242 | if self._db_can_handle_this_change(td): |
|
243 | 243 | |
|
244 | 244 | for col in td.columns_missing_from_B: |
|
245 | 245 | modelTable.columns[col].create() |
|
246 | 246 | for col in td.columns_missing_from_A: |
|
247 | 247 | dbTable.columns[col].drop() |
|
248 | 248 | # XXX handle column changes here. |
|
249 | 249 | else: |
|
250 | 250 | # Sqlite doesn't support drop column, so you have to |
|
251 | 251 | # do more: create temp table, copy data to it, drop |
|
252 | 252 | # old table, create new table, copy data back. |
|
253 | 253 | # |
|
254 | 254 | # I wonder if this is guaranteed to be unique? |
|
255 | 255 | tempName = '_temp_%s' % modelTable.name |
|
256 | 256 | |
|
257 | 257 | def getCopyStatement(): |
|
258 | 258 | preparer = self.engine.dialect.preparer |
|
259 | 259 | commonCols = [] |
|
260 | 260 | for modelCol in modelTable.columns: |
|
261 | 261 | if modelCol.name in dbTable.columns: |
|
262 | 262 | commonCols.append(modelCol.name) |
|
263 | 263 | commonColsStr = ', '.join(commonCols) |
|
264 | 264 | return 'INSERT INTO %s (%s) SELECT %s FROM %s' % \ |
|
265 | 265 | (tableName, commonColsStr, commonColsStr, tempName) |
|
266 | 266 | |
|
267 | 267 | # Move the data in one transaction, so that we don't |
|
268 | 268 | # leave the database in a nasty state. |
|
269 | 269 | connection = self.engine.connect() |
|
270 | 270 | trans = connection.begin() |
|
271 | 271 | try: |
|
272 | 272 | connection.execute( |
|
273 | 273 | 'CREATE TEMPORARY TABLE %s as SELECT * from %s' % \ |
|
274 | 274 | (tempName, modelTable.name)) |
|
275 | 275 | # make sure the drop takes place inside our |
|
276 | 276 | # transaction with the bind parameter |
|
277 | 277 | modelTable.drop(bind=connection) |
|
278 | 278 | modelTable.create(bind=connection) |
|
279 | 279 | connection.execute(getCopyStatement()) |
|
280 | 280 | connection.execute('DROP TABLE %s' % tempName) |
|
281 | 281 | trans.commit() |
|
282 | 282 | except: |
|
283 | 283 | trans.rollback() |
|
284 | 284 | raise |
@@ -1,295 +1,295 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Schema differencing support. |
|
3 | 3 | """ |
|
4 | 4 | |
|
5 | 5 | import logging |
|
6 | 6 | import sqlalchemy |
|
7 | 7 | |
|
8 | 8 | from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_06 |
|
9 | 9 | from sqlalchemy.types import Float |
|
10 | 10 | |
|
11 | 11 | log = logging.getLogger(__name__) |
|
12 | 12 | |
|
13 | 13 | |
|
14 | 14 | def getDiffOfModelAgainstDatabase(metadata, engine, excludeTables=None): |
|
15 | 15 | """ |
|
16 | 16 | Return differences of model against database. |
|
17 | 17 | |
|
18 | 18 | :return: object which will evaluate to :keyword:`True` if there \ |
|
19 | 19 | are differences else :keyword:`False`. |
|
20 | 20 | """ |
|
21 | 21 | db_metadata = sqlalchemy.MetaData(engine) |
|
22 | 22 | db_metadata.reflect() |
|
23 | 23 | |
|
24 | 24 | # sqlite will include a dynamically generated 'sqlite_sequence' table if |
|
25 | 25 | # there are autoincrement sequences in the database; this should not be |
|
26 | 26 | # compared. |
|
27 | 27 | if engine.dialect.name == 'sqlite': |
|
28 | 28 | if 'sqlite_sequence' in db_metadata.tables: |
|
29 | 29 | db_metadata.remove(db_metadata.tables['sqlite_sequence']) |
|
30 | 30 | |
|
31 | 31 | return SchemaDiff(metadata, db_metadata, |
|
32 | 32 | labelA='model', |
|
33 | 33 | labelB='database', |
|
34 | 34 | excludeTables=excludeTables) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | def getDiffOfModelAgainstModel(metadataA, metadataB, excludeTables=None): |
|
38 | 38 | """ |
|
39 | 39 | Return differences of model against another model. |
|
40 | 40 | |
|
41 | 41 | :return: object which will evaluate to :keyword:`True` if there \ |
|
42 | 42 | are differences else :keyword:`False`. |
|
43 | 43 | """ |
|
44 | 44 | return SchemaDiff(metadataA, metadataB, excludeTables=excludeTables) |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | class ColDiff(object): |
|
48 | 48 | """ |
|
49 | 49 | Container for differences in one :class:`~sqlalchemy.schema.Column` |
|
50 | 50 | between two :class:`~sqlalchemy.schema.Table` instances, ``A`` |
|
51 | 51 | and ``B``. |
|
52 | 52 | |
|
53 | 53 | .. attribute:: col_A |
|
54 | 54 | |
|
55 | 55 | The :class:`~sqlalchemy.schema.Column` object for A. |
|
56 | 56 | |
|
57 | 57 | .. attribute:: col_B |
|
58 | 58 | |
|
59 | 59 | The :class:`~sqlalchemy.schema.Column` object for B. |
|
60 | 60 | |
|
61 | 61 | .. attribute:: type_A |
|
62 | 62 | |
|
63 | 63 | The most generic type of the :class:`~sqlalchemy.schema.Column` |
|
64 | 64 | object in A. |
|
65 | 65 | |
|
66 | 66 | .. attribute:: type_B |
|
67 | 67 | |
|
68 | 68 | The most generic type of the :class:`~sqlalchemy.schema.Column` |
|
69 | 69 | object in A. |
|
70 | 70 | |
|
71 | 71 | """ |
|
72 | 72 | |
|
73 | 73 | diff = False |
|
74 | 74 | |
|
75 | 75 | def __init__(self,col_A,col_B): |
|
76 | 76 | self.col_A = col_A |
|
77 | 77 | self.col_B = col_B |
|
78 | 78 | |
|
79 | 79 | self.type_A = col_A.type |
|
80 | 80 | self.type_B = col_B.type |
|
81 | 81 | |
|
82 | 82 | self.affinity_A = self.type_A._type_affinity |
|
83 | 83 | self.affinity_B = self.type_B._type_affinity |
|
84 | 84 | |
|
85 | 85 | if self.affinity_A is not self.affinity_B: |
|
86 | 86 | self.diff = True |
|
87 | 87 | return |
|
88 | 88 | |
|
89 | 89 | if isinstance(self.type_A,Float) or isinstance(self.type_B,Float): |
|
90 | 90 | if not (isinstance(self.type_A,Float) and isinstance(self.type_B,Float)): |
|
91 | 91 | self.diff=True |
|
92 | 92 | return |
|
93 | 93 | |
|
94 | 94 | for attr in ('precision','scale','length'): |
|
95 | 95 | A = getattr(self.type_A,attr,None) |
|
96 | 96 | B = getattr(self.type_B,attr,None) |
|
97 | 97 | if not (A is None or B is None) and A!=B: |
|
98 | 98 | self.diff=True |
|
99 | 99 | return |
|
100 | 100 | |
|
101 | 101 | def __nonzero__(self): |
|
102 | 102 | return self.diff |
|
103 | 103 | |
|
104 | 104 | class TableDiff(object): |
|
105 | 105 | """ |
|
106 | 106 | Container for differences in one :class:`~sqlalchemy.schema.Table` |
|
107 | 107 | between two :class:`~sqlalchemy.schema.MetaData` instances, ``A`` |
|
108 | 108 | and ``B``. |
|
109 | 109 | |
|
110 | 110 | .. attribute:: columns_missing_from_A |
|
111 | 111 | |
|
112 | 112 | A sequence of column names that were found in B but weren't in |
|
113 | 113 | A. |
|
114 | 114 | |
|
115 | 115 | .. attribute:: columns_missing_from_B |
|
116 | 116 | |
|
117 | 117 | A sequence of column names that were found in A but weren't in |
|
118 | 118 | B. |
|
119 | 119 | |
|
120 | 120 | .. attribute:: columns_different |
|
121 | 121 | |
|
122 | 122 | A dictionary containing information about columns that were |
|
123 | 123 | found to be different. |
|
124 | 124 | It maps column names to a :class:`ColDiff` objects describing the |
|
125 | 125 | differences found. |
|
126 | 126 | """ |
|
127 | 127 | __slots__ = ( |
|
128 | 128 | 'columns_missing_from_A', |
|
129 | 129 | 'columns_missing_from_B', |
|
130 | 130 | 'columns_different', |
|
131 | 131 | ) |
|
132 | 132 | |
|
133 | 133 | def __nonzero__(self): |
|
134 | 134 | return bool( |
|
135 | 135 | self.columns_missing_from_A or |
|
136 | 136 | self.columns_missing_from_B or |
|
137 | 137 | self.columns_different |
|
138 | 138 | ) |
|
139 | 139 | |
|
140 | 140 | class SchemaDiff(object): |
|
141 | 141 | """ |
|
142 | 142 | Compute the difference between two :class:`~sqlalchemy.schema.MetaData` |
|
143 | 143 | objects. |
|
144 | 144 | |
|
145 | 145 | The string representation of a :class:`SchemaDiff` will summarise |
|
146 | 146 | the changes found between the two |
|
147 | 147 | :class:`~sqlalchemy.schema.MetaData` objects. |
|
148 | 148 | |
|
149 | 149 | The length of a :class:`SchemaDiff` will give the number of |
|
150 | 150 | changes found, enabling it to be used much like a boolean in |
|
151 | 151 | expressions. |
|
152 | 152 | |
|
153 | 153 | :param metadataA: |
|
154 | 154 | First :class:`~sqlalchemy.schema.MetaData` to compare. |
|
155 | 155 | |
|
156 | 156 | :param metadataB: |
|
157 | 157 | Second :class:`~sqlalchemy.schema.MetaData` to compare. |
|
158 | 158 | |
|
159 | 159 | :param labelA: |
|
160 | 160 | The label to use in messages about the first |
|
161 | 161 | :class:`~sqlalchemy.schema.MetaData`. |
|
162 | 162 | |
|
163 | 163 | :param labelB: |
|
164 | 164 | The label to use in messages about the second |
|
165 | 165 | :class:`~sqlalchemy.schema.MetaData`. |
|
166 | 166 | |
|
167 | 167 | :param excludeTables: |
|
168 | 168 | A sequence of table names to exclude. |
|
169 | 169 | |
|
170 | 170 | .. attribute:: tables_missing_from_A |
|
171 | 171 | |
|
172 | 172 | A sequence of table names that were found in B but weren't in |
|
173 | 173 | A. |
|
174 | 174 | |
|
175 | 175 | .. attribute:: tables_missing_from_B |
|
176 | 176 | |
|
177 | 177 | A sequence of table names that were found in A but weren't in |
|
178 | 178 | B. |
|
179 | 179 | |
|
180 | 180 | .. attribute:: tables_different |
|
181 | 181 | |
|
182 | 182 | A dictionary containing information about tables that were found |
|
183 | 183 | to be different. |
|
184 | 184 | It maps table names to a :class:`TableDiff` objects describing the |
|
185 | 185 | differences found. |
|
186 | 186 | """ |
|
187 | 187 | |
|
188 | 188 | def __init__(self, |
|
189 | 189 | metadataA, metadataB, |
|
190 | 190 | labelA='metadataA', |
|
191 | 191 | labelB='metadataB', |
|
192 | 192 | excludeTables=None): |
|
193 | 193 | |
|
194 | 194 | self.metadataA, self.metadataB = metadataA, metadataB |
|
195 | 195 | self.labelA, self.labelB = labelA, labelB |
|
196 | 196 | self.label_width = max(len(labelA),len(labelB)) |
|
197 | 197 | excludeTables = set(excludeTables or []) |
|
198 | 198 | |
|
199 | 199 | A_table_names = set(metadataA.tables.keys()) |
|
200 | 200 | B_table_names = set(metadataB.tables.keys()) |
|
201 | 201 | |
|
202 | 202 | self.tables_missing_from_A = sorted( |
|
203 | 203 | B_table_names - A_table_names - excludeTables |
|
204 | 204 | ) |
|
205 | 205 | self.tables_missing_from_B = sorted( |
|
206 | 206 | A_table_names - B_table_names - excludeTables |
|
207 | 207 | ) |
|
208 | 208 | |
|
209 | 209 | self.tables_different = {} |
|
210 | 210 | for table_name in A_table_names.intersection(B_table_names): |
|
211 | 211 | |
|
212 | 212 | td = TableDiff() |
|
213 | 213 | |
|
214 | 214 | A_table = metadataA.tables[table_name] |
|
215 | 215 | B_table = metadataB.tables[table_name] |
|
216 | 216 | |
|
217 | 217 | A_column_names = set(A_table.columns.keys()) |
|
218 | 218 | B_column_names = set(B_table.columns.keys()) |
|
219 | 219 | |
|
220 | 220 | td.columns_missing_from_A = sorted( |
|
221 | 221 | B_column_names - A_column_names |
|
222 | 222 | ) |
|
223 | 223 | |
|
224 | 224 | td.columns_missing_from_B = sorted( |
|
225 | 225 | A_column_names - B_column_names |
|
226 | 226 | ) |
|
227 | 227 | |
|
228 | 228 | td.columns_different = {} |
|
229 | 229 | |
|
230 | 230 | for col_name in A_column_names.intersection(B_column_names): |
|
231 | 231 | |
|
232 | 232 | cd = ColDiff( |
|
233 | 233 | A_table.columns.get(col_name), |
|
234 | 234 | B_table.columns.get(col_name) |
|
235 | 235 | ) |
|
236 | 236 | |
|
237 | 237 | if cd: |
|
238 | 238 | td.columns_different[col_name]=cd |
|
239 | 239 | |
|
240 | 240 | # XXX - index and constraint differences should |
|
241 | 241 | # be checked for here |
|
242 | 242 | |
|
243 | 243 | if td: |
|
244 | 244 | self.tables_different[table_name]=td |
|
245 | 245 | |
|
246 | 246 | def __str__(self): |
|
247 |
|
|
|
247 | """ Summarize differences. """ | |
|
248 | 248 | out = [] |
|
249 | 249 | column_template =' %%%is: %%r' % self.label_width |
|
250 | 250 | |
|
251 | 251 | for names,label in ( |
|
252 | 252 | (self.tables_missing_from_A,self.labelA), |
|
253 | 253 | (self.tables_missing_from_B,self.labelB), |
|
254 | 254 | ): |
|
255 | 255 | if names: |
|
256 | 256 | out.append( |
|
257 | 257 | ' tables missing from %s: %s' % ( |
|
258 | 258 | label,', '.join(sorted(names)) |
|
259 | 259 | ) |
|
260 | 260 | ) |
|
261 | 261 | |
|
262 | 262 | for name,td in sorted(self.tables_different.items()): |
|
263 | 263 | out.append( |
|
264 | 264 | ' table with differences: %s' % name |
|
265 | 265 | ) |
|
266 | 266 | for names,label in ( |
|
267 | 267 | (td.columns_missing_from_A,self.labelA), |
|
268 | 268 | (td.columns_missing_from_B,self.labelB), |
|
269 | 269 | ): |
|
270 | 270 | if names: |
|
271 | 271 | out.append( |
|
272 | 272 | ' %s missing these columns: %s' % ( |
|
273 | 273 | label,', '.join(sorted(names)) |
|
274 | 274 | ) |
|
275 | 275 | ) |
|
276 | 276 | for name,cd in td.columns_different.items(): |
|
277 | 277 | out.append(' column with differences: %s' % name) |
|
278 | 278 | out.append(column_template % (self.labelA,cd.col_A)) |
|
279 | 279 | out.append(column_template % (self.labelB,cd.col_B)) |
|
280 | 280 | |
|
281 | 281 | if out: |
|
282 | 282 | out.insert(0, 'Schema diffs:') |
|
283 | 283 | return '\n'.join(out) |
|
284 | 284 | else: |
|
285 | 285 | return 'No schema diffs' |
|
286 | 286 | |
|
287 | 287 | def __len__(self): |
|
288 | 288 | """ |
|
289 | 289 | Used in bool evaluation, return of 0 means no diffs. |
|
290 | 290 | """ |
|
291 | 291 | return ( |
|
292 | 292 | len(self.tables_missing_from_A) + |
|
293 | 293 | len(self.tables_missing_from_B) + |
|
294 | 294 | len(self.tables_different) |
|
295 | 295 | ) |
@@ -1,415 +1,415 b'' | |||
|
1 | ''' | |
|
1 | """ | |
|
2 | 2 | Module provides a class allowing to wrap communication over subprocess.Popen |
|
3 | 3 | input, output, error streams into a meaningfull, non-blocking, concurrent |
|
4 | 4 | stream processor exposing the output data as an iterator fitting to be a |
|
5 | 5 | return value passed by a WSGI applicaiton to a WSGI server per PEP 3333. |
|
6 | 6 | |
|
7 | 7 | Copyright (c) 2011 Daniel Dotsenko <dotsa@hotmail.com> |
|
8 | 8 | |
|
9 | 9 | This file is part of git_http_backend.py Project. |
|
10 | 10 | |
|
11 | 11 | git_http_backend.py Project is free software: you can redistribute it and/or |
|
12 | 12 | modify it under the terms of the GNU Lesser General Public License as |
|
13 | 13 | published by the Free Software Foundation, either version 2.1 of the License, |
|
14 | 14 | or (at your option) any later version. |
|
15 | 15 | |
|
16 | 16 | git_http_backend.py Project is distributed in the hope that it will be useful, |
|
17 | 17 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
18 | 18 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
19 | 19 | GNU Lesser General Public License for more details. |
|
20 | 20 | |
|
21 | 21 | You should have received a copy of the GNU Lesser General Public License |
|
22 | 22 | along with git_http_backend.py Project. |
|
23 | 23 | If not, see <http://www.gnu.org/licenses/>. |
|
24 | ''' | |
|
24 | """ | |
|
25 | 25 | import os |
|
26 | 26 | import subprocess |
|
27 | 27 | from rhodecode.lib.vcs.utils.compat import deque, Event, Thread, _bytes, _bytearray |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | class StreamFeeder(Thread): |
|
31 | 31 | """ |
|
32 | 32 | Normal writing into pipe-like is blocking once the buffer is filled. |
|
33 | 33 | This thread allows a thread to seep data from a file-like into a pipe |
|
34 | 34 | without blocking the main thread. |
|
35 | 35 | We close inpipe once the end of the source stream is reached. |
|
36 | 36 | """ |
|
37 | 37 | def __init__(self, source): |
|
38 | 38 | super(StreamFeeder, self).__init__() |
|
39 | 39 | self.daemon = True |
|
40 | 40 | filelike = False |
|
41 | 41 | self.bytes = _bytes() |
|
42 | 42 | if type(source) in (type(''), _bytes, _bytearray): # string-like |
|
43 | 43 | self.bytes = _bytes(source) |
|
44 | 44 | else: # can be either file pointer or file-like |
|
45 | 45 | if type(source) in (int, long): # file pointer it is |
|
46 | 46 | ## converting file descriptor (int) stdin into file-like |
|
47 | 47 | try: |
|
48 | 48 | source = os.fdopen(source, 'rb', 16384) |
|
49 | 49 | except Exception: |
|
50 | 50 | pass |
|
51 | 51 | # let's see if source is file-like by now |
|
52 | 52 | try: |
|
53 | 53 | filelike = source.read |
|
54 | 54 | except Exception: |
|
55 | 55 | pass |
|
56 | 56 | if not filelike and not self.bytes: |
|
57 | 57 | raise TypeError("StreamFeeder's source object must be a readable " |
|
58 | 58 | "file-like, a file descriptor, or a string-like.") |
|
59 | 59 | self.source = source |
|
60 | 60 | self.readiface, self.writeiface = os.pipe() |
|
61 | 61 | |
|
62 | 62 | def run(self): |
|
63 | 63 | t = self.writeiface |
|
64 | 64 | if self.bytes: |
|
65 | 65 | os.write(t, self.bytes) |
|
66 | 66 | else: |
|
67 | 67 | s = self.source |
|
68 | 68 | b = s.read(4096) |
|
69 | 69 | while b: |
|
70 | 70 | os.write(t, b) |
|
71 | 71 | b = s.read(4096) |
|
72 | 72 | os.close(t) |
|
73 | 73 | |
|
74 | 74 | @property |
|
75 | 75 | def output(self): |
|
76 | 76 | return self.readiface |
|
77 | 77 | |
|
78 | 78 | |
|
79 | 79 | class InputStreamChunker(Thread): |
|
80 | 80 | def __init__(self, source, target, buffer_size, chunk_size): |
|
81 | 81 | |
|
82 | 82 | super(InputStreamChunker, self).__init__() |
|
83 | 83 | |
|
84 | 84 | self.daemon = True # die die die. |
|
85 | 85 | |
|
86 | 86 | self.source = source |
|
87 | 87 | self.target = target |
|
88 | 88 | self.chunk_count_max = int(buffer_size / chunk_size) + 1 |
|
89 | 89 | self.chunk_size = chunk_size |
|
90 | 90 | |
|
91 | 91 | self.data_added = Event() |
|
92 | 92 | self.data_added.clear() |
|
93 | 93 | |
|
94 | 94 | self.keep_reading = Event() |
|
95 | 95 | self.keep_reading.set() |
|
96 | 96 | |
|
97 | 97 | self.EOF = Event() |
|
98 | 98 | self.EOF.clear() |
|
99 | 99 | |
|
100 | 100 | self.go = Event() |
|
101 | 101 | self.go.set() |
|
102 | 102 | |
|
103 | 103 | def stop(self): |
|
104 | 104 | self.go.clear() |
|
105 | 105 | self.EOF.set() |
|
106 | 106 | try: |
|
107 | 107 | # this is not proper, but is done to force the reader thread let |
|
108 | 108 | # go of the input because, if successful, .close() will send EOF |
|
109 | 109 | # down the pipe. |
|
110 | 110 | self.source.close() |
|
111 | 111 | except: |
|
112 | 112 | pass |
|
113 | 113 | |
|
114 | 114 | def run(self): |
|
115 | 115 | s = self.source |
|
116 | 116 | t = self.target |
|
117 | 117 | cs = self.chunk_size |
|
118 | 118 | ccm = self.chunk_count_max |
|
119 | 119 | kr = self.keep_reading |
|
120 | 120 | da = self.data_added |
|
121 | 121 | go = self.go |
|
122 | 122 | |
|
123 | 123 | try: |
|
124 | 124 | b = s.read(cs) |
|
125 | 125 | except ValueError: |
|
126 | 126 | b = '' |
|
127 | 127 | |
|
128 | 128 | while b and go.is_set(): |
|
129 | 129 | if len(t) > ccm: |
|
130 | 130 | kr.clear() |
|
131 | 131 | kr.wait(2) |
|
132 | 132 | # # this only works on 2.7.x and up |
|
133 | 133 | # if not kr.wait(10): |
|
134 | 134 | # raise Exception("Timed out while waiting for input to be read.") |
|
135 | 135 | # instead we'll use this |
|
136 | 136 | if len(t) > ccm + 3: |
|
137 | 137 | raise IOError("Timed out while waiting for input from subprocess.") |
|
138 | 138 | t.append(b) |
|
139 | 139 | da.set() |
|
140 | 140 | b = s.read(cs) |
|
141 | 141 | self.EOF.set() |
|
142 | 142 | da.set() # for cases when done but there was no input. |
|
143 | 143 | |
|
144 | 144 | |
|
145 | 145 | class BufferedGenerator(): |
|
146 | ''' | |
|
146 | """ | |
|
147 | 147 | Class behaves as a non-blocking, buffered pipe reader. |
|
148 | 148 | Reads chunks of data (through a thread) |
|
149 | 149 | from a blocking pipe, and attaches these to an array (Deque) of chunks. |
|
150 | 150 | Reading is halted in the thread when max chunks is internally buffered. |
|
151 | 151 | The .next() may operate in blocking or non-blocking fashion by yielding |
|
152 | 152 | '' if no data is ready |
|
153 | 153 | to be sent or by not returning until there is some data to send |
|
154 | 154 | When we get EOF from underlying source pipe we raise the marker to raise |
|
155 | 155 | StopIteration after the last chunk of data is yielded. |
|
156 | ''' | |
|
156 | """ | |
|
157 | 157 | |
|
158 | 158 | def __init__(self, source, buffer_size=65536, chunk_size=4096, |
|
159 | 159 | starting_values=[], bottomless=False): |
|
160 | 160 | |
|
161 | 161 | if bottomless: |
|
162 | 162 | maxlen = int(buffer_size / chunk_size) |
|
163 | 163 | else: |
|
164 | 164 | maxlen = None |
|
165 | 165 | |
|
166 | 166 | self.data = deque(starting_values, maxlen) |
|
167 | 167 | |
|
168 | 168 | self.worker = InputStreamChunker(source, self.data, buffer_size, |
|
169 | 169 | chunk_size) |
|
170 | 170 | if starting_values: |
|
171 | 171 | self.worker.data_added.set() |
|
172 | 172 | self.worker.start() |
|
173 | 173 | |
|
174 | 174 | #################### |
|
175 | 175 | # Generator's methods |
|
176 | 176 | #################### |
|
177 | 177 | |
|
178 | 178 | def __iter__(self): |
|
179 | 179 | return self |
|
180 | 180 | |
|
181 | 181 | def next(self): |
|
182 | 182 | while not len(self.data) and not self.worker.EOF.is_set(): |
|
183 | 183 | self.worker.data_added.clear() |
|
184 | 184 | self.worker.data_added.wait(0.2) |
|
185 | 185 | if len(self.data): |
|
186 | 186 | self.worker.keep_reading.set() |
|
187 | 187 | return _bytes(self.data.popleft()) |
|
188 | 188 | elif self.worker.EOF.is_set(): |
|
189 | 189 | raise StopIteration |
|
190 | 190 | |
|
191 | 191 | def throw(self, type, value=None, traceback=None): |
|
192 | 192 | if not self.worker.EOF.is_set(): |
|
193 | 193 | raise type(value) |
|
194 | 194 | |
|
195 | 195 | def start(self): |
|
196 | 196 | self.worker.start() |
|
197 | 197 | |
|
198 | 198 | def stop(self): |
|
199 | 199 | self.worker.stop() |
|
200 | 200 | |
|
201 | 201 | def close(self): |
|
202 | 202 | try: |
|
203 | 203 | self.worker.stop() |
|
204 | 204 | self.throw(GeneratorExit) |
|
205 | 205 | except (GeneratorExit, StopIteration): |
|
206 | 206 | pass |
|
207 | 207 | |
|
208 | 208 | def __del__(self): |
|
209 | 209 | self.close() |
|
210 | 210 | |
|
211 | 211 | #################### |
|
212 | 212 | # Threaded reader's infrastructure. |
|
213 | 213 | #################### |
|
214 | 214 | @property |
|
215 | 215 | def input(self): |
|
216 | 216 | return self.worker.w |
|
217 | 217 | |
|
218 | 218 | @property |
|
219 | 219 | def data_added_event(self): |
|
220 | 220 | return self.worker.data_added |
|
221 | 221 | |
|
222 | 222 | @property |
|
223 | 223 | def data_added(self): |
|
224 | 224 | return self.worker.data_added.is_set() |
|
225 | 225 | |
|
226 | 226 | @property |
|
227 | 227 | def reading_paused(self): |
|
228 | 228 | return not self.worker.keep_reading.is_set() |
|
229 | 229 | |
|
230 | 230 | @property |
|
231 | 231 | def done_reading_event(self): |
|
232 |
|
|
|
232 | """ | |
|
233 | 233 | Done_reding does not mean that the iterator's buffer is empty. |
|
234 | 234 | Iterator might have done reading from underlying source, but the read |
|
235 | 235 | chunks might still be available for serving through .next() method. |
|
236 | 236 | |
|
237 | 237 | @return An Event class instance. |
|
238 |
|
|
|
238 | """ | |
|
239 | 239 | return self.worker.EOF |
|
240 | 240 | |
|
241 | 241 | @property |
|
242 | 242 | def done_reading(self): |
|
243 |
|
|
|
243 | """ | |
|
244 | 244 | Done_reding does not mean that the iterator's buffer is empty. |
|
245 | 245 | Iterator might have done reading from underlying source, but the read |
|
246 | 246 | chunks might still be available for serving through .next() method. |
|
247 | 247 | |
|
248 | 248 | @return An Bool value. |
|
249 |
|
|
|
249 | """ | |
|
250 | 250 | return self.worker.EOF.is_set() |
|
251 | 251 | |
|
252 | 252 | @property |
|
253 | 253 | def length(self): |
|
254 |
|
|
|
254 | """ | |
|
255 | 255 | returns int. |
|
256 | 256 | |
|
257 | 257 | This is the lenght of the que of chunks, not the length of |
|
258 | 258 | the combined contents in those chunks. |
|
259 | 259 | |
|
260 | 260 | __len__() cannot be meaningfully implemented because this |
|
261 | 261 | reader is just flying throuh a bottomless pit content and |
|
262 | 262 | can only know the lenght of what it already saw. |
|
263 | 263 | |
|
264 | 264 | If __len__() on WSGI server per PEP 3333 returns a value, |
|
265 | 265 | the responce's length will be set to that. In order not to |
|
266 | 266 | confuse WSGI PEP3333 servers, we will not implement __len__ |
|
267 | 267 | at all. |
|
268 |
|
|
|
268 | """ | |
|
269 | 269 | return len(self.data) |
|
270 | 270 | |
|
271 | 271 | def prepend(self, x): |
|
272 | 272 | self.data.appendleft(x) |
|
273 | 273 | |
|
274 | 274 | def append(self, x): |
|
275 | 275 | self.data.append(x) |
|
276 | 276 | |
|
277 | 277 | def extend(self, o): |
|
278 | 278 | self.data.extend(o) |
|
279 | 279 | |
|
280 | 280 | def __getitem__(self, i): |
|
281 | 281 | return self.data[i] |
|
282 | 282 | |
|
283 | 283 | |
|
284 | 284 | class SubprocessIOChunker(object): |
|
285 | ''' | |
|
285 | """ | |
|
286 | 286 | Processor class wrapping handling of subprocess IO. |
|
287 | 287 | |
|
288 | 288 | In a way, this is a "communicate()" replacement with a twist. |
|
289 | 289 | |
|
290 | 290 | - We are multithreaded. Writing in and reading out, err are all sep threads. |
|
291 | 291 | - We support concurrent (in and out) stream processing. |
|
292 | 292 | - The output is not a stream. It's a queue of read string (bytes, not unicode) |
|
293 | 293 | chunks. The object behaves as an iterable. You can "for chunk in obj:" us. |
|
294 | 294 | - We are non-blocking in more respects than communicate() |
|
295 | 295 | (reading from subprocess out pauses when internal buffer is full, but |
|
296 | 296 | does not block the parent calling code. On the flip side, reading from |
|
297 | 297 | slow-yielding subprocess may block the iteration until data shows up. This |
|
298 | 298 | does not block the parallel inpipe reading occurring parallel thread.) |
|
299 | 299 | |
|
300 | 300 | The purpose of the object is to allow us to wrap subprocess interactions into |
|
301 | 301 | and interable that can be passed to a WSGI server as the application's return |
|
302 | 302 | value. Because of stream-processing-ability, WSGI does not have to read ALL |
|
303 | 303 | of the subprocess's output and buffer it, before handing it to WSGI server for |
|
304 | 304 | HTTP response. Instead, the class initializer reads just a bit of the stream |
|
305 | 305 | to figure out if error ocurred or likely to occur and if not, just hands the |
|
306 | 306 | further iteration over subprocess output to the server for completion of HTTP |
|
307 | 307 | response. |
|
308 | 308 | |
|
309 | 309 | The real or perceived subprocess error is trapped and raised as one of |
|
310 | 310 | EnvironmentError family of exceptions |
|
311 | 311 | |
|
312 | 312 | Example usage: |
|
313 | 313 | # try: |
|
314 | 314 | # answer = SubprocessIOChunker( |
|
315 | 315 | # cmd, |
|
316 | 316 | # input, |
|
317 | 317 | # buffer_size = 65536, |
|
318 | 318 | # chunk_size = 4096 |
|
319 | 319 | # ) |
|
320 | 320 | # except (EnvironmentError) as e: |
|
321 | 321 | # print str(e) |
|
322 | 322 | # raise e |
|
323 | 323 | # |
|
324 | 324 | # return answer |
|
325 | 325 | |
|
326 | 326 | |
|
327 | ''' | |
|
327 | """ | |
|
328 | 328 | def __init__(self, cmd, inputstream=None, buffer_size=65536, |
|
329 | 329 | chunk_size=4096, starting_values=[], **kwargs): |
|
330 |
|
|
|
330 | """ | |
|
331 | 331 | Initializes SubprocessIOChunker |
|
332 | 332 | |
|
333 | 333 | :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings |
|
334 | 334 | :param inputstream: (Default: None) A file-like, string, or file pointer. |
|
335 | 335 | :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes. |
|
336 | 336 | :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller. |
|
337 | 337 | :param starting_values: (Default: []) An array of strings to put in front of output que. |
|
338 |
|
|
|
338 | """ | |
|
339 | 339 | |
|
340 | 340 | if inputstream: |
|
341 | 341 | input_streamer = StreamFeeder(inputstream) |
|
342 | 342 | input_streamer.start() |
|
343 | 343 | inputstream = input_streamer.output |
|
344 | 344 | |
|
345 | 345 | _shell = kwargs.get('shell', True) |
|
346 | 346 | if isinstance(cmd, (list, tuple)): |
|
347 | 347 | cmd = ' '.join(cmd) |
|
348 | 348 | |
|
349 | 349 | kwargs['shell'] = _shell |
|
350 | 350 | _p = subprocess.Popen(cmd, |
|
351 | 351 | bufsize=-1, |
|
352 | 352 | stdin=inputstream, |
|
353 | 353 | stdout=subprocess.PIPE, |
|
354 | 354 | stderr=subprocess.PIPE, |
|
355 | 355 | **kwargs |
|
356 | 356 | ) |
|
357 | 357 | |
|
358 | 358 | bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size, starting_values) |
|
359 | 359 | bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True) |
|
360 | 360 | |
|
361 | 361 | while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length: |
|
362 | 362 | # doing this until we reach either end of file, or end of buffer. |
|
363 | 363 | bg_out.data_added_event.wait(1) |
|
364 | 364 | bg_out.data_added_event.clear() |
|
365 | 365 | |
|
366 | 366 | # at this point it's still ambiguous if we are done reading or just full buffer. |
|
367 | 367 | # Either way, if error (returned by ended process, or implied based on |
|
368 | 368 | # presence of stuff in stderr output) we error out. |
|
369 | 369 | # Else, we are happy. |
|
370 | 370 | _returncode = _p.poll() |
|
371 | 371 | if _returncode or (_returncode == None and bg_err.length): |
|
372 | 372 | try: |
|
373 | 373 | _p.terminate() |
|
374 | 374 | except: |
|
375 | 375 | pass |
|
376 | 376 | bg_out.stop() |
|
377 | 377 | bg_err.stop() |
|
378 | 378 | err = '%s' % ''.join(bg_err) |
|
379 | 379 | if err: |
|
380 | 380 | raise EnvironmentError("Subprocess exited due to an error:\n" + err) |
|
381 | 381 | raise EnvironmentError("Subprocess exited with non 0 ret code:%s" % _returncode) |
|
382 | 382 | |
|
383 | 383 | self.process = _p |
|
384 | 384 | self.output = bg_out |
|
385 | 385 | self.error = bg_err |
|
386 | 386 | |
|
387 | 387 | def __iter__(self): |
|
388 | 388 | return self |
|
389 | 389 | |
|
390 | 390 | def next(self): |
|
391 | 391 | if self.process.poll(): |
|
392 | 392 | err = '%s' % ''.join(self.error) |
|
393 | 393 | raise EnvironmentError("Subprocess exited due to an error:\n" + err) |
|
394 | 394 | return self.output.next() |
|
395 | 395 | |
|
396 | 396 | def throw(self, type, value=None, traceback=None): |
|
397 | 397 | if self.output.length or not self.output.done_reading: |
|
398 | 398 | raise type(value) |
|
399 | 399 | |
|
400 | 400 | def close(self): |
|
401 | 401 | try: |
|
402 | 402 | self.process.terminate() |
|
403 | 403 | except: |
|
404 | 404 | pass |
|
405 | 405 | try: |
|
406 | 406 | self.output.close() |
|
407 | 407 | except: |
|
408 | 408 | pass |
|
409 | 409 | try: |
|
410 | 410 | self.error.close() |
|
411 | 411 | except: |
|
412 | 412 | pass |
|
413 | 413 | |
|
414 | 414 | def __del__(self): |
|
415 | 415 | self.close() |
@@ -1,707 +1,707 b'' | |||
|
1 | 1 | from __future__ import with_statement |
|
2 | 2 | |
|
3 | 3 | import os |
|
4 | 4 | import mock |
|
5 | 5 | import datetime |
|
6 | 6 | from rhodecode.lib.vcs.backends.git import GitRepository, GitChangeset |
|
7 | 7 | from rhodecode.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError |
|
8 | 8 | from rhodecode.lib.vcs.nodes import NodeKind, FileNode, DirNode, NodeState |
|
9 | 9 | from rhodecode.lib.vcs.utils.compat import unittest |
|
10 | 10 | from rhodecode.tests.vcs.base import BackendTestMixin |
|
11 | 11 | from rhodecode.tests.vcs.conf import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir |
|
12 | 12 | |
|
13 | 13 | |
|
14 | 14 | class GitRepositoryTest(unittest.TestCase): |
|
15 | 15 | |
|
16 | 16 | def __check_for_existing_repo(self): |
|
17 | 17 | if os.path.exists(TEST_GIT_REPO_CLONE): |
|
18 | 18 | self.fail('Cannot test git clone repo as location %s already ' |
|
19 | 19 | 'exists. You should manually remove it first.' |
|
20 | 20 | % TEST_GIT_REPO_CLONE) |
|
21 | 21 | |
|
22 | 22 | def setUp(self): |
|
23 | 23 | self.repo = GitRepository(TEST_GIT_REPO) |
|
24 | 24 | |
|
25 | 25 | def test_wrong_repo_path(self): |
|
26 | 26 | wrong_repo_path = '/tmp/errorrepo' |
|
27 | 27 | self.assertRaises(RepositoryError, GitRepository, wrong_repo_path) |
|
28 | 28 | |
|
29 | 29 | def test_repo_clone(self): |
|
30 | 30 | self.__check_for_existing_repo() |
|
31 | 31 | repo = GitRepository(TEST_GIT_REPO) |
|
32 | 32 | repo_clone = GitRepository(TEST_GIT_REPO_CLONE, |
|
33 | 33 | src_url=TEST_GIT_REPO, create=True, update_after_clone=True) |
|
34 | 34 | self.assertEqual(len(repo.revisions), len(repo_clone.revisions)) |
|
35 | 35 | # Checking hashes of changesets should be enough |
|
36 | 36 | for changeset in repo.get_changesets(): |
|
37 | 37 | raw_id = changeset.raw_id |
|
38 | 38 | self.assertEqual(raw_id, repo_clone.get_changeset(raw_id).raw_id) |
|
39 | 39 | |
|
40 | 40 | def test_repo_clone_without_create(self): |
|
41 | 41 | self.assertRaises(RepositoryError, GitRepository, |
|
42 | 42 | TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO) |
|
43 | 43 | |
|
44 | 44 | def test_repo_clone_with_update(self): |
|
45 | 45 | repo = GitRepository(TEST_GIT_REPO) |
|
46 | 46 | clone_path = TEST_GIT_REPO_CLONE + '_with_update' |
|
47 | 47 | repo_clone = GitRepository(clone_path, |
|
48 | 48 | create=True, src_url=TEST_GIT_REPO, update_after_clone=True) |
|
49 | 49 | self.assertEqual(len(repo.revisions), len(repo_clone.revisions)) |
|
50 | 50 | |
|
51 | 51 | #check if current workdir was updated |
|
52 | 52 | fpath = os.path.join(clone_path, 'MANIFEST.in') |
|
53 | 53 | self.assertEqual(True, os.path.isfile(fpath), |
|
54 | 54 | 'Repo was cloned and updated but file %s could not be found' |
|
55 | 55 | % fpath) |
|
56 | 56 | |
|
57 | 57 | def test_repo_clone_without_update(self): |
|
58 | 58 | repo = GitRepository(TEST_GIT_REPO) |
|
59 | 59 | clone_path = TEST_GIT_REPO_CLONE + '_without_update' |
|
60 | 60 | repo_clone = GitRepository(clone_path, |
|
61 | 61 | create=True, src_url=TEST_GIT_REPO, update_after_clone=False) |
|
62 | 62 | self.assertEqual(len(repo.revisions), len(repo_clone.revisions)) |
|
63 | 63 | #check if current workdir was *NOT* updated |
|
64 | 64 | fpath = os.path.join(clone_path, 'MANIFEST.in') |
|
65 | 65 | # Make sure it's not bare repo |
|
66 | 66 | self.assertFalse(repo_clone._repo.bare) |
|
67 | 67 | self.assertEqual(False, os.path.isfile(fpath), |
|
68 | 68 | 'Repo was cloned and updated but file %s was found' |
|
69 | 69 | % fpath) |
|
70 | 70 | |
|
71 | 71 | def test_repo_clone_into_bare_repo(self): |
|
72 | 72 | repo = GitRepository(TEST_GIT_REPO) |
|
73 | 73 | clone_path = TEST_GIT_REPO_CLONE + '_bare.git' |
|
74 | 74 | repo_clone = GitRepository(clone_path, create=True, |
|
75 | 75 | src_url=repo.path, bare=True) |
|
76 | 76 | self.assertTrue(repo_clone._repo.bare) |
|
77 | 77 | |
|
78 | 78 | def test_create_repo_is_not_bare_by_default(self): |
|
79 | 79 | repo = GitRepository(get_new_dir('not-bare-by-default'), create=True) |
|
80 | 80 | self.assertFalse(repo._repo.bare) |
|
81 | 81 | |
|
82 | 82 | def test_create_bare_repo(self): |
|
83 | 83 | repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True) |
|
84 | 84 | self.assertTrue(repo._repo.bare) |
|
85 | 85 | |
|
86 | 86 | def test_revisions(self): |
|
87 | 87 | # there are 112 revisions (by now) |
|
88 | 88 | # so we can assume they would be available from now on |
|
89 | 89 | subset = set([ |
|
90 | 90 | 'c1214f7e79e02fc37156ff215cd71275450cffc3', |
|
91 | 91 | '38b5fe81f109cb111f549bfe9bb6b267e10bc557', |
|
92 | 92 | 'fa6600f6848800641328adbf7811fd2372c02ab2', |
|
93 | 93 | '102607b09cdd60e2793929c4f90478be29f85a17', |
|
94 | 94 | '49d3fd156b6f7db46313fac355dca1a0b94a0017', |
|
95 | 95 | '2d1028c054665b962fa3d307adfc923ddd528038', |
|
96 | 96 | 'd7e0d30fbcae12c90680eb095a4f5f02505ce501', |
|
97 | 97 | 'ff7ca51e58c505fec0dd2491de52c622bb7a806b', |
|
98 | 98 | 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f', |
|
99 | 99 | '8430a588b43b5d6da365400117c89400326e7992', |
|
100 | 100 | 'd955cd312c17b02143c04fa1099a352b04368118', |
|
101 | 101 | 'f67b87e5c629c2ee0ba58f85197e423ff28d735b', |
|
102 | 102 | 'add63e382e4aabc9e1afdc4bdc24506c269b7618', |
|
103 | 103 | 'f298fe1189f1b69779a4423f40b48edf92a703fc', |
|
104 | 104 | 'bd9b619eb41994cac43d67cf4ccc8399c1125808', |
|
105 | 105 | '6e125e7c890379446e98980d8ed60fba87d0f6d1', |
|
106 | 106 | 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd', |
|
107 | 107 | '0b05e4ed56c802098dfc813cbe779b2f49e92500', |
|
108 | 108 | '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e', |
|
109 | 109 | '45223f8f114c64bf4d6f853e3c35a369a6305520', |
|
110 | 110 | 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e', |
|
111 | 111 | 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68', |
|
112 | 112 | '27d48942240f5b91dfda77accd2caac94708cc7d', |
|
113 | 113 | '622f0eb0bafd619d2560c26f80f09e3b0b0d78af', |
|
114 | 114 | 'e686b958768ee96af8029fe19c6050b1a8dd3b2b']) |
|
115 | 115 | self.assertTrue(subset.issubset(set(self.repo.revisions))) |
|
116 | 116 | |
|
117 | 117 | |
|
118 | 118 | |
|
119 | 119 | def test_slicing(self): |
|
120 | 120 | #4 1 5 10 95 |
|
121 | 121 | for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), |
|
122 | 122 | (10, 20, 10), (5, 100, 95)]: |
|
123 | 123 | revs = list(self.repo[sfrom:sto]) |
|
124 | 124 | self.assertEqual(len(revs), size) |
|
125 | 125 | self.assertEqual(revs[0], self.repo.get_changeset(sfrom)) |
|
126 | 126 | self.assertEqual(revs[-1], self.repo.get_changeset(sto - 1)) |
|
127 | 127 | |
|
128 | 128 | |
|
129 | 129 | def test_branches(self): |
|
130 | 130 | # TODO: Need more tests here |
|
131 | 131 | # Removed (those are 'remotes' branches for cloned repo) |
|
132 | 132 | #self.assertTrue('master' in self.repo.branches) |
|
133 | 133 | #self.assertTrue('gittree' in self.repo.branches) |
|
134 | 134 | #self.assertTrue('web-branch' in self.repo.branches) |
|
135 | 135 | for name, id in self.repo.branches.items(): |
|
136 | 136 | self.assertTrue(isinstance( |
|
137 | 137 | self.repo.get_changeset(id), GitChangeset)) |
|
138 | 138 | |
|
139 | 139 | def test_tags(self): |
|
140 | 140 | # TODO: Need more tests here |
|
141 | 141 | self.assertTrue('v0.1.1' in self.repo.tags) |
|
142 | 142 | self.assertTrue('v0.1.2' in self.repo.tags) |
|
143 | 143 | for name, id in self.repo.tags.items(): |
|
144 | 144 | self.assertTrue(isinstance( |
|
145 | 145 | self.repo.get_changeset(id), GitChangeset)) |
|
146 | 146 | |
|
147 | 147 | def _test_single_changeset_cache(self, revision): |
|
148 | 148 | chset = self.repo.get_changeset(revision) |
|
149 | 149 | self.assertTrue(revision in self.repo.changesets) |
|
150 | 150 | self.assertTrue(chset is self.repo.changesets[revision]) |
|
151 | 151 | |
|
152 | 152 | def test_initial_changeset(self): |
|
153 | 153 | id = self.repo.revisions[0] |
|
154 | 154 | init_chset = self.repo.get_changeset(id) |
|
155 | 155 | self.assertEqual(init_chset.message, 'initial import\n') |
|
156 | 156 | self.assertEqual(init_chset.author, |
|
157 | 157 | 'Marcin Kuzminski <marcin@python-blog.com>') |
|
158 | 158 | for path in ('vcs/__init__.py', |
|
159 | 159 | 'vcs/backends/BaseRepository.py', |
|
160 | 160 | 'vcs/backends/__init__.py'): |
|
161 | 161 | self.assertTrue(isinstance(init_chset.get_node(path), FileNode)) |
|
162 | 162 | for path in ('', 'vcs', 'vcs/backends'): |
|
163 | 163 | self.assertTrue(isinstance(init_chset.get_node(path), DirNode)) |
|
164 | 164 | |
|
165 | 165 | self.assertRaises(NodeDoesNotExistError, init_chset.get_node, path='foobar') |
|
166 | 166 | |
|
167 | 167 | node = init_chset.get_node('vcs/') |
|
168 | 168 | self.assertTrue(hasattr(node, 'kind')) |
|
169 | 169 | self.assertEqual(node.kind, NodeKind.DIR) |
|
170 | 170 | |
|
171 | 171 | node = init_chset.get_node('vcs') |
|
172 | 172 | self.assertTrue(hasattr(node, 'kind')) |
|
173 | 173 | self.assertEqual(node.kind, NodeKind.DIR) |
|
174 | 174 | |
|
175 | 175 | node = init_chset.get_node('vcs/__init__.py') |
|
176 | 176 | self.assertTrue(hasattr(node, 'kind')) |
|
177 | 177 | self.assertEqual(node.kind, NodeKind.FILE) |
|
178 | 178 | |
|
179 | 179 | def test_not_existing_changeset(self): |
|
180 | 180 | self.assertRaises(RepositoryError, self.repo.get_changeset, |
|
181 | 181 | 'f' * 40) |
|
182 | 182 | |
|
183 | 183 | def test_changeset10(self): |
|
184 | 184 | |
|
185 | 185 | chset10 = self.repo.get_changeset(self.repo.revisions[9]) |
|
186 | 186 | README = """=== |
|
187 | 187 | VCS |
|
188 | 188 | === |
|
189 | 189 | |
|
190 | 190 | Various Version Control System management abstraction layer for Python. |
|
191 | 191 | |
|
192 | 192 | Introduction |
|
193 | 193 | ------------ |
|
194 | 194 | |
|
195 | 195 | TODO: To be written... |
|
196 | 196 | |
|
197 | 197 | """ |
|
198 | 198 | node = chset10.get_node('README.rst') |
|
199 | 199 | self.assertEqual(node.kind, NodeKind.FILE) |
|
200 | 200 | self.assertEqual(node.content, README) |
|
201 | 201 | |
|
202 | 202 | |
|
203 | 203 | class GitChangesetTest(unittest.TestCase): |
|
204 | 204 | |
|
205 | 205 | def setUp(self): |
|
206 | 206 | self.repo = GitRepository(TEST_GIT_REPO) |
|
207 | 207 | |
|
208 | 208 | def test_default_changeset(self): |
|
209 | 209 | tip = self.repo.get_changeset() |
|
210 | 210 | self.assertEqual(tip, self.repo.get_changeset(None)) |
|
211 | 211 | self.assertEqual(tip, self.repo.get_changeset('tip')) |
|
212 | 212 | |
|
213 | 213 | def test_root_node(self): |
|
214 | 214 | tip = self.repo.get_changeset() |
|
215 | 215 | self.assertTrue(tip.root is tip.get_node('')) |
|
216 | 216 | |
|
217 | 217 | def test_lazy_fetch(self): |
|
218 | 218 | """ |
|
219 | 219 | Test if changeset's nodes expands and are cached as we walk through |
|
220 | 220 | the revision. This test is somewhat hard to write as order of tests |
|
221 | 221 | is a key here. Written by running command after command in a shell. |
|
222 | 222 | """ |
|
223 | 223 | hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc' |
|
224 | 224 | self.assertTrue(hex in self.repo.revisions) |
|
225 | 225 | chset = self.repo.get_changeset(hex) |
|
226 | 226 | self.assertTrue(len(chset.nodes) == 0) |
|
227 | 227 | root = chset.root |
|
228 | 228 | self.assertTrue(len(chset.nodes) == 1) |
|
229 | 229 | self.assertTrue(len(root.nodes) == 8) |
|
230 | 230 | # accessing root.nodes updates chset.nodes |
|
231 | 231 | self.assertTrue(len(chset.nodes) == 9) |
|
232 | 232 | |
|
233 | 233 | docs = root.get_node('docs') |
|
234 | 234 | # we haven't yet accessed anything new as docs dir was already cached |
|
235 | 235 | self.assertTrue(len(chset.nodes) == 9) |
|
236 | 236 | self.assertTrue(len(docs.nodes) == 8) |
|
237 | 237 | # accessing docs.nodes updates chset.nodes |
|
238 | 238 | self.assertTrue(len(chset.nodes) == 17) |
|
239 | 239 | |
|
240 | 240 | self.assertTrue(docs is chset.get_node('docs')) |
|
241 | 241 | self.assertTrue(docs is root.nodes[0]) |
|
242 | 242 | self.assertTrue(docs is root.dirs[0]) |
|
243 | 243 | self.assertTrue(docs is chset.get_node('docs')) |
|
244 | 244 | |
|
245 | 245 | def test_nodes_with_changeset(self): |
|
246 | 246 | hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc' |
|
247 | 247 | chset = self.repo.get_changeset(hex) |
|
248 | 248 | root = chset.root |
|
249 | 249 | docs = root.get_node('docs') |
|
250 | 250 | self.assertTrue(docs is chset.get_node('docs')) |
|
251 | 251 | api = docs.get_node('api') |
|
252 | 252 | self.assertTrue(api is chset.get_node('docs/api')) |
|
253 | 253 | index = api.get_node('index.rst') |
|
254 | 254 | self.assertTrue(index is chset.get_node('docs/api/index.rst')) |
|
255 | 255 | self.assertTrue(index is chset.get_node('docs')\ |
|
256 | 256 | .get_node('api')\ |
|
257 | 257 | .get_node('index.rst')) |
|
258 | 258 | |
|
259 | 259 | def test_branch_and_tags(self): |
|
260 |
|
|
|
260 | """ | |
|
261 | 261 | rev0 = self.repo.revisions[0] |
|
262 | 262 | chset0 = self.repo.get_changeset(rev0) |
|
263 | 263 | self.assertEqual(chset0.branch, 'master') |
|
264 | 264 | self.assertEqual(chset0.tags, []) |
|
265 | 265 | |
|
266 | 266 | rev10 = self.repo.revisions[10] |
|
267 | 267 | chset10 = self.repo.get_changeset(rev10) |
|
268 | 268 | self.assertEqual(chset10.branch, 'master') |
|
269 | 269 | self.assertEqual(chset10.tags, []) |
|
270 | 270 | |
|
271 | 271 | rev44 = self.repo.revisions[44] |
|
272 | 272 | chset44 = self.repo.get_changeset(rev44) |
|
273 | 273 | self.assertEqual(chset44.branch, 'web-branch') |
|
274 | 274 | |
|
275 | 275 | tip = self.repo.get_changeset('tip') |
|
276 | 276 | self.assertTrue('tip' in tip.tags) |
|
277 |
|
|
|
277 | """ | |
|
278 | 278 | # Those tests would fail - branches are now going |
|
279 | 279 | # to be changed at main API in order to support git backend |
|
280 | 280 | pass |
|
281 | 281 | |
|
282 | 282 | def _test_slices(self, limit, offset): |
|
283 | 283 | count = self.repo.count() |
|
284 | 284 | changesets = self.repo.get_changesets(limit=limit, offset=offset) |
|
285 | 285 | idx = 0 |
|
286 | 286 | for changeset in changesets: |
|
287 | 287 | rev = offset + idx |
|
288 | 288 | idx += 1 |
|
289 | 289 | rev_id = self.repo.revisions[rev] |
|
290 | 290 | if idx > limit: |
|
291 | 291 | self.fail("Exceeded limit already (getting revision %s, " |
|
292 | 292 | "there are %s total revisions, offset=%s, limit=%s)" |
|
293 | 293 | % (rev_id, count, offset, limit)) |
|
294 | 294 | self.assertEqual(changeset, self.repo.get_changeset(rev_id)) |
|
295 | 295 | result = list(self.repo.get_changesets(limit=limit, offset=offset)) |
|
296 | 296 | start = offset |
|
297 | 297 | end = limit and offset + limit or None |
|
298 | 298 | sliced = list(self.repo[start:end]) |
|
299 | 299 | self.failUnlessEqual(result, sliced, |
|
300 | 300 | msg="Comparison failed for limit=%s, offset=%s" |
|
301 | 301 | "(get_changeset returned: %s and sliced: %s" |
|
302 | 302 | % (limit, offset, result, sliced)) |
|
303 | 303 | |
|
304 | 304 | def _test_file_size(self, revision, path, size): |
|
305 | 305 | node = self.repo.get_changeset(revision).get_node(path) |
|
306 | 306 | self.assertTrue(node.is_file()) |
|
307 | 307 | self.assertEqual(node.size, size) |
|
308 | 308 | |
|
309 | 309 | def test_file_size(self): |
|
310 | 310 | to_check = ( |
|
311 | 311 | ('c1214f7e79e02fc37156ff215cd71275450cffc3', |
|
312 | 312 | 'vcs/backends/BaseRepository.py', 502), |
|
313 | 313 | ('d7e0d30fbcae12c90680eb095a4f5f02505ce501', |
|
314 | 314 | 'vcs/backends/hg.py', 854), |
|
315 | 315 | ('6e125e7c890379446e98980d8ed60fba87d0f6d1', |
|
316 | 316 | 'setup.py', 1068), |
|
317 | 317 | |
|
318 | 318 | ('d955cd312c17b02143c04fa1099a352b04368118', |
|
319 | 319 | 'vcs/backends/base.py', 2921), |
|
320 | 320 | ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e', |
|
321 | 321 | 'vcs/backends/base.py', 3936), |
|
322 | 322 | ('f50f42baeed5af6518ef4b0cb2f1423f3851a941', |
|
323 | 323 | 'vcs/backends/base.py', 6189), |
|
324 | 324 | ) |
|
325 | 325 | for revision, path, size in to_check: |
|
326 | 326 | self._test_file_size(revision, path, size) |
|
327 | 327 | |
|
328 | 328 | def test_file_history(self): |
|
329 | 329 | # we can only check if those revisions are present in the history |
|
330 | 330 | # as we cannot update this test every time file is changed |
|
331 | 331 | files = { |
|
332 | 332 | 'setup.py': [ |
|
333 | 333 | '54386793436c938cff89326944d4c2702340037d', |
|
334 | 334 | '51d254f0ecf5df2ce50c0b115741f4cf13985dab', |
|
335 | 335 | '998ed409c795fec2012b1c0ca054d99888b22090', |
|
336 | 336 | '5e0eb4c47f56564395f76333f319d26c79e2fb09', |
|
337 | 337 | '0115510b70c7229dbc5dc49036b32e7d91d23acd', |
|
338 | 338 | '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e', |
|
339 | 339 | '2a13f185e4525f9d4b59882791a2d397b90d5ddc', |
|
340 | 340 | '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e', |
|
341 | 341 | 'ff7ca51e58c505fec0dd2491de52c622bb7a806b', |
|
342 | 342 | ], |
|
343 | 343 | 'vcs/nodes.py': [ |
|
344 | 344 | '33fa3223355104431402a888fa77a4e9956feb3e', |
|
345 | 345 | 'fa014c12c26d10ba682fadb78f2a11c24c8118e1', |
|
346 | 346 | 'e686b958768ee96af8029fe19c6050b1a8dd3b2b', |
|
347 | 347 | 'ab5721ca0a081f26bf43d9051e615af2cc99952f', |
|
348 | 348 | 'c877b68d18e792a66b7f4c529ea02c8f80801542', |
|
349 | 349 | '4313566d2e417cb382948f8d9d7c765330356054', |
|
350 | 350 | '6c2303a793671e807d1cfc70134c9ca0767d98c2', |
|
351 | 351 | '54386793436c938cff89326944d4c2702340037d', |
|
352 | 352 | '54000345d2e78b03a99d561399e8e548de3f3203', |
|
353 | 353 | '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b', |
|
354 | 354 | '2d03ca750a44440fb5ea8b751176d1f36f8e8f46', |
|
355 | 355 | '2a08b128c206db48c2f0b8f70df060e6db0ae4f8', |
|
356 | 356 | '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b', |
|
357 | 357 | 'ac71e9503c2ca95542839af0ce7b64011b72ea7c', |
|
358 | 358 | '12669288fd13adba2a9b7dd5b870cc23ffab92d2', |
|
359 | 359 | '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382', |
|
360 | 360 | '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5', |
|
361 | 361 | '5eab1222a7cd4bfcbabc218ca6d04276d4e27378', |
|
362 | 362 | 'f50f42baeed5af6518ef4b0cb2f1423f3851a941', |
|
363 | 363 | 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25', |
|
364 | 364 | 'f15c21f97864b4f071cddfbf2750ec2e23859414', |
|
365 | 365 | 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade', |
|
366 | 366 | 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b', |
|
367 | 367 | '84dec09632a4458f79f50ddbbd155506c460b4f9', |
|
368 | 368 | '0115510b70c7229dbc5dc49036b32e7d91d23acd', |
|
369 | 369 | '2a13f185e4525f9d4b59882791a2d397b90d5ddc', |
|
370 | 370 | '3bf1c5868e570e39569d094f922d33ced2fa3b2b', |
|
371 | 371 | 'b8d04012574729d2c29886e53b1a43ef16dd00a1', |
|
372 | 372 | '6970b057cffe4aab0a792aa634c89f4bebf01441', |
|
373 | 373 | 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f', |
|
374 | 374 | 'ff7ca51e58c505fec0dd2491de52c622bb7a806b', |
|
375 | 375 | ], |
|
376 | 376 | 'vcs/backends/git.py': [ |
|
377 | 377 | '4cf116ad5a457530381135e2f4c453e68a1b0105', |
|
378 | 378 | '9a751d84d8e9408e736329767387f41b36935153', |
|
379 | 379 | 'cb681fb539c3faaedbcdf5ca71ca413425c18f01', |
|
380 | 380 | '428f81bb652bcba8d631bce926e8834ff49bdcc6', |
|
381 | 381 | '180ab15aebf26f98f714d8c68715e0f05fa6e1c7', |
|
382 | 382 | '2b8e07312a2e89e92b90426ab97f349f4bce2a3a', |
|
383 | 383 | '50e08c506174d8645a4bb517dd122ac946a0f3bf', |
|
384 | 384 | '54000345d2e78b03a99d561399e8e548de3f3203', |
|
385 | 385 | ], |
|
386 | 386 | } |
|
387 | 387 | for path, revs in files.items(): |
|
388 | 388 | node = self.repo.get_changeset(revs[0]).get_node(path) |
|
389 | 389 | node_revs = [chset.raw_id for chset in node.history] |
|
390 | 390 | self.assertTrue(set(revs).issubset(set(node_revs)), |
|
391 | 391 | "We assumed that %s is subset of revisions for which file %s " |
|
392 | 392 | "has been changed, and history of that node returned: %s" |
|
393 | 393 | % (revs, path, node_revs)) |
|
394 | 394 | |
|
395 | 395 | def test_file_annotate(self): |
|
396 | 396 | files = { |
|
397 | 397 | 'vcs/backends/__init__.py': { |
|
398 | 398 | 'c1214f7e79e02fc37156ff215cd71275450cffc3': { |
|
399 | 399 | 'lines_no': 1, |
|
400 | 400 | 'changesets': [ |
|
401 | 401 | 'c1214f7e79e02fc37156ff215cd71275450cffc3', |
|
402 | 402 | ], |
|
403 | 403 | }, |
|
404 | 404 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': { |
|
405 | 405 | 'lines_no': 21, |
|
406 | 406 | 'changesets': [ |
|
407 | 407 | '49d3fd156b6f7db46313fac355dca1a0b94a0017', |
|
408 | 408 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
409 | 409 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
410 | 410 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
411 | 411 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
412 | 412 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
413 | 413 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
414 | 414 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
415 | 415 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
416 | 416 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
417 | 417 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
418 | 418 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
419 | 419 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
420 | 420 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
421 | 421 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
422 | 422 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
423 | 423 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
424 | 424 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
425 | 425 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
426 | 426 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
427 | 427 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
428 | 428 | ], |
|
429 | 429 | }, |
|
430 | 430 | 'e29b67bd158580fc90fc5e9111240b90e6e86064': { |
|
431 | 431 | 'lines_no': 32, |
|
432 | 432 | 'changesets': [ |
|
433 | 433 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
434 | 434 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
435 | 435 | '5eab1222a7cd4bfcbabc218ca6d04276d4e27378', |
|
436 | 436 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
437 | 437 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
438 | 438 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
439 | 439 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
440 | 440 | '54000345d2e78b03a99d561399e8e548de3f3203', |
|
441 | 441 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
442 | 442 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
443 | 443 | '78c3f0c23b7ee935ec276acb8b8212444c33c396', |
|
444 | 444 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
445 | 445 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
446 | 446 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
447 | 447 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
448 | 448 | '2a13f185e4525f9d4b59882791a2d397b90d5ddc', |
|
449 | 449 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
450 | 450 | '78c3f0c23b7ee935ec276acb8b8212444c33c396', |
|
451 | 451 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
452 | 452 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
453 | 453 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
454 | 454 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
455 | 455 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
456 | 456 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
457 | 457 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
458 | 458 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
459 | 459 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
460 | 460 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
461 | 461 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
462 | 462 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
463 | 463 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
464 | 464 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
465 | 465 | ], |
|
466 | 466 | }, |
|
467 | 467 | }, |
|
468 | 468 | } |
|
469 | 469 | |
|
470 | 470 | for fname, revision_dict in files.items(): |
|
471 | 471 | for rev, data in revision_dict.items(): |
|
472 | 472 | cs = self.repo.get_changeset(rev) |
|
473 | 473 | |
|
474 | 474 | l1_1 = [x[1] for x in cs.get_file_annotate(fname)] |
|
475 | 475 | l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)] |
|
476 | 476 | self.assertEqual(l1_1, l1_2) |
|
477 | 477 | l1 = l1_1 |
|
478 | 478 | l2 = files[fname][rev]['changesets'] |
|
479 | 479 | self.assertTrue(l1 == l2 , "The lists of revision for %s@rev %s" |
|
480 | 480 | "from annotation list should match each other, " |
|
481 | 481 | "got \n%s \nvs \n%s " % (fname, rev, l1, l2)) |
|
482 | 482 | |
|
483 | 483 | def test_files_state(self): |
|
484 | 484 | """ |
|
485 | 485 | Tests state of FileNodes. |
|
486 | 486 | """ |
|
487 | 487 | node = self.repo\ |
|
488 | 488 | .get_changeset('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\ |
|
489 | 489 | .get_node('vcs/utils/diffs.py') |
|
490 | 490 | self.assertTrue(node.state, NodeState.ADDED) |
|
491 | 491 | self.assertTrue(node.added) |
|
492 | 492 | self.assertFalse(node.changed) |
|
493 | 493 | self.assertFalse(node.not_changed) |
|
494 | 494 | self.assertFalse(node.removed) |
|
495 | 495 | |
|
496 | 496 | node = self.repo\ |
|
497 | 497 | .get_changeset('33fa3223355104431402a888fa77a4e9956feb3e')\ |
|
498 | 498 | .get_node('.hgignore') |
|
499 | 499 | self.assertTrue(node.state, NodeState.CHANGED) |
|
500 | 500 | self.assertFalse(node.added) |
|
501 | 501 | self.assertTrue(node.changed) |
|
502 | 502 | self.assertFalse(node.not_changed) |
|
503 | 503 | self.assertFalse(node.removed) |
|
504 | 504 | |
|
505 | 505 | node = self.repo\ |
|
506 | 506 | .get_changeset('e29b67bd158580fc90fc5e9111240b90e6e86064')\ |
|
507 | 507 | .get_node('setup.py') |
|
508 | 508 | self.assertTrue(node.state, NodeState.NOT_CHANGED) |
|
509 | 509 | self.assertFalse(node.added) |
|
510 | 510 | self.assertFalse(node.changed) |
|
511 | 511 | self.assertTrue(node.not_changed) |
|
512 | 512 | self.assertFalse(node.removed) |
|
513 | 513 | |
|
514 | 514 | # If node has REMOVED state then trying to fetch it would raise |
|
515 | 515 | # ChangesetError exception |
|
516 | 516 | chset = self.repo.get_changeset( |
|
517 | 517 | 'fa6600f6848800641328adbf7811fd2372c02ab2') |
|
518 | 518 | path = 'vcs/backends/BaseRepository.py' |
|
519 | 519 | self.assertRaises(NodeDoesNotExistError, chset.get_node, path) |
|
520 | 520 | # but it would be one of ``removed`` (changeset's attribute) |
|
521 | 521 | self.assertTrue(path in [rf.path for rf in chset.removed]) |
|
522 | 522 | |
|
523 | 523 | chset = self.repo.get_changeset( |
|
524 | 524 | '54386793436c938cff89326944d4c2702340037d') |
|
525 | 525 | changed = ['setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py', |
|
526 | 526 | 'vcs/nodes.py'] |
|
527 | 527 | self.assertEqual(set(changed), set([f.path for f in chset.changed])) |
|
528 | 528 | |
|
529 | 529 | def test_commit_message_is_unicode(self): |
|
530 | 530 | for cs in self.repo: |
|
531 | 531 | self.assertEqual(type(cs.message), unicode) |
|
532 | 532 | |
|
533 | 533 | def test_changeset_author_is_unicode(self): |
|
534 | 534 | for cs in self.repo: |
|
535 | 535 | self.assertEqual(type(cs.author), unicode) |
|
536 | 536 | |
|
537 | 537 | def test_repo_files_content_is_unicode(self): |
|
538 | 538 | changeset = self.repo.get_changeset() |
|
539 | 539 | for node in changeset.get_node('/'): |
|
540 | 540 | if node.is_file(): |
|
541 | 541 | self.assertEqual(type(node.content), unicode) |
|
542 | 542 | |
|
543 | 543 | def test_wrong_path(self): |
|
544 | 544 | # There is 'setup.py' in the root dir but not there: |
|
545 | 545 | path = 'foo/bar/setup.py' |
|
546 | 546 | tip = self.repo.get_changeset() |
|
547 | 547 | self.assertRaises(VCSError, tip.get_node, path) |
|
548 | 548 | |
|
549 | 549 | def test_author_email(self): |
|
550 | 550 | self.assertEqual('marcin@python-blog.com', |
|
551 | 551 | self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\ |
|
552 | 552 | .author_email) |
|
553 | 553 | self.assertEqual('lukasz.balcerzak@python-center.pl', |
|
554 | 554 | self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\ |
|
555 | 555 | .author_email) |
|
556 | 556 | self.assertEqual('none@none', |
|
557 | 557 | self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\ |
|
558 | 558 | .author_email) |
|
559 | 559 | |
|
560 | 560 | def test_author_username(self): |
|
561 | 561 | self.assertEqual('Marcin Kuzminski', |
|
562 | 562 | self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\ |
|
563 | 563 | .author_name) |
|
564 | 564 | self.assertEqual('Lukasz Balcerzak', |
|
565 | 565 | self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\ |
|
566 | 566 | .author_name) |
|
567 | 567 | self.assertEqual('marcink', |
|
568 | 568 | self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\ |
|
569 | 569 | .author_name) |
|
570 | 570 | |
|
571 | 571 | |
|
572 | 572 | class GitSpecificTest(unittest.TestCase): |
|
573 | 573 | |
|
574 | 574 | def test_error_is_raised_for_added_if_diff_name_status_is_wrong(self): |
|
575 | 575 | repo = mock.MagicMock() |
|
576 | 576 | changeset = GitChangeset(repo, 'foobar') |
|
577 | 577 | changeset._diff_name_status = 'foobar' |
|
578 | 578 | with self.assertRaises(VCSError): |
|
579 | 579 | changeset.added |
|
580 | 580 | |
|
581 | 581 | def test_error_is_raised_for_changed_if_diff_name_status_is_wrong(self): |
|
582 | 582 | repo = mock.MagicMock() |
|
583 | 583 | changeset = GitChangeset(repo, 'foobar') |
|
584 | 584 | changeset._diff_name_status = 'foobar' |
|
585 | 585 | with self.assertRaises(VCSError): |
|
586 | 586 | changeset.added |
|
587 | 587 | |
|
588 | 588 | def test_error_is_raised_for_removed_if_diff_name_status_is_wrong(self): |
|
589 | 589 | repo = mock.MagicMock() |
|
590 | 590 | changeset = GitChangeset(repo, 'foobar') |
|
591 | 591 | changeset._diff_name_status = 'foobar' |
|
592 | 592 | with self.assertRaises(VCSError): |
|
593 | 593 | changeset.added |
|
594 | 594 | |
|
595 | 595 | |
|
596 | 596 | class GitSpecificWithRepoTest(BackendTestMixin, unittest.TestCase): |
|
597 | 597 | backend_alias = 'git' |
|
598 | 598 | |
|
599 | 599 | @classmethod |
|
600 | 600 | def _get_commits(cls): |
|
601 | 601 | return [ |
|
602 | 602 | { |
|
603 | 603 | 'message': 'Initial', |
|
604 | 604 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
605 | 605 | 'date': datetime.datetime(2010, 1, 1, 20), |
|
606 | 606 | 'added': [ |
|
607 | 607 | FileNode('foobar/static/js/admin/base.js', content='base'), |
|
608 | 608 | FileNode('foobar/static/admin', content='admin', |
|
609 | 609 | mode=0120000), # this is a link |
|
610 | 610 | FileNode('foo', content='foo'), |
|
611 | 611 | ], |
|
612 | 612 | }, |
|
613 | 613 | { |
|
614 | 614 | 'message': 'Second', |
|
615 | 615 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
616 | 616 | 'date': datetime.datetime(2010, 1, 1, 22), |
|
617 | 617 | 'added': [ |
|
618 | 618 | FileNode('foo2', content='foo2'), |
|
619 | 619 | ], |
|
620 | 620 | }, |
|
621 | 621 | ] |
|
622 | 622 | |
|
623 | 623 | def test_paths_slow_traversing(self): |
|
624 | 624 | cs = self.repo.get_changeset() |
|
625 | 625 | self.assertEqual(cs.get_node('foobar').get_node('static').get_node('js') |
|
626 | 626 | .get_node('admin').get_node('base.js').content, 'base') |
|
627 | 627 | |
|
628 | 628 | def test_paths_fast_traversing(self): |
|
629 | 629 | cs = self.repo.get_changeset() |
|
630 | 630 | self.assertEqual(cs.get_node('foobar/static/js/admin/base.js').content, |
|
631 | 631 | 'base') |
|
632 | 632 | |
|
633 | 633 | def test_workdir_get_branch(self): |
|
634 | 634 | self.repo.run_git_command('checkout -b production') |
|
635 | 635 | # Regression test: one of following would fail if we don't check |
|
636 | 636 | # .git/HEAD file |
|
637 | 637 | self.repo.run_git_command('checkout production') |
|
638 | 638 | self.assertEqual(self.repo.workdir.get_branch(), 'production') |
|
639 | 639 | self.repo.run_git_command('checkout master') |
|
640 | 640 | self.assertEqual(self.repo.workdir.get_branch(), 'master') |
|
641 | 641 | |
|
642 | 642 | def test_get_diff_runs_git_command_with_hashes(self): |
|
643 | 643 | self.repo.run_git_command = mock.Mock(return_value=['', '']) |
|
644 | 644 | self.repo.get_diff(0, 1) |
|
645 | 645 | self.repo.run_git_command.assert_called_once_with( |
|
646 | 646 | 'diff -U%s --full-index --binary -p -M --abbrev=40 %s %s' % |
|
647 | 647 | (3, self.repo._get_revision(0), self.repo._get_revision(1))) |
|
648 | 648 | |
|
649 | 649 | def test_get_diff_runs_git_command_with_str_hashes(self): |
|
650 | 650 | self.repo.run_git_command = mock.Mock(return_value=['', '']) |
|
651 | 651 | self.repo.get_diff(self.repo.EMPTY_CHANGESET, 1) |
|
652 | 652 | self.repo.run_git_command.assert_called_once_with( |
|
653 | 653 | 'show -U%s --full-index --binary -p -M --abbrev=40 %s' % |
|
654 | 654 | (3, self.repo._get_revision(1))) |
|
655 | 655 | |
|
656 | 656 | def test_get_diff_runs_git_command_with_path_if_its_given(self): |
|
657 | 657 | self.repo.run_git_command = mock.Mock(return_value=['', '']) |
|
658 | 658 | self.repo.get_diff(0, 1, 'foo') |
|
659 | 659 | self.repo.run_git_command.assert_called_once_with( |
|
660 | 660 | 'diff -U%s --full-index --binary -p -M --abbrev=40 %s %s -- "foo"' |
|
661 | 661 | % (3, self.repo._get_revision(0), self.repo._get_revision(1))) |
|
662 | 662 | |
|
663 | 663 | |
|
664 | 664 | class GitRegressionTest(BackendTestMixin, unittest.TestCase): |
|
665 | 665 | backend_alias = 'git' |
|
666 | 666 | |
|
667 | 667 | @classmethod |
|
668 | 668 | def _get_commits(cls): |
|
669 | 669 | return [ |
|
670 | 670 | { |
|
671 | 671 | 'message': 'Initial', |
|
672 | 672 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
673 | 673 | 'date': datetime.datetime(2010, 1, 1, 20), |
|
674 | 674 | 'added': [ |
|
675 | 675 | FileNode('bot/__init__.py', content='base'), |
|
676 | 676 | FileNode('bot/templates/404.html', content='base'), |
|
677 | 677 | FileNode('bot/templates/500.html', content='base'), |
|
678 | 678 | ], |
|
679 | 679 | }, |
|
680 | 680 | { |
|
681 | 681 | 'message': 'Second', |
|
682 | 682 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
683 | 683 | 'date': datetime.datetime(2010, 1, 1, 22), |
|
684 | 684 | 'added': [ |
|
685 | 685 | FileNode('bot/build/migrations/1.py', content='foo2'), |
|
686 | 686 | FileNode('bot/build/migrations/2.py', content='foo2'), |
|
687 | 687 | FileNode('bot/build/static/templates/f.html', content='foo2'), |
|
688 | 688 | FileNode('bot/build/static/templates/f1.html', content='foo2'), |
|
689 | 689 | FileNode('bot/build/templates/err.html', content='foo2'), |
|
690 | 690 | FileNode('bot/build/templates/err2.html', content='foo2'), |
|
691 | 691 | ], |
|
692 | 692 | }, |
|
693 | 693 | ] |
|
694 | 694 | |
|
695 | 695 | def test_similar_paths(self): |
|
696 | 696 | cs = self.repo.get_changeset() |
|
697 | 697 | paths = lambda *n:[x.path for x in n] |
|
698 | 698 | self.assertEqual(paths(*cs.get_nodes('bot')), ['bot/build', 'bot/templates', 'bot/__init__.py']) |
|
699 | 699 | self.assertEqual(paths(*cs.get_nodes('bot/build')), ['bot/build/migrations', 'bot/build/static', 'bot/build/templates']) |
|
700 | 700 | self.assertEqual(paths(*cs.get_nodes('bot/build/static')), ['bot/build/static/templates']) |
|
701 | 701 | # this get_nodes below causes troubles ! |
|
702 | 702 | self.assertEqual(paths(*cs.get_nodes('bot/build/static/templates')), ['bot/build/static/templates/f.html', 'bot/build/static/templates/f1.html']) |
|
703 | 703 | self.assertEqual(paths(*cs.get_nodes('bot/build/templates')), ['bot/build/templates/err.html', 'bot/build/templates/err2.html']) |
|
704 | 704 | self.assertEqual(paths(*cs.get_nodes('bot/templates/')), ['bot/templates/404.html', 'bot/templates/500.html']) |
|
705 | 705 | |
|
706 | 706 | if __name__ == '__main__': |
|
707 | 707 | unittest.main() |
General Comments 0
You need to be logged in to leave comments.
Login now