##// END OF EJS Templates
use consisten double quote docstring formatting
marcink -
r3886:a1696507 beta
parent child Browse files
Show More
@@ -1,108 +1,108 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.controllers.error
3 rhodecode.controllers.error
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 RhodeCode error controller
6 RhodeCode error controller
7
7
8 :created_on: Dec 8, 2010
8 :created_on: Dec 8, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 import os
25 import os
26 import cgi
26 import cgi
27 import logging
27 import logging
28 import paste.fileapp
28 import paste.fileapp
29
29
30 from pylons import tmpl_context as c, request, config, url
30 from pylons import tmpl_context as c, request, config, url
31 from pylons.i18n.translation import _
31 from pylons.i18n.translation import _
32 from pylons.middleware import media_path
32 from pylons.middleware import media_path
33
33
34 from rhodecode.lib.base import BaseController, render
34 from rhodecode.lib.base import BaseController, render
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 class ErrorController(BaseController):
39 class ErrorController(BaseController):
40 """Generates error documents as and when they are required.
40 """Generates error documents as and when they are required.
41
41
42 The ErrorDocuments middleware forwards to ErrorController when error
42 The ErrorDocuments middleware forwards to ErrorController when error
43 related status codes are returned from the application.
43 related status codes are returned from the application.
44
44
45 This behavior can be altered by changing the parameters to the
45 This behavior can be altered by changing the parameters to the
46 ErrorDocuments middleware in your config/middleware.py file.
46 ErrorDocuments middleware in your config/middleware.py file.
47 """
47 """
48
48
49 def __before__(self):
49 def __before__(self):
50 #disable all base actions since we don't need them here
50 #disable all base actions since we don't need them here
51 pass
51 pass
52
52
53 def document(self):
53 def document(self):
54 resp = request.environ.get('pylons.original_response')
54 resp = request.environ.get('pylons.original_response')
55 c.rhodecode_name = config.get('rhodecode_title')
55 c.rhodecode_name = config.get('rhodecode_title')
56
56
57 log.debug('### %s ###' % resp.status)
57 log.debug('### %s ###' % resp.status)
58
58
59 e = request.environ
59 e = request.environ
60 c.serv_p = r'%(protocol)s://%(host)s/' \
60 c.serv_p = r'%(protocol)s://%(host)s/' \
61 % {'protocol': e.get('wsgi.url_scheme'),
61 % {'protocol': e.get('wsgi.url_scheme'),
62 'host': e.get('HTTP_HOST'), }
62 'host': e.get('HTTP_HOST'), }
63
63
64 c.error_message = cgi.escape(request.GET.get('code', str(resp.status)))
64 c.error_message = cgi.escape(request.GET.get('code', str(resp.status)))
65 c.error_explanation = self.get_error_explanation(resp.status_int)
65 c.error_explanation = self.get_error_explanation(resp.status_int)
66
66
67 # redirect to when error with given seconds
67 # redirect to when error with given seconds
68 c.redirect_time = 0
68 c.redirect_time = 0
69 c.redirect_module = _('Home page')
69 c.redirect_module = _('Home page')
70 c.url_redirect = "/"
70 c.url_redirect = "/"
71
71
72 return render('/errors/error_document.html')
72 return render('/errors/error_document.html')
73
73
74 def img(self, id):
74 def img(self, id):
75 """Serve Pylons' stock images"""
75 """Serve Pylons' stock images"""
76 return self._serve_file(os.path.join(media_path, 'img', id))
76 return self._serve_file(os.path.join(media_path, 'img', id))
77
77
78 def style(self, id):
78 def style(self, id):
79 """Serve Pylons' stock stylesheets"""
79 """Serve Pylons' stock stylesheets"""
80 return self._serve_file(os.path.join(media_path, 'style', id))
80 return self._serve_file(os.path.join(media_path, 'style', id))
81
81
82 def _serve_file(self, path):
82 def _serve_file(self, path):
83 """Call Paste's FileApp (a WSGI application) to serve the file
83 """Call Paste's FileApp (a WSGI application) to serve the file
84 at the specified path
84 at the specified path
85 """
85 """
86 fapp = paste.fileapp.FileApp(path)
86 fapp = paste.fileapp.FileApp(path)
87 return fapp(request.environ, self.start_response)
87 return fapp(request.environ, self.start_response)
88
88
89 def get_error_explanation(self, code):
89 def get_error_explanation(self, code):
90 ''' get the error explanations of int codes
90 """ get the error explanations of int codes
91 [400, 401, 403, 404, 500]'''
91 [400, 401, 403, 404, 500]"""
92 try:
92 try:
93 code = int(code)
93 code = int(code)
94 except Exception:
94 except Exception:
95 code = 500
95 code = 500
96
96
97 if code == 400:
97 if code == 400:
98 return _('The request could not be understood by the server'
98 return _('The request could not be understood by the server'
99 ' due to malformed syntax.')
99 ' due to malformed syntax.')
100 if code == 401:
100 if code == 401:
101 return _('Unauthorized access to resource')
101 return _('Unauthorized access to resource')
102 if code == 403:
102 if code == 403:
103 return _("You don't have permission to view this page")
103 return _("You don't have permission to view this page")
104 if code == 404:
104 if code == 404:
105 return _('The resource could not be found')
105 return _('The resource could not be found')
106 if code == 500:
106 if code == 500:
107 return _('The server encountered an unexpected condition'
107 return _('The server encountered an unexpected condition'
108 ' which prevented it from fulfilling the request.')
108 ' which prevented it from fulfilling the request.')
@@ -1,284 +1,284 b''
1 """
1 """
2 Code to generate a Python model from a database or differences
2 Code to generate a Python model from a database or differences
3 between a model and database.
3 between a model and database.
4
4
5 Some of this is borrowed heavily from the AutoCode project at:
5 Some of this is borrowed heavily from the AutoCode project at:
6 http://code.google.com/p/sqlautocode/
6 http://code.google.com/p/sqlautocode/
7 """
7 """
8
8
9 import sys
9 import sys
10 import logging
10 import logging
11
11
12 import sqlalchemy
12 import sqlalchemy
13
13
14 from rhodecode.lib.dbmigrate import migrate
14 from rhodecode.lib.dbmigrate import migrate
15 from rhodecode.lib.dbmigrate.migrate import changeset
15 from rhodecode.lib.dbmigrate.migrate import changeset
16
16
17
17
18 log = logging.getLogger(__name__)
18 log = logging.getLogger(__name__)
19 HEADER = """
19 HEADER = """
20 ## File autogenerated by genmodel.py
20 ## File autogenerated by genmodel.py
21
21
22 from sqlalchemy import *
22 from sqlalchemy import *
23 meta = MetaData()
23 meta = MetaData()
24 """
24 """
25
25
26 DECLARATIVE_HEADER = """
26 DECLARATIVE_HEADER = """
27 ## File autogenerated by genmodel.py
27 ## File autogenerated by genmodel.py
28
28
29 from sqlalchemy import *
29 from sqlalchemy import *
30 from sqlalchemy.ext import declarative
30 from sqlalchemy.ext import declarative
31
31
32 Base = declarative.declarative_base()
32 Base = declarative.declarative_base()
33 """
33 """
34
34
35
35
36 class ModelGenerator(object):
36 class ModelGenerator(object):
37 """Various transformations from an A, B diff.
37 """Various transformations from an A, B diff.
38
38
39 In the implementation, A tends to be called the model and B
39 In the implementation, A tends to be called the model and B
40 the database (although this is not true of all diffs).
40 the database (although this is not true of all diffs).
41 The diff is directionless, but transformations apply the diff
41 The diff is directionless, but transformations apply the diff
42 in a particular direction, described in the method name.
42 in a particular direction, described in the method name.
43 """
43 """
44
44
45 def __init__(self, diff, engine, declarative=False):
45 def __init__(self, diff, engine, declarative=False):
46 self.diff = diff
46 self.diff = diff
47 self.engine = engine
47 self.engine = engine
48 self.declarative = declarative
48 self.declarative = declarative
49
49
50 def column_repr(self, col):
50 def column_repr(self, col):
51 kwarg = []
51 kwarg = []
52 if col.key != col.name:
52 if col.key != col.name:
53 kwarg.append('key')
53 kwarg.append('key')
54 if col.primary_key:
54 if col.primary_key:
55 col.primary_key = True # otherwise it dumps it as 1
55 col.primary_key = True # otherwise it dumps it as 1
56 kwarg.append('primary_key')
56 kwarg.append('primary_key')
57 if not col.nullable:
57 if not col.nullable:
58 kwarg.append('nullable')
58 kwarg.append('nullable')
59 if col.onupdate:
59 if col.onupdate:
60 kwarg.append('onupdate')
60 kwarg.append('onupdate')
61 if col.default:
61 if col.default:
62 if col.primary_key:
62 if col.primary_key:
63 # I found that PostgreSQL automatically creates a
63 # I found that PostgreSQL automatically creates a
64 # default value for the sequence, but let's not show
64 # default value for the sequence, but let's not show
65 # that.
65 # that.
66 pass
66 pass
67 else:
67 else:
68 kwarg.append('default')
68 kwarg.append('default')
69 args = ['%s=%r' % (k, getattr(col, k)) for k in kwarg]
69 args = ['%s=%r' % (k, getattr(col, k)) for k in kwarg]
70
70
71 # crs: not sure if this is good idea, but it gets rid of extra
71 # crs: not sure if this is good idea, but it gets rid of extra
72 # u''
72 # u''
73 name = col.name.encode('utf8')
73 name = col.name.encode('utf8')
74
74
75 type_ = col.type
75 type_ = col.type
76 for cls in col.type.__class__.__mro__:
76 for cls in col.type.__class__.__mro__:
77 if cls.__module__ == 'sqlalchemy.types' and \
77 if cls.__module__ == 'sqlalchemy.types' and \
78 not cls.__name__.isupper():
78 not cls.__name__.isupper():
79 if cls is not type_.__class__:
79 if cls is not type_.__class__:
80 type_ = cls()
80 type_ = cls()
81 break
81 break
82
82
83 type_repr = repr(type_)
83 type_repr = repr(type_)
84 if type_repr.endswith('()'):
84 if type_repr.endswith('()'):
85 type_repr = type_repr[:-2]
85 type_repr = type_repr[:-2]
86
86
87 constraints = [repr(cn) for cn in col.constraints]
87 constraints = [repr(cn) for cn in col.constraints]
88
88
89 data = {
89 data = {
90 'name': name,
90 'name': name,
91 'commonStuff': ', '.join([type_repr] + constraints + args),
91 'commonStuff': ', '.join([type_repr] + constraints + args),
92 }
92 }
93
93
94 if self.declarative:
94 if self.declarative:
95 return """%(name)s = Column(%(commonStuff)s)""" % data
95 return """%(name)s = Column(%(commonStuff)s)""" % data
96 else:
96 else:
97 return """Column(%(name)r, %(commonStuff)s)""" % data
97 return """Column(%(name)r, %(commonStuff)s)""" % data
98
98
99 def _getTableDefn(self, table, metaName='meta'):
99 def _getTableDefn(self, table, metaName='meta'):
100 out = []
100 out = []
101 tableName = table.name
101 tableName = table.name
102 if self.declarative:
102 if self.declarative:
103 out.append("class %(table)s(Base):" % {'table': tableName})
103 out.append("class %(table)s(Base):" % {'table': tableName})
104 out.append(" __tablename__ = '%(table)s'\n" %
104 out.append(" __tablename__ = '%(table)s'\n" %
105 {'table': tableName})
105 {'table': tableName})
106 for col in table.columns:
106 for col in table.columns:
107 out.append(" %s" % self.column_repr(col))
107 out.append(" %s" % self.column_repr(col))
108 out.append('\n')
108 out.append('\n')
109 else:
109 else:
110 out.append("%(table)s = Table('%(table)s', %(meta)s," %
110 out.append("%(table)s = Table('%(table)s', %(meta)s," %
111 {'table': tableName, 'meta': metaName})
111 {'table': tableName, 'meta': metaName})
112 for col in table.columns:
112 for col in table.columns:
113 out.append(" %s," % self.column_repr(col))
113 out.append(" %s," % self.column_repr(col))
114 out.append(")\n")
114 out.append(")\n")
115 return out
115 return out
116
116
117 def _get_tables(self,missingA=False,missingB=False,modified=False):
117 def _get_tables(self,missingA=False,missingB=False,modified=False):
118 to_process = []
118 to_process = []
119 for bool_,names,metadata in (
119 for bool_,names,metadata in (
120 (missingA,self.diff.tables_missing_from_A,self.diff.metadataB),
120 (missingA,self.diff.tables_missing_from_A,self.diff.metadataB),
121 (missingB,self.diff.tables_missing_from_B,self.diff.metadataA),
121 (missingB,self.diff.tables_missing_from_B,self.diff.metadataA),
122 (modified,self.diff.tables_different,self.diff.metadataA),
122 (modified,self.diff.tables_different,self.diff.metadataA),
123 ):
123 ):
124 if bool_:
124 if bool_:
125 for name in names:
125 for name in names:
126 yield metadata.tables.get(name)
126 yield metadata.tables.get(name)
127
127
128 def genBDefinition(self):
128 def genBDefinition(self):
129 """Generates the source code for a definition of B.
129 """Generates the source code for a definition of B.
130
130
131 Assumes a diff where A is empty.
131 Assumes a diff where A is empty.
132
132
133 Was: toPython. Assume database (B) is current and model (A) is empty.
133 Was: toPython. Assume database (B) is current and model (A) is empty.
134 """
134 """
135
135
136 out = []
136 out = []
137 if self.declarative:
137 if self.declarative:
138 out.append(DECLARATIVE_HEADER)
138 out.append(DECLARATIVE_HEADER)
139 else:
139 else:
140 out.append(HEADER)
140 out.append(HEADER)
141 out.append("")
141 out.append("")
142 for table in self._get_tables(missingA=True):
142 for table in self._get_tables(missingA=True):
143 out.extend(self._getTableDefn(table))
143 out.extend(self._getTableDefn(table))
144 return '\n'.join(out)
144 return '\n'.join(out)
145
145
146 def genB2AMigration(self, indent=' '):
146 def genB2AMigration(self, indent=' '):
147 '''Generate a migration from B to A.
147 """Generate a migration from B to A.
148
148
149 Was: toUpgradeDowngradePython
149 Was: toUpgradeDowngradePython
150 Assume model (A) is most current and database (B) is out-of-date.
150 Assume model (A) is most current and database (B) is out-of-date.
151 '''
151 """
152
152
153 decls = ['from migrate.changeset import schema',
153 decls = ['from migrate.changeset import schema',
154 'pre_meta = MetaData()',
154 'pre_meta = MetaData()',
155 'post_meta = MetaData()',
155 'post_meta = MetaData()',
156 ]
156 ]
157 upgradeCommands = ['pre_meta.bind = migrate_engine',
157 upgradeCommands = ['pre_meta.bind = migrate_engine',
158 'post_meta.bind = migrate_engine']
158 'post_meta.bind = migrate_engine']
159 downgradeCommands = list(upgradeCommands)
159 downgradeCommands = list(upgradeCommands)
160
160
161 for tn in self.diff.tables_missing_from_A:
161 for tn in self.diff.tables_missing_from_A:
162 pre_table = self.diff.metadataB.tables[tn]
162 pre_table = self.diff.metadataB.tables[tn]
163 decls.extend(self._getTableDefn(pre_table, metaName='pre_meta'))
163 decls.extend(self._getTableDefn(pre_table, metaName='pre_meta'))
164 upgradeCommands.append(
164 upgradeCommands.append(
165 "pre_meta.tables[%(table)r].drop()" % {'table': tn})
165 "pre_meta.tables[%(table)r].drop()" % {'table': tn})
166 downgradeCommands.append(
166 downgradeCommands.append(
167 "pre_meta.tables[%(table)r].create()" % {'table': tn})
167 "pre_meta.tables[%(table)r].create()" % {'table': tn})
168
168
169 for tn in self.diff.tables_missing_from_B:
169 for tn in self.diff.tables_missing_from_B:
170 post_table = self.diff.metadataA.tables[tn]
170 post_table = self.diff.metadataA.tables[tn]
171 decls.extend(self._getTableDefn(post_table, metaName='post_meta'))
171 decls.extend(self._getTableDefn(post_table, metaName='post_meta'))
172 upgradeCommands.append(
172 upgradeCommands.append(
173 "post_meta.tables[%(table)r].create()" % {'table': tn})
173 "post_meta.tables[%(table)r].create()" % {'table': tn})
174 downgradeCommands.append(
174 downgradeCommands.append(
175 "post_meta.tables[%(table)r].drop()" % {'table': tn})
175 "post_meta.tables[%(table)r].drop()" % {'table': tn})
176
176
177 for (tn, td) in self.diff.tables_different.iteritems():
177 for (tn, td) in self.diff.tables_different.iteritems():
178 if td.columns_missing_from_A or td.columns_different:
178 if td.columns_missing_from_A or td.columns_different:
179 pre_table = self.diff.metadataB.tables[tn]
179 pre_table = self.diff.metadataB.tables[tn]
180 decls.extend(self._getTableDefn(
180 decls.extend(self._getTableDefn(
181 pre_table, metaName='pre_meta'))
181 pre_table, metaName='pre_meta'))
182 if td.columns_missing_from_B or td.columns_different:
182 if td.columns_missing_from_B or td.columns_different:
183 post_table = self.diff.metadataA.tables[tn]
183 post_table = self.diff.metadataA.tables[tn]
184 decls.extend(self._getTableDefn(
184 decls.extend(self._getTableDefn(
185 post_table, metaName='post_meta'))
185 post_table, metaName='post_meta'))
186
186
187 for col in td.columns_missing_from_A:
187 for col in td.columns_missing_from_A:
188 upgradeCommands.append(
188 upgradeCommands.append(
189 'pre_meta.tables[%r].columns[%r].drop()' % (tn, col))
189 'pre_meta.tables[%r].columns[%r].drop()' % (tn, col))
190 downgradeCommands.append(
190 downgradeCommands.append(
191 'pre_meta.tables[%r].columns[%r].create()' % (tn, col))
191 'pre_meta.tables[%r].columns[%r].create()' % (tn, col))
192 for col in td.columns_missing_from_B:
192 for col in td.columns_missing_from_B:
193 upgradeCommands.append(
193 upgradeCommands.append(
194 'post_meta.tables[%r].columns[%r].create()' % (tn, col))
194 'post_meta.tables[%r].columns[%r].create()' % (tn, col))
195 downgradeCommands.append(
195 downgradeCommands.append(
196 'post_meta.tables[%r].columns[%r].drop()' % (tn, col))
196 'post_meta.tables[%r].columns[%r].drop()' % (tn, col))
197 for modelCol, databaseCol, modelDecl, databaseDecl in td.columns_different:
197 for modelCol, databaseCol, modelDecl, databaseDecl in td.columns_different:
198 upgradeCommands.append(
198 upgradeCommands.append(
199 'assert False, "Can\'t alter columns: %s:%s=>%s"' % (
199 'assert False, "Can\'t alter columns: %s:%s=>%s"' % (
200 tn, modelCol.name, databaseCol.name))
200 tn, modelCol.name, databaseCol.name))
201 downgradeCommands.append(
201 downgradeCommands.append(
202 'assert False, "Can\'t alter columns: %s:%s=>%s"' % (
202 'assert False, "Can\'t alter columns: %s:%s=>%s"' % (
203 tn, modelCol.name, databaseCol.name))
203 tn, modelCol.name, databaseCol.name))
204
204
205 return (
205 return (
206 '\n'.join(decls),
206 '\n'.join(decls),
207 '\n'.join('%s%s' % (indent, line) for line in upgradeCommands),
207 '\n'.join('%s%s' % (indent, line) for line in upgradeCommands),
208 '\n'.join('%s%s' % (indent, line) for line in downgradeCommands))
208 '\n'.join('%s%s' % (indent, line) for line in downgradeCommands))
209
209
210 def _db_can_handle_this_change(self,td):
210 def _db_can_handle_this_change(self,td):
211 """Check if the database can handle going from B to A."""
211 """Check if the database can handle going from B to A."""
212
212
213 if (td.columns_missing_from_B
213 if (td.columns_missing_from_B
214 and not td.columns_missing_from_A
214 and not td.columns_missing_from_A
215 and not td.columns_different):
215 and not td.columns_different):
216 # Even sqlite can handle column additions.
216 # Even sqlite can handle column additions.
217 return True
217 return True
218 else:
218 else:
219 return not self.engine.url.drivername.startswith('sqlite')
219 return not self.engine.url.drivername.startswith('sqlite')
220
220
221 def runB2A(self):
221 def runB2A(self):
222 """Goes from B to A.
222 """Goes from B to A.
223
223
224 Was: applyModel. Apply model (A) to current database (B).
224 Was: applyModel. Apply model (A) to current database (B).
225 """
225 """
226
226
227 meta = sqlalchemy.MetaData(self.engine)
227 meta = sqlalchemy.MetaData(self.engine)
228
228
229 for table in self._get_tables(missingA=True):
229 for table in self._get_tables(missingA=True):
230 table = table.tometadata(meta)
230 table = table.tometadata(meta)
231 table.drop()
231 table.drop()
232 for table in self._get_tables(missingB=True):
232 for table in self._get_tables(missingB=True):
233 table = table.tometadata(meta)
233 table = table.tometadata(meta)
234 table.create()
234 table.create()
235 for modelTable in self._get_tables(modified=True):
235 for modelTable in self._get_tables(modified=True):
236 tableName = modelTable.name
236 tableName = modelTable.name
237 modelTable = modelTable.tometadata(meta)
237 modelTable = modelTable.tometadata(meta)
238 dbTable = self.diff.metadataB.tables[tableName]
238 dbTable = self.diff.metadataB.tables[tableName]
239
239
240 td = self.diff.tables_different[tableName]
240 td = self.diff.tables_different[tableName]
241
241
242 if self._db_can_handle_this_change(td):
242 if self._db_can_handle_this_change(td):
243
243
244 for col in td.columns_missing_from_B:
244 for col in td.columns_missing_from_B:
245 modelTable.columns[col].create()
245 modelTable.columns[col].create()
246 for col in td.columns_missing_from_A:
246 for col in td.columns_missing_from_A:
247 dbTable.columns[col].drop()
247 dbTable.columns[col].drop()
248 # XXX handle column changes here.
248 # XXX handle column changes here.
249 else:
249 else:
250 # Sqlite doesn't support drop column, so you have to
250 # Sqlite doesn't support drop column, so you have to
251 # do more: create temp table, copy data to it, drop
251 # do more: create temp table, copy data to it, drop
252 # old table, create new table, copy data back.
252 # old table, create new table, copy data back.
253 #
253 #
254 # I wonder if this is guaranteed to be unique?
254 # I wonder if this is guaranteed to be unique?
255 tempName = '_temp_%s' % modelTable.name
255 tempName = '_temp_%s' % modelTable.name
256
256
257 def getCopyStatement():
257 def getCopyStatement():
258 preparer = self.engine.dialect.preparer
258 preparer = self.engine.dialect.preparer
259 commonCols = []
259 commonCols = []
260 for modelCol in modelTable.columns:
260 for modelCol in modelTable.columns:
261 if modelCol.name in dbTable.columns:
261 if modelCol.name in dbTable.columns:
262 commonCols.append(modelCol.name)
262 commonCols.append(modelCol.name)
263 commonColsStr = ', '.join(commonCols)
263 commonColsStr = ', '.join(commonCols)
264 return 'INSERT INTO %s (%s) SELECT %s FROM %s' % \
264 return 'INSERT INTO %s (%s) SELECT %s FROM %s' % \
265 (tableName, commonColsStr, commonColsStr, tempName)
265 (tableName, commonColsStr, commonColsStr, tempName)
266
266
267 # Move the data in one transaction, so that we don't
267 # Move the data in one transaction, so that we don't
268 # leave the database in a nasty state.
268 # leave the database in a nasty state.
269 connection = self.engine.connect()
269 connection = self.engine.connect()
270 trans = connection.begin()
270 trans = connection.begin()
271 try:
271 try:
272 connection.execute(
272 connection.execute(
273 'CREATE TEMPORARY TABLE %s as SELECT * from %s' % \
273 'CREATE TEMPORARY TABLE %s as SELECT * from %s' % \
274 (tempName, modelTable.name))
274 (tempName, modelTable.name))
275 # make sure the drop takes place inside our
275 # make sure the drop takes place inside our
276 # transaction with the bind parameter
276 # transaction with the bind parameter
277 modelTable.drop(bind=connection)
277 modelTable.drop(bind=connection)
278 modelTable.create(bind=connection)
278 modelTable.create(bind=connection)
279 connection.execute(getCopyStatement())
279 connection.execute(getCopyStatement())
280 connection.execute('DROP TABLE %s' % tempName)
280 connection.execute('DROP TABLE %s' % tempName)
281 trans.commit()
281 trans.commit()
282 except:
282 except:
283 trans.rollback()
283 trans.rollback()
284 raise
284 raise
@@ -1,295 +1,295 b''
1 """
1 """
2 Schema differencing support.
2 Schema differencing support.
3 """
3 """
4
4
5 import logging
5 import logging
6 import sqlalchemy
6 import sqlalchemy
7
7
8 from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_06
8 from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_06
9 from sqlalchemy.types import Float
9 from sqlalchemy.types import Float
10
10
11 log = logging.getLogger(__name__)
11 log = logging.getLogger(__name__)
12
12
13
13
14 def getDiffOfModelAgainstDatabase(metadata, engine, excludeTables=None):
14 def getDiffOfModelAgainstDatabase(metadata, engine, excludeTables=None):
15 """
15 """
16 Return differences of model against database.
16 Return differences of model against database.
17
17
18 :return: object which will evaluate to :keyword:`True` if there \
18 :return: object which will evaluate to :keyword:`True` if there \
19 are differences else :keyword:`False`.
19 are differences else :keyword:`False`.
20 """
20 """
21 db_metadata = sqlalchemy.MetaData(engine)
21 db_metadata = sqlalchemy.MetaData(engine)
22 db_metadata.reflect()
22 db_metadata.reflect()
23
23
24 # sqlite will include a dynamically generated 'sqlite_sequence' table if
24 # sqlite will include a dynamically generated 'sqlite_sequence' table if
25 # there are autoincrement sequences in the database; this should not be
25 # there are autoincrement sequences in the database; this should not be
26 # compared.
26 # compared.
27 if engine.dialect.name == 'sqlite':
27 if engine.dialect.name == 'sqlite':
28 if 'sqlite_sequence' in db_metadata.tables:
28 if 'sqlite_sequence' in db_metadata.tables:
29 db_metadata.remove(db_metadata.tables['sqlite_sequence'])
29 db_metadata.remove(db_metadata.tables['sqlite_sequence'])
30
30
31 return SchemaDiff(metadata, db_metadata,
31 return SchemaDiff(metadata, db_metadata,
32 labelA='model',
32 labelA='model',
33 labelB='database',
33 labelB='database',
34 excludeTables=excludeTables)
34 excludeTables=excludeTables)
35
35
36
36
37 def getDiffOfModelAgainstModel(metadataA, metadataB, excludeTables=None):
37 def getDiffOfModelAgainstModel(metadataA, metadataB, excludeTables=None):
38 """
38 """
39 Return differences of model against another model.
39 Return differences of model against another model.
40
40
41 :return: object which will evaluate to :keyword:`True` if there \
41 :return: object which will evaluate to :keyword:`True` if there \
42 are differences else :keyword:`False`.
42 are differences else :keyword:`False`.
43 """
43 """
44 return SchemaDiff(metadataA, metadataB, excludeTables=excludeTables)
44 return SchemaDiff(metadataA, metadataB, excludeTables=excludeTables)
45
45
46
46
47 class ColDiff(object):
47 class ColDiff(object):
48 """
48 """
49 Container for differences in one :class:`~sqlalchemy.schema.Column`
49 Container for differences in one :class:`~sqlalchemy.schema.Column`
50 between two :class:`~sqlalchemy.schema.Table` instances, ``A``
50 between two :class:`~sqlalchemy.schema.Table` instances, ``A``
51 and ``B``.
51 and ``B``.
52
52
53 .. attribute:: col_A
53 .. attribute:: col_A
54
54
55 The :class:`~sqlalchemy.schema.Column` object for A.
55 The :class:`~sqlalchemy.schema.Column` object for A.
56
56
57 .. attribute:: col_B
57 .. attribute:: col_B
58
58
59 The :class:`~sqlalchemy.schema.Column` object for B.
59 The :class:`~sqlalchemy.schema.Column` object for B.
60
60
61 .. attribute:: type_A
61 .. attribute:: type_A
62
62
63 The most generic type of the :class:`~sqlalchemy.schema.Column`
63 The most generic type of the :class:`~sqlalchemy.schema.Column`
64 object in A.
64 object in A.
65
65
66 .. attribute:: type_B
66 .. attribute:: type_B
67
67
68 The most generic type of the :class:`~sqlalchemy.schema.Column`
68 The most generic type of the :class:`~sqlalchemy.schema.Column`
69 object in A.
69 object in A.
70
70
71 """
71 """
72
72
73 diff = False
73 diff = False
74
74
75 def __init__(self,col_A,col_B):
75 def __init__(self,col_A,col_B):
76 self.col_A = col_A
76 self.col_A = col_A
77 self.col_B = col_B
77 self.col_B = col_B
78
78
79 self.type_A = col_A.type
79 self.type_A = col_A.type
80 self.type_B = col_B.type
80 self.type_B = col_B.type
81
81
82 self.affinity_A = self.type_A._type_affinity
82 self.affinity_A = self.type_A._type_affinity
83 self.affinity_B = self.type_B._type_affinity
83 self.affinity_B = self.type_B._type_affinity
84
84
85 if self.affinity_A is not self.affinity_B:
85 if self.affinity_A is not self.affinity_B:
86 self.diff = True
86 self.diff = True
87 return
87 return
88
88
89 if isinstance(self.type_A,Float) or isinstance(self.type_B,Float):
89 if isinstance(self.type_A,Float) or isinstance(self.type_B,Float):
90 if not (isinstance(self.type_A,Float) and isinstance(self.type_B,Float)):
90 if not (isinstance(self.type_A,Float) and isinstance(self.type_B,Float)):
91 self.diff=True
91 self.diff=True
92 return
92 return
93
93
94 for attr in ('precision','scale','length'):
94 for attr in ('precision','scale','length'):
95 A = getattr(self.type_A,attr,None)
95 A = getattr(self.type_A,attr,None)
96 B = getattr(self.type_B,attr,None)
96 B = getattr(self.type_B,attr,None)
97 if not (A is None or B is None) and A!=B:
97 if not (A is None or B is None) and A!=B:
98 self.diff=True
98 self.diff=True
99 return
99 return
100
100
101 def __nonzero__(self):
101 def __nonzero__(self):
102 return self.diff
102 return self.diff
103
103
104 class TableDiff(object):
104 class TableDiff(object):
105 """
105 """
106 Container for differences in one :class:`~sqlalchemy.schema.Table`
106 Container for differences in one :class:`~sqlalchemy.schema.Table`
107 between two :class:`~sqlalchemy.schema.MetaData` instances, ``A``
107 between two :class:`~sqlalchemy.schema.MetaData` instances, ``A``
108 and ``B``.
108 and ``B``.
109
109
110 .. attribute:: columns_missing_from_A
110 .. attribute:: columns_missing_from_A
111
111
112 A sequence of column names that were found in B but weren't in
112 A sequence of column names that were found in B but weren't in
113 A.
113 A.
114
114
115 .. attribute:: columns_missing_from_B
115 .. attribute:: columns_missing_from_B
116
116
117 A sequence of column names that were found in A but weren't in
117 A sequence of column names that were found in A but weren't in
118 B.
118 B.
119
119
120 .. attribute:: columns_different
120 .. attribute:: columns_different
121
121
122 A dictionary containing information about columns that were
122 A dictionary containing information about columns that were
123 found to be different.
123 found to be different.
124 It maps column names to a :class:`ColDiff` objects describing the
124 It maps column names to a :class:`ColDiff` objects describing the
125 differences found.
125 differences found.
126 """
126 """
127 __slots__ = (
127 __slots__ = (
128 'columns_missing_from_A',
128 'columns_missing_from_A',
129 'columns_missing_from_B',
129 'columns_missing_from_B',
130 'columns_different',
130 'columns_different',
131 )
131 )
132
132
133 def __nonzero__(self):
133 def __nonzero__(self):
134 return bool(
134 return bool(
135 self.columns_missing_from_A or
135 self.columns_missing_from_A or
136 self.columns_missing_from_B or
136 self.columns_missing_from_B or
137 self.columns_different
137 self.columns_different
138 )
138 )
139
139
140 class SchemaDiff(object):
140 class SchemaDiff(object):
141 """
141 """
142 Compute the difference between two :class:`~sqlalchemy.schema.MetaData`
142 Compute the difference between two :class:`~sqlalchemy.schema.MetaData`
143 objects.
143 objects.
144
144
145 The string representation of a :class:`SchemaDiff` will summarise
145 The string representation of a :class:`SchemaDiff` will summarise
146 the changes found between the two
146 the changes found between the two
147 :class:`~sqlalchemy.schema.MetaData` objects.
147 :class:`~sqlalchemy.schema.MetaData` objects.
148
148
149 The length of a :class:`SchemaDiff` will give the number of
149 The length of a :class:`SchemaDiff` will give the number of
150 changes found, enabling it to be used much like a boolean in
150 changes found, enabling it to be used much like a boolean in
151 expressions.
151 expressions.
152
152
153 :param metadataA:
153 :param metadataA:
154 First :class:`~sqlalchemy.schema.MetaData` to compare.
154 First :class:`~sqlalchemy.schema.MetaData` to compare.
155
155
156 :param metadataB:
156 :param metadataB:
157 Second :class:`~sqlalchemy.schema.MetaData` to compare.
157 Second :class:`~sqlalchemy.schema.MetaData` to compare.
158
158
159 :param labelA:
159 :param labelA:
160 The label to use in messages about the first
160 The label to use in messages about the first
161 :class:`~sqlalchemy.schema.MetaData`.
161 :class:`~sqlalchemy.schema.MetaData`.
162
162
163 :param labelB:
163 :param labelB:
164 The label to use in messages about the second
164 The label to use in messages about the second
165 :class:`~sqlalchemy.schema.MetaData`.
165 :class:`~sqlalchemy.schema.MetaData`.
166
166
167 :param excludeTables:
167 :param excludeTables:
168 A sequence of table names to exclude.
168 A sequence of table names to exclude.
169
169
170 .. attribute:: tables_missing_from_A
170 .. attribute:: tables_missing_from_A
171
171
172 A sequence of table names that were found in B but weren't in
172 A sequence of table names that were found in B but weren't in
173 A.
173 A.
174
174
175 .. attribute:: tables_missing_from_B
175 .. attribute:: tables_missing_from_B
176
176
177 A sequence of table names that were found in A but weren't in
177 A sequence of table names that were found in A but weren't in
178 B.
178 B.
179
179
180 .. attribute:: tables_different
180 .. attribute:: tables_different
181
181
182 A dictionary containing information about tables that were found
182 A dictionary containing information about tables that were found
183 to be different.
183 to be different.
184 It maps table names to a :class:`TableDiff` objects describing the
184 It maps table names to a :class:`TableDiff` objects describing the
185 differences found.
185 differences found.
186 """
186 """
187
187
188 def __init__(self,
188 def __init__(self,
189 metadataA, metadataB,
189 metadataA, metadataB,
190 labelA='metadataA',
190 labelA='metadataA',
191 labelB='metadataB',
191 labelB='metadataB',
192 excludeTables=None):
192 excludeTables=None):
193
193
194 self.metadataA, self.metadataB = metadataA, metadataB
194 self.metadataA, self.metadataB = metadataA, metadataB
195 self.labelA, self.labelB = labelA, labelB
195 self.labelA, self.labelB = labelA, labelB
196 self.label_width = max(len(labelA),len(labelB))
196 self.label_width = max(len(labelA),len(labelB))
197 excludeTables = set(excludeTables or [])
197 excludeTables = set(excludeTables or [])
198
198
199 A_table_names = set(metadataA.tables.keys())
199 A_table_names = set(metadataA.tables.keys())
200 B_table_names = set(metadataB.tables.keys())
200 B_table_names = set(metadataB.tables.keys())
201
201
202 self.tables_missing_from_A = sorted(
202 self.tables_missing_from_A = sorted(
203 B_table_names - A_table_names - excludeTables
203 B_table_names - A_table_names - excludeTables
204 )
204 )
205 self.tables_missing_from_B = sorted(
205 self.tables_missing_from_B = sorted(
206 A_table_names - B_table_names - excludeTables
206 A_table_names - B_table_names - excludeTables
207 )
207 )
208
208
209 self.tables_different = {}
209 self.tables_different = {}
210 for table_name in A_table_names.intersection(B_table_names):
210 for table_name in A_table_names.intersection(B_table_names):
211
211
212 td = TableDiff()
212 td = TableDiff()
213
213
214 A_table = metadataA.tables[table_name]
214 A_table = metadataA.tables[table_name]
215 B_table = metadataB.tables[table_name]
215 B_table = metadataB.tables[table_name]
216
216
217 A_column_names = set(A_table.columns.keys())
217 A_column_names = set(A_table.columns.keys())
218 B_column_names = set(B_table.columns.keys())
218 B_column_names = set(B_table.columns.keys())
219
219
220 td.columns_missing_from_A = sorted(
220 td.columns_missing_from_A = sorted(
221 B_column_names - A_column_names
221 B_column_names - A_column_names
222 )
222 )
223
223
224 td.columns_missing_from_B = sorted(
224 td.columns_missing_from_B = sorted(
225 A_column_names - B_column_names
225 A_column_names - B_column_names
226 )
226 )
227
227
228 td.columns_different = {}
228 td.columns_different = {}
229
229
230 for col_name in A_column_names.intersection(B_column_names):
230 for col_name in A_column_names.intersection(B_column_names):
231
231
232 cd = ColDiff(
232 cd = ColDiff(
233 A_table.columns.get(col_name),
233 A_table.columns.get(col_name),
234 B_table.columns.get(col_name)
234 B_table.columns.get(col_name)
235 )
235 )
236
236
237 if cd:
237 if cd:
238 td.columns_different[col_name]=cd
238 td.columns_different[col_name]=cd
239
239
240 # XXX - index and constraint differences should
240 # XXX - index and constraint differences should
241 # be checked for here
241 # be checked for here
242
242
243 if td:
243 if td:
244 self.tables_different[table_name]=td
244 self.tables_different[table_name]=td
245
245
246 def __str__(self):
246 def __str__(self):
247 ''' Summarize differences. '''
247 """ Summarize differences. """
248 out = []
248 out = []
249 column_template =' %%%is: %%r' % self.label_width
249 column_template =' %%%is: %%r' % self.label_width
250
250
251 for names,label in (
251 for names,label in (
252 (self.tables_missing_from_A,self.labelA),
252 (self.tables_missing_from_A,self.labelA),
253 (self.tables_missing_from_B,self.labelB),
253 (self.tables_missing_from_B,self.labelB),
254 ):
254 ):
255 if names:
255 if names:
256 out.append(
256 out.append(
257 ' tables missing from %s: %s' % (
257 ' tables missing from %s: %s' % (
258 label,', '.join(sorted(names))
258 label,', '.join(sorted(names))
259 )
259 )
260 )
260 )
261
261
262 for name,td in sorted(self.tables_different.items()):
262 for name,td in sorted(self.tables_different.items()):
263 out.append(
263 out.append(
264 ' table with differences: %s' % name
264 ' table with differences: %s' % name
265 )
265 )
266 for names,label in (
266 for names,label in (
267 (td.columns_missing_from_A,self.labelA),
267 (td.columns_missing_from_A,self.labelA),
268 (td.columns_missing_from_B,self.labelB),
268 (td.columns_missing_from_B,self.labelB),
269 ):
269 ):
270 if names:
270 if names:
271 out.append(
271 out.append(
272 ' %s missing these columns: %s' % (
272 ' %s missing these columns: %s' % (
273 label,', '.join(sorted(names))
273 label,', '.join(sorted(names))
274 )
274 )
275 )
275 )
276 for name,cd in td.columns_different.items():
276 for name,cd in td.columns_different.items():
277 out.append(' column with differences: %s' % name)
277 out.append(' column with differences: %s' % name)
278 out.append(column_template % (self.labelA,cd.col_A))
278 out.append(column_template % (self.labelA,cd.col_A))
279 out.append(column_template % (self.labelB,cd.col_B))
279 out.append(column_template % (self.labelB,cd.col_B))
280
280
281 if out:
281 if out:
282 out.insert(0, 'Schema diffs:')
282 out.insert(0, 'Schema diffs:')
283 return '\n'.join(out)
283 return '\n'.join(out)
284 else:
284 else:
285 return 'No schema diffs'
285 return 'No schema diffs'
286
286
287 def __len__(self):
287 def __len__(self):
288 """
288 """
289 Used in bool evaluation, return of 0 means no diffs.
289 Used in bool evaluation, return of 0 means no diffs.
290 """
290 """
291 return (
291 return (
292 len(self.tables_missing_from_A) +
292 len(self.tables_missing_from_A) +
293 len(self.tables_missing_from_B) +
293 len(self.tables_missing_from_B) +
294 len(self.tables_different)
294 len(self.tables_different)
295 )
295 )
@@ -1,415 +1,415 b''
1 '''
1 """
2 Module provides a class allowing to wrap communication over subprocess.Popen
2 Module provides a class allowing to wrap communication over subprocess.Popen
3 input, output, error streams into a meaningfull, non-blocking, concurrent
3 input, output, error streams into a meaningfull, non-blocking, concurrent
4 stream processor exposing the output data as an iterator fitting to be a
4 stream processor exposing the output data as an iterator fitting to be a
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
6
6
7 Copyright (c) 2011 Daniel Dotsenko <dotsa@hotmail.com>
7 Copyright (c) 2011 Daniel Dotsenko <dotsa@hotmail.com>
8
8
9 This file is part of git_http_backend.py Project.
9 This file is part of git_http_backend.py Project.
10
10
11 git_http_backend.py Project is free software: you can redistribute it and/or
11 git_http_backend.py Project is free software: you can redistribute it and/or
12 modify it under the terms of the GNU Lesser General Public License as
12 modify it under the terms of the GNU Lesser General Public License as
13 published by the Free Software Foundation, either version 2.1 of the License,
13 published by the Free Software Foundation, either version 2.1 of the License,
14 or (at your option) any later version.
14 or (at your option) any later version.
15
15
16 git_http_backend.py Project is distributed in the hope that it will be useful,
16 git_http_backend.py Project is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU Lesser General Public License for more details.
19 GNU Lesser General Public License for more details.
20
20
21 You should have received a copy of the GNU Lesser General Public License
21 You should have received a copy of the GNU Lesser General Public License
22 along with git_http_backend.py Project.
22 along with git_http_backend.py Project.
23 If not, see <http://www.gnu.org/licenses/>.
23 If not, see <http://www.gnu.org/licenses/>.
24 '''
24 """
25 import os
25 import os
26 import subprocess
26 import subprocess
27 from rhodecode.lib.vcs.utils.compat import deque, Event, Thread, _bytes, _bytearray
27 from rhodecode.lib.vcs.utils.compat import deque, Event, Thread, _bytes, _bytearray
28
28
29
29
30 class StreamFeeder(Thread):
30 class StreamFeeder(Thread):
31 """
31 """
32 Normal writing into pipe-like is blocking once the buffer is filled.
32 Normal writing into pipe-like is blocking once the buffer is filled.
33 This thread allows a thread to seep data from a file-like into a pipe
33 This thread allows a thread to seep data from a file-like into a pipe
34 without blocking the main thread.
34 without blocking the main thread.
35 We close inpipe once the end of the source stream is reached.
35 We close inpipe once the end of the source stream is reached.
36 """
36 """
37 def __init__(self, source):
37 def __init__(self, source):
38 super(StreamFeeder, self).__init__()
38 super(StreamFeeder, self).__init__()
39 self.daemon = True
39 self.daemon = True
40 filelike = False
40 filelike = False
41 self.bytes = _bytes()
41 self.bytes = _bytes()
42 if type(source) in (type(''), _bytes, _bytearray): # string-like
42 if type(source) in (type(''), _bytes, _bytearray): # string-like
43 self.bytes = _bytes(source)
43 self.bytes = _bytes(source)
44 else: # can be either file pointer or file-like
44 else: # can be either file pointer or file-like
45 if type(source) in (int, long): # file pointer it is
45 if type(source) in (int, long): # file pointer it is
46 ## converting file descriptor (int) stdin into file-like
46 ## converting file descriptor (int) stdin into file-like
47 try:
47 try:
48 source = os.fdopen(source, 'rb', 16384)
48 source = os.fdopen(source, 'rb', 16384)
49 except Exception:
49 except Exception:
50 pass
50 pass
51 # let's see if source is file-like by now
51 # let's see if source is file-like by now
52 try:
52 try:
53 filelike = source.read
53 filelike = source.read
54 except Exception:
54 except Exception:
55 pass
55 pass
56 if not filelike and not self.bytes:
56 if not filelike and not self.bytes:
57 raise TypeError("StreamFeeder's source object must be a readable "
57 raise TypeError("StreamFeeder's source object must be a readable "
58 "file-like, a file descriptor, or a string-like.")
58 "file-like, a file descriptor, or a string-like.")
59 self.source = source
59 self.source = source
60 self.readiface, self.writeiface = os.pipe()
60 self.readiface, self.writeiface = os.pipe()
61
61
62 def run(self):
62 def run(self):
63 t = self.writeiface
63 t = self.writeiface
64 if self.bytes:
64 if self.bytes:
65 os.write(t, self.bytes)
65 os.write(t, self.bytes)
66 else:
66 else:
67 s = self.source
67 s = self.source
68 b = s.read(4096)
68 b = s.read(4096)
69 while b:
69 while b:
70 os.write(t, b)
70 os.write(t, b)
71 b = s.read(4096)
71 b = s.read(4096)
72 os.close(t)
72 os.close(t)
73
73
74 @property
74 @property
75 def output(self):
75 def output(self):
76 return self.readiface
76 return self.readiface
77
77
78
78
79 class InputStreamChunker(Thread):
79 class InputStreamChunker(Thread):
80 def __init__(self, source, target, buffer_size, chunk_size):
80 def __init__(self, source, target, buffer_size, chunk_size):
81
81
82 super(InputStreamChunker, self).__init__()
82 super(InputStreamChunker, self).__init__()
83
83
84 self.daemon = True # die die die.
84 self.daemon = True # die die die.
85
85
86 self.source = source
86 self.source = source
87 self.target = target
87 self.target = target
88 self.chunk_count_max = int(buffer_size / chunk_size) + 1
88 self.chunk_count_max = int(buffer_size / chunk_size) + 1
89 self.chunk_size = chunk_size
89 self.chunk_size = chunk_size
90
90
91 self.data_added = Event()
91 self.data_added = Event()
92 self.data_added.clear()
92 self.data_added.clear()
93
93
94 self.keep_reading = Event()
94 self.keep_reading = Event()
95 self.keep_reading.set()
95 self.keep_reading.set()
96
96
97 self.EOF = Event()
97 self.EOF = Event()
98 self.EOF.clear()
98 self.EOF.clear()
99
99
100 self.go = Event()
100 self.go = Event()
101 self.go.set()
101 self.go.set()
102
102
103 def stop(self):
103 def stop(self):
104 self.go.clear()
104 self.go.clear()
105 self.EOF.set()
105 self.EOF.set()
106 try:
106 try:
107 # this is not proper, but is done to force the reader thread let
107 # this is not proper, but is done to force the reader thread let
108 # go of the input because, if successful, .close() will send EOF
108 # go of the input because, if successful, .close() will send EOF
109 # down the pipe.
109 # down the pipe.
110 self.source.close()
110 self.source.close()
111 except:
111 except:
112 pass
112 pass
113
113
114 def run(self):
114 def run(self):
115 s = self.source
115 s = self.source
116 t = self.target
116 t = self.target
117 cs = self.chunk_size
117 cs = self.chunk_size
118 ccm = self.chunk_count_max
118 ccm = self.chunk_count_max
119 kr = self.keep_reading
119 kr = self.keep_reading
120 da = self.data_added
120 da = self.data_added
121 go = self.go
121 go = self.go
122
122
123 try:
123 try:
124 b = s.read(cs)
124 b = s.read(cs)
125 except ValueError:
125 except ValueError:
126 b = ''
126 b = ''
127
127
128 while b and go.is_set():
128 while b and go.is_set():
129 if len(t) > ccm:
129 if len(t) > ccm:
130 kr.clear()
130 kr.clear()
131 kr.wait(2)
131 kr.wait(2)
132 # # this only works on 2.7.x and up
132 # # this only works on 2.7.x and up
133 # if not kr.wait(10):
133 # if not kr.wait(10):
134 # raise Exception("Timed out while waiting for input to be read.")
134 # raise Exception("Timed out while waiting for input to be read.")
135 # instead we'll use this
135 # instead we'll use this
136 if len(t) > ccm + 3:
136 if len(t) > ccm + 3:
137 raise IOError("Timed out while waiting for input from subprocess.")
137 raise IOError("Timed out while waiting for input from subprocess.")
138 t.append(b)
138 t.append(b)
139 da.set()
139 da.set()
140 b = s.read(cs)
140 b = s.read(cs)
141 self.EOF.set()
141 self.EOF.set()
142 da.set() # for cases when done but there was no input.
142 da.set() # for cases when done but there was no input.
143
143
144
144
145 class BufferedGenerator():
145 class BufferedGenerator():
146 '''
146 """
147 Class behaves as a non-blocking, buffered pipe reader.
147 Class behaves as a non-blocking, buffered pipe reader.
148 Reads chunks of data (through a thread)
148 Reads chunks of data (through a thread)
149 from a blocking pipe, and attaches these to an array (Deque) of chunks.
149 from a blocking pipe, and attaches these to an array (Deque) of chunks.
150 Reading is halted in the thread when max chunks is internally buffered.
150 Reading is halted in the thread when max chunks is internally buffered.
151 The .next() may operate in blocking or non-blocking fashion by yielding
151 The .next() may operate in blocking or non-blocking fashion by yielding
152 '' if no data is ready
152 '' if no data is ready
153 to be sent or by not returning until there is some data to send
153 to be sent or by not returning until there is some data to send
154 When we get EOF from underlying source pipe we raise the marker to raise
154 When we get EOF from underlying source pipe we raise the marker to raise
155 StopIteration after the last chunk of data is yielded.
155 StopIteration after the last chunk of data is yielded.
156 '''
156 """
157
157
158 def __init__(self, source, buffer_size=65536, chunk_size=4096,
158 def __init__(self, source, buffer_size=65536, chunk_size=4096,
159 starting_values=[], bottomless=False):
159 starting_values=[], bottomless=False):
160
160
161 if bottomless:
161 if bottomless:
162 maxlen = int(buffer_size / chunk_size)
162 maxlen = int(buffer_size / chunk_size)
163 else:
163 else:
164 maxlen = None
164 maxlen = None
165
165
166 self.data = deque(starting_values, maxlen)
166 self.data = deque(starting_values, maxlen)
167
167
168 self.worker = InputStreamChunker(source, self.data, buffer_size,
168 self.worker = InputStreamChunker(source, self.data, buffer_size,
169 chunk_size)
169 chunk_size)
170 if starting_values:
170 if starting_values:
171 self.worker.data_added.set()
171 self.worker.data_added.set()
172 self.worker.start()
172 self.worker.start()
173
173
174 ####################
174 ####################
175 # Generator's methods
175 # Generator's methods
176 ####################
176 ####################
177
177
178 def __iter__(self):
178 def __iter__(self):
179 return self
179 return self
180
180
181 def next(self):
181 def next(self):
182 while not len(self.data) and not self.worker.EOF.is_set():
182 while not len(self.data) and not self.worker.EOF.is_set():
183 self.worker.data_added.clear()
183 self.worker.data_added.clear()
184 self.worker.data_added.wait(0.2)
184 self.worker.data_added.wait(0.2)
185 if len(self.data):
185 if len(self.data):
186 self.worker.keep_reading.set()
186 self.worker.keep_reading.set()
187 return _bytes(self.data.popleft())
187 return _bytes(self.data.popleft())
188 elif self.worker.EOF.is_set():
188 elif self.worker.EOF.is_set():
189 raise StopIteration
189 raise StopIteration
190
190
191 def throw(self, type, value=None, traceback=None):
191 def throw(self, type, value=None, traceback=None):
192 if not self.worker.EOF.is_set():
192 if not self.worker.EOF.is_set():
193 raise type(value)
193 raise type(value)
194
194
195 def start(self):
195 def start(self):
196 self.worker.start()
196 self.worker.start()
197
197
198 def stop(self):
198 def stop(self):
199 self.worker.stop()
199 self.worker.stop()
200
200
201 def close(self):
201 def close(self):
202 try:
202 try:
203 self.worker.stop()
203 self.worker.stop()
204 self.throw(GeneratorExit)
204 self.throw(GeneratorExit)
205 except (GeneratorExit, StopIteration):
205 except (GeneratorExit, StopIteration):
206 pass
206 pass
207
207
208 def __del__(self):
208 def __del__(self):
209 self.close()
209 self.close()
210
210
211 ####################
211 ####################
212 # Threaded reader's infrastructure.
212 # Threaded reader's infrastructure.
213 ####################
213 ####################
214 @property
214 @property
215 def input(self):
215 def input(self):
216 return self.worker.w
216 return self.worker.w
217
217
218 @property
218 @property
219 def data_added_event(self):
219 def data_added_event(self):
220 return self.worker.data_added
220 return self.worker.data_added
221
221
222 @property
222 @property
223 def data_added(self):
223 def data_added(self):
224 return self.worker.data_added.is_set()
224 return self.worker.data_added.is_set()
225
225
226 @property
226 @property
227 def reading_paused(self):
227 def reading_paused(self):
228 return not self.worker.keep_reading.is_set()
228 return not self.worker.keep_reading.is_set()
229
229
230 @property
230 @property
231 def done_reading_event(self):
231 def done_reading_event(self):
232 '''
232 """
233 Done_reding does not mean that the iterator's buffer is empty.
233 Done_reding does not mean that the iterator's buffer is empty.
234 Iterator might have done reading from underlying source, but the read
234 Iterator might have done reading from underlying source, but the read
235 chunks might still be available for serving through .next() method.
235 chunks might still be available for serving through .next() method.
236
236
237 @return An Event class instance.
237 @return An Event class instance.
238 '''
238 """
239 return self.worker.EOF
239 return self.worker.EOF
240
240
241 @property
241 @property
242 def done_reading(self):
242 def done_reading(self):
243 '''
243 """
244 Done_reding does not mean that the iterator's buffer is empty.
244 Done_reding does not mean that the iterator's buffer is empty.
245 Iterator might have done reading from underlying source, but the read
245 Iterator might have done reading from underlying source, but the read
246 chunks might still be available for serving through .next() method.
246 chunks might still be available for serving through .next() method.
247
247
248 @return An Bool value.
248 @return An Bool value.
249 '''
249 """
250 return self.worker.EOF.is_set()
250 return self.worker.EOF.is_set()
251
251
252 @property
252 @property
253 def length(self):
253 def length(self):
254 '''
254 """
255 returns int.
255 returns int.
256
256
257 This is the lenght of the que of chunks, not the length of
257 This is the lenght of the que of chunks, not the length of
258 the combined contents in those chunks.
258 the combined contents in those chunks.
259
259
260 __len__() cannot be meaningfully implemented because this
260 __len__() cannot be meaningfully implemented because this
261 reader is just flying throuh a bottomless pit content and
261 reader is just flying throuh a bottomless pit content and
262 can only know the lenght of what it already saw.
262 can only know the lenght of what it already saw.
263
263
264 If __len__() on WSGI server per PEP 3333 returns a value,
264 If __len__() on WSGI server per PEP 3333 returns a value,
265 the responce's length will be set to that. In order not to
265 the responce's length will be set to that. In order not to
266 confuse WSGI PEP3333 servers, we will not implement __len__
266 confuse WSGI PEP3333 servers, we will not implement __len__
267 at all.
267 at all.
268 '''
268 """
269 return len(self.data)
269 return len(self.data)
270
270
271 def prepend(self, x):
271 def prepend(self, x):
272 self.data.appendleft(x)
272 self.data.appendleft(x)
273
273
274 def append(self, x):
274 def append(self, x):
275 self.data.append(x)
275 self.data.append(x)
276
276
277 def extend(self, o):
277 def extend(self, o):
278 self.data.extend(o)
278 self.data.extend(o)
279
279
280 def __getitem__(self, i):
280 def __getitem__(self, i):
281 return self.data[i]
281 return self.data[i]
282
282
283
283
284 class SubprocessIOChunker(object):
284 class SubprocessIOChunker(object):
285 '''
285 """
286 Processor class wrapping handling of subprocess IO.
286 Processor class wrapping handling of subprocess IO.
287
287
288 In a way, this is a "communicate()" replacement with a twist.
288 In a way, this is a "communicate()" replacement with a twist.
289
289
290 - We are multithreaded. Writing in and reading out, err are all sep threads.
290 - We are multithreaded. Writing in and reading out, err are all sep threads.
291 - We support concurrent (in and out) stream processing.
291 - We support concurrent (in and out) stream processing.
292 - The output is not a stream. It's a queue of read string (bytes, not unicode)
292 - The output is not a stream. It's a queue of read string (bytes, not unicode)
293 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
293 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
294 - We are non-blocking in more respects than communicate()
294 - We are non-blocking in more respects than communicate()
295 (reading from subprocess out pauses when internal buffer is full, but
295 (reading from subprocess out pauses when internal buffer is full, but
296 does not block the parent calling code. On the flip side, reading from
296 does not block the parent calling code. On the flip side, reading from
297 slow-yielding subprocess may block the iteration until data shows up. This
297 slow-yielding subprocess may block the iteration until data shows up. This
298 does not block the parallel inpipe reading occurring parallel thread.)
298 does not block the parallel inpipe reading occurring parallel thread.)
299
299
300 The purpose of the object is to allow us to wrap subprocess interactions into
300 The purpose of the object is to allow us to wrap subprocess interactions into
301 and interable that can be passed to a WSGI server as the application's return
301 and interable that can be passed to a WSGI server as the application's return
302 value. Because of stream-processing-ability, WSGI does not have to read ALL
302 value. Because of stream-processing-ability, WSGI does not have to read ALL
303 of the subprocess's output and buffer it, before handing it to WSGI server for
303 of the subprocess's output and buffer it, before handing it to WSGI server for
304 HTTP response. Instead, the class initializer reads just a bit of the stream
304 HTTP response. Instead, the class initializer reads just a bit of the stream
305 to figure out if error ocurred or likely to occur and if not, just hands the
305 to figure out if error ocurred or likely to occur and if not, just hands the
306 further iteration over subprocess output to the server for completion of HTTP
306 further iteration over subprocess output to the server for completion of HTTP
307 response.
307 response.
308
308
309 The real or perceived subprocess error is trapped and raised as one of
309 The real or perceived subprocess error is trapped and raised as one of
310 EnvironmentError family of exceptions
310 EnvironmentError family of exceptions
311
311
312 Example usage:
312 Example usage:
313 # try:
313 # try:
314 # answer = SubprocessIOChunker(
314 # answer = SubprocessIOChunker(
315 # cmd,
315 # cmd,
316 # input,
316 # input,
317 # buffer_size = 65536,
317 # buffer_size = 65536,
318 # chunk_size = 4096
318 # chunk_size = 4096
319 # )
319 # )
320 # except (EnvironmentError) as e:
320 # except (EnvironmentError) as e:
321 # print str(e)
321 # print str(e)
322 # raise e
322 # raise e
323 #
323 #
324 # return answer
324 # return answer
325
325
326
326
327 '''
327 """
328 def __init__(self, cmd, inputstream=None, buffer_size=65536,
328 def __init__(self, cmd, inputstream=None, buffer_size=65536,
329 chunk_size=4096, starting_values=[], **kwargs):
329 chunk_size=4096, starting_values=[], **kwargs):
330 '''
330 """
331 Initializes SubprocessIOChunker
331 Initializes SubprocessIOChunker
332
332
333 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
333 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
334 :param inputstream: (Default: None) A file-like, string, or file pointer.
334 :param inputstream: (Default: None) A file-like, string, or file pointer.
335 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
335 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
336 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
336 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
337 :param starting_values: (Default: []) An array of strings to put in front of output que.
337 :param starting_values: (Default: []) An array of strings to put in front of output que.
338 '''
338 """
339
339
340 if inputstream:
340 if inputstream:
341 input_streamer = StreamFeeder(inputstream)
341 input_streamer = StreamFeeder(inputstream)
342 input_streamer.start()
342 input_streamer.start()
343 inputstream = input_streamer.output
343 inputstream = input_streamer.output
344
344
345 _shell = kwargs.get('shell', True)
345 _shell = kwargs.get('shell', True)
346 if isinstance(cmd, (list, tuple)):
346 if isinstance(cmd, (list, tuple)):
347 cmd = ' '.join(cmd)
347 cmd = ' '.join(cmd)
348
348
349 kwargs['shell'] = _shell
349 kwargs['shell'] = _shell
350 _p = subprocess.Popen(cmd,
350 _p = subprocess.Popen(cmd,
351 bufsize=-1,
351 bufsize=-1,
352 stdin=inputstream,
352 stdin=inputstream,
353 stdout=subprocess.PIPE,
353 stdout=subprocess.PIPE,
354 stderr=subprocess.PIPE,
354 stderr=subprocess.PIPE,
355 **kwargs
355 **kwargs
356 )
356 )
357
357
358 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size, starting_values)
358 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size, starting_values)
359 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
359 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
360
360
361 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
361 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
362 # doing this until we reach either end of file, or end of buffer.
362 # doing this until we reach either end of file, or end of buffer.
363 bg_out.data_added_event.wait(1)
363 bg_out.data_added_event.wait(1)
364 bg_out.data_added_event.clear()
364 bg_out.data_added_event.clear()
365
365
366 # at this point it's still ambiguous if we are done reading or just full buffer.
366 # at this point it's still ambiguous if we are done reading or just full buffer.
367 # Either way, if error (returned by ended process, or implied based on
367 # Either way, if error (returned by ended process, or implied based on
368 # presence of stuff in stderr output) we error out.
368 # presence of stuff in stderr output) we error out.
369 # Else, we are happy.
369 # Else, we are happy.
370 _returncode = _p.poll()
370 _returncode = _p.poll()
371 if _returncode or (_returncode == None and bg_err.length):
371 if _returncode or (_returncode == None and bg_err.length):
372 try:
372 try:
373 _p.terminate()
373 _p.terminate()
374 except:
374 except:
375 pass
375 pass
376 bg_out.stop()
376 bg_out.stop()
377 bg_err.stop()
377 bg_err.stop()
378 err = '%s' % ''.join(bg_err)
378 err = '%s' % ''.join(bg_err)
379 if err:
379 if err:
380 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
380 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
381 raise EnvironmentError("Subprocess exited with non 0 ret code:%s" % _returncode)
381 raise EnvironmentError("Subprocess exited with non 0 ret code:%s" % _returncode)
382
382
383 self.process = _p
383 self.process = _p
384 self.output = bg_out
384 self.output = bg_out
385 self.error = bg_err
385 self.error = bg_err
386
386
387 def __iter__(self):
387 def __iter__(self):
388 return self
388 return self
389
389
390 def next(self):
390 def next(self):
391 if self.process.poll():
391 if self.process.poll():
392 err = '%s' % ''.join(self.error)
392 err = '%s' % ''.join(self.error)
393 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
393 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
394 return self.output.next()
394 return self.output.next()
395
395
396 def throw(self, type, value=None, traceback=None):
396 def throw(self, type, value=None, traceback=None):
397 if self.output.length or not self.output.done_reading:
397 if self.output.length or not self.output.done_reading:
398 raise type(value)
398 raise type(value)
399
399
400 def close(self):
400 def close(self):
401 try:
401 try:
402 self.process.terminate()
402 self.process.terminate()
403 except:
403 except:
404 pass
404 pass
405 try:
405 try:
406 self.output.close()
406 self.output.close()
407 except:
407 except:
408 pass
408 pass
409 try:
409 try:
410 self.error.close()
410 self.error.close()
411 except:
411 except:
412 pass
412 pass
413
413
414 def __del__(self):
414 def __del__(self):
415 self.close()
415 self.close()
@@ -1,707 +1,707 b''
1 from __future__ import with_statement
1 from __future__ import with_statement
2
2
3 import os
3 import os
4 import mock
4 import mock
5 import datetime
5 import datetime
6 from rhodecode.lib.vcs.backends.git import GitRepository, GitChangeset
6 from rhodecode.lib.vcs.backends.git import GitRepository, GitChangeset
7 from rhodecode.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError
7 from rhodecode.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError
8 from rhodecode.lib.vcs.nodes import NodeKind, FileNode, DirNode, NodeState
8 from rhodecode.lib.vcs.nodes import NodeKind, FileNode, DirNode, NodeState
9 from rhodecode.lib.vcs.utils.compat import unittest
9 from rhodecode.lib.vcs.utils.compat import unittest
10 from rhodecode.tests.vcs.base import BackendTestMixin
10 from rhodecode.tests.vcs.base import BackendTestMixin
11 from rhodecode.tests.vcs.conf import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
11 from rhodecode.tests.vcs.conf import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
12
12
13
13
14 class GitRepositoryTest(unittest.TestCase):
14 class GitRepositoryTest(unittest.TestCase):
15
15
16 def __check_for_existing_repo(self):
16 def __check_for_existing_repo(self):
17 if os.path.exists(TEST_GIT_REPO_CLONE):
17 if os.path.exists(TEST_GIT_REPO_CLONE):
18 self.fail('Cannot test git clone repo as location %s already '
18 self.fail('Cannot test git clone repo as location %s already '
19 'exists. You should manually remove it first.'
19 'exists. You should manually remove it first.'
20 % TEST_GIT_REPO_CLONE)
20 % TEST_GIT_REPO_CLONE)
21
21
22 def setUp(self):
22 def setUp(self):
23 self.repo = GitRepository(TEST_GIT_REPO)
23 self.repo = GitRepository(TEST_GIT_REPO)
24
24
25 def test_wrong_repo_path(self):
25 def test_wrong_repo_path(self):
26 wrong_repo_path = '/tmp/errorrepo'
26 wrong_repo_path = '/tmp/errorrepo'
27 self.assertRaises(RepositoryError, GitRepository, wrong_repo_path)
27 self.assertRaises(RepositoryError, GitRepository, wrong_repo_path)
28
28
29 def test_repo_clone(self):
29 def test_repo_clone(self):
30 self.__check_for_existing_repo()
30 self.__check_for_existing_repo()
31 repo = GitRepository(TEST_GIT_REPO)
31 repo = GitRepository(TEST_GIT_REPO)
32 repo_clone = GitRepository(TEST_GIT_REPO_CLONE,
32 repo_clone = GitRepository(TEST_GIT_REPO_CLONE,
33 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
33 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
34 self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
34 self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
35 # Checking hashes of changesets should be enough
35 # Checking hashes of changesets should be enough
36 for changeset in repo.get_changesets():
36 for changeset in repo.get_changesets():
37 raw_id = changeset.raw_id
37 raw_id = changeset.raw_id
38 self.assertEqual(raw_id, repo_clone.get_changeset(raw_id).raw_id)
38 self.assertEqual(raw_id, repo_clone.get_changeset(raw_id).raw_id)
39
39
40 def test_repo_clone_without_create(self):
40 def test_repo_clone_without_create(self):
41 self.assertRaises(RepositoryError, GitRepository,
41 self.assertRaises(RepositoryError, GitRepository,
42 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
42 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
43
43
44 def test_repo_clone_with_update(self):
44 def test_repo_clone_with_update(self):
45 repo = GitRepository(TEST_GIT_REPO)
45 repo = GitRepository(TEST_GIT_REPO)
46 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
46 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
47 repo_clone = GitRepository(clone_path,
47 repo_clone = GitRepository(clone_path,
48 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
48 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
49 self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
49 self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
50
50
51 #check if current workdir was updated
51 #check if current workdir was updated
52 fpath = os.path.join(clone_path, 'MANIFEST.in')
52 fpath = os.path.join(clone_path, 'MANIFEST.in')
53 self.assertEqual(True, os.path.isfile(fpath),
53 self.assertEqual(True, os.path.isfile(fpath),
54 'Repo was cloned and updated but file %s could not be found'
54 'Repo was cloned and updated but file %s could not be found'
55 % fpath)
55 % fpath)
56
56
57 def test_repo_clone_without_update(self):
57 def test_repo_clone_without_update(self):
58 repo = GitRepository(TEST_GIT_REPO)
58 repo = GitRepository(TEST_GIT_REPO)
59 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
59 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
60 repo_clone = GitRepository(clone_path,
60 repo_clone = GitRepository(clone_path,
61 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
61 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
62 self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
62 self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
63 #check if current workdir was *NOT* updated
63 #check if current workdir was *NOT* updated
64 fpath = os.path.join(clone_path, 'MANIFEST.in')
64 fpath = os.path.join(clone_path, 'MANIFEST.in')
65 # Make sure it's not bare repo
65 # Make sure it's not bare repo
66 self.assertFalse(repo_clone._repo.bare)
66 self.assertFalse(repo_clone._repo.bare)
67 self.assertEqual(False, os.path.isfile(fpath),
67 self.assertEqual(False, os.path.isfile(fpath),
68 'Repo was cloned and updated but file %s was found'
68 'Repo was cloned and updated but file %s was found'
69 % fpath)
69 % fpath)
70
70
71 def test_repo_clone_into_bare_repo(self):
71 def test_repo_clone_into_bare_repo(self):
72 repo = GitRepository(TEST_GIT_REPO)
72 repo = GitRepository(TEST_GIT_REPO)
73 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
73 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
74 repo_clone = GitRepository(clone_path, create=True,
74 repo_clone = GitRepository(clone_path, create=True,
75 src_url=repo.path, bare=True)
75 src_url=repo.path, bare=True)
76 self.assertTrue(repo_clone._repo.bare)
76 self.assertTrue(repo_clone._repo.bare)
77
77
78 def test_create_repo_is_not_bare_by_default(self):
78 def test_create_repo_is_not_bare_by_default(self):
79 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
79 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
80 self.assertFalse(repo._repo.bare)
80 self.assertFalse(repo._repo.bare)
81
81
82 def test_create_bare_repo(self):
82 def test_create_bare_repo(self):
83 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
83 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
84 self.assertTrue(repo._repo.bare)
84 self.assertTrue(repo._repo.bare)
85
85
86 def test_revisions(self):
86 def test_revisions(self):
87 # there are 112 revisions (by now)
87 # there are 112 revisions (by now)
88 # so we can assume they would be available from now on
88 # so we can assume they would be available from now on
89 subset = set([
89 subset = set([
90 'c1214f7e79e02fc37156ff215cd71275450cffc3',
90 'c1214f7e79e02fc37156ff215cd71275450cffc3',
91 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
91 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
92 'fa6600f6848800641328adbf7811fd2372c02ab2',
92 'fa6600f6848800641328adbf7811fd2372c02ab2',
93 '102607b09cdd60e2793929c4f90478be29f85a17',
93 '102607b09cdd60e2793929c4f90478be29f85a17',
94 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
94 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
95 '2d1028c054665b962fa3d307adfc923ddd528038',
95 '2d1028c054665b962fa3d307adfc923ddd528038',
96 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
96 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
97 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
97 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
98 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
98 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
99 '8430a588b43b5d6da365400117c89400326e7992',
99 '8430a588b43b5d6da365400117c89400326e7992',
100 'd955cd312c17b02143c04fa1099a352b04368118',
100 'd955cd312c17b02143c04fa1099a352b04368118',
101 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
101 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
102 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
102 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
103 'f298fe1189f1b69779a4423f40b48edf92a703fc',
103 'f298fe1189f1b69779a4423f40b48edf92a703fc',
104 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
104 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
105 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
105 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
106 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
106 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
107 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
107 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
108 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
108 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
109 '45223f8f114c64bf4d6f853e3c35a369a6305520',
109 '45223f8f114c64bf4d6f853e3c35a369a6305520',
110 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
110 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
111 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
111 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
112 '27d48942240f5b91dfda77accd2caac94708cc7d',
112 '27d48942240f5b91dfda77accd2caac94708cc7d',
113 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
113 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
114 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
114 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
115 self.assertTrue(subset.issubset(set(self.repo.revisions)))
115 self.assertTrue(subset.issubset(set(self.repo.revisions)))
116
116
117
117
118
118
119 def test_slicing(self):
119 def test_slicing(self):
120 #4 1 5 10 95
120 #4 1 5 10 95
121 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
121 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
122 (10, 20, 10), (5, 100, 95)]:
122 (10, 20, 10), (5, 100, 95)]:
123 revs = list(self.repo[sfrom:sto])
123 revs = list(self.repo[sfrom:sto])
124 self.assertEqual(len(revs), size)
124 self.assertEqual(len(revs), size)
125 self.assertEqual(revs[0], self.repo.get_changeset(sfrom))
125 self.assertEqual(revs[0], self.repo.get_changeset(sfrom))
126 self.assertEqual(revs[-1], self.repo.get_changeset(sto - 1))
126 self.assertEqual(revs[-1], self.repo.get_changeset(sto - 1))
127
127
128
128
129 def test_branches(self):
129 def test_branches(self):
130 # TODO: Need more tests here
130 # TODO: Need more tests here
131 # Removed (those are 'remotes' branches for cloned repo)
131 # Removed (those are 'remotes' branches for cloned repo)
132 #self.assertTrue('master' in self.repo.branches)
132 #self.assertTrue('master' in self.repo.branches)
133 #self.assertTrue('gittree' in self.repo.branches)
133 #self.assertTrue('gittree' in self.repo.branches)
134 #self.assertTrue('web-branch' in self.repo.branches)
134 #self.assertTrue('web-branch' in self.repo.branches)
135 for name, id in self.repo.branches.items():
135 for name, id in self.repo.branches.items():
136 self.assertTrue(isinstance(
136 self.assertTrue(isinstance(
137 self.repo.get_changeset(id), GitChangeset))
137 self.repo.get_changeset(id), GitChangeset))
138
138
139 def test_tags(self):
139 def test_tags(self):
140 # TODO: Need more tests here
140 # TODO: Need more tests here
141 self.assertTrue('v0.1.1' in self.repo.tags)
141 self.assertTrue('v0.1.1' in self.repo.tags)
142 self.assertTrue('v0.1.2' in self.repo.tags)
142 self.assertTrue('v0.1.2' in self.repo.tags)
143 for name, id in self.repo.tags.items():
143 for name, id in self.repo.tags.items():
144 self.assertTrue(isinstance(
144 self.assertTrue(isinstance(
145 self.repo.get_changeset(id), GitChangeset))
145 self.repo.get_changeset(id), GitChangeset))
146
146
147 def _test_single_changeset_cache(self, revision):
147 def _test_single_changeset_cache(self, revision):
148 chset = self.repo.get_changeset(revision)
148 chset = self.repo.get_changeset(revision)
149 self.assertTrue(revision in self.repo.changesets)
149 self.assertTrue(revision in self.repo.changesets)
150 self.assertTrue(chset is self.repo.changesets[revision])
150 self.assertTrue(chset is self.repo.changesets[revision])
151
151
152 def test_initial_changeset(self):
152 def test_initial_changeset(self):
153 id = self.repo.revisions[0]
153 id = self.repo.revisions[0]
154 init_chset = self.repo.get_changeset(id)
154 init_chset = self.repo.get_changeset(id)
155 self.assertEqual(init_chset.message, 'initial import\n')
155 self.assertEqual(init_chset.message, 'initial import\n')
156 self.assertEqual(init_chset.author,
156 self.assertEqual(init_chset.author,
157 'Marcin Kuzminski <marcin@python-blog.com>')
157 'Marcin Kuzminski <marcin@python-blog.com>')
158 for path in ('vcs/__init__.py',
158 for path in ('vcs/__init__.py',
159 'vcs/backends/BaseRepository.py',
159 'vcs/backends/BaseRepository.py',
160 'vcs/backends/__init__.py'):
160 'vcs/backends/__init__.py'):
161 self.assertTrue(isinstance(init_chset.get_node(path), FileNode))
161 self.assertTrue(isinstance(init_chset.get_node(path), FileNode))
162 for path in ('', 'vcs', 'vcs/backends'):
162 for path in ('', 'vcs', 'vcs/backends'):
163 self.assertTrue(isinstance(init_chset.get_node(path), DirNode))
163 self.assertTrue(isinstance(init_chset.get_node(path), DirNode))
164
164
165 self.assertRaises(NodeDoesNotExistError, init_chset.get_node, path='foobar')
165 self.assertRaises(NodeDoesNotExistError, init_chset.get_node, path='foobar')
166
166
167 node = init_chset.get_node('vcs/')
167 node = init_chset.get_node('vcs/')
168 self.assertTrue(hasattr(node, 'kind'))
168 self.assertTrue(hasattr(node, 'kind'))
169 self.assertEqual(node.kind, NodeKind.DIR)
169 self.assertEqual(node.kind, NodeKind.DIR)
170
170
171 node = init_chset.get_node('vcs')
171 node = init_chset.get_node('vcs')
172 self.assertTrue(hasattr(node, 'kind'))
172 self.assertTrue(hasattr(node, 'kind'))
173 self.assertEqual(node.kind, NodeKind.DIR)
173 self.assertEqual(node.kind, NodeKind.DIR)
174
174
175 node = init_chset.get_node('vcs/__init__.py')
175 node = init_chset.get_node('vcs/__init__.py')
176 self.assertTrue(hasattr(node, 'kind'))
176 self.assertTrue(hasattr(node, 'kind'))
177 self.assertEqual(node.kind, NodeKind.FILE)
177 self.assertEqual(node.kind, NodeKind.FILE)
178
178
179 def test_not_existing_changeset(self):
179 def test_not_existing_changeset(self):
180 self.assertRaises(RepositoryError, self.repo.get_changeset,
180 self.assertRaises(RepositoryError, self.repo.get_changeset,
181 'f' * 40)
181 'f' * 40)
182
182
183 def test_changeset10(self):
183 def test_changeset10(self):
184
184
185 chset10 = self.repo.get_changeset(self.repo.revisions[9])
185 chset10 = self.repo.get_changeset(self.repo.revisions[9])
186 README = """===
186 README = """===
187 VCS
187 VCS
188 ===
188 ===
189
189
190 Various Version Control System management abstraction layer for Python.
190 Various Version Control System management abstraction layer for Python.
191
191
192 Introduction
192 Introduction
193 ------------
193 ------------
194
194
195 TODO: To be written...
195 TODO: To be written...
196
196
197 """
197 """
198 node = chset10.get_node('README.rst')
198 node = chset10.get_node('README.rst')
199 self.assertEqual(node.kind, NodeKind.FILE)
199 self.assertEqual(node.kind, NodeKind.FILE)
200 self.assertEqual(node.content, README)
200 self.assertEqual(node.content, README)
201
201
202
202
203 class GitChangesetTest(unittest.TestCase):
203 class GitChangesetTest(unittest.TestCase):
204
204
205 def setUp(self):
205 def setUp(self):
206 self.repo = GitRepository(TEST_GIT_REPO)
206 self.repo = GitRepository(TEST_GIT_REPO)
207
207
208 def test_default_changeset(self):
208 def test_default_changeset(self):
209 tip = self.repo.get_changeset()
209 tip = self.repo.get_changeset()
210 self.assertEqual(tip, self.repo.get_changeset(None))
210 self.assertEqual(tip, self.repo.get_changeset(None))
211 self.assertEqual(tip, self.repo.get_changeset('tip'))
211 self.assertEqual(tip, self.repo.get_changeset('tip'))
212
212
213 def test_root_node(self):
213 def test_root_node(self):
214 tip = self.repo.get_changeset()
214 tip = self.repo.get_changeset()
215 self.assertTrue(tip.root is tip.get_node(''))
215 self.assertTrue(tip.root is tip.get_node(''))
216
216
217 def test_lazy_fetch(self):
217 def test_lazy_fetch(self):
218 """
218 """
219 Test if changeset's nodes expands and are cached as we walk through
219 Test if changeset's nodes expands and are cached as we walk through
220 the revision. This test is somewhat hard to write as order of tests
220 the revision. This test is somewhat hard to write as order of tests
221 is a key here. Written by running command after command in a shell.
221 is a key here. Written by running command after command in a shell.
222 """
222 """
223 hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
223 hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
224 self.assertTrue(hex in self.repo.revisions)
224 self.assertTrue(hex in self.repo.revisions)
225 chset = self.repo.get_changeset(hex)
225 chset = self.repo.get_changeset(hex)
226 self.assertTrue(len(chset.nodes) == 0)
226 self.assertTrue(len(chset.nodes) == 0)
227 root = chset.root
227 root = chset.root
228 self.assertTrue(len(chset.nodes) == 1)
228 self.assertTrue(len(chset.nodes) == 1)
229 self.assertTrue(len(root.nodes) == 8)
229 self.assertTrue(len(root.nodes) == 8)
230 # accessing root.nodes updates chset.nodes
230 # accessing root.nodes updates chset.nodes
231 self.assertTrue(len(chset.nodes) == 9)
231 self.assertTrue(len(chset.nodes) == 9)
232
232
233 docs = root.get_node('docs')
233 docs = root.get_node('docs')
234 # we haven't yet accessed anything new as docs dir was already cached
234 # we haven't yet accessed anything new as docs dir was already cached
235 self.assertTrue(len(chset.nodes) == 9)
235 self.assertTrue(len(chset.nodes) == 9)
236 self.assertTrue(len(docs.nodes) == 8)
236 self.assertTrue(len(docs.nodes) == 8)
237 # accessing docs.nodes updates chset.nodes
237 # accessing docs.nodes updates chset.nodes
238 self.assertTrue(len(chset.nodes) == 17)
238 self.assertTrue(len(chset.nodes) == 17)
239
239
240 self.assertTrue(docs is chset.get_node('docs'))
240 self.assertTrue(docs is chset.get_node('docs'))
241 self.assertTrue(docs is root.nodes[0])
241 self.assertTrue(docs is root.nodes[0])
242 self.assertTrue(docs is root.dirs[0])
242 self.assertTrue(docs is root.dirs[0])
243 self.assertTrue(docs is chset.get_node('docs'))
243 self.assertTrue(docs is chset.get_node('docs'))
244
244
245 def test_nodes_with_changeset(self):
245 def test_nodes_with_changeset(self):
246 hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
246 hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
247 chset = self.repo.get_changeset(hex)
247 chset = self.repo.get_changeset(hex)
248 root = chset.root
248 root = chset.root
249 docs = root.get_node('docs')
249 docs = root.get_node('docs')
250 self.assertTrue(docs is chset.get_node('docs'))
250 self.assertTrue(docs is chset.get_node('docs'))
251 api = docs.get_node('api')
251 api = docs.get_node('api')
252 self.assertTrue(api is chset.get_node('docs/api'))
252 self.assertTrue(api is chset.get_node('docs/api'))
253 index = api.get_node('index.rst')
253 index = api.get_node('index.rst')
254 self.assertTrue(index is chset.get_node('docs/api/index.rst'))
254 self.assertTrue(index is chset.get_node('docs/api/index.rst'))
255 self.assertTrue(index is chset.get_node('docs')\
255 self.assertTrue(index is chset.get_node('docs')\
256 .get_node('api')\
256 .get_node('api')\
257 .get_node('index.rst'))
257 .get_node('index.rst'))
258
258
259 def test_branch_and_tags(self):
259 def test_branch_and_tags(self):
260 '''
260 """
261 rev0 = self.repo.revisions[0]
261 rev0 = self.repo.revisions[0]
262 chset0 = self.repo.get_changeset(rev0)
262 chset0 = self.repo.get_changeset(rev0)
263 self.assertEqual(chset0.branch, 'master')
263 self.assertEqual(chset0.branch, 'master')
264 self.assertEqual(chset0.tags, [])
264 self.assertEqual(chset0.tags, [])
265
265
266 rev10 = self.repo.revisions[10]
266 rev10 = self.repo.revisions[10]
267 chset10 = self.repo.get_changeset(rev10)
267 chset10 = self.repo.get_changeset(rev10)
268 self.assertEqual(chset10.branch, 'master')
268 self.assertEqual(chset10.branch, 'master')
269 self.assertEqual(chset10.tags, [])
269 self.assertEqual(chset10.tags, [])
270
270
271 rev44 = self.repo.revisions[44]
271 rev44 = self.repo.revisions[44]
272 chset44 = self.repo.get_changeset(rev44)
272 chset44 = self.repo.get_changeset(rev44)
273 self.assertEqual(chset44.branch, 'web-branch')
273 self.assertEqual(chset44.branch, 'web-branch')
274
274
275 tip = self.repo.get_changeset('tip')
275 tip = self.repo.get_changeset('tip')
276 self.assertTrue('tip' in tip.tags)
276 self.assertTrue('tip' in tip.tags)
277 '''
277 """
278 # Those tests would fail - branches are now going
278 # Those tests would fail - branches are now going
279 # to be changed at main API in order to support git backend
279 # to be changed at main API in order to support git backend
280 pass
280 pass
281
281
282 def _test_slices(self, limit, offset):
282 def _test_slices(self, limit, offset):
283 count = self.repo.count()
283 count = self.repo.count()
284 changesets = self.repo.get_changesets(limit=limit, offset=offset)
284 changesets = self.repo.get_changesets(limit=limit, offset=offset)
285 idx = 0
285 idx = 0
286 for changeset in changesets:
286 for changeset in changesets:
287 rev = offset + idx
287 rev = offset + idx
288 idx += 1
288 idx += 1
289 rev_id = self.repo.revisions[rev]
289 rev_id = self.repo.revisions[rev]
290 if idx > limit:
290 if idx > limit:
291 self.fail("Exceeded limit already (getting revision %s, "
291 self.fail("Exceeded limit already (getting revision %s, "
292 "there are %s total revisions, offset=%s, limit=%s)"
292 "there are %s total revisions, offset=%s, limit=%s)"
293 % (rev_id, count, offset, limit))
293 % (rev_id, count, offset, limit))
294 self.assertEqual(changeset, self.repo.get_changeset(rev_id))
294 self.assertEqual(changeset, self.repo.get_changeset(rev_id))
295 result = list(self.repo.get_changesets(limit=limit, offset=offset))
295 result = list(self.repo.get_changesets(limit=limit, offset=offset))
296 start = offset
296 start = offset
297 end = limit and offset + limit or None
297 end = limit and offset + limit or None
298 sliced = list(self.repo[start:end])
298 sliced = list(self.repo[start:end])
299 self.failUnlessEqual(result, sliced,
299 self.failUnlessEqual(result, sliced,
300 msg="Comparison failed for limit=%s, offset=%s"
300 msg="Comparison failed for limit=%s, offset=%s"
301 "(get_changeset returned: %s and sliced: %s"
301 "(get_changeset returned: %s and sliced: %s"
302 % (limit, offset, result, sliced))
302 % (limit, offset, result, sliced))
303
303
304 def _test_file_size(self, revision, path, size):
304 def _test_file_size(self, revision, path, size):
305 node = self.repo.get_changeset(revision).get_node(path)
305 node = self.repo.get_changeset(revision).get_node(path)
306 self.assertTrue(node.is_file())
306 self.assertTrue(node.is_file())
307 self.assertEqual(node.size, size)
307 self.assertEqual(node.size, size)
308
308
309 def test_file_size(self):
309 def test_file_size(self):
310 to_check = (
310 to_check = (
311 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
311 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
312 'vcs/backends/BaseRepository.py', 502),
312 'vcs/backends/BaseRepository.py', 502),
313 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
313 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
314 'vcs/backends/hg.py', 854),
314 'vcs/backends/hg.py', 854),
315 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
315 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
316 'setup.py', 1068),
316 'setup.py', 1068),
317
317
318 ('d955cd312c17b02143c04fa1099a352b04368118',
318 ('d955cd312c17b02143c04fa1099a352b04368118',
319 'vcs/backends/base.py', 2921),
319 'vcs/backends/base.py', 2921),
320 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
320 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
321 'vcs/backends/base.py', 3936),
321 'vcs/backends/base.py', 3936),
322 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
322 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
323 'vcs/backends/base.py', 6189),
323 'vcs/backends/base.py', 6189),
324 )
324 )
325 for revision, path, size in to_check:
325 for revision, path, size in to_check:
326 self._test_file_size(revision, path, size)
326 self._test_file_size(revision, path, size)
327
327
328 def test_file_history(self):
328 def test_file_history(self):
329 # we can only check if those revisions are present in the history
329 # we can only check if those revisions are present in the history
330 # as we cannot update this test every time file is changed
330 # as we cannot update this test every time file is changed
331 files = {
331 files = {
332 'setup.py': [
332 'setup.py': [
333 '54386793436c938cff89326944d4c2702340037d',
333 '54386793436c938cff89326944d4c2702340037d',
334 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
334 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
335 '998ed409c795fec2012b1c0ca054d99888b22090',
335 '998ed409c795fec2012b1c0ca054d99888b22090',
336 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
336 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
337 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
337 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
338 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
338 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
339 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
339 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
340 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
340 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
341 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
341 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
342 ],
342 ],
343 'vcs/nodes.py': [
343 'vcs/nodes.py': [
344 '33fa3223355104431402a888fa77a4e9956feb3e',
344 '33fa3223355104431402a888fa77a4e9956feb3e',
345 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
345 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
346 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
346 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
347 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
347 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
348 'c877b68d18e792a66b7f4c529ea02c8f80801542',
348 'c877b68d18e792a66b7f4c529ea02c8f80801542',
349 '4313566d2e417cb382948f8d9d7c765330356054',
349 '4313566d2e417cb382948f8d9d7c765330356054',
350 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
350 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
351 '54386793436c938cff89326944d4c2702340037d',
351 '54386793436c938cff89326944d4c2702340037d',
352 '54000345d2e78b03a99d561399e8e548de3f3203',
352 '54000345d2e78b03a99d561399e8e548de3f3203',
353 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
353 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
354 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
354 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
355 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
355 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
356 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
356 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
357 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
357 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
358 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
358 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
359 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
359 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
360 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
360 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
361 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
361 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
362 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
362 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
363 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
363 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
364 'f15c21f97864b4f071cddfbf2750ec2e23859414',
364 'f15c21f97864b4f071cddfbf2750ec2e23859414',
365 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
365 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
366 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
366 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
367 '84dec09632a4458f79f50ddbbd155506c460b4f9',
367 '84dec09632a4458f79f50ddbbd155506c460b4f9',
368 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
368 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
369 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
369 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
370 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
370 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
371 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
371 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
372 '6970b057cffe4aab0a792aa634c89f4bebf01441',
372 '6970b057cffe4aab0a792aa634c89f4bebf01441',
373 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
373 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
374 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
374 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
375 ],
375 ],
376 'vcs/backends/git.py': [
376 'vcs/backends/git.py': [
377 '4cf116ad5a457530381135e2f4c453e68a1b0105',
377 '4cf116ad5a457530381135e2f4c453e68a1b0105',
378 '9a751d84d8e9408e736329767387f41b36935153',
378 '9a751d84d8e9408e736329767387f41b36935153',
379 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
379 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
380 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
380 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
381 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
381 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
382 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
382 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
383 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
383 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
384 '54000345d2e78b03a99d561399e8e548de3f3203',
384 '54000345d2e78b03a99d561399e8e548de3f3203',
385 ],
385 ],
386 }
386 }
387 for path, revs in files.items():
387 for path, revs in files.items():
388 node = self.repo.get_changeset(revs[0]).get_node(path)
388 node = self.repo.get_changeset(revs[0]).get_node(path)
389 node_revs = [chset.raw_id for chset in node.history]
389 node_revs = [chset.raw_id for chset in node.history]
390 self.assertTrue(set(revs).issubset(set(node_revs)),
390 self.assertTrue(set(revs).issubset(set(node_revs)),
391 "We assumed that %s is subset of revisions for which file %s "
391 "We assumed that %s is subset of revisions for which file %s "
392 "has been changed, and history of that node returned: %s"
392 "has been changed, and history of that node returned: %s"
393 % (revs, path, node_revs))
393 % (revs, path, node_revs))
394
394
395 def test_file_annotate(self):
395 def test_file_annotate(self):
396 files = {
396 files = {
397 'vcs/backends/__init__.py': {
397 'vcs/backends/__init__.py': {
398 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
398 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
399 'lines_no': 1,
399 'lines_no': 1,
400 'changesets': [
400 'changesets': [
401 'c1214f7e79e02fc37156ff215cd71275450cffc3',
401 'c1214f7e79e02fc37156ff215cd71275450cffc3',
402 ],
402 ],
403 },
403 },
404 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
404 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
405 'lines_no': 21,
405 'lines_no': 21,
406 'changesets': [
406 'changesets': [
407 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
407 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
408 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
408 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
409 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
409 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
410 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
410 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
411 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
411 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
412 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
412 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
413 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
413 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
414 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
414 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
415 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
415 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
416 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
416 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
417 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
417 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
418 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
418 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
419 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
419 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
420 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
420 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
421 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
421 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
422 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
422 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
423 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
423 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
424 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
424 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
425 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
425 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
426 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
426 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
427 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
427 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
428 ],
428 ],
429 },
429 },
430 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
430 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
431 'lines_no': 32,
431 'lines_no': 32,
432 'changesets': [
432 'changesets': [
433 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
433 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
434 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
434 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
435 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
435 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
436 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
436 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
437 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
437 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
438 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
438 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
439 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
439 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
440 '54000345d2e78b03a99d561399e8e548de3f3203',
440 '54000345d2e78b03a99d561399e8e548de3f3203',
441 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
441 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
442 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
442 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
443 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
443 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
444 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
444 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
445 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
445 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
446 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
446 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
447 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
447 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
448 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
448 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
449 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
449 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
450 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
450 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
451 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
451 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
452 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
452 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
453 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
453 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
454 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
454 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
455 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
455 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
456 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
456 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
457 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
457 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
458 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
458 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
459 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
459 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
460 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
460 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
461 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
461 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
462 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
462 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
463 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
463 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
464 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
464 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
465 ],
465 ],
466 },
466 },
467 },
467 },
468 }
468 }
469
469
470 for fname, revision_dict in files.items():
470 for fname, revision_dict in files.items():
471 for rev, data in revision_dict.items():
471 for rev, data in revision_dict.items():
472 cs = self.repo.get_changeset(rev)
472 cs = self.repo.get_changeset(rev)
473
473
474 l1_1 = [x[1] for x in cs.get_file_annotate(fname)]
474 l1_1 = [x[1] for x in cs.get_file_annotate(fname)]
475 l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)]
475 l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)]
476 self.assertEqual(l1_1, l1_2)
476 self.assertEqual(l1_1, l1_2)
477 l1 = l1_1
477 l1 = l1_1
478 l2 = files[fname][rev]['changesets']
478 l2 = files[fname][rev]['changesets']
479 self.assertTrue(l1 == l2 , "The lists of revision for %s@rev %s"
479 self.assertTrue(l1 == l2 , "The lists of revision for %s@rev %s"
480 "from annotation list should match each other, "
480 "from annotation list should match each other, "
481 "got \n%s \nvs \n%s " % (fname, rev, l1, l2))
481 "got \n%s \nvs \n%s " % (fname, rev, l1, l2))
482
482
483 def test_files_state(self):
483 def test_files_state(self):
484 """
484 """
485 Tests state of FileNodes.
485 Tests state of FileNodes.
486 """
486 """
487 node = self.repo\
487 node = self.repo\
488 .get_changeset('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
488 .get_changeset('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
489 .get_node('vcs/utils/diffs.py')
489 .get_node('vcs/utils/diffs.py')
490 self.assertTrue(node.state, NodeState.ADDED)
490 self.assertTrue(node.state, NodeState.ADDED)
491 self.assertTrue(node.added)
491 self.assertTrue(node.added)
492 self.assertFalse(node.changed)
492 self.assertFalse(node.changed)
493 self.assertFalse(node.not_changed)
493 self.assertFalse(node.not_changed)
494 self.assertFalse(node.removed)
494 self.assertFalse(node.removed)
495
495
496 node = self.repo\
496 node = self.repo\
497 .get_changeset('33fa3223355104431402a888fa77a4e9956feb3e')\
497 .get_changeset('33fa3223355104431402a888fa77a4e9956feb3e')\
498 .get_node('.hgignore')
498 .get_node('.hgignore')
499 self.assertTrue(node.state, NodeState.CHANGED)
499 self.assertTrue(node.state, NodeState.CHANGED)
500 self.assertFalse(node.added)
500 self.assertFalse(node.added)
501 self.assertTrue(node.changed)
501 self.assertTrue(node.changed)
502 self.assertFalse(node.not_changed)
502 self.assertFalse(node.not_changed)
503 self.assertFalse(node.removed)
503 self.assertFalse(node.removed)
504
504
505 node = self.repo\
505 node = self.repo\
506 .get_changeset('e29b67bd158580fc90fc5e9111240b90e6e86064')\
506 .get_changeset('e29b67bd158580fc90fc5e9111240b90e6e86064')\
507 .get_node('setup.py')
507 .get_node('setup.py')
508 self.assertTrue(node.state, NodeState.NOT_CHANGED)
508 self.assertTrue(node.state, NodeState.NOT_CHANGED)
509 self.assertFalse(node.added)
509 self.assertFalse(node.added)
510 self.assertFalse(node.changed)
510 self.assertFalse(node.changed)
511 self.assertTrue(node.not_changed)
511 self.assertTrue(node.not_changed)
512 self.assertFalse(node.removed)
512 self.assertFalse(node.removed)
513
513
514 # If node has REMOVED state then trying to fetch it would raise
514 # If node has REMOVED state then trying to fetch it would raise
515 # ChangesetError exception
515 # ChangesetError exception
516 chset = self.repo.get_changeset(
516 chset = self.repo.get_changeset(
517 'fa6600f6848800641328adbf7811fd2372c02ab2')
517 'fa6600f6848800641328adbf7811fd2372c02ab2')
518 path = 'vcs/backends/BaseRepository.py'
518 path = 'vcs/backends/BaseRepository.py'
519 self.assertRaises(NodeDoesNotExistError, chset.get_node, path)
519 self.assertRaises(NodeDoesNotExistError, chset.get_node, path)
520 # but it would be one of ``removed`` (changeset's attribute)
520 # but it would be one of ``removed`` (changeset's attribute)
521 self.assertTrue(path in [rf.path for rf in chset.removed])
521 self.assertTrue(path in [rf.path for rf in chset.removed])
522
522
523 chset = self.repo.get_changeset(
523 chset = self.repo.get_changeset(
524 '54386793436c938cff89326944d4c2702340037d')
524 '54386793436c938cff89326944d4c2702340037d')
525 changed = ['setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
525 changed = ['setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
526 'vcs/nodes.py']
526 'vcs/nodes.py']
527 self.assertEqual(set(changed), set([f.path for f in chset.changed]))
527 self.assertEqual(set(changed), set([f.path for f in chset.changed]))
528
528
529 def test_commit_message_is_unicode(self):
529 def test_commit_message_is_unicode(self):
530 for cs in self.repo:
530 for cs in self.repo:
531 self.assertEqual(type(cs.message), unicode)
531 self.assertEqual(type(cs.message), unicode)
532
532
533 def test_changeset_author_is_unicode(self):
533 def test_changeset_author_is_unicode(self):
534 for cs in self.repo:
534 for cs in self.repo:
535 self.assertEqual(type(cs.author), unicode)
535 self.assertEqual(type(cs.author), unicode)
536
536
537 def test_repo_files_content_is_unicode(self):
537 def test_repo_files_content_is_unicode(self):
538 changeset = self.repo.get_changeset()
538 changeset = self.repo.get_changeset()
539 for node in changeset.get_node('/'):
539 for node in changeset.get_node('/'):
540 if node.is_file():
540 if node.is_file():
541 self.assertEqual(type(node.content), unicode)
541 self.assertEqual(type(node.content), unicode)
542
542
543 def test_wrong_path(self):
543 def test_wrong_path(self):
544 # There is 'setup.py' in the root dir but not there:
544 # There is 'setup.py' in the root dir but not there:
545 path = 'foo/bar/setup.py'
545 path = 'foo/bar/setup.py'
546 tip = self.repo.get_changeset()
546 tip = self.repo.get_changeset()
547 self.assertRaises(VCSError, tip.get_node, path)
547 self.assertRaises(VCSError, tip.get_node, path)
548
548
549 def test_author_email(self):
549 def test_author_email(self):
550 self.assertEqual('marcin@python-blog.com',
550 self.assertEqual('marcin@python-blog.com',
551 self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\
551 self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\
552 .author_email)
552 .author_email)
553 self.assertEqual('lukasz.balcerzak@python-center.pl',
553 self.assertEqual('lukasz.balcerzak@python-center.pl',
554 self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\
554 self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\
555 .author_email)
555 .author_email)
556 self.assertEqual('none@none',
556 self.assertEqual('none@none',
557 self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\
557 self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\
558 .author_email)
558 .author_email)
559
559
560 def test_author_username(self):
560 def test_author_username(self):
561 self.assertEqual('Marcin Kuzminski',
561 self.assertEqual('Marcin Kuzminski',
562 self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\
562 self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\
563 .author_name)
563 .author_name)
564 self.assertEqual('Lukasz Balcerzak',
564 self.assertEqual('Lukasz Balcerzak',
565 self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\
565 self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\
566 .author_name)
566 .author_name)
567 self.assertEqual('marcink',
567 self.assertEqual('marcink',
568 self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\
568 self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\
569 .author_name)
569 .author_name)
570
570
571
571
572 class GitSpecificTest(unittest.TestCase):
572 class GitSpecificTest(unittest.TestCase):
573
573
574 def test_error_is_raised_for_added_if_diff_name_status_is_wrong(self):
574 def test_error_is_raised_for_added_if_diff_name_status_is_wrong(self):
575 repo = mock.MagicMock()
575 repo = mock.MagicMock()
576 changeset = GitChangeset(repo, 'foobar')
576 changeset = GitChangeset(repo, 'foobar')
577 changeset._diff_name_status = 'foobar'
577 changeset._diff_name_status = 'foobar'
578 with self.assertRaises(VCSError):
578 with self.assertRaises(VCSError):
579 changeset.added
579 changeset.added
580
580
581 def test_error_is_raised_for_changed_if_diff_name_status_is_wrong(self):
581 def test_error_is_raised_for_changed_if_diff_name_status_is_wrong(self):
582 repo = mock.MagicMock()
582 repo = mock.MagicMock()
583 changeset = GitChangeset(repo, 'foobar')
583 changeset = GitChangeset(repo, 'foobar')
584 changeset._diff_name_status = 'foobar'
584 changeset._diff_name_status = 'foobar'
585 with self.assertRaises(VCSError):
585 with self.assertRaises(VCSError):
586 changeset.added
586 changeset.added
587
587
588 def test_error_is_raised_for_removed_if_diff_name_status_is_wrong(self):
588 def test_error_is_raised_for_removed_if_diff_name_status_is_wrong(self):
589 repo = mock.MagicMock()
589 repo = mock.MagicMock()
590 changeset = GitChangeset(repo, 'foobar')
590 changeset = GitChangeset(repo, 'foobar')
591 changeset._diff_name_status = 'foobar'
591 changeset._diff_name_status = 'foobar'
592 with self.assertRaises(VCSError):
592 with self.assertRaises(VCSError):
593 changeset.added
593 changeset.added
594
594
595
595
596 class GitSpecificWithRepoTest(BackendTestMixin, unittest.TestCase):
596 class GitSpecificWithRepoTest(BackendTestMixin, unittest.TestCase):
597 backend_alias = 'git'
597 backend_alias = 'git'
598
598
599 @classmethod
599 @classmethod
600 def _get_commits(cls):
600 def _get_commits(cls):
601 return [
601 return [
602 {
602 {
603 'message': 'Initial',
603 'message': 'Initial',
604 'author': 'Joe Doe <joe.doe@example.com>',
604 'author': 'Joe Doe <joe.doe@example.com>',
605 'date': datetime.datetime(2010, 1, 1, 20),
605 'date': datetime.datetime(2010, 1, 1, 20),
606 'added': [
606 'added': [
607 FileNode('foobar/static/js/admin/base.js', content='base'),
607 FileNode('foobar/static/js/admin/base.js', content='base'),
608 FileNode('foobar/static/admin', content='admin',
608 FileNode('foobar/static/admin', content='admin',
609 mode=0120000), # this is a link
609 mode=0120000), # this is a link
610 FileNode('foo', content='foo'),
610 FileNode('foo', content='foo'),
611 ],
611 ],
612 },
612 },
613 {
613 {
614 'message': 'Second',
614 'message': 'Second',
615 'author': 'Joe Doe <joe.doe@example.com>',
615 'author': 'Joe Doe <joe.doe@example.com>',
616 'date': datetime.datetime(2010, 1, 1, 22),
616 'date': datetime.datetime(2010, 1, 1, 22),
617 'added': [
617 'added': [
618 FileNode('foo2', content='foo2'),
618 FileNode('foo2', content='foo2'),
619 ],
619 ],
620 },
620 },
621 ]
621 ]
622
622
623 def test_paths_slow_traversing(self):
623 def test_paths_slow_traversing(self):
624 cs = self.repo.get_changeset()
624 cs = self.repo.get_changeset()
625 self.assertEqual(cs.get_node('foobar').get_node('static').get_node('js')
625 self.assertEqual(cs.get_node('foobar').get_node('static').get_node('js')
626 .get_node('admin').get_node('base.js').content, 'base')
626 .get_node('admin').get_node('base.js').content, 'base')
627
627
628 def test_paths_fast_traversing(self):
628 def test_paths_fast_traversing(self):
629 cs = self.repo.get_changeset()
629 cs = self.repo.get_changeset()
630 self.assertEqual(cs.get_node('foobar/static/js/admin/base.js').content,
630 self.assertEqual(cs.get_node('foobar/static/js/admin/base.js').content,
631 'base')
631 'base')
632
632
633 def test_workdir_get_branch(self):
633 def test_workdir_get_branch(self):
634 self.repo.run_git_command('checkout -b production')
634 self.repo.run_git_command('checkout -b production')
635 # Regression test: one of following would fail if we don't check
635 # Regression test: one of following would fail if we don't check
636 # .git/HEAD file
636 # .git/HEAD file
637 self.repo.run_git_command('checkout production')
637 self.repo.run_git_command('checkout production')
638 self.assertEqual(self.repo.workdir.get_branch(), 'production')
638 self.assertEqual(self.repo.workdir.get_branch(), 'production')
639 self.repo.run_git_command('checkout master')
639 self.repo.run_git_command('checkout master')
640 self.assertEqual(self.repo.workdir.get_branch(), 'master')
640 self.assertEqual(self.repo.workdir.get_branch(), 'master')
641
641
642 def test_get_diff_runs_git_command_with_hashes(self):
642 def test_get_diff_runs_git_command_with_hashes(self):
643 self.repo.run_git_command = mock.Mock(return_value=['', ''])
643 self.repo.run_git_command = mock.Mock(return_value=['', ''])
644 self.repo.get_diff(0, 1)
644 self.repo.get_diff(0, 1)
645 self.repo.run_git_command.assert_called_once_with(
645 self.repo.run_git_command.assert_called_once_with(
646 'diff -U%s --full-index --binary -p -M --abbrev=40 %s %s' %
646 'diff -U%s --full-index --binary -p -M --abbrev=40 %s %s' %
647 (3, self.repo._get_revision(0), self.repo._get_revision(1)))
647 (3, self.repo._get_revision(0), self.repo._get_revision(1)))
648
648
649 def test_get_diff_runs_git_command_with_str_hashes(self):
649 def test_get_diff_runs_git_command_with_str_hashes(self):
650 self.repo.run_git_command = mock.Mock(return_value=['', ''])
650 self.repo.run_git_command = mock.Mock(return_value=['', ''])
651 self.repo.get_diff(self.repo.EMPTY_CHANGESET, 1)
651 self.repo.get_diff(self.repo.EMPTY_CHANGESET, 1)
652 self.repo.run_git_command.assert_called_once_with(
652 self.repo.run_git_command.assert_called_once_with(
653 'show -U%s --full-index --binary -p -M --abbrev=40 %s' %
653 'show -U%s --full-index --binary -p -M --abbrev=40 %s' %
654 (3, self.repo._get_revision(1)))
654 (3, self.repo._get_revision(1)))
655
655
656 def test_get_diff_runs_git_command_with_path_if_its_given(self):
656 def test_get_diff_runs_git_command_with_path_if_its_given(self):
657 self.repo.run_git_command = mock.Mock(return_value=['', ''])
657 self.repo.run_git_command = mock.Mock(return_value=['', ''])
658 self.repo.get_diff(0, 1, 'foo')
658 self.repo.get_diff(0, 1, 'foo')
659 self.repo.run_git_command.assert_called_once_with(
659 self.repo.run_git_command.assert_called_once_with(
660 'diff -U%s --full-index --binary -p -M --abbrev=40 %s %s -- "foo"'
660 'diff -U%s --full-index --binary -p -M --abbrev=40 %s %s -- "foo"'
661 % (3, self.repo._get_revision(0), self.repo._get_revision(1)))
661 % (3, self.repo._get_revision(0), self.repo._get_revision(1)))
662
662
663
663
664 class GitRegressionTest(BackendTestMixin, unittest.TestCase):
664 class GitRegressionTest(BackendTestMixin, unittest.TestCase):
665 backend_alias = 'git'
665 backend_alias = 'git'
666
666
667 @classmethod
667 @classmethod
668 def _get_commits(cls):
668 def _get_commits(cls):
669 return [
669 return [
670 {
670 {
671 'message': 'Initial',
671 'message': 'Initial',
672 'author': 'Joe Doe <joe.doe@example.com>',
672 'author': 'Joe Doe <joe.doe@example.com>',
673 'date': datetime.datetime(2010, 1, 1, 20),
673 'date': datetime.datetime(2010, 1, 1, 20),
674 'added': [
674 'added': [
675 FileNode('bot/__init__.py', content='base'),
675 FileNode('bot/__init__.py', content='base'),
676 FileNode('bot/templates/404.html', content='base'),
676 FileNode('bot/templates/404.html', content='base'),
677 FileNode('bot/templates/500.html', content='base'),
677 FileNode('bot/templates/500.html', content='base'),
678 ],
678 ],
679 },
679 },
680 {
680 {
681 'message': 'Second',
681 'message': 'Second',
682 'author': 'Joe Doe <joe.doe@example.com>',
682 'author': 'Joe Doe <joe.doe@example.com>',
683 'date': datetime.datetime(2010, 1, 1, 22),
683 'date': datetime.datetime(2010, 1, 1, 22),
684 'added': [
684 'added': [
685 FileNode('bot/build/migrations/1.py', content='foo2'),
685 FileNode('bot/build/migrations/1.py', content='foo2'),
686 FileNode('bot/build/migrations/2.py', content='foo2'),
686 FileNode('bot/build/migrations/2.py', content='foo2'),
687 FileNode('bot/build/static/templates/f.html', content='foo2'),
687 FileNode('bot/build/static/templates/f.html', content='foo2'),
688 FileNode('bot/build/static/templates/f1.html', content='foo2'),
688 FileNode('bot/build/static/templates/f1.html', content='foo2'),
689 FileNode('bot/build/templates/err.html', content='foo2'),
689 FileNode('bot/build/templates/err.html', content='foo2'),
690 FileNode('bot/build/templates/err2.html', content='foo2'),
690 FileNode('bot/build/templates/err2.html', content='foo2'),
691 ],
691 ],
692 },
692 },
693 ]
693 ]
694
694
695 def test_similar_paths(self):
695 def test_similar_paths(self):
696 cs = self.repo.get_changeset()
696 cs = self.repo.get_changeset()
697 paths = lambda *n:[x.path for x in n]
697 paths = lambda *n:[x.path for x in n]
698 self.assertEqual(paths(*cs.get_nodes('bot')), ['bot/build', 'bot/templates', 'bot/__init__.py'])
698 self.assertEqual(paths(*cs.get_nodes('bot')), ['bot/build', 'bot/templates', 'bot/__init__.py'])
699 self.assertEqual(paths(*cs.get_nodes('bot/build')), ['bot/build/migrations', 'bot/build/static', 'bot/build/templates'])
699 self.assertEqual(paths(*cs.get_nodes('bot/build')), ['bot/build/migrations', 'bot/build/static', 'bot/build/templates'])
700 self.assertEqual(paths(*cs.get_nodes('bot/build/static')), ['bot/build/static/templates'])
700 self.assertEqual(paths(*cs.get_nodes('bot/build/static')), ['bot/build/static/templates'])
701 # this get_nodes below causes troubles !
701 # this get_nodes below causes troubles !
702 self.assertEqual(paths(*cs.get_nodes('bot/build/static/templates')), ['bot/build/static/templates/f.html', 'bot/build/static/templates/f1.html'])
702 self.assertEqual(paths(*cs.get_nodes('bot/build/static/templates')), ['bot/build/static/templates/f.html', 'bot/build/static/templates/f1.html'])
703 self.assertEqual(paths(*cs.get_nodes('bot/build/templates')), ['bot/build/templates/err.html', 'bot/build/templates/err2.html'])
703 self.assertEqual(paths(*cs.get_nodes('bot/build/templates')), ['bot/build/templates/err.html', 'bot/build/templates/err2.html'])
704 self.assertEqual(paths(*cs.get_nodes('bot/templates/')), ['bot/templates/404.html', 'bot/templates/500.html'])
704 self.assertEqual(paths(*cs.get_nodes('bot/templates/')), ['bot/templates/404.html', 'bot/templates/500.html'])
705
705
706 if __name__ == '__main__':
706 if __name__ == '__main__':
707 unittest.main()
707 unittest.main()
General Comments 0
You need to be logged in to leave comments. Login now