##// END OF EJS Templates
git: run external commands as list of strings so we really get correct quoting (Issue #135)...
Mads Kiilerich -
r5182:0e2d450f default
parent child Browse files
Show More
@@ -1,289 +1,289 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.controllers.compare
15 kallithea.controllers.compare
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
17
17
18 compare controller for pylons showing differences between two
18 compare controller for pylons showing differences between two
19 repos, branches, bookmarks or tips
19 repos, branches, bookmarks or tips
20
20
21 This file was forked by the Kallithea project in July 2014.
21 This file was forked by the Kallithea project in July 2014.
22 Original author and date, and relevant copyright and licensing information is below:
22 Original author and date, and relevant copyright and licensing information is below:
23 :created_on: May 6, 2012
23 :created_on: May 6, 2012
24 :author: marcink
24 :author: marcink
25 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :copyright: (c) 2013 RhodeCode GmbH, and others.
26 :license: GPLv3, see LICENSE.md for more details.
26 :license: GPLv3, see LICENSE.md for more details.
27 """
27 """
28
28
29
29
30 import logging
30 import logging
31 import re
31 import re
32
32
33 from webob.exc import HTTPBadRequest
33 from webob.exc import HTTPBadRequest
34 from pylons import request, tmpl_context as c, url
34 from pylons import request, tmpl_context as c, url
35 from pylons.controllers.util import redirect
35 from pylons.controllers.util import redirect
36 from pylons.i18n.translation import _
36 from pylons.i18n.translation import _
37
37
38 from kallithea.lib.vcs.utils.hgcompat import unionrepo
38 from kallithea.lib.vcs.utils.hgcompat import unionrepo
39 from kallithea.lib import helpers as h
39 from kallithea.lib import helpers as h
40 from kallithea.lib.base import BaseRepoController, render
40 from kallithea.lib.base import BaseRepoController, render
41 from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
41 from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
42 from kallithea.lib import diffs
42 from kallithea.lib import diffs
43 from kallithea.model.db import Repository
43 from kallithea.model.db import Repository
44 from kallithea.lib.diffs import LimitedDiffContainer
44 from kallithea.lib.diffs import LimitedDiffContainer
45 from kallithea.controllers.changeset import _ignorews_url,\
45 from kallithea.controllers.changeset import _ignorews_url,\
46 _context_url, get_line_ctx, get_ignore_ws
46 _context_url, get_line_ctx, get_ignore_ws
47 from kallithea.lib.graphmod import graph_data
47 from kallithea.lib.graphmod import graph_data
48 from kallithea.lib.compat import json
48 from kallithea.lib.compat import json
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class CompareController(BaseRepoController):
53 class CompareController(BaseRepoController):
54
54
55 def __before__(self):
55 def __before__(self):
56 super(CompareController, self).__before__()
56 super(CompareController, self).__before__()
57
57
58 @staticmethod
58 @staticmethod
59 def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev):
59 def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev):
60 """
60 """
61 Returns lists of changesets that can be merged from org_repo@org_rev
61 Returns lists of changesets that can be merged from org_repo@org_rev
62 to other_repo@other_rev
62 to other_repo@other_rev
63 ... and the other way
63 ... and the other way
64 ... and the ancestor that would be used for merge
64 ... and the ancestor that would be used for merge
65
65
66 :param org_repo: repo object, that is most likely the original repo we forked from
66 :param org_repo: repo object, that is most likely the original repo we forked from
67 :param org_rev: the revision we want our compare to be made
67 :param org_rev: the revision we want our compare to be made
68 :param other_repo: repo object, most likely the fork of org_repo. It has
68 :param other_repo: repo object, most likely the fork of org_repo. It has
69 all changesets that we need to obtain
69 all changesets that we need to obtain
70 :param other_rev: revision we want out compare to be made on other_repo
70 :param other_rev: revision we want out compare to be made on other_repo
71 """
71 """
72 ancestor = None
72 ancestor = None
73 if org_rev == other_rev or org_repo.EMPTY_CHANGESET in (org_rev, other_rev):
73 if org_rev == other_rev or org_repo.EMPTY_CHANGESET in (org_rev, other_rev):
74 org_changesets = []
74 org_changesets = []
75 other_changesets = []
75 other_changesets = []
76 ancestor = org_rev
76 ancestor = org_rev
77
77
78 elif alias == 'hg':
78 elif alias == 'hg':
79 #case two independent repos
79 #case two independent repos
80 if org_repo != other_repo:
80 if org_repo != other_repo:
81 hgrepo = unionrepo.unionrepository(other_repo.baseui,
81 hgrepo = unionrepo.unionrepository(other_repo.baseui,
82 other_repo.path,
82 other_repo.path,
83 org_repo.path)
83 org_repo.path)
84 # all ancestors of other_rev will be in other_repo and
84 # all ancestors of other_rev will be in other_repo and
85 # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot
85 # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot
86
86
87 #no remote compare do it on the same repository
87 #no remote compare do it on the same repository
88 else:
88 else:
89 hgrepo = other_repo._repo
89 hgrepo = other_repo._repo
90
90
91 ancestors = hgrepo.revs("ancestor(id(%s), id(%s))", org_rev, other_rev)
91 ancestors = hgrepo.revs("ancestor(id(%s), id(%s))", org_rev, other_rev)
92 if ancestors:
92 if ancestors:
93 # FIXME: picks arbitrary ancestor - but there is usually only one
93 # FIXME: picks arbitrary ancestor - but there is usually only one
94 try:
94 try:
95 ancestor = hgrepo[ancestors.first()].hex()
95 ancestor = hgrepo[ancestors.first()].hex()
96 except AttributeError:
96 except AttributeError:
97 # removed in hg 3.2
97 # removed in hg 3.2
98 ancestor = hgrepo[ancestors[0]].hex()
98 ancestor = hgrepo[ancestors[0]].hex()
99
99
100 other_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
100 other_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
101 other_rev, org_rev, org_rev)
101 other_rev, org_rev, org_rev)
102 other_changesets = [other_repo.get_changeset(rev) for rev in other_revs]
102 other_changesets = [other_repo.get_changeset(rev) for rev in other_revs]
103 org_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
103 org_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
104 org_rev, other_rev, other_rev)
104 org_rev, other_rev, other_rev)
105
105
106 org_changesets = [org_repo.get_changeset(hgrepo[rev].hex()) for rev in org_revs]
106 org_changesets = [org_repo.get_changeset(hgrepo[rev].hex()) for rev in org_revs]
107
107
108 elif alias == 'git':
108 elif alias == 'git':
109 if org_repo != other_repo:
109 if org_repo != other_repo:
110 from dulwich.repo import Repo
110 from dulwich.repo import Repo
111 from dulwich.client import SubprocessGitClient
111 from dulwich.client import SubprocessGitClient
112
112
113 gitrepo = Repo(org_repo.path)
113 gitrepo = Repo(org_repo.path)
114 SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo)
114 SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo)
115
115
116 gitrepo_remote = Repo(other_repo.path)
116 gitrepo_remote = Repo(other_repo.path)
117 SubprocessGitClient(thin_packs=False).fetch(org_repo.path, gitrepo_remote)
117 SubprocessGitClient(thin_packs=False).fetch(org_repo.path, gitrepo_remote)
118
118
119 revs = []
119 revs = []
120 for x in gitrepo_remote.get_walker(include=[other_rev],
120 for x in gitrepo_remote.get_walker(include=[other_rev],
121 exclude=[org_rev]):
121 exclude=[org_rev]):
122 revs.append(x.commit.id)
122 revs.append(x.commit.id)
123
123
124 other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)]
124 other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)]
125 if other_changesets:
125 if other_changesets:
126 ancestor = other_changesets[0].parents[0].raw_id
126 ancestor = other_changesets[0].parents[0].raw_id
127 else:
127 else:
128 # no changesets from other repo, ancestor is the other_rev
128 # no changesets from other repo, ancestor is the other_rev
129 ancestor = other_rev
129 ancestor = other_rev
130
130
131 else:
131 else:
132 so, se = org_repo.run_git_command(
132 so, se = org_repo.run_git_command(
133 'log --reverse --pretty="format: %%H" -s %s..%s'
133 ['log', '--reverse', '--pretty=format:%H',
134 % (org_rev, other_rev)
134 '-s', '%s..%s' % (org_rev, other_rev)]
135 )
135 )
136 other_changesets = [org_repo.get_changeset(cs)
136 other_changesets = [org_repo.get_changeset(cs)
137 for cs in re.findall(r'[0-9a-fA-F]{40}', so)]
137 for cs in re.findall(r'[0-9a-fA-F]{40}', so)]
138 so, se = org_repo.run_git_command(
138 so, se = org_repo.run_git_command(
139 'merge-base %s %s' % (org_rev, other_rev)
139 ['merge-base', org_rev, other_rev]
140 )
140 )
141 ancestor = re.findall(r'[0-9a-fA-F]{40}', so)[0]
141 ancestor = re.findall(r'[0-9a-fA-F]{40}', so)[0]
142 org_changesets = []
142 org_changesets = []
143
143
144 else:
144 else:
145 raise Exception('Bad alias only git and hg is allowed')
145 raise Exception('Bad alias only git and hg is allowed')
146
146
147 return other_changesets, org_changesets, ancestor
147 return other_changesets, org_changesets, ancestor
148
148
149 @LoginRequired()
149 @LoginRequired()
150 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
150 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
151 'repository.admin')
151 'repository.admin')
152 def index(self, repo_name):
152 def index(self, repo_name):
153 c.compare_home = True
153 c.compare_home = True
154 org_repo = c.db_repo.repo_name
154 org_repo = c.db_repo.repo_name
155 other_repo = request.GET.get('other_repo', org_repo)
155 other_repo = request.GET.get('other_repo', org_repo)
156 c.a_repo = Repository.get_by_repo_name(org_repo)
156 c.a_repo = Repository.get_by_repo_name(org_repo)
157 c.cs_repo = Repository.get_by_repo_name(other_repo)
157 c.cs_repo = Repository.get_by_repo_name(other_repo)
158 c.a_ref_name = c.cs_ref_name = _('Select changeset')
158 c.a_ref_name = c.cs_ref_name = _('Select changeset')
159 return render('compare/compare_diff.html')
159 return render('compare/compare_diff.html')
160
160
161 @LoginRequired()
161 @LoginRequired()
162 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
162 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
163 'repository.admin')
163 'repository.admin')
164 def compare(self, repo_name, org_ref_type, org_ref_name, other_ref_type, other_ref_name):
164 def compare(self, repo_name, org_ref_type, org_ref_name, other_ref_type, other_ref_name):
165 org_repo = c.db_repo.repo_name
165 org_repo = c.db_repo.repo_name
166 other_repo = request.GET.get('other_repo', org_repo)
166 other_repo = request.GET.get('other_repo', org_repo)
167 # If merge is True:
167 # If merge is True:
168 # Show what org would get if merged with other:
168 # Show what org would get if merged with other:
169 # List changesets that are ancestors of other but not of org.
169 # List changesets that are ancestors of other but not of org.
170 # New changesets in org is thus ignored.
170 # New changesets in org is thus ignored.
171 # Diff will be from common ancestor, and merges of org to other will thus be ignored.
171 # Diff will be from common ancestor, and merges of org to other will thus be ignored.
172 # If merge is False:
172 # If merge is False:
173 # Make a raw diff from org to other, no matter if related or not.
173 # Make a raw diff from org to other, no matter if related or not.
174 # Changesets in one and not in the other will be ignored
174 # Changesets in one and not in the other will be ignored
175 merge = bool(request.GET.get('merge'))
175 merge = bool(request.GET.get('merge'))
176 # fulldiff disables cut_off_limit
176 # fulldiff disables cut_off_limit
177 c.fulldiff = request.GET.get('fulldiff')
177 c.fulldiff = request.GET.get('fulldiff')
178 # partial uses compare_cs.html template directly
178 # partial uses compare_cs.html template directly
179 partial = request.environ.get('HTTP_X_PARTIAL_XHR')
179 partial = request.environ.get('HTTP_X_PARTIAL_XHR')
180 # as_form puts hidden input field with changeset revisions
180 # as_form puts hidden input field with changeset revisions
181 c.as_form = partial and request.GET.get('as_form')
181 c.as_form = partial and request.GET.get('as_form')
182 # swap url for compare_diff page - never partial and never as_form
182 # swap url for compare_diff page - never partial and never as_form
183 c.swap_url = h.url('compare_url',
183 c.swap_url = h.url('compare_url',
184 repo_name=other_repo,
184 repo_name=other_repo,
185 org_ref_type=other_ref_type, org_ref_name=other_ref_name,
185 org_ref_type=other_ref_type, org_ref_name=other_ref_name,
186 other_repo=org_repo,
186 other_repo=org_repo,
187 other_ref_type=org_ref_type, other_ref_name=org_ref_name,
187 other_ref_type=org_ref_type, other_ref_name=org_ref_name,
188 merge=merge or '')
188 merge=merge or '')
189
189
190 # set callbacks for generating markup for icons
190 # set callbacks for generating markup for icons
191 c.ignorews_url = _ignorews_url
191 c.ignorews_url = _ignorews_url
192 c.context_url = _context_url
192 c.context_url = _context_url
193 ignore_whitespace = request.GET.get('ignorews') == '1'
193 ignore_whitespace = request.GET.get('ignorews') == '1'
194 line_context = request.GET.get('context', 3)
194 line_context = request.GET.get('context', 3)
195
195
196 org_repo = Repository.get_by_repo_name(org_repo)
196 org_repo = Repository.get_by_repo_name(org_repo)
197 other_repo = Repository.get_by_repo_name(other_repo)
197 other_repo = Repository.get_by_repo_name(other_repo)
198
198
199 if org_repo is None:
199 if org_repo is None:
200 msg = 'Could not find org repo %s' % org_repo
200 msg = 'Could not find org repo %s' % org_repo
201 log.error(msg)
201 log.error(msg)
202 h.flash(msg, category='error')
202 h.flash(msg, category='error')
203 return redirect(url('compare_home', repo_name=c.repo_name))
203 return redirect(url('compare_home', repo_name=c.repo_name))
204
204
205 if other_repo is None:
205 if other_repo is None:
206 msg = 'Could not find other repo %s' % other_repo
206 msg = 'Could not find other repo %s' % other_repo
207 log.error(msg)
207 log.error(msg)
208 h.flash(msg, category='error')
208 h.flash(msg, category='error')
209 return redirect(url('compare_home', repo_name=c.repo_name))
209 return redirect(url('compare_home', repo_name=c.repo_name))
210
210
211 if org_repo.scm_instance.alias != other_repo.scm_instance.alias:
211 if org_repo.scm_instance.alias != other_repo.scm_instance.alias:
212 msg = 'compare of two different kind of remote repos not available'
212 msg = 'compare of two different kind of remote repos not available'
213 log.error(msg)
213 log.error(msg)
214 h.flash(msg, category='error')
214 h.flash(msg, category='error')
215 return redirect(url('compare_home', repo_name=c.repo_name))
215 return redirect(url('compare_home', repo_name=c.repo_name))
216
216
217 c.a_rev = self._get_ref_rev(org_repo, org_ref_type, org_ref_name,
217 c.a_rev = self._get_ref_rev(org_repo, org_ref_type, org_ref_name,
218 returnempty=True)
218 returnempty=True)
219 c.cs_rev = self._get_ref_rev(other_repo, other_ref_type, other_ref_name)
219 c.cs_rev = self._get_ref_rev(other_repo, other_ref_type, other_ref_name)
220
220
221 c.compare_home = False
221 c.compare_home = False
222 c.a_repo = org_repo
222 c.a_repo = org_repo
223 c.a_ref_name = org_ref_name
223 c.a_ref_name = org_ref_name
224 c.a_ref_type = org_ref_type
224 c.a_ref_type = org_ref_type
225 c.cs_repo = other_repo
225 c.cs_repo = other_repo
226 c.cs_ref_name = other_ref_name
226 c.cs_ref_name = other_ref_name
227 c.cs_ref_type = other_ref_type
227 c.cs_ref_type = other_ref_type
228
228
229 c.cs_ranges, c.cs_ranges_org, c.ancestor = self._get_changesets(
229 c.cs_ranges, c.cs_ranges_org, c.ancestor = self._get_changesets(
230 org_repo.scm_instance.alias, org_repo.scm_instance, c.a_rev,
230 org_repo.scm_instance.alias, org_repo.scm_instance, c.a_rev,
231 other_repo.scm_instance, c.cs_rev)
231 other_repo.scm_instance, c.cs_rev)
232 raw_ids = [x.raw_id for x in c.cs_ranges]
232 raw_ids = [x.raw_id for x in c.cs_ranges]
233 c.cs_comments = other_repo.get_comments(raw_ids)
233 c.cs_comments = other_repo.get_comments(raw_ids)
234 c.statuses = other_repo.statuses(raw_ids)
234 c.statuses = other_repo.statuses(raw_ids)
235
235
236 revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
236 revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
237 c.jsdata = json.dumps(graph_data(c.cs_repo.scm_instance, revs))
237 c.jsdata = json.dumps(graph_data(c.cs_repo.scm_instance, revs))
238
238
239 if partial:
239 if partial:
240 return render('compare/compare_cs.html')
240 return render('compare/compare_cs.html')
241 if merge and c.ancestor:
241 if merge and c.ancestor:
242 # case we want a simple diff without incoming changesets,
242 # case we want a simple diff without incoming changesets,
243 # previewing what will be merged.
243 # previewing what will be merged.
244 # Make the diff on the other repo (which is known to have other_rev)
244 # Make the diff on the other repo (which is known to have other_rev)
245 log.debug('Using ancestor %s as rev1 instead of %s'
245 log.debug('Using ancestor %s as rev1 instead of %s'
246 % (c.ancestor, c.a_rev))
246 % (c.ancestor, c.a_rev))
247 rev1 = c.ancestor
247 rev1 = c.ancestor
248 org_repo = other_repo
248 org_repo = other_repo
249 else: # comparing tips, not necessarily linearly related
249 else: # comparing tips, not necessarily linearly related
250 if merge:
250 if merge:
251 log.error('Unable to find ancestor revision')
251 log.error('Unable to find ancestor revision')
252 if org_repo != other_repo:
252 if org_repo != other_repo:
253 # TODO: we could do this by using hg unionrepo
253 # TODO: we could do this by using hg unionrepo
254 log.error('cannot compare across repos %s and %s', org_repo, other_repo)
254 log.error('cannot compare across repos %s and %s', org_repo, other_repo)
255 h.flash(_('Cannot compare repositories without using common ancestor'), category='error')
255 h.flash(_('Cannot compare repositories without using common ancestor'), category='error')
256 raise HTTPBadRequest
256 raise HTTPBadRequest
257 rev1 = c.a_rev
257 rev1 = c.a_rev
258
258
259 diff_limit = self.cut_off_limit if not c.fulldiff else None
259 diff_limit = self.cut_off_limit if not c.fulldiff else None
260
260
261 log.debug('running diff between %s and %s in %s'
261 log.debug('running diff between %s and %s in %s'
262 % (rev1, c.cs_rev, org_repo.scm_instance.path))
262 % (rev1, c.cs_rev, org_repo.scm_instance.path))
263 txtdiff = org_repo.scm_instance.get_diff(rev1=rev1, rev2=c.cs_rev,
263 txtdiff = org_repo.scm_instance.get_diff(rev1=rev1, rev2=c.cs_rev,
264 ignore_whitespace=ignore_whitespace,
264 ignore_whitespace=ignore_whitespace,
265 context=line_context)
265 context=line_context)
266
266
267 diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff',
267 diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff',
268 diff_limit=diff_limit)
268 diff_limit=diff_limit)
269 _parsed = diff_processor.prepare()
269 _parsed = diff_processor.prepare()
270
270
271 c.limited_diff = False
271 c.limited_diff = False
272 if isinstance(_parsed, LimitedDiffContainer):
272 if isinstance(_parsed, LimitedDiffContainer):
273 c.limited_diff = True
273 c.limited_diff = True
274
274
275 c.files = []
275 c.files = []
276 c.changes = {}
276 c.changes = {}
277 c.lines_added = 0
277 c.lines_added = 0
278 c.lines_deleted = 0
278 c.lines_deleted = 0
279 for f in _parsed:
279 for f in _parsed:
280 st = f['stats']
280 st = f['stats']
281 if not st['binary']:
281 if not st['binary']:
282 c.lines_added += st['added']
282 c.lines_added += st['added']
283 c.lines_deleted += st['deleted']
283 c.lines_deleted += st['deleted']
284 fid = h.FID('', f['filename'])
284 fid = h.FID('', f['filename'])
285 c.files.append([fid, f['operation'], f['filename'], f['stats']])
285 c.files.append([fid, f['operation'], f['filename'], f['stats']])
286 htmldiff = diff_processor.as_html(enable_comments=False, parsed_lines=[f])
286 htmldiff = diff_processor.as_html(enable_comments=False, parsed_lines=[f])
287 c.changes[fid] = [f['operation'], f['filename'], htmldiff]
287 c.changes[fid] = [f['operation'], f['filename'], htmldiff]
288
288
289 return render('compare/compare_diff.html')
289 return render('compare/compare_diff.html')
@@ -1,470 +1,470 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.lib.hooks
15 kallithea.lib.hooks
16 ~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~
17
17
18 Hooks run by Kallithea
18 Hooks run by Kallithea
19
19
20 This file was forked by the Kallithea project in July 2014.
20 This file was forked by the Kallithea project in July 2014.
21 Original author and date, and relevant copyright and licensing information is below:
21 Original author and date, and relevant copyright and licensing information is below:
22 :created_on: Aug 6, 2010
22 :created_on: Aug 6, 2010
23 :author: marcink
23 :author: marcink
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :license: GPLv3, see LICENSE.md for more details.
25 :license: GPLv3, see LICENSE.md for more details.
26 """
26 """
27
27
28 import os
28 import os
29 import sys
29 import sys
30 import time
30 import time
31 import binascii
31 import binascii
32
32
33 from kallithea.lib.vcs.utils.hgcompat import nullrev, revrange
33 from kallithea.lib.vcs.utils.hgcompat import nullrev, revrange
34 from kallithea.lib import helpers as h
34 from kallithea.lib import helpers as h
35 from kallithea.lib.utils import action_logger
35 from kallithea.lib.utils import action_logger
36 from kallithea.lib.vcs.backends.base import EmptyChangeset
36 from kallithea.lib.vcs.backends.base import EmptyChangeset
37 from kallithea.lib.exceptions import HTTPLockedRC, UserCreationError
37 from kallithea.lib.exceptions import HTTPLockedRC, UserCreationError
38 from kallithea.lib.utils2 import safe_str, _extract_extras
38 from kallithea.lib.utils2 import safe_str, _extract_extras
39 from kallithea.model.db import Repository, User
39 from kallithea.model.db import Repository, User
40
40
41
41
42 def _get_scm_size(alias, root_path):
42 def _get_scm_size(alias, root_path):
43
43
44 if not alias.startswith('.'):
44 if not alias.startswith('.'):
45 alias += '.'
45 alias += '.'
46
46
47 size_scm, size_root = 0, 0
47 size_scm, size_root = 0, 0
48 for path, dirs, files in os.walk(safe_str(root_path)):
48 for path, dirs, files in os.walk(safe_str(root_path)):
49 if path.find(alias) != -1:
49 if path.find(alias) != -1:
50 for f in files:
50 for f in files:
51 try:
51 try:
52 size_scm += os.path.getsize(os.path.join(path, f))
52 size_scm += os.path.getsize(os.path.join(path, f))
53 except OSError:
53 except OSError:
54 pass
54 pass
55 else:
55 else:
56 for f in files:
56 for f in files:
57 try:
57 try:
58 size_root += os.path.getsize(os.path.join(path, f))
58 size_root += os.path.getsize(os.path.join(path, f))
59 except OSError:
59 except OSError:
60 pass
60 pass
61
61
62 size_scm_f = h.format_byte_size(size_scm)
62 size_scm_f = h.format_byte_size(size_scm)
63 size_root_f = h.format_byte_size(size_root)
63 size_root_f = h.format_byte_size(size_root)
64 size_total_f = h.format_byte_size(size_root + size_scm)
64 size_total_f = h.format_byte_size(size_root + size_scm)
65
65
66 return size_scm_f, size_root_f, size_total_f
66 return size_scm_f, size_root_f, size_total_f
67
67
68
68
69 def repo_size(ui, repo, hooktype=None, **kwargs):
69 def repo_size(ui, repo, hooktype=None, **kwargs):
70 """
70 """
71 Presents size of repository after push
71 Presents size of repository after push
72
72
73 :param ui:
73 :param ui:
74 :param repo:
74 :param repo:
75 :param hooktype:
75 :param hooktype:
76 """
76 """
77
77
78 size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root)
78 size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root)
79
79
80 last_cs = repo[len(repo) - 1]
80 last_cs = repo[len(repo) - 1]
81
81
82 msg = ('Repository size .hg:%s repo:%s total:%s\n'
82 msg = ('Repository size .hg:%s repo:%s total:%s\n'
83 'Last revision is now r%s:%s\n') % (
83 'Last revision is now r%s:%s\n') % (
84 size_hg_f, size_root_f, size_total_f, last_cs.rev(), last_cs.hex()[:12]
84 size_hg_f, size_root_f, size_total_f, last_cs.rev(), last_cs.hex()[:12]
85 )
85 )
86
86
87 sys.stdout.write(msg)
87 sys.stdout.write(msg)
88
88
89
89
90 def pre_push(ui, repo, **kwargs):
90 def pre_push(ui, repo, **kwargs):
91 # pre push function, currently used to ban pushing when
91 # pre push function, currently used to ban pushing when
92 # repository is locked
92 # repository is locked
93 ex = _extract_extras()
93 ex = _extract_extras()
94
94
95 usr = User.get_by_username(ex.username)
95 usr = User.get_by_username(ex.username)
96 if ex.locked_by[0] and usr.user_id != int(ex.locked_by[0]):
96 if ex.locked_by[0] and usr.user_id != int(ex.locked_by[0]):
97 locked_by = User.get(ex.locked_by[0]).username
97 locked_by = User.get(ex.locked_by[0]).username
98 # this exception is interpreted in git/hg middlewares and based
98 # this exception is interpreted in git/hg middlewares and based
99 # on that proper return code is server to client
99 # on that proper return code is server to client
100 _http_ret = HTTPLockedRC(ex.repository, locked_by)
100 _http_ret = HTTPLockedRC(ex.repository, locked_by)
101 if str(_http_ret.code).startswith('2'):
101 if str(_http_ret.code).startswith('2'):
102 #2xx Codes don't raise exceptions
102 #2xx Codes don't raise exceptions
103 sys.stdout.write(_http_ret.title)
103 sys.stdout.write(_http_ret.title)
104 else:
104 else:
105 raise _http_ret
105 raise _http_ret
106
106
107
107
108 def pre_pull(ui, repo, **kwargs):
108 def pre_pull(ui, repo, **kwargs):
109 # pre push function, currently used to ban pushing when
109 # pre push function, currently used to ban pushing when
110 # repository is locked
110 # repository is locked
111 ex = _extract_extras()
111 ex = _extract_extras()
112 if ex.locked_by[0]:
112 if ex.locked_by[0]:
113 locked_by = User.get(ex.locked_by[0]).username
113 locked_by = User.get(ex.locked_by[0]).username
114 # this exception is interpreted in git/hg middlewares and based
114 # this exception is interpreted in git/hg middlewares and based
115 # on that proper return code is server to client
115 # on that proper return code is server to client
116 _http_ret = HTTPLockedRC(ex.repository, locked_by)
116 _http_ret = HTTPLockedRC(ex.repository, locked_by)
117 if str(_http_ret.code).startswith('2'):
117 if str(_http_ret.code).startswith('2'):
118 #2xx Codes don't raise exceptions
118 #2xx Codes don't raise exceptions
119 sys.stdout.write(_http_ret.title)
119 sys.stdout.write(_http_ret.title)
120 else:
120 else:
121 raise _http_ret
121 raise _http_ret
122
122
123
123
124 def log_pull_action(ui, repo, **kwargs):
124 def log_pull_action(ui, repo, **kwargs):
125 """
125 """
126 Logs user last pull action
126 Logs user last pull action
127
127
128 :param ui:
128 :param ui:
129 :param repo:
129 :param repo:
130 """
130 """
131 ex = _extract_extras()
131 ex = _extract_extras()
132
132
133 user = User.get_by_username(ex.username)
133 user = User.get_by_username(ex.username)
134 action = 'pull'
134 action = 'pull'
135 action_logger(user, action, ex.repository, ex.ip, commit=True)
135 action_logger(user, action, ex.repository, ex.ip, commit=True)
136 # extension hook call
136 # extension hook call
137 from kallithea import EXTENSIONS
137 from kallithea import EXTENSIONS
138 callback = getattr(EXTENSIONS, 'PULL_HOOK', None)
138 callback = getattr(EXTENSIONS, 'PULL_HOOK', None)
139 if callable(callback):
139 if callable(callback):
140 kw = {}
140 kw = {}
141 kw.update(ex)
141 kw.update(ex)
142 callback(**kw)
142 callback(**kw)
143
143
144 if ex.make_lock is not None and ex.make_lock:
144 if ex.make_lock is not None and ex.make_lock:
145 Repository.lock(Repository.get_by_repo_name(ex.repository), user.user_id)
145 Repository.lock(Repository.get_by_repo_name(ex.repository), user.user_id)
146 #msg = 'Made lock on repo `%s`' % repository
146 #msg = 'Made lock on repo `%s`' % repository
147 #sys.stdout.write(msg)
147 #sys.stdout.write(msg)
148
148
149 if ex.locked_by[0]:
149 if ex.locked_by[0]:
150 locked_by = User.get(ex.locked_by[0]).username
150 locked_by = User.get(ex.locked_by[0]).username
151 _http_ret = HTTPLockedRC(ex.repository, locked_by)
151 _http_ret = HTTPLockedRC(ex.repository, locked_by)
152 if str(_http_ret.code).startswith('2'):
152 if str(_http_ret.code).startswith('2'):
153 #2xx Codes don't raise exceptions
153 #2xx Codes don't raise exceptions
154 sys.stdout.write(_http_ret.title)
154 sys.stdout.write(_http_ret.title)
155 return 0
155 return 0
156
156
157
157
158 def log_push_action(ui, repo, **kwargs):
158 def log_push_action(ui, repo, **kwargs):
159 """
159 """
160 Maps user last push action to new changeset id, from mercurial
160 Maps user last push action to new changeset id, from mercurial
161
161
162 :param ui:
162 :param ui:
163 :param repo: repo object containing the `ui` object
163 :param repo: repo object containing the `ui` object
164 """
164 """
165
165
166 ex = _extract_extras()
166 ex = _extract_extras()
167
167
168 action_tmpl = ex.action + ':%s'
168 action_tmpl = ex.action + ':%s'
169 revs = []
169 revs = []
170 if ex.scm == 'hg':
170 if ex.scm == 'hg':
171 node = kwargs['node']
171 node = kwargs['node']
172
172
173 def get_revs(repo, rev_opt):
173 def get_revs(repo, rev_opt):
174 if rev_opt:
174 if rev_opt:
175 revs = revrange(repo, rev_opt)
175 revs = revrange(repo, rev_opt)
176
176
177 if len(revs) == 0:
177 if len(revs) == 0:
178 return (nullrev, nullrev)
178 return (nullrev, nullrev)
179 return max(revs), min(revs)
179 return max(revs), min(revs)
180 else:
180 else:
181 return len(repo) - 1, 0
181 return len(repo) - 1, 0
182
182
183 stop, start = get_revs(repo, [node + ':'])
183 stop, start = get_revs(repo, [node + ':'])
184 _h = binascii.hexlify
184 _h = binascii.hexlify
185 revs = [_h(repo[r].node()) for r in xrange(start, stop + 1)]
185 revs = [_h(repo[r].node()) for r in xrange(start, stop + 1)]
186 elif ex.scm == 'git':
186 elif ex.scm == 'git':
187 revs = kwargs.get('_git_revs', [])
187 revs = kwargs.get('_git_revs', [])
188 if '_git_revs' in kwargs:
188 if '_git_revs' in kwargs:
189 kwargs.pop('_git_revs')
189 kwargs.pop('_git_revs')
190
190
191 action = action_tmpl % ','.join(revs)
191 action = action_tmpl % ','.join(revs)
192 action_logger(ex.username, action, ex.repository, ex.ip, commit=True)
192 action_logger(ex.username, action, ex.repository, ex.ip, commit=True)
193
193
194 # extension hook call
194 # extension hook call
195 from kallithea import EXTENSIONS
195 from kallithea import EXTENSIONS
196 callback = getattr(EXTENSIONS, 'PUSH_HOOK', None)
196 callback = getattr(EXTENSIONS, 'PUSH_HOOK', None)
197 if callable(callback):
197 if callable(callback):
198 kw = {'pushed_revs': revs}
198 kw = {'pushed_revs': revs}
199 kw.update(ex)
199 kw.update(ex)
200 callback(**kw)
200 callback(**kw)
201
201
202 if ex.make_lock is not None and not ex.make_lock:
202 if ex.make_lock is not None and not ex.make_lock:
203 Repository.unlock(Repository.get_by_repo_name(ex.repository))
203 Repository.unlock(Repository.get_by_repo_name(ex.repository))
204 msg = 'Released lock on repo `%s`\n' % ex.repository
204 msg = 'Released lock on repo `%s`\n' % ex.repository
205 sys.stdout.write(msg)
205 sys.stdout.write(msg)
206
206
207 if ex.locked_by[0]:
207 if ex.locked_by[0]:
208 locked_by = User.get(ex.locked_by[0]).username
208 locked_by = User.get(ex.locked_by[0]).username
209 _http_ret = HTTPLockedRC(ex.repository, locked_by)
209 _http_ret = HTTPLockedRC(ex.repository, locked_by)
210 if str(_http_ret.code).startswith('2'):
210 if str(_http_ret.code).startswith('2'):
211 #2xx Codes don't raise exceptions
211 #2xx Codes don't raise exceptions
212 sys.stdout.write(_http_ret.title)
212 sys.stdout.write(_http_ret.title)
213
213
214 return 0
214 return 0
215
215
216
216
217 def log_create_repository(repository_dict, created_by, **kwargs):
217 def log_create_repository(repository_dict, created_by, **kwargs):
218 """
218 """
219 Post create repository Hook.
219 Post create repository Hook.
220
220
221 :param repository: dict dump of repository object
221 :param repository: dict dump of repository object
222 :param created_by: username who created repository
222 :param created_by: username who created repository
223
223
224 available keys of repository_dict:
224 available keys of repository_dict:
225
225
226 'repo_type',
226 'repo_type',
227 'description',
227 'description',
228 'private',
228 'private',
229 'created_on',
229 'created_on',
230 'enable_downloads',
230 'enable_downloads',
231 'repo_id',
231 'repo_id',
232 'user_id',
232 'user_id',
233 'enable_statistics',
233 'enable_statistics',
234 'clone_uri',
234 'clone_uri',
235 'fork_id',
235 'fork_id',
236 'group_id',
236 'group_id',
237 'repo_name'
237 'repo_name'
238
238
239 """
239 """
240 from kallithea import EXTENSIONS
240 from kallithea import EXTENSIONS
241 callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None)
241 callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None)
242 if callable(callback):
242 if callable(callback):
243 kw = {}
243 kw = {}
244 kw.update(repository_dict)
244 kw.update(repository_dict)
245 kw.update({'created_by': created_by})
245 kw.update({'created_by': created_by})
246 kw.update(kwargs)
246 kw.update(kwargs)
247 return callback(**kw)
247 return callback(**kw)
248
248
249 return 0
249 return 0
250
250
251
251
252 def check_allowed_create_user(user_dict, created_by, **kwargs):
252 def check_allowed_create_user(user_dict, created_by, **kwargs):
253 # pre create hooks
253 # pre create hooks
254 from kallithea import EXTENSIONS
254 from kallithea import EXTENSIONS
255 callback = getattr(EXTENSIONS, 'PRE_CREATE_USER_HOOK', None)
255 callback = getattr(EXTENSIONS, 'PRE_CREATE_USER_HOOK', None)
256 if callable(callback):
256 if callable(callback):
257 allowed, reason = callback(created_by=created_by, **user_dict)
257 allowed, reason = callback(created_by=created_by, **user_dict)
258 if not allowed:
258 if not allowed:
259 raise UserCreationError(reason)
259 raise UserCreationError(reason)
260
260
261
261
262 def log_create_user(user_dict, created_by, **kwargs):
262 def log_create_user(user_dict, created_by, **kwargs):
263 """
263 """
264 Post create user Hook.
264 Post create user Hook.
265
265
266 :param user_dict: dict dump of user object
266 :param user_dict: dict dump of user object
267
267
268 available keys for user_dict:
268 available keys for user_dict:
269
269
270 'username',
270 'username',
271 'full_name_or_username',
271 'full_name_or_username',
272 'full_contact',
272 'full_contact',
273 'user_id',
273 'user_id',
274 'name',
274 'name',
275 'firstname',
275 'firstname',
276 'short_contact',
276 'short_contact',
277 'admin',
277 'admin',
278 'lastname',
278 'lastname',
279 'ip_addresses',
279 'ip_addresses',
280 'ldap_dn',
280 'ldap_dn',
281 'email',
281 'email',
282 'api_key',
282 'api_key',
283 'last_login',
283 'last_login',
284 'full_name',
284 'full_name',
285 'active',
285 'active',
286 'password',
286 'password',
287 'emails',
287 'emails',
288 'inherit_default_permissions'
288 'inherit_default_permissions'
289
289
290 """
290 """
291 from kallithea import EXTENSIONS
291 from kallithea import EXTENSIONS
292 callback = getattr(EXTENSIONS, 'CREATE_USER_HOOK', None)
292 callback = getattr(EXTENSIONS, 'CREATE_USER_HOOK', None)
293 if callable(callback):
293 if callable(callback):
294 return callback(created_by=created_by, **user_dict)
294 return callback(created_by=created_by, **user_dict)
295
295
296 return 0
296 return 0
297
297
298
298
299 def log_delete_repository(repository_dict, deleted_by, **kwargs):
299 def log_delete_repository(repository_dict, deleted_by, **kwargs):
300 """
300 """
301 Post delete repository Hook.
301 Post delete repository Hook.
302
302
303 :param repository: dict dump of repository object
303 :param repository: dict dump of repository object
304 :param deleted_by: username who deleted the repository
304 :param deleted_by: username who deleted the repository
305
305
306 available keys of repository_dict:
306 available keys of repository_dict:
307
307
308 'repo_type',
308 'repo_type',
309 'description',
309 'description',
310 'private',
310 'private',
311 'created_on',
311 'created_on',
312 'enable_downloads',
312 'enable_downloads',
313 'repo_id',
313 'repo_id',
314 'user_id',
314 'user_id',
315 'enable_statistics',
315 'enable_statistics',
316 'clone_uri',
316 'clone_uri',
317 'fork_id',
317 'fork_id',
318 'group_id',
318 'group_id',
319 'repo_name'
319 'repo_name'
320
320
321 """
321 """
322 from kallithea import EXTENSIONS
322 from kallithea import EXTENSIONS
323 callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None)
323 callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None)
324 if callable(callback):
324 if callable(callback):
325 kw = {}
325 kw = {}
326 kw.update(repository_dict)
326 kw.update(repository_dict)
327 kw.update({'deleted_by': deleted_by,
327 kw.update({'deleted_by': deleted_by,
328 'deleted_on': time.time()})
328 'deleted_on': time.time()})
329 kw.update(kwargs)
329 kw.update(kwargs)
330 return callback(**kw)
330 return callback(**kw)
331
331
332 return 0
332 return 0
333
333
334
334
335 def log_delete_user(user_dict, deleted_by, **kwargs):
335 def log_delete_user(user_dict, deleted_by, **kwargs):
336 """
336 """
337 Post delete user Hook.
337 Post delete user Hook.
338
338
339 :param user_dict: dict dump of user object
339 :param user_dict: dict dump of user object
340
340
341 available keys for user_dict:
341 available keys for user_dict:
342
342
343 'username',
343 'username',
344 'full_name_or_username',
344 'full_name_or_username',
345 'full_contact',
345 'full_contact',
346 'user_id',
346 'user_id',
347 'name',
347 'name',
348 'firstname',
348 'firstname',
349 'short_contact',
349 'short_contact',
350 'admin',
350 'admin',
351 'lastname',
351 'lastname',
352 'ip_addresses',
352 'ip_addresses',
353 'ldap_dn',
353 'ldap_dn',
354 'email',
354 'email',
355 'api_key',
355 'api_key',
356 'last_login',
356 'last_login',
357 'full_name',
357 'full_name',
358 'active',
358 'active',
359 'password',
359 'password',
360 'emails',
360 'emails',
361 'inherit_default_permissions'
361 'inherit_default_permissions'
362
362
363 """
363 """
364 from kallithea import EXTENSIONS
364 from kallithea import EXTENSIONS
365 callback = getattr(EXTENSIONS, 'DELETE_USER_HOOK', None)
365 callback = getattr(EXTENSIONS, 'DELETE_USER_HOOK', None)
366 if callable(callback):
366 if callable(callback):
367 return callback(deleted_by=deleted_by, **user_dict)
367 return callback(deleted_by=deleted_by, **user_dict)
368
368
369 return 0
369 return 0
370
370
371
371
372 handle_git_pre_receive = (lambda repo_path, revs, env:
372 handle_git_pre_receive = (lambda repo_path, revs, env:
373 handle_git_receive(repo_path, revs, env, hook_type='pre'))
373 handle_git_receive(repo_path, revs, env, hook_type='pre'))
374 handle_git_post_receive = (lambda repo_path, revs, env:
374 handle_git_post_receive = (lambda repo_path, revs, env:
375 handle_git_receive(repo_path, revs, env, hook_type='post'))
375 handle_git_receive(repo_path, revs, env, hook_type='post'))
376
376
377
377
378 def handle_git_receive(repo_path, revs, env, hook_type='post'):
378 def handle_git_receive(repo_path, revs, env, hook_type='post'):
379 """
379 """
380 A really hacky method that is run by git post-receive hook and logs
380 A really hacky method that is run by git post-receive hook and logs
381 an push action together with pushed revisions. It's executed by subprocess
381 an push action together with pushed revisions. It's executed by subprocess
382 thus needs all info to be able to create a on the fly pylons environment,
382 thus needs all info to be able to create a on the fly pylons environment,
383 connect to database and run the logging code. Hacky as sh*t but works.
383 connect to database and run the logging code. Hacky as sh*t but works.
384
384
385 :param repo_path:
385 :param repo_path:
386 :param revs:
386 :param revs:
387 :param env:
387 :param env:
388 """
388 """
389 from paste.deploy import appconfig
389 from paste.deploy import appconfig
390 from sqlalchemy import engine_from_config
390 from sqlalchemy import engine_from_config
391 from kallithea.config.environment import load_environment
391 from kallithea.config.environment import load_environment
392 from kallithea.model import init_model
392 from kallithea.model import init_model
393 from kallithea.model.db import Ui
393 from kallithea.model.db import Ui
394 from kallithea.lib.utils import make_ui
394 from kallithea.lib.utils import make_ui
395 extras = _extract_extras(env)
395 extras = _extract_extras(env)
396
396
397 path, ini_name = os.path.split(extras['config'])
397 path, ini_name = os.path.split(extras['config'])
398 conf = appconfig('config:%s' % ini_name, relative_to=path)
398 conf = appconfig('config:%s' % ini_name, relative_to=path)
399 load_environment(conf.global_conf, conf.local_conf, test_env=False,
399 load_environment(conf.global_conf, conf.local_conf, test_env=False,
400 test_index=False)
400 test_index=False)
401
401
402 engine = engine_from_config(conf, 'sqlalchemy.db1.')
402 engine = engine_from_config(conf, 'sqlalchemy.db1.')
403 init_model(engine)
403 init_model(engine)
404
404
405 baseui = make_ui('db')
405 baseui = make_ui('db')
406 # fix if it's not a bare repo
406 # fix if it's not a bare repo
407 if repo_path.endswith(os.sep + '.git'):
407 if repo_path.endswith(os.sep + '.git'):
408 repo_path = repo_path[:-5]
408 repo_path = repo_path[:-5]
409
409
410 repo = Repository.get_by_full_path(repo_path)
410 repo = Repository.get_by_full_path(repo_path)
411 if not repo:
411 if not repo:
412 raise OSError('Repository %s not found in database'
412 raise OSError('Repository %s not found in database'
413 % (safe_str(repo_path)))
413 % (safe_str(repo_path)))
414
414
415 _hooks = dict(baseui.configitems('hooks')) or {}
415 _hooks = dict(baseui.configitems('hooks')) or {}
416
416
417 if hook_type == 'pre':
417 if hook_type == 'pre':
418 repo = repo.scm_instance
418 repo = repo.scm_instance
419 else:
419 else:
420 #post push shouldn't use the cached instance never
420 #post push shouldn't use the cached instance never
421 repo = repo.scm_instance_no_cache()
421 repo = repo.scm_instance_no_cache()
422
422
423 if hook_type == 'pre':
423 if hook_type == 'pre':
424 pre_push(baseui, repo)
424 pre_push(baseui, repo)
425
425
426 # if push hook is enabled via web interface
426 # if push hook is enabled via web interface
427 elif hook_type == 'post' and _hooks.get(Ui.HOOK_PUSH):
427 elif hook_type == 'post' and _hooks.get(Ui.HOOK_PUSH):
428 rev_data = []
428 rev_data = []
429 for l in revs:
429 for l in revs:
430 old_rev, new_rev, ref = l.split(' ')
430 old_rev, new_rev, ref = l.split(' ')
431 _ref_data = ref.split('/')
431 _ref_data = ref.split('/')
432 if _ref_data[1] in ['tags', 'heads']:
432 if _ref_data[1] in ['tags', 'heads']:
433 rev_data.append({'old_rev': old_rev,
433 rev_data.append({'old_rev': old_rev,
434 'new_rev': new_rev,
434 'new_rev': new_rev,
435 'ref': ref,
435 'ref': ref,
436 'type': _ref_data[1],
436 'type': _ref_data[1],
437 'name': _ref_data[2].strip()})
437 'name': _ref_data[2].strip()})
438
438
439 git_revs = []
439 git_revs = []
440
440
441 for push_ref in rev_data:
441 for push_ref in rev_data:
442 _type = push_ref['type']
442 _type = push_ref['type']
443 if _type == 'heads':
443 if _type == 'heads':
444 if push_ref['old_rev'] == EmptyChangeset().raw_id:
444 if push_ref['old_rev'] == EmptyChangeset().raw_id:
445 # update the symbolic ref if we push new repo
445 # update the symbolic ref if we push new repo
446 if repo.is_empty():
446 if repo.is_empty():
447 repo._repo.refs.set_symbolic_ref('HEAD',
447 repo._repo.refs.set_symbolic_ref('HEAD',
448 'refs/heads/%s' % push_ref['name'])
448 'refs/heads/%s' % push_ref['name'])
449
449
450 cmd = "for-each-ref --format='%(refname)' 'refs/heads/*'"
450 cmd = ['for-each-ref', '--format=%(refname)','refs/heads/*']
451 heads = repo.run_git_command(cmd)[0]
451 heads = repo.run_git_command(cmd)[0]
452 cmd = ['log', push_ref['new_rev'],
453 '--reverse', '--pretty=format:%H', '--not']
452 heads = heads.replace(push_ref['ref'], '')
454 heads = heads.replace(push_ref['ref'], '')
453 heads = ' '.join(map(lambda c: c.strip('\n').strip(),
455 for l in heads.splitlines():
454 heads.splitlines()))
456 cmd.append(l.strip())
455 cmd = (('log %(new_rev)s' % push_ref) +
456 ' --reverse --pretty=format:"%H" --not ' + heads)
457 git_revs += repo.run_git_command(cmd)[0].splitlines()
457 git_revs += repo.run_git_command(cmd)[0].splitlines()
458
458
459 elif push_ref['new_rev'] == EmptyChangeset().raw_id:
459 elif push_ref['new_rev'] == EmptyChangeset().raw_id:
460 #delete branch case
460 #delete branch case
461 git_revs += ['delete_branch=>%s' % push_ref['name']]
461 git_revs += ['delete_branch=>%s' % push_ref['name']]
462 else:
462 else:
463 cmd = (('log %(old_rev)s..%(new_rev)s' % push_ref) +
463 cmd = ['log', '%(old_rev)s..%(new_rev)s' % push_ref,
464 ' --reverse --pretty=format:"%H"')
464 '--reverse', '--pretty=format:%H']
465 git_revs += repo.run_git_command(cmd)[0].splitlines()
465 git_revs += repo.run_git_command(cmd)[0].splitlines()
466
466
467 elif _type == 'tags':
467 elif _type == 'tags':
468 git_revs += ['tag=>%s' % push_ref['name']]
468 git_revs += ['tag=>%s' % push_ref['name']]
469
469
470 log_push_action(baseui, repo, _git_revs=git_revs)
470 log_push_action(baseui, repo, _git_revs=git_revs)
@@ -1,202 +1,197 b''
1 import os
1 import os
2 import socket
2 import socket
3 import logging
3 import logging
4 import traceback
4 import traceback
5
5
6 from webob import Request, Response, exc
6 from webob import Request, Response, exc
7
7
8 import kallithea
8 import kallithea
9 from kallithea.lib.vcs import subprocessio
9 from kallithea.lib.vcs import subprocessio
10
10
11 log = logging.getLogger(__name__)
11 log = logging.getLogger(__name__)
12
12
13
13
14 class FileWrapper(object):
14 class FileWrapper(object):
15
15
16 def __init__(self, fd, content_length):
16 def __init__(self, fd, content_length):
17 self.fd = fd
17 self.fd = fd
18 self.content_length = content_length
18 self.content_length = content_length
19 self.remain = content_length
19 self.remain = content_length
20
20
21 def read(self, size):
21 def read(self, size):
22 if size <= self.remain:
22 if size <= self.remain:
23 try:
23 try:
24 data = self.fd.read(size)
24 data = self.fd.read(size)
25 except socket.error:
25 except socket.error:
26 raise IOError(self)
26 raise IOError(self)
27 self.remain -= size
27 self.remain -= size
28 elif self.remain:
28 elif self.remain:
29 data = self.fd.read(self.remain)
29 data = self.fd.read(self.remain)
30 self.remain = 0
30 self.remain = 0
31 else:
31 else:
32 data = None
32 data = None
33 return data
33 return data
34
34
35 def __repr__(self):
35 def __repr__(self):
36 return '<FileWrapper %s len: %s, read: %s>' % (
36 return '<FileWrapper %s len: %s, read: %s>' % (
37 self.fd, self.content_length, self.content_length - self.remain
37 self.fd, self.content_length, self.content_length - self.remain
38 )
38 )
39
39
40
40
41 class GitRepository(object):
41 class GitRepository(object):
42 git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs'])
42 git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs'])
43 commands = ['git-upload-pack', 'git-receive-pack']
43 commands = ['git-upload-pack', 'git-receive-pack']
44
44
45 def __init__(self, repo_name, content_path, extras):
45 def __init__(self, repo_name, content_path, extras):
46 files = set([f.lower() for f in os.listdir(content_path)])
46 files = set([f.lower() for f in os.listdir(content_path)])
47 if not (self.git_folder_signature.intersection(files)
47 if not (self.git_folder_signature.intersection(files)
48 == self.git_folder_signature):
48 == self.git_folder_signature):
49 raise OSError('%s missing git signature' % content_path)
49 raise OSError('%s missing git signature' % content_path)
50 self.content_path = content_path
50 self.content_path = content_path
51 self.valid_accepts = ['application/x-%s-result' %
51 self.valid_accepts = ['application/x-%s-result' %
52 c for c in self.commands]
52 c for c in self.commands]
53 self.repo_name = repo_name
53 self.repo_name = repo_name
54 self.extras = extras
54 self.extras = extras
55
55
56 def _get_fixedpath(self, path):
56 def _get_fixedpath(self, path):
57 """
57 """
58 Small fix for repo_path
58 Small fix for repo_path
59
59
60 :param path:
60 :param path:
61 """
61 """
62 return path.split(self.repo_name, 1)[-1].strip('/')
62 return path.split(self.repo_name, 1)[-1].strip('/')
63
63
64 def inforefs(self, request, environ):
64 def inforefs(self, request, environ):
65 """
65 """
66 WSGI Response producer for HTTP GET Git Smart
66 WSGI Response producer for HTTP GET Git Smart
67 HTTP /info/refs request.
67 HTTP /info/refs request.
68 """
68 """
69
69
70 git_command = request.GET.get('service')
70 git_command = request.GET.get('service')
71 if git_command not in self.commands:
71 if git_command not in self.commands:
72 log.debug('command %s not allowed' % git_command)
72 log.debug('command %s not allowed' % git_command)
73 return exc.HTTPMethodNotAllowed()
73 return exc.HTTPMethodNotAllowed()
74
74
75 # note to self:
75 # note to self:
76 # please, resist the urge to add '\n' to git capture and increment
76 # please, resist the urge to add '\n' to git capture and increment
77 # line count by 1.
77 # line count by 1.
78 # The code in Git client not only does NOT need '\n', but actually
78 # The code in Git client not only does NOT need '\n', but actually
79 # blows up if you sprinkle "flush" (0000) as "0001\n".
79 # blows up if you sprinkle "flush" (0000) as "0001\n".
80 # It reads binary, per number of bytes specified.
80 # It reads binary, per number of bytes specified.
81 # if you do add '\n' as part of data, count it.
81 # if you do add '\n' as part of data, count it.
82 server_advert = '# service=%s' % git_command
82 server_advert = '# service=%s' % git_command
83 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
83 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
84 _git_path = kallithea.CONFIG.get('git_path', 'git')
84 _git_path = kallithea.CONFIG.get('git_path', 'git')
85 cmd = [_git_path, git_command[4:],
86 '--stateless-rpc', '--advertise-refs', self.content_path]
87 log.debug('handling cmd %s', cmd)
85 try:
88 try:
86 out = subprocessio.SubprocessIOChunker(
89 out = subprocessio.SubprocessIOChunker(cmd,
87 r'%s %s --stateless-rpc --advertise-refs "%s"' % (
90 starting_values=[packet_len + server_advert + '0000']
88 _git_path, git_command[4:], self.content_path),
89 starting_values=[
90 packet_len + server_advert + '0000'
91 ]
92 )
91 )
93 except EnvironmentError, e:
92 except EnvironmentError, e:
94 log.error(traceback.format_exc())
93 log.error(traceback.format_exc())
95 raise exc.HTTPExpectationFailed()
94 raise exc.HTTPExpectationFailed()
96 resp = Response()
95 resp = Response()
97 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
96 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
98 resp.charset = None
97 resp.charset = None
99 resp.app_iter = out
98 resp.app_iter = out
100 return resp
99 return resp
101
100
102 def backend(self, request, environ):
101 def backend(self, request, environ):
103 """
102 """
104 WSGI Response producer for HTTP POST Git Smart HTTP requests.
103 WSGI Response producer for HTTP POST Git Smart HTTP requests.
105 Reads commands and data from HTTP POST's body.
104 Reads commands and data from HTTP POST's body.
106 returns an iterator obj with contents of git command's
105 returns an iterator obj with contents of git command's
107 response to stdout
106 response to stdout
108 """
107 """
109 _git_path = kallithea.CONFIG.get('git_path', 'git')
108 _git_path = kallithea.CONFIG.get('git_path', 'git')
110 git_command = self._get_fixedpath(request.path_info)
109 git_command = self._get_fixedpath(request.path_info)
111 if git_command not in self.commands:
110 if git_command not in self.commands:
112 log.debug('command %s not allowed' % git_command)
111 log.debug('command %s not allowed' % git_command)
113 return exc.HTTPMethodNotAllowed()
112 return exc.HTTPMethodNotAllowed()
114
113
115 if 'CONTENT_LENGTH' in environ:
114 if 'CONTENT_LENGTH' in environ:
116 inputstream = FileWrapper(environ['wsgi.input'],
115 inputstream = FileWrapper(environ['wsgi.input'],
117 request.content_length)
116 request.content_length)
118 else:
117 else:
119 inputstream = environ['wsgi.input']
118 inputstream = environ['wsgi.input']
120
119
121 try:
120 gitenv = dict(os.environ)
122 gitenv = os.environ
123 # forget all configs
121 # forget all configs
124 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
122 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
125 opts = dict(
123 cmd = [_git_path, git_command[4:], '--stateless-rpc', self.content_path]
126 env=gitenv,
124 log.debug('handling cmd %s', cmd)
127 cwd=self.content_path,
125 try:
128 )
129 cmd = r'%s %s --stateless-rpc "%s"' % (_git_path, git_command[4:],
130 self.content_path),
131 log.debug('handling cmd %s' % cmd)
132 out = subprocessio.SubprocessIOChunker(
126 out = subprocessio.SubprocessIOChunker(
133 cmd,
127 cmd,
134 inputstream=inputstream,
128 inputstream=inputstream,
135 **opts
129 env=gitenv,
130 cwd=self.content_path,
136 )
131 )
137 except EnvironmentError, e:
132 except EnvironmentError, e:
138 log.error(traceback.format_exc())
133 log.error(traceback.format_exc())
139 raise exc.HTTPExpectationFailed()
134 raise exc.HTTPExpectationFailed()
140
135
141 if git_command in [u'git-receive-pack']:
136 if git_command in [u'git-receive-pack']:
142 # updating refs manually after each push.
137 # updating refs manually after each push.
143 # Needed for pre-1.7.0.4 git clients using regular HTTP mode.
138 # Needed for pre-1.7.0.4 git clients using regular HTTP mode.
144 from kallithea.lib.vcs import get_repo
139 from kallithea.lib.vcs import get_repo
145 from dulwich.server import update_server_info
140 from dulwich.server import update_server_info
146 repo = get_repo(self.content_path)
141 repo = get_repo(self.content_path)
147 if repo:
142 if repo:
148 update_server_info(repo._repo)
143 update_server_info(repo._repo)
149
144
150 resp = Response()
145 resp = Response()
151 resp.content_type = 'application/x-%s-result' % git_command.encode('utf8')
146 resp.content_type = 'application/x-%s-result' % git_command.encode('utf8')
152 resp.charset = None
147 resp.charset = None
153 resp.app_iter = out
148 resp.app_iter = out
154 return resp
149 return resp
155
150
156 def __call__(self, environ, start_response):
151 def __call__(self, environ, start_response):
157 request = Request(environ)
152 request = Request(environ)
158 _path = self._get_fixedpath(request.path_info)
153 _path = self._get_fixedpath(request.path_info)
159 if _path.startswith('info/refs'):
154 if _path.startswith('info/refs'):
160 app = self.inforefs
155 app = self.inforefs
161 elif [a for a in self.valid_accepts if a in request.accept]:
156 elif [a for a in self.valid_accepts if a in request.accept]:
162 app = self.backend
157 app = self.backend
163 try:
158 try:
164 resp = app(request, environ)
159 resp = app(request, environ)
165 except exc.HTTPException, e:
160 except exc.HTTPException, e:
166 resp = e
161 resp = e
167 log.error(traceback.format_exc())
162 log.error(traceback.format_exc())
168 except Exception, e:
163 except Exception, e:
169 log.error(traceback.format_exc())
164 log.error(traceback.format_exc())
170 resp = exc.HTTPInternalServerError()
165 resp = exc.HTTPInternalServerError()
171 return resp(environ, start_response)
166 return resp(environ, start_response)
172
167
173
168
174 class GitDirectory(object):
169 class GitDirectory(object):
175
170
176 def __init__(self, repo_root, repo_name, extras):
171 def __init__(self, repo_root, repo_name, extras):
177 repo_location = os.path.join(repo_root, repo_name)
172 repo_location = os.path.join(repo_root, repo_name)
178 if not os.path.isdir(repo_location):
173 if not os.path.isdir(repo_location):
179 raise OSError(repo_location)
174 raise OSError(repo_location)
180
175
181 self.content_path = repo_location
176 self.content_path = repo_location
182 self.repo_name = repo_name
177 self.repo_name = repo_name
183 self.repo_location = repo_location
178 self.repo_location = repo_location
184 self.extras = extras
179 self.extras = extras
185
180
186 def __call__(self, environ, start_response):
181 def __call__(self, environ, start_response):
187 content_path = self.content_path
182 content_path = self.content_path
188 try:
183 try:
189 app = GitRepository(self.repo_name, content_path, self.extras)
184 app = GitRepository(self.repo_name, content_path, self.extras)
190 except (AssertionError, OSError):
185 except (AssertionError, OSError):
191 content_path = os.path.join(content_path, '.git')
186 content_path = os.path.join(content_path, '.git')
192 if os.path.isdir(content_path):
187 if os.path.isdir(content_path):
193 app = GitRepository(self.repo_name, content_path, self.extras)
188 app = GitRepository(self.repo_name, content_path, self.extras)
194 else:
189 else:
195 return exc.HTTPNotFound()(environ, start_response)
190 return exc.HTTPNotFound()(environ, start_response)
196 return app(environ, start_response)
191 return app(environ, start_response)
197
192
198
193
199 def make_wsgi_app(repo_name, repo_root, extras):
194 def make_wsgi_app(repo_name, repo_root, extras):
200 from dulwich.web import LimitedInputFilter, GunzipFilter
195 from dulwich.web import LimitedInputFilter, GunzipFilter
201 app = GitDirectory(repo_root, repo_name, extras)
196 app = GitDirectory(repo_root, repo_name, extras)
202 return GunzipFilter(LimitedInputFilter(app))
197 return GunzipFilter(LimitedInputFilter(app))
@@ -1,872 +1,872 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.lib.utils
15 kallithea.lib.utils
16 ~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~
17
17
18 Utilities library for Kallithea
18 Utilities library for Kallithea
19
19
20 This file was forked by the Kallithea project in July 2014.
20 This file was forked by the Kallithea project in July 2014.
21 Original author and date, and relevant copyright and licensing information is below:
21 Original author and date, and relevant copyright and licensing information is below:
22 :created_on: Apr 18, 2010
22 :created_on: Apr 18, 2010
23 :author: marcink
23 :author: marcink
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :license: GPLv3, see LICENSE.md for more details.
25 :license: GPLv3, see LICENSE.md for more details.
26 """
26 """
27
27
28 import os
28 import os
29 import re
29 import re
30 import logging
30 import logging
31 import datetime
31 import datetime
32 import traceback
32 import traceback
33 import paste
33 import paste
34 import beaker
34 import beaker
35 import tarfile
35 import tarfile
36 import shutil
36 import shutil
37 import decorator
37 import decorator
38 import warnings
38 import warnings
39 from os.path import abspath
39 from os.path import abspath
40 from os.path import dirname as dn, join as jn
40 from os.path import dirname as dn, join as jn
41
41
42 from paste.script.command import Command, BadCommand
42 from paste.script.command import Command, BadCommand
43
43
44 from webhelpers.text import collapse, remove_formatting, strip_tags
44 from webhelpers.text import collapse, remove_formatting, strip_tags
45 from beaker.cache import _cache_decorate
45 from beaker.cache import _cache_decorate
46
46
47 from kallithea import BRAND
47 from kallithea import BRAND
48
48
49 from kallithea.lib.vcs.utils.hgcompat import ui, config
49 from kallithea.lib.vcs.utils.hgcompat import ui, config
50 from kallithea.lib.vcs.utils.helpers import get_scm
50 from kallithea.lib.vcs.utils.helpers import get_scm
51 from kallithea.lib.vcs.exceptions import VCSError
51 from kallithea.lib.vcs.exceptions import VCSError
52
52
53 from kallithea.lib.caching_query import FromCache
53 from kallithea.lib.caching_query import FromCache
54
54
55 from kallithea.model import meta
55 from kallithea.model import meta
56 from kallithea.model.db import Repository, User, Ui, \
56 from kallithea.model.db import Repository, User, Ui, \
57 UserLog, RepoGroup, Setting, CacheInvalidation, UserGroup
57 UserLog, RepoGroup, Setting, CacheInvalidation, UserGroup
58 from kallithea.model.meta import Session
58 from kallithea.model.meta import Session
59 from kallithea.model.repo_group import RepoGroupModel
59 from kallithea.model.repo_group import RepoGroupModel
60 from kallithea.lib.utils2 import safe_str, safe_unicode, get_current_authuser
60 from kallithea.lib.utils2 import safe_str, safe_unicode, get_current_authuser
61 from kallithea.lib.vcs.utils.fakemod import create_module
61 from kallithea.lib.vcs.utils.fakemod import create_module
62
62
63 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
64
64
65 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}_.*')
65 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}_.*')
66
66
67
67
68 def recursive_replace(str_, replace=' '):
68 def recursive_replace(str_, replace=' '):
69 """
69 """
70 Recursive replace of given sign to just one instance
70 Recursive replace of given sign to just one instance
71
71
72 :param str_: given string
72 :param str_: given string
73 :param replace: char to find and replace multiple instances
73 :param replace: char to find and replace multiple instances
74
74
75 Examples::
75 Examples::
76 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
76 >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
77 'Mighty-Mighty-Bo-sstones'
77 'Mighty-Mighty-Bo-sstones'
78 """
78 """
79
79
80 if str_.find(replace * 2) == -1:
80 if str_.find(replace * 2) == -1:
81 return str_
81 return str_
82 else:
82 else:
83 str_ = str_.replace(replace * 2, replace)
83 str_ = str_.replace(replace * 2, replace)
84 return recursive_replace(str_, replace)
84 return recursive_replace(str_, replace)
85
85
86
86
87 def repo_name_slug(value):
87 def repo_name_slug(value):
88 """
88 """
89 Return slug of name of repository
89 Return slug of name of repository
90 This function is called on each creation/modification
90 This function is called on each creation/modification
91 of repository to prevent bad names in repo
91 of repository to prevent bad names in repo
92 """
92 """
93
93
94 slug = remove_formatting(value)
94 slug = remove_formatting(value)
95 slug = strip_tags(slug)
95 slug = strip_tags(slug)
96
96
97 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
97 for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
98 slug = slug.replace(c, '-')
98 slug = slug.replace(c, '-')
99 slug = recursive_replace(slug, '-')
99 slug = recursive_replace(slug, '-')
100 slug = collapse(slug, '-')
100 slug = collapse(slug, '-')
101 return slug
101 return slug
102
102
103
103
104 #==============================================================================
104 #==============================================================================
105 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
105 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
106 #==============================================================================
106 #==============================================================================
107 def get_repo_slug(request):
107 def get_repo_slug(request):
108 _repo = request.environ['pylons.routes_dict'].get('repo_name')
108 _repo = request.environ['pylons.routes_dict'].get('repo_name')
109 if _repo:
109 if _repo:
110 _repo = _repo.rstrip('/')
110 _repo = _repo.rstrip('/')
111 return _repo
111 return _repo
112
112
113
113
114 def get_repo_group_slug(request):
114 def get_repo_group_slug(request):
115 _group = request.environ['pylons.routes_dict'].get('group_name')
115 _group = request.environ['pylons.routes_dict'].get('group_name')
116 if _group:
116 if _group:
117 _group = _group.rstrip('/')
117 _group = _group.rstrip('/')
118 return _group
118 return _group
119
119
120
120
121 def get_user_group_slug(request):
121 def get_user_group_slug(request):
122 _group = request.environ['pylons.routes_dict'].get('id')
122 _group = request.environ['pylons.routes_dict'].get('id')
123 _group = UserGroup.get(_group)
123 _group = UserGroup.get(_group)
124 if _group:
124 if _group:
125 return _group.users_group_name
125 return _group.users_group_name
126 return None
126 return None
127
127
128
128
129 def _extract_id_from_repo_name(repo_name):
129 def _extract_id_from_repo_name(repo_name):
130 if repo_name.startswith('/'):
130 if repo_name.startswith('/'):
131 repo_name = repo_name.lstrip('/')
131 repo_name = repo_name.lstrip('/')
132 by_id_match = re.match(r'^_(\d{1,})', repo_name)
132 by_id_match = re.match(r'^_(\d{1,})', repo_name)
133 if by_id_match:
133 if by_id_match:
134 return by_id_match.groups()[0]
134 return by_id_match.groups()[0]
135
135
136
136
137 def get_repo_by_id(repo_name):
137 def get_repo_by_id(repo_name):
138 """
138 """
139 Extracts repo_name by id from special urls. Example url is _11/repo_name
139 Extracts repo_name by id from special urls. Example url is _11/repo_name
140
140
141 :param repo_name:
141 :param repo_name:
142 :return: repo_name if matched else None
142 :return: repo_name if matched else None
143 """
143 """
144 _repo_id = _extract_id_from_repo_name(repo_name)
144 _repo_id = _extract_id_from_repo_name(repo_name)
145 if _repo_id:
145 if _repo_id:
146 from kallithea.model.db import Repository
146 from kallithea.model.db import Repository
147 repo = Repository.get(_repo_id)
147 repo = Repository.get(_repo_id)
148 if repo:
148 if repo:
149 # TODO: return repo instead of reponame? or would that be a layering violation?
149 # TODO: return repo instead of reponame? or would that be a layering violation?
150 return repo.repo_name
150 return repo.repo_name
151 return None
151 return None
152
152
153
153
154 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
154 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
155 """
155 """
156 Action logger for various actions made by users
156 Action logger for various actions made by users
157
157
158 :param user: user that made this action, can be a unique username string or
158 :param user: user that made this action, can be a unique username string or
159 object containing user_id attribute
159 object containing user_id attribute
160 :param action: action to log, should be on of predefined unique actions for
160 :param action: action to log, should be on of predefined unique actions for
161 easy translations
161 easy translations
162 :param repo: string name of repository or object containing repo_id,
162 :param repo: string name of repository or object containing repo_id,
163 that action was made on
163 that action was made on
164 :param ipaddr: optional IP address from what the action was made
164 :param ipaddr: optional IP address from what the action was made
165 :param sa: optional sqlalchemy session
165 :param sa: optional sqlalchemy session
166
166
167 """
167 """
168
168
169 if not sa:
169 if not sa:
170 sa = meta.Session()
170 sa = meta.Session()
171 # if we don't get explicit IP address try to get one from registered user
171 # if we don't get explicit IP address try to get one from registered user
172 # in tmpl context var
172 # in tmpl context var
173 if not ipaddr:
173 if not ipaddr:
174 ipaddr = getattr(get_current_authuser(), 'ip_addr', '')
174 ipaddr = getattr(get_current_authuser(), 'ip_addr', '')
175
175
176 if getattr(user, 'user_id', None):
176 if getattr(user, 'user_id', None):
177 user_obj = User.get(user.user_id)
177 user_obj = User.get(user.user_id)
178 elif isinstance(user, basestring):
178 elif isinstance(user, basestring):
179 user_obj = User.get_by_username(user)
179 user_obj = User.get_by_username(user)
180 else:
180 else:
181 raise Exception('You have to provide a user object or a username')
181 raise Exception('You have to provide a user object or a username')
182
182
183 if getattr(repo, 'repo_id', None):
183 if getattr(repo, 'repo_id', None):
184 repo_obj = Repository.get(repo.repo_id)
184 repo_obj = Repository.get(repo.repo_id)
185 repo_name = repo_obj.repo_name
185 repo_name = repo_obj.repo_name
186 elif isinstance(repo, basestring):
186 elif isinstance(repo, basestring):
187 repo_name = repo.lstrip('/')
187 repo_name = repo.lstrip('/')
188 repo_obj = Repository.get_by_repo_name(repo_name)
188 repo_obj = Repository.get_by_repo_name(repo_name)
189 else:
189 else:
190 repo_obj = None
190 repo_obj = None
191 repo_name = ''
191 repo_name = ''
192
192
193 user_log = UserLog()
193 user_log = UserLog()
194 user_log.user_id = user_obj.user_id
194 user_log.user_id = user_obj.user_id
195 user_log.username = user_obj.username
195 user_log.username = user_obj.username
196 user_log.action = safe_unicode(action)
196 user_log.action = safe_unicode(action)
197
197
198 user_log.repository = repo_obj
198 user_log.repository = repo_obj
199 user_log.repository_name = repo_name
199 user_log.repository_name = repo_name
200
200
201 user_log.action_date = datetime.datetime.now()
201 user_log.action_date = datetime.datetime.now()
202 user_log.user_ip = ipaddr
202 user_log.user_ip = ipaddr
203 sa.add(user_log)
203 sa.add(user_log)
204
204
205 log.info('Logging action:%s on %s by user:%s ip:%s' %
205 log.info('Logging action:%s on %s by user:%s ip:%s' %
206 (action, safe_unicode(repo), user_obj, ipaddr))
206 (action, safe_unicode(repo), user_obj, ipaddr))
207 if commit:
207 if commit:
208 sa.commit()
208 sa.commit()
209
209
210
210
211 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
211 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
212 """
212 """
213 Scans given path for repos and return (name,(type,path)) tuple
213 Scans given path for repos and return (name,(type,path)) tuple
214
214
215 :param path: path to scan for repositories
215 :param path: path to scan for repositories
216 :param recursive: recursive search and return names with subdirs in front
216 :param recursive: recursive search and return names with subdirs in front
217 """
217 """
218
218
219 # remove ending slash for better results
219 # remove ending slash for better results
220 path = path.rstrip(os.sep)
220 path = path.rstrip(os.sep)
221 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
221 log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
222
222
223 def _get_repos(p):
223 def _get_repos(p):
224 if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
224 if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
225 log.warning('ignoring repo path without access: %s' % (p,))
225 log.warning('ignoring repo path without access: %s' % (p,))
226 return
226 return
227 if not os.access(p, os.W_OK):
227 if not os.access(p, os.W_OK):
228 log.warning('repo path without write access: %s' % (p,))
228 log.warning('repo path without write access: %s' % (p,))
229 for dirpath in os.listdir(p):
229 for dirpath in os.listdir(p):
230 if os.path.isfile(os.path.join(p, dirpath)):
230 if os.path.isfile(os.path.join(p, dirpath)):
231 continue
231 continue
232 cur_path = os.path.join(p, dirpath)
232 cur_path = os.path.join(p, dirpath)
233
233
234 # skip removed repos
234 # skip removed repos
235 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
235 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
236 continue
236 continue
237
237
238 #skip .<somethin> dirs
238 #skip .<somethin> dirs
239 if dirpath.startswith('.'):
239 if dirpath.startswith('.'):
240 continue
240 continue
241
241
242 try:
242 try:
243 scm_info = get_scm(cur_path)
243 scm_info = get_scm(cur_path)
244 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
244 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
245 except VCSError:
245 except VCSError:
246 if not recursive:
246 if not recursive:
247 continue
247 continue
248 #check if this dir containts other repos for recursive scan
248 #check if this dir containts other repos for recursive scan
249 rec_path = os.path.join(p, dirpath)
249 rec_path = os.path.join(p, dirpath)
250 if not os.path.islink(rec_path) and os.path.isdir(rec_path):
250 if not os.path.islink(rec_path) and os.path.isdir(rec_path):
251 for inner_scm in _get_repos(rec_path):
251 for inner_scm in _get_repos(rec_path):
252 yield inner_scm
252 yield inner_scm
253
253
254 return _get_repos(path)
254 return _get_repos(path)
255
255
256
256
257 def is_valid_repo(repo_name, base_path, scm=None):
257 def is_valid_repo(repo_name, base_path, scm=None):
258 """
258 """
259 Returns True if given path is a valid repository False otherwise.
259 Returns True if given path is a valid repository False otherwise.
260 If scm param is given also compare if given scm is the same as expected
260 If scm param is given also compare if given scm is the same as expected
261 from scm parameter
261 from scm parameter
262
262
263 :param repo_name:
263 :param repo_name:
264 :param base_path:
264 :param base_path:
265 :param scm:
265 :param scm:
266
266
267 :return True: if given path is a valid repository
267 :return True: if given path is a valid repository
268 """
268 """
269 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
269 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
270
270
271 try:
271 try:
272 scm_ = get_scm(full_path)
272 scm_ = get_scm(full_path)
273 if scm:
273 if scm:
274 return scm_[0] == scm
274 return scm_[0] == scm
275 return True
275 return True
276 except VCSError:
276 except VCSError:
277 return False
277 return False
278
278
279
279
280 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
280 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
281 """
281 """
282 Returns True if given path is a repository group False otherwise
282 Returns True if given path is a repository group False otherwise
283
283
284 :param repo_name:
284 :param repo_name:
285 :param base_path:
285 :param base_path:
286 """
286 """
287 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
287 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
288
288
289 # check if it's not a repo
289 # check if it's not a repo
290 if is_valid_repo(repo_group_name, base_path):
290 if is_valid_repo(repo_group_name, base_path):
291 return False
291 return False
292
292
293 try:
293 try:
294 # we need to check bare git repos at higher level
294 # we need to check bare git repos at higher level
295 # since we might match branches/hooks/info/objects or possible
295 # since we might match branches/hooks/info/objects or possible
296 # other things inside bare git repo
296 # other things inside bare git repo
297 get_scm(os.path.dirname(full_path))
297 get_scm(os.path.dirname(full_path))
298 return False
298 return False
299 except VCSError:
299 except VCSError:
300 pass
300 pass
301
301
302 # check if it's a valid path
302 # check if it's a valid path
303 if skip_path_check or os.path.isdir(full_path):
303 if skip_path_check or os.path.isdir(full_path):
304 return True
304 return True
305
305
306 return False
306 return False
307
307
308
308
309 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
309 def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
310 while True:
310 while True:
311 ok = raw_input(prompt)
311 ok = raw_input(prompt)
312 if ok in ('y', 'ye', 'yes'):
312 if ok in ('y', 'ye', 'yes'):
313 return True
313 return True
314 if ok in ('n', 'no', 'nop', 'nope'):
314 if ok in ('n', 'no', 'nop', 'nope'):
315 return False
315 return False
316 retries = retries - 1
316 retries = retries - 1
317 if retries < 0:
317 if retries < 0:
318 raise IOError
318 raise IOError
319 print complaint
319 print complaint
320
320
321 #propagated from mercurial documentation
321 #propagated from mercurial documentation
322 ui_sections = ['alias', 'auth',
322 ui_sections = ['alias', 'auth',
323 'decode/encode', 'defaults',
323 'decode/encode', 'defaults',
324 'diff', 'email',
324 'diff', 'email',
325 'extensions', 'format',
325 'extensions', 'format',
326 'merge-patterns', 'merge-tools',
326 'merge-patterns', 'merge-tools',
327 'hooks', 'http_proxy',
327 'hooks', 'http_proxy',
328 'smtp', 'patch',
328 'smtp', 'patch',
329 'paths', 'profiling',
329 'paths', 'profiling',
330 'server', 'trusted',
330 'server', 'trusted',
331 'ui', 'web', ]
331 'ui', 'web', ]
332
332
333
333
334 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
334 def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
335 """
335 """
336 A function that will read python rc files or database
336 A function that will read python rc files or database
337 and make an mercurial ui object from read options
337 and make an mercurial ui object from read options
338
338
339 :param path: path to mercurial config file
339 :param path: path to mercurial config file
340 :param checkpaths: check the path
340 :param checkpaths: check the path
341 :param read_from: read from 'file' or 'db'
341 :param read_from: read from 'file' or 'db'
342 """
342 """
343
343
344 baseui = ui.ui()
344 baseui = ui.ui()
345
345
346 # clean the baseui object
346 # clean the baseui object
347 baseui._ocfg = config.config()
347 baseui._ocfg = config.config()
348 baseui._ucfg = config.config()
348 baseui._ucfg = config.config()
349 baseui._tcfg = config.config()
349 baseui._tcfg = config.config()
350
350
351 if read_from == 'file':
351 if read_from == 'file':
352 if not os.path.isfile(path):
352 if not os.path.isfile(path):
353 log.debug('hgrc file is not present at %s, skipping...' % path)
353 log.debug('hgrc file is not present at %s, skipping...' % path)
354 return False
354 return False
355 log.debug('reading hgrc from %s' % path)
355 log.debug('reading hgrc from %s' % path)
356 cfg = config.config()
356 cfg = config.config()
357 cfg.read(path)
357 cfg.read(path)
358 for section in ui_sections:
358 for section in ui_sections:
359 for k, v in cfg.items(section):
359 for k, v in cfg.items(section):
360 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
360 log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
361 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
361 baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
362
362
363 elif read_from == 'db':
363 elif read_from == 'db':
364 sa = meta.Session()
364 sa = meta.Session()
365 ret = sa.query(Ui).all()
365 ret = sa.query(Ui).all()
366
366
367 hg_ui = ret
367 hg_ui = ret
368 for ui_ in hg_ui:
368 for ui_ in hg_ui:
369 if ui_.ui_active:
369 if ui_.ui_active:
370 ui_val = safe_str(ui_.ui_value)
370 ui_val = safe_str(ui_.ui_value)
371 if ui_.ui_section == 'hooks' and BRAND != 'kallithea' and ui_val.startswith('python:' + BRAND + '.lib.hooks.'):
371 if ui_.ui_section == 'hooks' and BRAND != 'kallithea' and ui_val.startswith('python:' + BRAND + '.lib.hooks.'):
372 ui_val = ui_val.replace('python:' + BRAND + '.lib.hooks.', 'python:kallithea.lib.hooks.')
372 ui_val = ui_val.replace('python:' + BRAND + '.lib.hooks.', 'python:kallithea.lib.hooks.')
373 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
373 log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
374 ui_.ui_key, ui_val)
374 ui_.ui_key, ui_val)
375 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
375 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
376 ui_val)
376 ui_val)
377 if ui_.ui_key == 'push_ssl':
377 if ui_.ui_key == 'push_ssl':
378 # force set push_ssl requirement to False, kallithea
378 # force set push_ssl requirement to False, kallithea
379 # handles that
379 # handles that
380 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
380 baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
381 False)
381 False)
382 if clear_session:
382 if clear_session:
383 meta.Session.remove()
383 meta.Session.remove()
384
384
385 # prevent interactive questions for ssh password / passphrase
385 # prevent interactive questions for ssh password / passphrase
386 ssh = baseui.config('ui', 'ssh', default='ssh')
386 ssh = baseui.config('ui', 'ssh', default='ssh')
387 baseui.setconfig('ui', 'ssh', '%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh)
387 baseui.setconfig('ui', 'ssh', '%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh)
388
388
389 return baseui
389 return baseui
390
390
391
391
392 def set_app_settings(config):
392 def set_app_settings(config):
393 """
393 """
394 Updates pylons config with new settings from database
394 Updates pylons config with new settings from database
395
395
396 :param config:
396 :param config:
397 """
397 """
398 hgsettings = Setting.get_app_settings()
398 hgsettings = Setting.get_app_settings()
399
399
400 for k, v in hgsettings.items():
400 for k, v in hgsettings.items():
401 config[k] = v
401 config[k] = v
402
402
403
403
404 def set_vcs_config(config):
404 def set_vcs_config(config):
405 """
405 """
406 Patch VCS config with some Kallithea specific stuff
406 Patch VCS config with some Kallithea specific stuff
407
407
408 :param config: kallithea.CONFIG
408 :param config: kallithea.CONFIG
409 """
409 """
410 import kallithea
410 import kallithea
411 from kallithea.lib.vcs import conf
411 from kallithea.lib.vcs import conf
412 from kallithea.lib.utils2 import aslist
412 from kallithea.lib.utils2 import aslist
413 conf.settings.BACKENDS = {
413 conf.settings.BACKENDS = {
414 'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository',
414 'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository',
415 'git': 'kallithea.lib.vcs.backends.git.GitRepository',
415 'git': 'kallithea.lib.vcs.backends.git.GitRepository',
416 }
416 }
417
417
418 conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
418 conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
419 conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
419 conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
420 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
420 conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
421 'utf8'), sep=',')
421 'utf8'), sep=',')
422
422
423
423
424 def map_groups(path):
424 def map_groups(path):
425 """
425 """
426 Given a full path to a repository, create all nested groups that this
426 Given a full path to a repository, create all nested groups that this
427 repo is inside. This function creates parent-child relationships between
427 repo is inside. This function creates parent-child relationships between
428 groups and creates default perms for all new groups.
428 groups and creates default perms for all new groups.
429
429
430 :param paths: full path to repository
430 :param paths: full path to repository
431 """
431 """
432 sa = meta.Session()
432 sa = meta.Session()
433 groups = path.split(Repository.url_sep())
433 groups = path.split(Repository.url_sep())
434 parent = None
434 parent = None
435 group = None
435 group = None
436
436
437 # last element is repo in nested groups structure
437 # last element is repo in nested groups structure
438 groups = groups[:-1]
438 groups = groups[:-1]
439 rgm = RepoGroupModel(sa)
439 rgm = RepoGroupModel(sa)
440 owner = User.get_first_admin()
440 owner = User.get_first_admin()
441 for lvl, group_name in enumerate(groups):
441 for lvl, group_name in enumerate(groups):
442 group_name = '/'.join(groups[:lvl] + [group_name])
442 group_name = '/'.join(groups[:lvl] + [group_name])
443 group = RepoGroup.get_by_group_name(group_name)
443 group = RepoGroup.get_by_group_name(group_name)
444 desc = '%s group' % group_name
444 desc = '%s group' % group_name
445
445
446 # skip folders that are now removed repos
446 # skip folders that are now removed repos
447 if REMOVED_REPO_PAT.match(group_name):
447 if REMOVED_REPO_PAT.match(group_name):
448 break
448 break
449
449
450 if group is None:
450 if group is None:
451 log.debug('creating group level: %s group_name: %s'
451 log.debug('creating group level: %s group_name: %s'
452 % (lvl, group_name))
452 % (lvl, group_name))
453 group = RepoGroup(group_name, parent)
453 group = RepoGroup(group_name, parent)
454 group.group_description = desc
454 group.group_description = desc
455 group.user = owner
455 group.user = owner
456 sa.add(group)
456 sa.add(group)
457 perm_obj = rgm._create_default_perms(group)
457 perm_obj = rgm._create_default_perms(group)
458 sa.add(perm_obj)
458 sa.add(perm_obj)
459 sa.flush()
459 sa.flush()
460
460
461 parent = group
461 parent = group
462 return group
462 return group
463
463
464
464
465 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
465 def repo2db_mapper(initial_repo_list, remove_obsolete=False,
466 install_git_hook=False, user=None):
466 install_git_hook=False, user=None):
467 """
467 """
468 maps all repos given in initial_repo_list, non existing repositories
468 maps all repos given in initial_repo_list, non existing repositories
469 are created, if remove_obsolete is True it also check for db entries
469 are created, if remove_obsolete is True it also check for db entries
470 that are not in initial_repo_list and removes them.
470 that are not in initial_repo_list and removes them.
471
471
472 :param initial_repo_list: list of repositories found by scanning methods
472 :param initial_repo_list: list of repositories found by scanning methods
473 :param remove_obsolete: check for obsolete entries in database
473 :param remove_obsolete: check for obsolete entries in database
474 :param install_git_hook: if this is True, also check and install githook
474 :param install_git_hook: if this is True, also check and install githook
475 for a repo if missing
475 for a repo if missing
476 """
476 """
477 from kallithea.model.repo import RepoModel
477 from kallithea.model.repo import RepoModel
478 from kallithea.model.scm import ScmModel
478 from kallithea.model.scm import ScmModel
479 sa = meta.Session()
479 sa = meta.Session()
480 repo_model = RepoModel()
480 repo_model = RepoModel()
481 if user is None:
481 if user is None:
482 user = User.get_first_admin()
482 user = User.get_first_admin()
483 added = []
483 added = []
484
484
485 ##creation defaults
485 ##creation defaults
486 defs = Setting.get_default_repo_settings(strip_prefix=True)
486 defs = Setting.get_default_repo_settings(strip_prefix=True)
487 enable_statistics = defs.get('repo_enable_statistics')
487 enable_statistics = defs.get('repo_enable_statistics')
488 enable_locking = defs.get('repo_enable_locking')
488 enable_locking = defs.get('repo_enable_locking')
489 enable_downloads = defs.get('repo_enable_downloads')
489 enable_downloads = defs.get('repo_enable_downloads')
490 private = defs.get('repo_private')
490 private = defs.get('repo_private')
491
491
492 for name, repo in initial_repo_list.items():
492 for name, repo in initial_repo_list.items():
493 group = map_groups(name)
493 group = map_groups(name)
494 unicode_name = safe_unicode(name)
494 unicode_name = safe_unicode(name)
495 db_repo = repo_model.get_by_repo_name(unicode_name)
495 db_repo = repo_model.get_by_repo_name(unicode_name)
496 # found repo that is on filesystem not in Kallithea database
496 # found repo that is on filesystem not in Kallithea database
497 if not db_repo:
497 if not db_repo:
498 log.info('repository %s not found, creating now' % name)
498 log.info('repository %s not found, creating now' % name)
499 added.append(name)
499 added.append(name)
500 desc = (repo.description
500 desc = (repo.description
501 if repo.description != 'unknown'
501 if repo.description != 'unknown'
502 else '%s repository' % name)
502 else '%s repository' % name)
503
503
504 new_repo = repo_model._create_repo(
504 new_repo = repo_model._create_repo(
505 repo_name=name,
505 repo_name=name,
506 repo_type=repo.alias,
506 repo_type=repo.alias,
507 description=desc,
507 description=desc,
508 repo_group=getattr(group, 'group_id', None),
508 repo_group=getattr(group, 'group_id', None),
509 owner=user,
509 owner=user,
510 enable_locking=enable_locking,
510 enable_locking=enable_locking,
511 enable_downloads=enable_downloads,
511 enable_downloads=enable_downloads,
512 enable_statistics=enable_statistics,
512 enable_statistics=enable_statistics,
513 private=private,
513 private=private,
514 state=Repository.STATE_CREATED
514 state=Repository.STATE_CREATED
515 )
515 )
516 sa.commit()
516 sa.commit()
517 # we added that repo just now, and make sure it has githook
517 # we added that repo just now, and make sure it has githook
518 # installed, and updated server info
518 # installed, and updated server info
519 if new_repo.repo_type == 'git':
519 if new_repo.repo_type == 'git':
520 git_repo = new_repo.scm_instance
520 git_repo = new_repo.scm_instance
521 ScmModel().install_git_hook(git_repo)
521 ScmModel().install_git_hook(git_repo)
522 # update repository server-info
522 # update repository server-info
523 log.debug('Running update server info')
523 log.debug('Running update server info')
524 git_repo._update_server_info()
524 git_repo._update_server_info()
525 new_repo.update_changeset_cache()
525 new_repo.update_changeset_cache()
526 elif install_git_hook:
526 elif install_git_hook:
527 if db_repo.repo_type == 'git':
527 if db_repo.repo_type == 'git':
528 ScmModel().install_git_hook(db_repo.scm_instance)
528 ScmModel().install_git_hook(db_repo.scm_instance)
529
529
530 removed = []
530 removed = []
531 # remove from database those repositories that are not in the filesystem
531 # remove from database those repositories that are not in the filesystem
532 for repo in sa.query(Repository).all():
532 for repo in sa.query(Repository).all():
533 if repo.repo_name not in initial_repo_list.keys():
533 if repo.repo_name not in initial_repo_list.keys():
534 if remove_obsolete:
534 if remove_obsolete:
535 log.debug("Removing non-existing repository found in db `%s`" %
535 log.debug("Removing non-existing repository found in db `%s`" %
536 repo.repo_name)
536 repo.repo_name)
537 try:
537 try:
538 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
538 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
539 sa.commit()
539 sa.commit()
540 except Exception:
540 except Exception:
541 #don't hold further removals on error
541 #don't hold further removals on error
542 log.error(traceback.format_exc())
542 log.error(traceback.format_exc())
543 sa.rollback()
543 sa.rollback()
544 removed.append(repo.repo_name)
544 removed.append(repo.repo_name)
545 return added, removed
545 return added, removed
546
546
547
547
548 # set cache regions for beaker so celery can utilise it
548 # set cache regions for beaker so celery can utilise it
549 def add_cache(settings):
549 def add_cache(settings):
550 cache_settings = {'regions': None}
550 cache_settings = {'regions': None}
551 for key in settings.keys():
551 for key in settings.keys():
552 for prefix in ['beaker.cache.', 'cache.']:
552 for prefix in ['beaker.cache.', 'cache.']:
553 if key.startswith(prefix):
553 if key.startswith(prefix):
554 name = key.split(prefix)[1].strip()
554 name = key.split(prefix)[1].strip()
555 cache_settings[name] = settings[key].strip()
555 cache_settings[name] = settings[key].strip()
556 if cache_settings['regions']:
556 if cache_settings['regions']:
557 for region in cache_settings['regions'].split(','):
557 for region in cache_settings['regions'].split(','):
558 region = region.strip()
558 region = region.strip()
559 region_settings = {}
559 region_settings = {}
560 for key, value in cache_settings.items():
560 for key, value in cache_settings.items():
561 if key.startswith(region):
561 if key.startswith(region):
562 region_settings[key.split('.')[1]] = value
562 region_settings[key.split('.')[1]] = value
563 region_settings['expire'] = int(region_settings.get('expire',
563 region_settings['expire'] = int(region_settings.get('expire',
564 60))
564 60))
565 region_settings.setdefault('lock_dir',
565 region_settings.setdefault('lock_dir',
566 cache_settings.get('lock_dir'))
566 cache_settings.get('lock_dir'))
567 region_settings.setdefault('data_dir',
567 region_settings.setdefault('data_dir',
568 cache_settings.get('data_dir'))
568 cache_settings.get('data_dir'))
569
569
570 if 'type' not in region_settings:
570 if 'type' not in region_settings:
571 region_settings['type'] = cache_settings.get('type',
571 region_settings['type'] = cache_settings.get('type',
572 'memory')
572 'memory')
573 beaker.cache.cache_regions[region] = region_settings
573 beaker.cache.cache_regions[region] = region_settings
574
574
575
575
576 def load_rcextensions(root_path):
576 def load_rcextensions(root_path):
577 import kallithea
577 import kallithea
578 from kallithea.config import conf
578 from kallithea.config import conf
579
579
580 path = os.path.join(root_path, 'rcextensions', '__init__.py')
580 path = os.path.join(root_path, 'rcextensions', '__init__.py')
581 if os.path.isfile(path):
581 if os.path.isfile(path):
582 rcext = create_module('rc', path)
582 rcext = create_module('rc', path)
583 EXT = kallithea.EXTENSIONS = rcext
583 EXT = kallithea.EXTENSIONS = rcext
584 log.debug('Found rcextensions now loading %s...' % rcext)
584 log.debug('Found rcextensions now loading %s...' % rcext)
585
585
586 # Additional mappings that are not present in the pygments lexers
586 # Additional mappings that are not present in the pygments lexers
587 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
587 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
588
588
589 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
589 #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
590
590
591 if getattr(EXT, 'INDEX_EXTENSIONS', []):
591 if getattr(EXT, 'INDEX_EXTENSIONS', []):
592 log.debug('settings custom INDEX_EXTENSIONS')
592 log.debug('settings custom INDEX_EXTENSIONS')
593 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
593 conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
594
594
595 #ADDITIONAL MAPPINGS
595 #ADDITIONAL MAPPINGS
596 log.debug('adding extra into INDEX_EXTENSIONS')
596 log.debug('adding extra into INDEX_EXTENSIONS')
597 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
597 conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
598
598
599 # auto check if the module is not missing any data, set to default if is
599 # auto check if the module is not missing any data, set to default if is
600 # this will help autoupdate new feature of rcext module
600 # this will help autoupdate new feature of rcext module
601 #from kallithea.config import rcextensions
601 #from kallithea.config import rcextensions
602 #for k in dir(rcextensions):
602 #for k in dir(rcextensions):
603 # if not k.startswith('_') and not hasattr(EXT, k):
603 # if not k.startswith('_') and not hasattr(EXT, k):
604 # setattr(EXT, k, getattr(rcextensions, k))
604 # setattr(EXT, k, getattr(rcextensions, k))
605
605
606
606
607 def get_custom_lexer(extension):
607 def get_custom_lexer(extension):
608 """
608 """
609 returns a custom lexer if it's defined in rcextensions module, or None
609 returns a custom lexer if it's defined in rcextensions module, or None
610 if there's no custom lexer defined
610 if there's no custom lexer defined
611 """
611 """
612 import kallithea
612 import kallithea
613 from pygments import lexers
613 from pygments import lexers
614 #check if we didn't define this extension as other lexer
614 #check if we didn't define this extension as other lexer
615 if kallithea.EXTENSIONS and extension in kallithea.EXTENSIONS.EXTRA_LEXERS:
615 if kallithea.EXTENSIONS and extension in kallithea.EXTENSIONS.EXTRA_LEXERS:
616 _lexer_name = kallithea.EXTENSIONS.EXTRA_LEXERS[extension]
616 _lexer_name = kallithea.EXTENSIONS.EXTRA_LEXERS[extension]
617 return lexers.get_lexer_by_name(_lexer_name)
617 return lexers.get_lexer_by_name(_lexer_name)
618
618
619
619
620 #==============================================================================
620 #==============================================================================
621 # TEST FUNCTIONS AND CREATORS
621 # TEST FUNCTIONS AND CREATORS
622 #==============================================================================
622 #==============================================================================
623 def create_test_index(repo_location, config, full_index):
623 def create_test_index(repo_location, config, full_index):
624 """
624 """
625 Makes default test index
625 Makes default test index
626
626
627 :param config: test config
627 :param config: test config
628 :param full_index:
628 :param full_index:
629 """
629 """
630
630
631 from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
631 from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
632 from kallithea.lib.pidlock import DaemonLock, LockHeld
632 from kallithea.lib.pidlock import DaemonLock, LockHeld
633
633
634 repo_location = repo_location
634 repo_location = repo_location
635
635
636 index_location = os.path.join(config['app_conf']['index_dir'])
636 index_location = os.path.join(config['app_conf']['index_dir'])
637 if not os.path.exists(index_location):
637 if not os.path.exists(index_location):
638 os.makedirs(index_location)
638 os.makedirs(index_location)
639
639
640 try:
640 try:
641 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
641 l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
642 WhooshIndexingDaemon(index_location=index_location,
642 WhooshIndexingDaemon(index_location=index_location,
643 repo_location=repo_location)\
643 repo_location=repo_location)\
644 .run(full_index=full_index)
644 .run(full_index=full_index)
645 l.release()
645 l.release()
646 except LockHeld:
646 except LockHeld:
647 pass
647 pass
648
648
649
649
650 def create_test_env(repos_test_path, config):
650 def create_test_env(repos_test_path, config):
651 """
651 """
652 Makes a fresh database and
652 Makes a fresh database and
653 install test repository into tmp dir
653 install test repository into tmp dir
654 """
654 """
655 from kallithea.lib.db_manage import DbManage
655 from kallithea.lib.db_manage import DbManage
656 from kallithea.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
656 from kallithea.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
657
657
658 # PART ONE create db
658 # PART ONE create db
659 dbconf = config['sqlalchemy.db1.url']
659 dbconf = config['sqlalchemy.db1.url']
660 log.debug('making test db %s' % dbconf)
660 log.debug('making test db %s' % dbconf)
661
661
662 # create test dir if it doesn't exist
662 # create test dir if it doesn't exist
663 if not os.path.isdir(repos_test_path):
663 if not os.path.isdir(repos_test_path):
664 log.debug('Creating testdir %s' % repos_test_path)
664 log.debug('Creating testdir %s' % repos_test_path)
665 os.makedirs(repos_test_path)
665 os.makedirs(repos_test_path)
666
666
667 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
667 dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
668 tests=True)
668 tests=True)
669 dbmanage.create_tables(override=True)
669 dbmanage.create_tables(override=True)
670 # for tests dynamically set new root paths based on generated content
670 # for tests dynamically set new root paths based on generated content
671 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
671 dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
672 dbmanage.create_default_user()
672 dbmanage.create_default_user()
673 dbmanage.admin_prompt()
673 dbmanage.admin_prompt()
674 dbmanage.create_permissions()
674 dbmanage.create_permissions()
675 dbmanage.populate_default_permissions()
675 dbmanage.populate_default_permissions()
676 Session().commit()
676 Session().commit()
677 # PART TWO make test repo
677 # PART TWO make test repo
678 log.debug('making test vcs repositories')
678 log.debug('making test vcs repositories')
679
679
680 idx_path = config['app_conf']['index_dir']
680 idx_path = config['app_conf']['index_dir']
681 data_path = config['app_conf']['cache_dir']
681 data_path = config['app_conf']['cache_dir']
682
682
683 #clean index and data
683 #clean index and data
684 if idx_path and os.path.exists(idx_path):
684 if idx_path and os.path.exists(idx_path):
685 log.debug('remove %s' % idx_path)
685 log.debug('remove %s' % idx_path)
686 shutil.rmtree(idx_path)
686 shutil.rmtree(idx_path)
687
687
688 if data_path and os.path.exists(data_path):
688 if data_path and os.path.exists(data_path):
689 log.debug('remove %s' % data_path)
689 log.debug('remove %s' % data_path)
690 shutil.rmtree(data_path)
690 shutil.rmtree(data_path)
691
691
692 #CREATE DEFAULT TEST REPOS
692 #CREATE DEFAULT TEST REPOS
693 cur_dir = dn(dn(abspath(__file__)))
693 cur_dir = dn(dn(abspath(__file__)))
694 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz"))
694 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_hg.tar.gz"))
695 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
695 tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
696 tar.close()
696 tar.close()
697
697
698 cur_dir = dn(dn(abspath(__file__)))
698 cur_dir = dn(dn(abspath(__file__)))
699 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz"))
699 tar = tarfile.open(jn(cur_dir, 'tests', 'fixtures', "vcs_test_git.tar.gz"))
700 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
700 tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
701 tar.close()
701 tar.close()
702
702
703 #LOAD VCS test stuff
703 #LOAD VCS test stuff
704 from kallithea.tests.vcs import setup_package
704 from kallithea.tests.vcs import setup_package
705 setup_package()
705 setup_package()
706
706
707
707
708 #==============================================================================
708 #==============================================================================
709 # PASTER COMMANDS
709 # PASTER COMMANDS
710 #==============================================================================
710 #==============================================================================
711 class BasePasterCommand(Command):
711 class BasePasterCommand(Command):
712 """
712 """
713 Abstract Base Class for paster commands.
713 Abstract Base Class for paster commands.
714
714
715 The celery commands are somewhat aggressive about loading
715 The celery commands are somewhat aggressive about loading
716 celery.conf, and since our module sets the `CELERY_LOADER`
716 celery.conf, and since our module sets the `CELERY_LOADER`
717 environment variable to our loader, we have to bootstrap a bit and
717 environment variable to our loader, we have to bootstrap a bit and
718 make sure we've had a chance to load the pylons config off of the
718 make sure we've had a chance to load the pylons config off of the
719 command line, otherwise everything fails.
719 command line, otherwise everything fails.
720 """
720 """
721 min_args = 1
721 min_args = 1
722 min_args_error = "Please provide a paster config file as an argument."
722 min_args_error = "Please provide a paster config file as an argument."
723 takes_config_file = 1
723 takes_config_file = 1
724 requires_config_file = True
724 requires_config_file = True
725
725
726 def notify_msg(self, msg, log=False):
726 def notify_msg(self, msg, log=False):
727 """Make a notification to user, additionally if logger is passed
727 """Make a notification to user, additionally if logger is passed
728 it logs this action using given logger
728 it logs this action using given logger
729
729
730 :param msg: message that will be printed to user
730 :param msg: message that will be printed to user
731 :param log: logging instance, to use to additionally log this message
731 :param log: logging instance, to use to additionally log this message
732
732
733 """
733 """
734 if log and isinstance(log, logging):
734 if log and isinstance(log, logging):
735 log(msg)
735 log(msg)
736
736
737 def run(self, args):
737 def run(self, args):
738 """
738 """
739 Overrides Command.run
739 Overrides Command.run
740
740
741 Checks for a config file argument and loads it.
741 Checks for a config file argument and loads it.
742 """
742 """
743 if len(args) < self.min_args:
743 if len(args) < self.min_args:
744 raise BadCommand(
744 raise BadCommand(
745 self.min_args_error % {'min_args': self.min_args,
745 self.min_args_error % {'min_args': self.min_args,
746 'actual_args': len(args)})
746 'actual_args': len(args)})
747
747
748 # Decrement because we're going to lob off the first argument.
748 # Decrement because we're going to lob off the first argument.
749 # @@ This is hacky
749 # @@ This is hacky
750 self.min_args -= 1
750 self.min_args -= 1
751 self.bootstrap_config(args[0])
751 self.bootstrap_config(args[0])
752 self.update_parser()
752 self.update_parser()
753 return super(BasePasterCommand, self).run(args[1:])
753 return super(BasePasterCommand, self).run(args[1:])
754
754
755 def update_parser(self):
755 def update_parser(self):
756 """
756 """
757 Abstract method. Allows for the class's parser to be updated
757 Abstract method. Allows for the class's parser to be updated
758 before the superclass's `run` method is called. Necessary to
758 before the superclass's `run` method is called. Necessary to
759 allow options/arguments to be passed through to the underlying
759 allow options/arguments to be passed through to the underlying
760 celery command.
760 celery command.
761 """
761 """
762 raise NotImplementedError("Abstract Method.")
762 raise NotImplementedError("Abstract Method.")
763
763
764 def bootstrap_config(self, conf):
764 def bootstrap_config(self, conf):
765 """
765 """
766 Loads the pylons configuration.
766 Loads the pylons configuration.
767 """
767 """
768 from pylons import config as pylonsconfig
768 from pylons import config as pylonsconfig
769
769
770 self.path_to_ini_file = os.path.realpath(conf)
770 self.path_to_ini_file = os.path.realpath(conf)
771 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
771 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
772 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
772 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
773
773
774 def _init_session(self):
774 def _init_session(self):
775 """
775 """
776 Inits SqlAlchemy Session
776 Inits SqlAlchemy Session
777 """
777 """
778 logging.config.fileConfig(self.path_to_ini_file)
778 logging.config.fileConfig(self.path_to_ini_file)
779 from pylons import config
779 from pylons import config
780 from kallithea.model import init_model
780 from kallithea.model import init_model
781 from kallithea.lib.utils2 import engine_from_config
781 from kallithea.lib.utils2 import engine_from_config
782
782
783 #get to remove repos !!
783 #get to remove repos !!
784 add_cache(config)
784 add_cache(config)
785 engine = engine_from_config(config, 'sqlalchemy.db1.')
785 engine = engine_from_config(config, 'sqlalchemy.db1.')
786 init_model(engine)
786 init_model(engine)
787
787
788
788
789 def check_git_version():
789 def check_git_version():
790 """
790 """
791 Checks what version of git is installed in system, and issues a warning
791 Checks what version of git is installed in system, and issues a warning
792 if it's too old for Kallithea to work properly.
792 if it's too old for Kallithea to work properly.
793 """
793 """
794 from kallithea import BACKENDS
794 from kallithea import BACKENDS
795 from kallithea.lib.vcs.backends.git.repository import GitRepository
795 from kallithea.lib.vcs.backends.git.repository import GitRepository
796 from kallithea.lib.vcs.conf import settings
796 from kallithea.lib.vcs.conf import settings
797 from distutils.version import StrictVersion
797 from distutils.version import StrictVersion
798
798
799 if 'git' not in BACKENDS:
799 if 'git' not in BACKENDS:
800 return None
800 return None
801
801
802 stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
802 stdout, stderr = GitRepository._run_git_command(['--version'], _bare=True,
803 _safe=True)
803 _safe=True)
804
804
805 m = re.search("\d+.\d+.\d+", stdout)
805 m = re.search("\d+.\d+.\d+", stdout)
806 if m:
806 if m:
807 ver = StrictVersion(m.group(0))
807 ver = StrictVersion(m.group(0))
808 else:
808 else:
809 ver = StrictVersion('0.0.0')
809 ver = StrictVersion('0.0.0')
810
810
811 req_ver = StrictVersion('1.7.4')
811 req_ver = StrictVersion('1.7.4')
812
812
813 log.debug('Git executable: "%s" version %s detected: %s'
813 log.debug('Git executable: "%s" version %s detected: %s'
814 % (settings.GIT_EXECUTABLE_PATH, ver, stdout))
814 % (settings.GIT_EXECUTABLE_PATH, ver, stdout))
815 if stderr:
815 if stderr:
816 log.warning('Error detecting git version: %r' % stderr)
816 log.warning('Error detecting git version: %r' % stderr)
817 elif ver < req_ver:
817 elif ver < req_ver:
818 log.warning('Kallithea detected git version %s, which is too old '
818 log.warning('Kallithea detected git version %s, which is too old '
819 'for the system to function properly. '
819 'for the system to function properly. '
820 'Please upgrade to version %s or later.' % (ver, req_ver))
820 'Please upgrade to version %s or later.' % (ver, req_ver))
821 return ver
821 return ver
822
822
823
823
824 @decorator.decorator
824 @decorator.decorator
825 def jsonify(func, *args, **kwargs):
825 def jsonify(func, *args, **kwargs):
826 """Action decorator that formats output for JSON
826 """Action decorator that formats output for JSON
827
827
828 Given a function that will return content, this decorator will turn
828 Given a function that will return content, this decorator will turn
829 the result into JSON, with a content-type of 'application/json' and
829 the result into JSON, with a content-type of 'application/json' and
830 output it.
830 output it.
831
831
832 """
832 """
833 from pylons.decorators.util import get_pylons
833 from pylons.decorators.util import get_pylons
834 from kallithea.lib.compat import json
834 from kallithea.lib.compat import json
835 pylons = get_pylons(args)
835 pylons = get_pylons(args)
836 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
836 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
837 data = func(*args, **kwargs)
837 data = func(*args, **kwargs)
838 if isinstance(data, (list, tuple)):
838 if isinstance(data, (list, tuple)):
839 msg = "JSON responses with Array envelopes are susceptible to " \
839 msg = "JSON responses with Array envelopes are susceptible to " \
840 "cross-site data leak attacks, see " \
840 "cross-site data leak attacks, see " \
841 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
841 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
842 warnings.warn(msg, Warning, 2)
842 warnings.warn(msg, Warning, 2)
843 log.warning(msg)
843 log.warning(msg)
844 log.debug("Returning JSON wrapped action output")
844 log.debug("Returning JSON wrapped action output")
845 return json.dumps(data, encoding='utf-8')
845 return json.dumps(data, encoding='utf-8')
846
846
847
847
848 def conditional_cache(region, prefix, condition, func):
848 def conditional_cache(region, prefix, condition, func):
849 """
849 """
850
850
851 Conditional caching function use like::
851 Conditional caching function use like::
852 def _c(arg):
852 def _c(arg):
853 #heavy computation function
853 #heavy computation function
854 return data
854 return data
855
855
856 # denpending from condition the compute is wrapped in cache or not
856 # denpending from condition the compute is wrapped in cache or not
857 compute = conditional_cache('short_term', 'cache_desc', codnition=True, func=func)
857 compute = conditional_cache('short_term', 'cache_desc', codnition=True, func=func)
858 return compute(arg)
858 return compute(arg)
859
859
860 :param region: name of cache region
860 :param region: name of cache region
861 :param prefix: cache region prefix
861 :param prefix: cache region prefix
862 :param condition: condition for cache to be triggered, and return data cached
862 :param condition: condition for cache to be triggered, and return data cached
863 :param func: wrapped heavy function to compute
863 :param func: wrapped heavy function to compute
864
864
865 """
865 """
866 wrapped = func
866 wrapped = func
867 if condition:
867 if condition:
868 log.debug('conditional_cache: True, wrapping call of '
868 log.debug('conditional_cache: True, wrapping call of '
869 'func: %s into %s region cache' % (region, func))
869 'func: %s into %s region cache' % (region, func))
870 wrapped = _cache_decorate((prefix,), None, None, region)(func)
870 wrapped = _cache_decorate((prefix,), None, None, region)(func)
871
871
872 return wrapped
872 return wrapped
@@ -1,548 +1,548 b''
1 import re
1 import re
2 from itertools import chain
2 from itertools import chain
3 from dulwich import objects
3 from dulwich import objects
4 from subprocess import Popen, PIPE
4 from subprocess import Popen, PIPE
5
5
6 from kallithea.lib.vcs.conf import settings
6 from kallithea.lib.vcs.conf import settings
7 from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
7 from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
8 from kallithea.lib.vcs.exceptions import (
8 from kallithea.lib.vcs.exceptions import (
9 RepositoryError, ChangesetError, NodeDoesNotExistError, VCSError,
9 RepositoryError, ChangesetError, NodeDoesNotExistError, VCSError,
10 ChangesetDoesNotExistError, ImproperArchiveTypeError
10 ChangesetDoesNotExistError, ImproperArchiveTypeError
11 )
11 )
12 from kallithea.lib.vcs.nodes import (
12 from kallithea.lib.vcs.nodes import (
13 FileNode, DirNode, NodeKind, RootNode, RemovedFileNode, SubModuleNode,
13 FileNode, DirNode, NodeKind, RootNode, RemovedFileNode, SubModuleNode,
14 ChangedFileNodesGenerator, AddedFileNodesGenerator, RemovedFileNodesGenerator
14 ChangedFileNodesGenerator, AddedFileNodesGenerator, RemovedFileNodesGenerator
15 )
15 )
16 from kallithea.lib.vcs.utils import (
16 from kallithea.lib.vcs.utils import (
17 safe_unicode, safe_str, safe_int, date_fromtimestamp
17 safe_unicode, safe_str, safe_int, date_fromtimestamp
18 )
18 )
19 from kallithea.lib.vcs.utils.lazy import LazyProperty
19 from kallithea.lib.vcs.utils.lazy import LazyProperty
20
20
21
21
22 class GitChangeset(BaseChangeset):
22 class GitChangeset(BaseChangeset):
23 """
23 """
24 Represents state of the repository at single revision.
24 Represents state of the repository at single revision.
25 """
25 """
26
26
27 def __init__(self, repository, revision):
27 def __init__(self, repository, revision):
28 self._stat_modes = {}
28 self._stat_modes = {}
29 self.repository = repository
29 self.repository = repository
30 revision = safe_str(revision)
30 revision = safe_str(revision)
31 try:
31 try:
32 commit = self.repository._repo[revision]
32 commit = self.repository._repo[revision]
33 if isinstance(commit, objects.Tag):
33 if isinstance(commit, objects.Tag):
34 revision = safe_str(commit.object[1])
34 revision = safe_str(commit.object[1])
35 commit = self.repository._repo.get_object(commit.object[1])
35 commit = self.repository._repo.get_object(commit.object[1])
36 except KeyError:
36 except KeyError:
37 raise RepositoryError("Cannot get object with id %s" % revision)
37 raise RepositoryError("Cannot get object with id %s" % revision)
38 self.raw_id = revision
38 self.raw_id = revision
39 self.id = self.raw_id
39 self.id = self.raw_id
40 self.short_id = self.raw_id[:12]
40 self.short_id = self.raw_id[:12]
41 self._commit = commit
41 self._commit = commit
42 self._tree_id = commit.tree
42 self._tree_id = commit.tree
43 self._committer_property = 'committer'
43 self._committer_property = 'committer'
44 self._author_property = 'author'
44 self._author_property = 'author'
45 self._date_property = 'commit_time'
45 self._date_property = 'commit_time'
46 self._date_tz_property = 'commit_timezone'
46 self._date_tz_property = 'commit_timezone'
47 self.revision = repository.revisions.index(revision)
47 self.revision = repository.revisions.index(revision)
48
48
49 self.nodes = {}
49 self.nodes = {}
50 self._paths = {}
50 self._paths = {}
51
51
52 @LazyProperty
52 @LazyProperty
53 def message(self):
53 def message(self):
54 return safe_unicode(self._commit.message)
54 return safe_unicode(self._commit.message)
55
55
56 @LazyProperty
56 @LazyProperty
57 def committer(self):
57 def committer(self):
58 return safe_unicode(getattr(self._commit, self._committer_property))
58 return safe_unicode(getattr(self._commit, self._committer_property))
59
59
60 @LazyProperty
60 @LazyProperty
61 def author(self):
61 def author(self):
62 return safe_unicode(getattr(self._commit, self._author_property))
62 return safe_unicode(getattr(self._commit, self._author_property))
63
63
64 @LazyProperty
64 @LazyProperty
65 def date(self):
65 def date(self):
66 return date_fromtimestamp(getattr(self._commit, self._date_property),
66 return date_fromtimestamp(getattr(self._commit, self._date_property),
67 getattr(self._commit, self._date_tz_property))
67 getattr(self._commit, self._date_tz_property))
68
68
69 @LazyProperty
69 @LazyProperty
70 def _timestamp(self):
70 def _timestamp(self):
71 return getattr(self._commit, self._date_property)
71 return getattr(self._commit, self._date_property)
72
72
73 @LazyProperty
73 @LazyProperty
74 def status(self):
74 def status(self):
75 """
75 """
76 Returns modified, added, removed, deleted files for current changeset
76 Returns modified, added, removed, deleted files for current changeset
77 """
77 """
78 return self.changed, self.added, self.removed
78 return self.changed, self.added, self.removed
79
79
80 @LazyProperty
80 @LazyProperty
81 def tags(self):
81 def tags(self):
82 _tags = []
82 _tags = []
83 for tname, tsha in self.repository.tags.iteritems():
83 for tname, tsha in self.repository.tags.iteritems():
84 if tsha == self.raw_id:
84 if tsha == self.raw_id:
85 _tags.append(tname)
85 _tags.append(tname)
86 return _tags
86 return _tags
87
87
88 @LazyProperty
88 @LazyProperty
89 def branch(self):
89 def branch(self):
90
90
91 heads = self.repository._heads(reverse=False)
91 heads = self.repository._heads(reverse=False)
92
92
93 ref = heads.get(self.raw_id)
93 ref = heads.get(self.raw_id)
94 if ref:
94 if ref:
95 return safe_unicode(ref)
95 return safe_unicode(ref)
96
96
97 def _fix_path(self, path):
97 def _fix_path(self, path):
98 """
98 """
99 Paths are stored without trailing slash so we need to get rid off it if
99 Paths are stored without trailing slash so we need to get rid off it if
100 needed.
100 needed.
101 """
101 """
102 if path.endswith('/'):
102 if path.endswith('/'):
103 path = path.rstrip('/')
103 path = path.rstrip('/')
104 return path
104 return path
105
105
106 def _get_id_for_path(self, path):
106 def _get_id_for_path(self, path):
107 path = safe_str(path)
107 path = safe_str(path)
108 # FIXME: Please, spare a couple of minutes and make those codes cleaner;
108 # FIXME: Please, spare a couple of minutes and make those codes cleaner;
109 if not path in self._paths:
109 if not path in self._paths:
110 path = path.strip('/')
110 path = path.strip('/')
111 # set root tree
111 # set root tree
112 tree = self.repository._repo[self._tree_id]
112 tree = self.repository._repo[self._tree_id]
113 if path == '':
113 if path == '':
114 self._paths[''] = tree.id
114 self._paths[''] = tree.id
115 return tree.id
115 return tree.id
116 splitted = path.split('/')
116 splitted = path.split('/')
117 dirs, name = splitted[:-1], splitted[-1]
117 dirs, name = splitted[:-1], splitted[-1]
118 curdir = ''
118 curdir = ''
119
119
120 # initially extract things from root dir
120 # initially extract things from root dir
121 for item, stat, id in tree.iteritems():
121 for item, stat, id in tree.iteritems():
122 if curdir:
122 if curdir:
123 name = '/'.join((curdir, item))
123 name = '/'.join((curdir, item))
124 else:
124 else:
125 name = item
125 name = item
126 self._paths[name] = id
126 self._paths[name] = id
127 self._stat_modes[name] = stat
127 self._stat_modes[name] = stat
128
128
129 for dir in dirs:
129 for dir in dirs:
130 if curdir:
130 if curdir:
131 curdir = '/'.join((curdir, dir))
131 curdir = '/'.join((curdir, dir))
132 else:
132 else:
133 curdir = dir
133 curdir = dir
134 dir_id = None
134 dir_id = None
135 for item, stat, id in tree.iteritems():
135 for item, stat, id in tree.iteritems():
136 if dir == item:
136 if dir == item:
137 dir_id = id
137 dir_id = id
138 if dir_id:
138 if dir_id:
139 # Update tree
139 # Update tree
140 tree = self.repository._repo[dir_id]
140 tree = self.repository._repo[dir_id]
141 if not isinstance(tree, objects.Tree):
141 if not isinstance(tree, objects.Tree):
142 raise ChangesetError('%s is not a directory' % curdir)
142 raise ChangesetError('%s is not a directory' % curdir)
143 else:
143 else:
144 raise ChangesetError('%s have not been found' % curdir)
144 raise ChangesetError('%s have not been found' % curdir)
145
145
146 # cache all items from the given traversed tree
146 # cache all items from the given traversed tree
147 for item, stat, id in tree.iteritems():
147 for item, stat, id in tree.iteritems():
148 if curdir:
148 if curdir:
149 name = '/'.join((curdir, item))
149 name = '/'.join((curdir, item))
150 else:
150 else:
151 name = item
151 name = item
152 self._paths[name] = id
152 self._paths[name] = id
153 self._stat_modes[name] = stat
153 self._stat_modes[name] = stat
154 if not path in self._paths:
154 if not path in self._paths:
155 raise NodeDoesNotExistError("There is no file nor directory "
155 raise NodeDoesNotExistError("There is no file nor directory "
156 "at the given path '%s' at revision %s"
156 "at the given path '%s' at revision %s"
157 % (path, safe_str(self.short_id)))
157 % (path, safe_str(self.short_id)))
158 return self._paths[path]
158 return self._paths[path]
159
159
160 def _get_kind(self, path):
160 def _get_kind(self, path):
161 obj = self.repository._repo[self._get_id_for_path(path)]
161 obj = self.repository._repo[self._get_id_for_path(path)]
162 if isinstance(obj, objects.Blob):
162 if isinstance(obj, objects.Blob):
163 return NodeKind.FILE
163 return NodeKind.FILE
164 elif isinstance(obj, objects.Tree):
164 elif isinstance(obj, objects.Tree):
165 return NodeKind.DIR
165 return NodeKind.DIR
166
166
167 def _get_filectx(self, path):
167 def _get_filectx(self, path):
168 path = self._fix_path(path)
168 path = self._fix_path(path)
169 if self._get_kind(path) != NodeKind.FILE:
169 if self._get_kind(path) != NodeKind.FILE:
170 raise ChangesetError("File does not exist for revision %s at "
170 raise ChangesetError("File does not exist for revision %s at "
171 " '%s'" % (self.raw_id, path))
171 " '%s'" % (self.raw_id, path))
172 return path
172 return path
173
173
174 def _get_file_nodes(self):
174 def _get_file_nodes(self):
175 return chain(*(t[2] for t in self.walk()))
175 return chain(*(t[2] for t in self.walk()))
176
176
177 @LazyProperty
177 @LazyProperty
178 def parents(self):
178 def parents(self):
179 """
179 """
180 Returns list of parents changesets.
180 Returns list of parents changesets.
181 """
181 """
182 return [self.repository.get_changeset(parent)
182 return [self.repository.get_changeset(parent)
183 for parent in self._commit.parents]
183 for parent in self._commit.parents]
184
184
185 @LazyProperty
185 @LazyProperty
186 def children(self):
186 def children(self):
187 """
187 """
188 Returns list of children changesets.
188 Returns list of children changesets.
189 """
189 """
190 rev_filter = settings.GIT_REV_FILTER
190 rev_filter = settings.GIT_REV_FILTER
191 so, se = self.repository.run_git_command(
191 so, se = self.repository.run_git_command(
192 "rev-list %s --children" % (rev_filter)
192 ['rev-list', rev_filter, '--children']
193 )
193 )
194
194
195 children = []
195 children = []
196 pat = re.compile(r'^%s' % self.raw_id)
196 pat = re.compile(r'^%s' % self.raw_id)
197 for l in so.splitlines():
197 for l in so.splitlines():
198 if pat.match(l):
198 if pat.match(l):
199 childs = l.split(' ')[1:]
199 childs = l.split(' ')[1:]
200 children.extend(childs)
200 children.extend(childs)
201 return [self.repository.get_changeset(cs) for cs in children]
201 return [self.repository.get_changeset(cs) for cs in children]
202
202
203 def next(self, branch=None):
203 def next(self, branch=None):
204 if branch and self.branch != branch:
204 if branch and self.branch != branch:
205 raise VCSError('Branch option used on changeset not belonging '
205 raise VCSError('Branch option used on changeset not belonging '
206 'to that branch')
206 'to that branch')
207
207
208 cs = self
208 cs = self
209 while True:
209 while True:
210 try:
210 try:
211 next_ = cs.revision + 1
211 next_ = cs.revision + 1
212 next_rev = cs.repository.revisions[next_]
212 next_rev = cs.repository.revisions[next_]
213 except IndexError:
213 except IndexError:
214 raise ChangesetDoesNotExistError
214 raise ChangesetDoesNotExistError
215 cs = cs.repository.get_changeset(next_rev)
215 cs = cs.repository.get_changeset(next_rev)
216
216
217 if not branch or branch == cs.branch:
217 if not branch or branch == cs.branch:
218 return cs
218 return cs
219
219
220 def prev(self, branch=None):
220 def prev(self, branch=None):
221 if branch and self.branch != branch:
221 if branch and self.branch != branch:
222 raise VCSError('Branch option used on changeset not belonging '
222 raise VCSError('Branch option used on changeset not belonging '
223 'to that branch')
223 'to that branch')
224
224
225 cs = self
225 cs = self
226 while True:
226 while True:
227 try:
227 try:
228 prev_ = cs.revision - 1
228 prev_ = cs.revision - 1
229 if prev_ < 0:
229 if prev_ < 0:
230 raise IndexError
230 raise IndexError
231 prev_rev = cs.repository.revisions[prev_]
231 prev_rev = cs.repository.revisions[prev_]
232 except IndexError:
232 except IndexError:
233 raise ChangesetDoesNotExistError
233 raise ChangesetDoesNotExistError
234 cs = cs.repository.get_changeset(prev_rev)
234 cs = cs.repository.get_changeset(prev_rev)
235
235
236 if not branch or branch == cs.branch:
236 if not branch or branch == cs.branch:
237 return cs
237 return cs
238
238
239 def diff(self, ignore_whitespace=True, context=3):
239 def diff(self, ignore_whitespace=True, context=3):
240 rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET
240 rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET
241 rev2 = self
241 rev2 = self
242 return ''.join(self.repository.get_diff(rev1, rev2,
242 return ''.join(self.repository.get_diff(rev1, rev2,
243 ignore_whitespace=ignore_whitespace,
243 ignore_whitespace=ignore_whitespace,
244 context=context))
244 context=context))
245
245
246 def get_file_mode(self, path):
246 def get_file_mode(self, path):
247 """
247 """
248 Returns stat mode of the file at the given ``path``.
248 Returns stat mode of the file at the given ``path``.
249 """
249 """
250 # ensure path is traversed
250 # ensure path is traversed
251 path = safe_str(path)
251 path = safe_str(path)
252 self._get_id_for_path(path)
252 self._get_id_for_path(path)
253 return self._stat_modes[path]
253 return self._stat_modes[path]
254
254
255 def get_file_content(self, path):
255 def get_file_content(self, path):
256 """
256 """
257 Returns content of the file at given ``path``.
257 Returns content of the file at given ``path``.
258 """
258 """
259 id = self._get_id_for_path(path)
259 id = self._get_id_for_path(path)
260 blob = self.repository._repo[id]
260 blob = self.repository._repo[id]
261 return blob.as_pretty_string()
261 return blob.as_pretty_string()
262
262
263 def get_file_size(self, path):
263 def get_file_size(self, path):
264 """
264 """
265 Returns size of the file at given ``path``.
265 Returns size of the file at given ``path``.
266 """
266 """
267 id = self._get_id_for_path(path)
267 id = self._get_id_for_path(path)
268 blob = self.repository._repo[id]
268 blob = self.repository._repo[id]
269 return blob.raw_length()
269 return blob.raw_length()
270
270
271 def get_file_changeset(self, path):
271 def get_file_changeset(self, path):
272 """
272 """
273 Returns last commit of the file at the given ``path``.
273 Returns last commit of the file at the given ``path``.
274 """
274 """
275 return self.get_file_history(path, limit=1)[0]
275 return self.get_file_history(path, limit=1)[0]
276
276
277 def get_file_history(self, path, limit=None):
277 def get_file_history(self, path, limit=None):
278 """
278 """
279 Returns history of file as reversed list of ``Changeset`` objects for
279 Returns history of file as reversed list of ``Changeset`` objects for
280 which file at given ``path`` has been modified.
280 which file at given ``path`` has been modified.
281
281
282 TODO: This function now uses os underlying 'git' and 'grep' commands
282 TODO: This function now uses os underlying 'git' and 'grep' commands
283 which is generally not good. Should be replaced with algorithm
283 which is generally not good. Should be replaced with algorithm
284 iterating commits.
284 iterating commits.
285 """
285 """
286 self._get_filectx(path)
286 self._get_filectx(path)
287 cs_id = safe_str(self.id)
287 cs_id = safe_str(self.id)
288 f_path = safe_str(path)
288 f_path = safe_str(path)
289
289
290 if limit:
290 if limit:
291 cmd = 'log -n %s --pretty="format: %%H" -s %s -- "%s"' % (
291 cmd = ['log', '-n', str(safe_int(limit, 0)),
292 safe_int(limit, 0), cs_id, f_path)
292 '--pretty=format:%H', '-s', cs_id, '--', f_path]
293
293
294 else:
294 else:
295 cmd = 'log --pretty="format: %%H" -s %s -- "%s"' % (
295 cmd = ['log',
296 cs_id, f_path)
296 '--pretty=format:%H', '-s', cs_id, '--', f_path]
297 so, se = self.repository.run_git_command(cmd)
297 so, se = self.repository.run_git_command(cmd)
298 ids = re.findall(r'[0-9a-fA-F]{40}', so)
298 ids = re.findall(r'[0-9a-fA-F]{40}', so)
299 return [self.repository.get_changeset(sha) for sha in ids]
299 return [self.repository.get_changeset(sha) for sha in ids]
300
300
301 def get_file_history_2(self, path):
301 def get_file_history_2(self, path):
302 """
302 """
303 Returns history of file as reversed list of ``Changeset`` objects for
303 Returns history of file as reversed list of ``Changeset`` objects for
304 which file at given ``path`` has been modified.
304 which file at given ``path`` has been modified.
305
305
306 """
306 """
307 self._get_filectx(path)
307 self._get_filectx(path)
308 from dulwich.walk import Walker
308 from dulwich.walk import Walker
309 include = [self.id]
309 include = [self.id]
310 walker = Walker(self.repository._repo.object_store, include,
310 walker = Walker(self.repository._repo.object_store, include,
311 paths=[path], max_entries=1)
311 paths=[path], max_entries=1)
312 return [self.repository.get_changeset(sha)
312 return [self.repository.get_changeset(sha)
313 for sha in (x.commit.id for x in walker)]
313 for sha in (x.commit.id for x in walker)]
314
314
315 def get_file_annotate(self, path):
315 def get_file_annotate(self, path):
316 """
316 """
317 Returns a generator of four element tuples with
317 Returns a generator of four element tuples with
318 lineno, sha, changeset lazy loader and line
318 lineno, sha, changeset lazy loader and line
319
319
320 TODO: This function now uses os underlying 'git' command which is
320 TODO: This function now uses os underlying 'git' command which is
321 generally not good. Should be replaced with algorithm iterating
321 generally not good. Should be replaced with algorithm iterating
322 commits.
322 commits.
323 """
323 """
324 cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path)
324 cmd = ['blame', '-l', '--root', '-r', self.id, '--', path]
325 # -l ==> outputs long shas (and we need all 40 characters)
325 # -l ==> outputs long shas (and we need all 40 characters)
326 # --root ==> doesn't put '^' character for boundaries
326 # --root ==> doesn't put '^' character for boundaries
327 # -r sha ==> blames for the given revision
327 # -r sha ==> blames for the given revision
328 so, se = self.repository.run_git_command(cmd)
328 so, se = self.repository.run_git_command(cmd)
329
329
330 for i, blame_line in enumerate(so.split('\n')[:-1]):
330 for i, blame_line in enumerate(so.split('\n')[:-1]):
331 ln_no = i + 1
331 ln_no = i + 1
332 sha, line = re.split(r' ', blame_line, 1)
332 sha, line = re.split(r' ', blame_line, 1)
333 yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line)
333 yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line)
334
334
335 def fill_archive(self, stream=None, kind='tgz', prefix=None,
335 def fill_archive(self, stream=None, kind='tgz', prefix=None,
336 subrepos=False):
336 subrepos=False):
337 """
337 """
338 Fills up given stream.
338 Fills up given stream.
339
339
340 :param stream: file like object.
340 :param stream: file like object.
341 :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
341 :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
342 Default: ``tgz``.
342 Default: ``tgz``.
343 :param prefix: name of root directory in archive.
343 :param prefix: name of root directory in archive.
344 Default is repository name and changeset's raw_id joined with dash
344 Default is repository name and changeset's raw_id joined with dash
345 (``repo-tip.<KIND>``).
345 (``repo-tip.<KIND>``).
346 :param subrepos: include subrepos in this archive.
346 :param subrepos: include subrepos in this archive.
347
347
348 :raise ImproperArchiveTypeError: If given kind is wrong.
348 :raise ImproperArchiveTypeError: If given kind is wrong.
349 :raise VcsError: If given stream is None
349 :raise VcsError: If given stream is None
350
350
351 """
351 """
352 allowed_kinds = settings.ARCHIVE_SPECS.keys()
352 allowed_kinds = settings.ARCHIVE_SPECS.keys()
353 if kind not in allowed_kinds:
353 if kind not in allowed_kinds:
354 raise ImproperArchiveTypeError('Archive kind not supported use one'
354 raise ImproperArchiveTypeError('Archive kind not supported use one'
355 'of %s', allowed_kinds)
355 'of %s', allowed_kinds)
356
356
357 if prefix is None:
357 if prefix is None:
358 prefix = '%s-%s' % (self.repository.name, self.short_id)
358 prefix = '%s-%s' % (self.repository.name, self.short_id)
359 elif prefix.startswith('/'):
359 elif prefix.startswith('/'):
360 raise VCSError("Prefix cannot start with leading slash")
360 raise VCSError("Prefix cannot start with leading slash")
361 elif prefix.strip() == '':
361 elif prefix.strip() == '':
362 raise VCSError("Prefix cannot be empty")
362 raise VCSError("Prefix cannot be empty")
363
363
364 if kind == 'zip':
364 if kind == 'zip':
365 frmt = 'zip'
365 frmt = 'zip'
366 else:
366 else:
367 frmt = 'tar'
367 frmt = 'tar'
368 _git_path = settings.GIT_EXECUTABLE_PATH
368 _git_path = settings.GIT_EXECUTABLE_PATH
369 cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path,
369 cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path,
370 frmt, prefix, self.raw_id)
370 frmt, prefix, self.raw_id)
371 if kind == 'tgz':
371 if kind == 'tgz':
372 cmd += ' | gzip -9'
372 cmd += ' | gzip -9'
373 elif kind == 'tbz2':
373 elif kind == 'tbz2':
374 cmd += ' | bzip2 -9'
374 cmd += ' | bzip2 -9'
375
375
376 if stream is None:
376 if stream is None:
377 raise VCSError('You need to pass in a valid stream for filling'
377 raise VCSError('You need to pass in a valid stream for filling'
378 ' with archival data')
378 ' with archival data')
379 popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True,
379 popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True,
380 cwd=self.repository.path)
380 cwd=self.repository.path)
381
381
382 buffer_size = 1024 * 8
382 buffer_size = 1024 * 8
383 chunk = popen.stdout.read(buffer_size)
383 chunk = popen.stdout.read(buffer_size)
384 while chunk:
384 while chunk:
385 stream.write(chunk)
385 stream.write(chunk)
386 chunk = popen.stdout.read(buffer_size)
386 chunk = popen.stdout.read(buffer_size)
387 # Make sure all descriptors would be read
387 # Make sure all descriptors would be read
388 popen.communicate()
388 popen.communicate()
389
389
390 def get_nodes(self, path):
390 def get_nodes(self, path):
391 if self._get_kind(path) != NodeKind.DIR:
391 if self._get_kind(path) != NodeKind.DIR:
392 raise ChangesetError("Directory does not exist for revision %s at "
392 raise ChangesetError("Directory does not exist for revision %s at "
393 " '%s'" % (self.revision, path))
393 " '%s'" % (self.revision, path))
394 path = self._fix_path(path)
394 path = self._fix_path(path)
395 id = self._get_id_for_path(path)
395 id = self._get_id_for_path(path)
396 tree = self.repository._repo[id]
396 tree = self.repository._repo[id]
397 dirnodes = []
397 dirnodes = []
398 filenodes = []
398 filenodes = []
399 als = self.repository.alias
399 als = self.repository.alias
400 for name, stat, id in tree.iteritems():
400 for name, stat, id in tree.iteritems():
401 if objects.S_ISGITLINK(stat):
401 if objects.S_ISGITLINK(stat):
402 dirnodes.append(SubModuleNode(name, url=None, changeset=id,
402 dirnodes.append(SubModuleNode(name, url=None, changeset=id,
403 alias=als))
403 alias=als))
404 continue
404 continue
405
405
406 obj = self.repository._repo.get_object(id)
406 obj = self.repository._repo.get_object(id)
407 if path != '':
407 if path != '':
408 obj_path = '/'.join((path, name))
408 obj_path = '/'.join((path, name))
409 else:
409 else:
410 obj_path = name
410 obj_path = name
411 if obj_path not in self._stat_modes:
411 if obj_path not in self._stat_modes:
412 self._stat_modes[obj_path] = stat
412 self._stat_modes[obj_path] = stat
413 if isinstance(obj, objects.Tree):
413 if isinstance(obj, objects.Tree):
414 dirnodes.append(DirNode(obj_path, changeset=self))
414 dirnodes.append(DirNode(obj_path, changeset=self))
415 elif isinstance(obj, objects.Blob):
415 elif isinstance(obj, objects.Blob):
416 filenodes.append(FileNode(obj_path, changeset=self, mode=stat))
416 filenodes.append(FileNode(obj_path, changeset=self, mode=stat))
417 else:
417 else:
418 raise ChangesetError("Requested object should be Tree "
418 raise ChangesetError("Requested object should be Tree "
419 "or Blob, is %r" % type(obj))
419 "or Blob, is %r" % type(obj))
420 nodes = dirnodes + filenodes
420 nodes = dirnodes + filenodes
421 for node in nodes:
421 for node in nodes:
422 if not node.path in self.nodes:
422 if not node.path in self.nodes:
423 self.nodes[node.path] = node
423 self.nodes[node.path] = node
424 nodes.sort()
424 nodes.sort()
425 return nodes
425 return nodes
426
426
427 def get_node(self, path):
427 def get_node(self, path):
428 if isinstance(path, unicode):
428 if isinstance(path, unicode):
429 path = path.encode('utf-8')
429 path = path.encode('utf-8')
430 path = self._fix_path(path)
430 path = self._fix_path(path)
431 if not path in self.nodes:
431 if not path in self.nodes:
432 try:
432 try:
433 id_ = self._get_id_for_path(path)
433 id_ = self._get_id_for_path(path)
434 except ChangesetError:
434 except ChangesetError:
435 raise NodeDoesNotExistError("Cannot find one of parents' "
435 raise NodeDoesNotExistError("Cannot find one of parents' "
436 "directories for a given path: %s" % path)
436 "directories for a given path: %s" % path)
437
437
438 _GL = lambda m: m and objects.S_ISGITLINK(m)
438 _GL = lambda m: m and objects.S_ISGITLINK(m)
439 if _GL(self._stat_modes.get(path)):
439 if _GL(self._stat_modes.get(path)):
440 node = SubModuleNode(path, url=None, changeset=id_,
440 node = SubModuleNode(path, url=None, changeset=id_,
441 alias=self.repository.alias)
441 alias=self.repository.alias)
442 else:
442 else:
443 obj = self.repository._repo.get_object(id_)
443 obj = self.repository._repo.get_object(id_)
444
444
445 if isinstance(obj, objects.Tree):
445 if isinstance(obj, objects.Tree):
446 if path == '':
446 if path == '':
447 node = RootNode(changeset=self)
447 node = RootNode(changeset=self)
448 else:
448 else:
449 node = DirNode(path, changeset=self)
449 node = DirNode(path, changeset=self)
450 node._tree = obj
450 node._tree = obj
451 elif isinstance(obj, objects.Blob):
451 elif isinstance(obj, objects.Blob):
452 node = FileNode(path, changeset=self)
452 node = FileNode(path, changeset=self)
453 node._blob = obj
453 node._blob = obj
454 else:
454 else:
455 raise NodeDoesNotExistError("There is no file nor directory "
455 raise NodeDoesNotExistError("There is no file nor directory "
456 "at the given path '%s' at revision %s"
456 "at the given path '%s' at revision %s"
457 % (path, self.short_id))
457 % (path, self.short_id))
458 # cache node
458 # cache node
459 self.nodes[path] = node
459 self.nodes[path] = node
460 return self.nodes[path]
460 return self.nodes[path]
461
461
462 @LazyProperty
462 @LazyProperty
463 def affected_files(self):
463 def affected_files(self):
464 """
464 """
465 Gets a fast accessible file changes for given changeset
465 Gets a fast accessible file changes for given changeset
466 """
466 """
467 added, modified, deleted = self._changes_cache
467 added, modified, deleted = self._changes_cache
468 return list(added.union(modified).union(deleted))
468 return list(added.union(modified).union(deleted))
469
469
470 @LazyProperty
470 @LazyProperty
471 def _diff_name_status(self):
471 def _diff_name_status(self):
472 output = []
472 output = []
473 for parent in self.parents:
473 for parent in self.parents:
474 cmd = 'diff --name-status %s %s --encoding=utf8' % (parent.raw_id,
474 cmd = ['diff', '--name-status', parent.raw_id, self.raw_id,
475 self.raw_id)
475 '--encoding=utf8']
476 so, se = self.repository.run_git_command(cmd)
476 so, se = self.repository.run_git_command(cmd)
477 output.append(so.strip())
477 output.append(so.strip())
478 return '\n'.join(output)
478 return '\n'.join(output)
479
479
480 @LazyProperty
480 @LazyProperty
481 def _changes_cache(self):
481 def _changes_cache(self):
482 added = set()
482 added = set()
483 modified = set()
483 modified = set()
484 deleted = set()
484 deleted = set()
485 _r = self.repository._repo
485 _r = self.repository._repo
486
486
487 parents = self.parents
487 parents = self.parents
488 if not self.parents:
488 if not self.parents:
489 parents = [EmptyChangeset()]
489 parents = [EmptyChangeset()]
490 for parent in parents:
490 for parent in parents:
491 if isinstance(parent, EmptyChangeset):
491 if isinstance(parent, EmptyChangeset):
492 oid = None
492 oid = None
493 else:
493 else:
494 oid = _r[parent.raw_id].tree
494 oid = _r[parent.raw_id].tree
495 changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree)
495 changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree)
496 for (oldpath, newpath), (_, _), (_, _) in changes:
496 for (oldpath, newpath), (_, _), (_, _) in changes:
497 if newpath and oldpath:
497 if newpath and oldpath:
498 modified.add(newpath)
498 modified.add(newpath)
499 elif newpath and not oldpath:
499 elif newpath and not oldpath:
500 added.add(newpath)
500 added.add(newpath)
501 elif not newpath and oldpath:
501 elif not newpath and oldpath:
502 deleted.add(oldpath)
502 deleted.add(oldpath)
503 return added, modified, deleted
503 return added, modified, deleted
504
504
505 def _get_paths_for_status(self, status):
505 def _get_paths_for_status(self, status):
506 """
506 """
507 Returns sorted list of paths for given ``status``.
507 Returns sorted list of paths for given ``status``.
508
508
509 :param status: one of: *added*, *modified* or *deleted*
509 :param status: one of: *added*, *modified* or *deleted*
510 """
510 """
511 added, modified, deleted = self._changes_cache
511 added, modified, deleted = self._changes_cache
512 return sorted({
512 return sorted({
513 'added': list(added),
513 'added': list(added),
514 'modified': list(modified),
514 'modified': list(modified),
515 'deleted': list(deleted)}[status]
515 'deleted': list(deleted)}[status]
516 )
516 )
517
517
518 @LazyProperty
518 @LazyProperty
519 def added(self):
519 def added(self):
520 """
520 """
521 Returns list of added ``FileNode`` objects.
521 Returns list of added ``FileNode`` objects.
522 """
522 """
523 if not self.parents:
523 if not self.parents:
524 return list(self._get_file_nodes())
524 return list(self._get_file_nodes())
525 return AddedFileNodesGenerator([n for n in
525 return AddedFileNodesGenerator([n for n in
526 self._get_paths_for_status('added')], self)
526 self._get_paths_for_status('added')], self)
527
527
528 @LazyProperty
528 @LazyProperty
529 def changed(self):
529 def changed(self):
530 """
530 """
531 Returns list of modified ``FileNode`` objects.
531 Returns list of modified ``FileNode`` objects.
532 """
532 """
533 if not self.parents:
533 if not self.parents:
534 return []
534 return []
535 return ChangedFileNodesGenerator([n for n in
535 return ChangedFileNodesGenerator([n for n in
536 self._get_paths_for_status('modified')], self)
536 self._get_paths_for_status('modified')], self)
537
537
538 @LazyProperty
538 @LazyProperty
539 def removed(self):
539 def removed(self):
540 """
540 """
541 Returns list of removed ``FileNode`` objects.
541 Returns list of removed ``FileNode`` objects.
542 """
542 """
543 if not self.parents:
543 if not self.parents:
544 return []
544 return []
545 return RemovedFileNodesGenerator([n for n in
545 return RemovedFileNodesGenerator([n for n in
546 self._get_paths_for_status('deleted')], self)
546 self._get_paths_for_status('deleted')], self)
547
547
548 extra = {}
548 extra = {}
@@ -1,759 +1,732 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 vcs.backends.git.repository
3 vcs.backends.git.repository
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 Git repository implementation.
6 Git repository implementation.
7
7
8 :created_on: Apr 8, 2010
8 :created_on: Apr 8, 2010
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
9 :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
10 """
10 """
11
11
12 import os
12 import os
13 import re
13 import re
14 import time
14 import time
15 import errno
15 import errno
16 import urllib
16 import urllib
17 import urllib2
17 import urllib2
18 import logging
18 import logging
19 import posixpath
19 import posixpath
20 import string
20 import string
21 import sys
22 if sys.platform == "win32":
23 from subprocess import list2cmdline
24 def quote(s):
25 return list2cmdline([s])
26 else:
27 try:
28 # Python <=2.7
29 from pipes import quote
30 except ImportError:
31 # Python 3.3+
32 from shlex import quote
33
21
34 from dulwich.objects import Tag
22 from dulwich.objects import Tag
35 from dulwich.repo import Repo, NotGitRepository
23 from dulwich.repo import Repo, NotGitRepository
36 from dulwich.config import ConfigFile
24 from dulwich.config import ConfigFile
37
25
38 from kallithea.lib.vcs import subprocessio
26 from kallithea.lib.vcs import subprocessio
39 from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
27 from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
40 from kallithea.lib.vcs.conf import settings
28 from kallithea.lib.vcs.conf import settings
41
29
42 from kallithea.lib.vcs.exceptions import (
30 from kallithea.lib.vcs.exceptions import (
43 BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError,
31 BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError,
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError
32 RepositoryError, TagAlreadyExistError, TagDoesNotExistError
45 )
33 )
46 from kallithea.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp
34 from kallithea.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp
47 from kallithea.lib.vcs.utils.lazy import LazyProperty
35 from kallithea.lib.vcs.utils.lazy import LazyProperty
48 from kallithea.lib.vcs.utils.ordered_dict import OrderedDict
36 from kallithea.lib.vcs.utils.ordered_dict import OrderedDict
49 from kallithea.lib.vcs.utils.paths import abspath, get_user_home
37 from kallithea.lib.vcs.utils.paths import abspath, get_user_home
50
38
51 from kallithea.lib.vcs.utils.hgcompat import (
39 from kallithea.lib.vcs.utils.hgcompat import (
52 hg_url, httpbasicauthhandler, httpdigestauthhandler
40 hg_url, httpbasicauthhandler, httpdigestauthhandler
53 )
41 )
54
42
55 from .changeset import GitChangeset
43 from .changeset import GitChangeset
56 from .inmemory import GitInMemoryChangeset
44 from .inmemory import GitInMemoryChangeset
57 from .workdir import GitWorkdir
45 from .workdir import GitWorkdir
58
46
59 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
60
48
61 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
62
50
63
51
64 class GitRepository(BaseRepository):
52 class GitRepository(BaseRepository):
65 """
53 """
66 Git repository backend.
54 Git repository backend.
67 """
55 """
68 DEFAULT_BRANCH_NAME = 'master'
56 DEFAULT_BRANCH_NAME = 'master'
69 scm = 'git'
57 scm = 'git'
70
58
71 def __init__(self, repo_path, create=False, src_url=None,
59 def __init__(self, repo_path, create=False, src_url=None,
72 update_after_clone=False, bare=False):
60 update_after_clone=False, bare=False):
73
61
74 self.path = abspath(repo_path)
62 self.path = abspath(repo_path)
75 repo = self._get_repo(create, src_url, update_after_clone, bare)
63 repo = self._get_repo(create, src_url, update_after_clone, bare)
76 self.bare = repo.bare
64 self.bare = repo.bare
77
65
78 @property
66 @property
79 def _config_files(self):
67 def _config_files(self):
80 return [
68 return [
81 self.bare and abspath(self.path, 'config')
69 self.bare and abspath(self.path, 'config')
82 or abspath(self.path, '.git', 'config'),
70 or abspath(self.path, '.git', 'config'),
83 abspath(get_user_home(), '.gitconfig'),
71 abspath(get_user_home(), '.gitconfig'),
84 ]
72 ]
85
73
86 @property
74 @property
87 def _repo(self):
75 def _repo(self):
88 return Repo(self.path)
76 return Repo(self.path)
89
77
90 @property
78 @property
91 def head(self):
79 def head(self):
92 try:
80 try:
93 return self._repo.head()
81 return self._repo.head()
94 except KeyError:
82 except KeyError:
95 return None
83 return None
96
84
97 @property
85 @property
98 def _empty(self):
86 def _empty(self):
99 """
87 """
100 Checks if repository is empty ie. without any changesets
88 Checks if repository is empty ie. without any changesets
101 """
89 """
102
90
103 try:
91 try:
104 self.revisions[0]
92 self.revisions[0]
105 except (KeyError, IndexError):
93 except (KeyError, IndexError):
106 return True
94 return True
107 return False
95 return False
108
96
109 @LazyProperty
97 @LazyProperty
110 def revisions(self):
98 def revisions(self):
111 """
99 """
112 Returns list of revisions' ids, in ascending order. Being lazy
100 Returns list of revisions' ids, in ascending order. Being lazy
113 attribute allows external tools to inject shas from cache.
101 attribute allows external tools to inject shas from cache.
114 """
102 """
115 return self._get_all_revisions()
103 return self._get_all_revisions()
116
104
117 @classmethod
105 @classmethod
118 def _run_git_command(cls, cmd, **opts):
106 def _run_git_command(cls, cmd, **opts):
119 """
107 """
120 Runs given ``cmd`` as git command and returns tuple
108 Runs given ``cmd`` as git command and returns tuple
121 (stdout, stderr).
109 (stdout, stderr).
122
110
123 :param cmd: git command to be executed
111 :param cmd: git command to be executed
124 :param opts: env options to pass into Subprocess command
112 :param opts: env options to pass into Subprocess command
125 """
113 """
126
114
127 if '_bare' in opts:
115 if '_bare' in opts:
128 _copts = []
116 _copts = []
129 del opts['_bare']
117 del opts['_bare']
130 else:
118 else:
131 _copts = ['-c', 'core.quotepath=false', ]
119 _copts = ['-c', 'core.quotepath=false', ]
132 safe_call = False
120 safe_call = False
133 if '_safe' in opts:
121 if '_safe' in opts:
134 #no exc on failure
122 #no exc on failure
135 del opts['_safe']
123 del opts['_safe']
136 safe_call = True
124 safe_call = True
137
125
138 _str_cmd = False
126 assert isinstance(cmd, list), cmd
139 if isinstance(cmd, basestring):
140 cmd = [cmd]
141 _str_cmd = True
142
127
143 gitenv = os.environ
128 gitenv = os.environ
144 # need to clean fix GIT_DIR !
129 # need to clean fix GIT_DIR !
145 if 'GIT_DIR' in gitenv:
130 if 'GIT_DIR' in gitenv:
146 del gitenv['GIT_DIR']
131 del gitenv['GIT_DIR']
147 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
132 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
148
133
149 _git_path = settings.GIT_EXECUTABLE_PATH
134 _git_path = settings.GIT_EXECUTABLE_PATH
150 cmd = [_git_path] + _copts + cmd
135 cmd = [_git_path] + _copts + cmd
151 if _str_cmd:
152 cmd = ' '.join(cmd)
153
136
154 try:
137 try:
155 _opts = dict(
138 _opts = dict(
156 env=gitenv,
139 env=gitenv,
157 shell=True,
140 shell=False,
158 )
141 )
159 _opts.update(opts)
142 _opts.update(opts)
160 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
143 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
161 except (EnvironmentError, OSError), err:
144 except (EnvironmentError, OSError), err:
162 tb_err = ("Couldn't run git command (%s).\n"
145 tb_err = ("Couldn't run git command (%s).\n"
163 "Original error was:%s\n" % (cmd, err))
146 "Original error was:%s\n" % (cmd, err))
164 log.error(tb_err)
147 log.error(tb_err)
165 if safe_call:
148 if safe_call:
166 return '', err
149 return '', err
167 else:
150 else:
168 raise RepositoryError(tb_err)
151 raise RepositoryError(tb_err)
169
152
170 return ''.join(p.output), ''.join(p.error)
153 return ''.join(p.output), ''.join(p.error)
171
154
172 def run_git_command(self, cmd):
155 def run_git_command(self, cmd):
173 opts = {}
156 opts = {}
174 if os.path.isdir(self.path):
157 if os.path.isdir(self.path):
175 opts['cwd'] = self.path
158 opts['cwd'] = self.path
176 return self._run_git_command(cmd, **opts)
159 return self._run_git_command(cmd, **opts)
177
160
178 @classmethod
161 @classmethod
179 def _check_url(cls, url):
162 def _check_url(cls, url):
180 """
163 """
181 Function will check given url and try to verify if it's a valid
164 Function will check given url and try to verify if it's a valid
182 link. Sometimes it may happened that git will issue basic
165 link. Sometimes it may happened that git will issue basic
183 auth request that can cause whole API to hang when used from python
166 auth request that can cause whole API to hang when used from python
184 or other external calls.
167 or other external calls.
185
168
186 On failures it'll raise urllib2.HTTPError, exception is also thrown
169 On failures it'll raise urllib2.HTTPError, exception is also thrown
187 when the return code is non 200
170 when the return code is non 200
188 """
171 """
189
172
190 # check first if it's not an local url
173 # check first if it's not an local url
191 if os.path.isdir(url) or url.startswith('file:'):
174 if os.path.isdir(url) or url.startswith('file:'):
192 return True
175 return True
193
176
194 if '+' in url[:url.find('://')]:
177 if '+' in url[:url.find('://')]:
195 url = url[url.find('+') + 1:]
178 url = url[url.find('+') + 1:]
196
179
197 handlers = []
180 handlers = []
198 url_obj = hg_url(url)
181 url_obj = hg_url(url)
199 test_uri, authinfo = url_obj.authinfo()
182 test_uri, authinfo = url_obj.authinfo()
200 url_obj.passwd = '*****'
183 url_obj.passwd = '*****'
201 cleaned_uri = str(url_obj)
184 cleaned_uri = str(url_obj)
202
185
203 if not test_uri.endswith('info/refs'):
186 if not test_uri.endswith('info/refs'):
204 test_uri = test_uri.rstrip('/') + '/info/refs'
187 test_uri = test_uri.rstrip('/') + '/info/refs'
205
188
206 if authinfo:
189 if authinfo:
207 #create a password manager
190 #create a password manager
208 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
191 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
209 passmgr.add_password(*authinfo)
192 passmgr.add_password(*authinfo)
210
193
211 handlers.extend((httpbasicauthhandler(passmgr),
194 handlers.extend((httpbasicauthhandler(passmgr),
212 httpdigestauthhandler(passmgr)))
195 httpdigestauthhandler(passmgr)))
213
196
214 o = urllib2.build_opener(*handlers)
197 o = urllib2.build_opener(*handlers)
215 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
198 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
216
199
217 q = {"service": 'git-upload-pack'}
200 q = {"service": 'git-upload-pack'}
218 qs = '?%s' % urllib.urlencode(q)
201 qs = '?%s' % urllib.urlencode(q)
219 cu = "%s%s" % (test_uri, qs)
202 cu = "%s%s" % (test_uri, qs)
220 req = urllib2.Request(cu, None, {})
203 req = urllib2.Request(cu, None, {})
221
204
222 try:
205 try:
223 resp = o.open(req)
206 resp = o.open(req)
224 if resp.code != 200:
207 if resp.code != 200:
225 raise Exception('Return Code is not 200')
208 raise Exception('Return Code is not 200')
226 except Exception, e:
209 except Exception, e:
227 # means it cannot be cloned
210 # means it cannot be cloned
228 raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
211 raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
229
212
230 # now detect if it's proper git repo
213 # now detect if it's proper git repo
231 gitdata = resp.read()
214 gitdata = resp.read()
232 if not 'service=git-upload-pack' in gitdata:
215 if not 'service=git-upload-pack' in gitdata:
233 raise urllib2.URLError(
216 raise urllib2.URLError(
234 "url [%s] does not look like an git" % (cleaned_uri))
217 "url [%s] does not look like an git" % (cleaned_uri))
235
218
236 return True
219 return True
237
220
238 def _get_repo(self, create, src_url=None, update_after_clone=False,
221 def _get_repo(self, create, src_url=None, update_after_clone=False,
239 bare=False):
222 bare=False):
240 if create and os.path.exists(self.path):
223 if create and os.path.exists(self.path):
241 raise RepositoryError("Location already exist")
224 raise RepositoryError("Location already exist")
242 if src_url and not create:
225 if src_url and not create:
243 raise RepositoryError("Create should be set to True if src_url is "
226 raise RepositoryError("Create should be set to True if src_url is "
244 "given (clone operation creates repository)")
227 "given (clone operation creates repository)")
245 try:
228 try:
246 if create and src_url:
229 if create and src_url:
247 GitRepository._check_url(src_url)
230 GitRepository._check_url(src_url)
248 self.clone(src_url, update_after_clone, bare)
231 self.clone(src_url, update_after_clone, bare)
249 return Repo(self.path)
232 return Repo(self.path)
250 elif create:
233 elif create:
251 os.makedirs(self.path)
234 os.makedirs(self.path)
252 if bare:
235 if bare:
253 return Repo.init_bare(self.path)
236 return Repo.init_bare(self.path)
254 else:
237 else:
255 return Repo.init(self.path)
238 return Repo.init(self.path)
256 else:
239 else:
257 return self._repo
240 return self._repo
258 except (NotGitRepository, OSError), err:
241 except (NotGitRepository, OSError), err:
259 raise RepositoryError(err)
242 raise RepositoryError(err)
260
243
261 def _get_all_revisions(self):
244 def _get_all_revisions(self):
262 # we must check if this repo is not empty, since later command
245 # we must check if this repo is not empty, since later command
263 # fails if it is. And it's cheaper to ask than throw the subprocess
246 # fails if it is. And it's cheaper to ask than throw the subprocess
264 # errors
247 # errors
265 try:
248 try:
266 self._repo.head()
249 self._repo.head()
267 except KeyError:
250 except KeyError:
268 return []
251 return []
269
252
270 rev_filter = settings.GIT_REV_FILTER
253 rev_filter = settings.GIT_REV_FILTER
271 cmd = 'rev-list %s --reverse --date-order' % (rev_filter)
254 cmd = ['rev-list', rev_filter, '--reverse', '--date-order']
272 try:
255 try:
273 so, se = self.run_git_command(cmd)
256 so, se = self.run_git_command(cmd)
274 except RepositoryError:
257 except RepositoryError:
275 # Can be raised for empty repositories
258 # Can be raised for empty repositories
276 return []
259 return []
277 return so.splitlines()
260 return so.splitlines()
278
261
279 def _get_all_revisions2(self):
262 def _get_all_revisions2(self):
280 #alternate implementation using dulwich
263 #alternate implementation using dulwich
281 includes = [x[1][0] for x in self._parsed_refs.iteritems()
264 includes = [x[1][0] for x in self._parsed_refs.iteritems()
282 if x[1][1] != 'T']
265 if x[1][1] != 'T']
283 return [c.commit.id for c in self._repo.get_walker(include=includes)]
266 return [c.commit.id for c in self._repo.get_walker(include=includes)]
284
267
285 def _get_revision(self, revision):
268 def _get_revision(self, revision):
286 """
269 """
287 For git backend we always return integer here. This way we ensure
270 For git backend we always return integer here. This way we ensure
288 that changeset's revision attribute would become integer.
271 that changeset's revision attribute would become integer.
289 """
272 """
290
273
291 is_null = lambda o: len(o) == revision.count('0')
274 is_null = lambda o: len(o) == revision.count('0')
292
275
293 if self._empty:
276 if self._empty:
294 raise EmptyRepositoryError("There are no changesets yet")
277 raise EmptyRepositoryError("There are no changesets yet")
295
278
296 if revision in (None, '', 'tip', 'HEAD', 'head', -1):
279 if revision in (None, '', 'tip', 'HEAD', 'head', -1):
297 return self.revisions[-1]
280 return self.revisions[-1]
298
281
299 is_bstr = isinstance(revision, (str, unicode))
282 is_bstr = isinstance(revision, (str, unicode))
300 if ((is_bstr and revision.isdigit() and len(revision) < 12)
283 if ((is_bstr and revision.isdigit() and len(revision) < 12)
301 or isinstance(revision, int) or is_null(revision)):
284 or isinstance(revision, int) or is_null(revision)):
302 try:
285 try:
303 revision = self.revisions[int(revision)]
286 revision = self.revisions[int(revision)]
304 except IndexError:
287 except IndexError:
305 msg = ("Revision %s does not exist for %s" % (revision, self))
288 msg = ("Revision %s does not exist for %s" % (revision, self))
306 raise ChangesetDoesNotExistError(msg)
289 raise ChangesetDoesNotExistError(msg)
307
290
308 elif is_bstr:
291 elif is_bstr:
309 # get by branch/tag name
292 # get by branch/tag name
310 _ref_revision = self._parsed_refs.get(revision)
293 _ref_revision = self._parsed_refs.get(revision)
311 if _ref_revision: # and _ref_revision[1] in ['H', 'RH', 'T']:
294 if _ref_revision: # and _ref_revision[1] in ['H', 'RH', 'T']:
312 return _ref_revision[0]
295 return _ref_revision[0]
313
296
314 _tags_shas = self.tags.values()
297 _tags_shas = self.tags.values()
315 # maybe it's a tag ? we don't have them in self.revisions
298 # maybe it's a tag ? we don't have them in self.revisions
316 if revision in _tags_shas:
299 if revision in _tags_shas:
317 return _tags_shas[_tags_shas.index(revision)]
300 return _tags_shas[_tags_shas.index(revision)]
318
301
319 elif not SHA_PATTERN.match(revision) or revision not in self.revisions:
302 elif not SHA_PATTERN.match(revision) or revision not in self.revisions:
320 msg = ("Revision %s does not exist for %s" % (revision, self))
303 msg = ("Revision %s does not exist for %s" % (revision, self))
321 raise ChangesetDoesNotExistError(msg)
304 raise ChangesetDoesNotExistError(msg)
322
305
323 # Ensure we return full id
306 # Ensure we return full id
324 if not SHA_PATTERN.match(str(revision)):
307 if not SHA_PATTERN.match(str(revision)):
325 raise ChangesetDoesNotExistError("Given revision %s not recognized"
308 raise ChangesetDoesNotExistError("Given revision %s not recognized"
326 % revision)
309 % revision)
327 return revision
310 return revision
328
311
329 def get_ref_revision(self, ref_type, ref_name):
312 def get_ref_revision(self, ref_type, ref_name):
330 """
313 """
331 Returns ``MercurialChangeset`` object representing repository's
314 Returns ``MercurialChangeset`` object representing repository's
332 changeset at the given ``revision``.
315 changeset at the given ``revision``.
333 """
316 """
334 return self._get_revision(ref_name)
317 return self._get_revision(ref_name)
335
318
336 def _get_archives(self, archive_name='tip'):
319 def _get_archives(self, archive_name='tip'):
337
320
338 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
321 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
339 yield {"type": i[0], "extension": i[1], "node": archive_name}
322 yield {"type": i[0], "extension": i[1], "node": archive_name}
340
323
341 def _get_url(self, url):
324 def _get_url(self, url):
342 """
325 """
343 Returns normalized url. If schema is not given, would fall to
326 Returns normalized url. If schema is not given, would fall to
344 filesystem (``file:///``) schema.
327 filesystem (``file:///``) schema.
345 """
328 """
346 url = str(url)
329 url = str(url)
347 if url != 'default' and not '://' in url:
330 if url != 'default' and not '://' in url:
348 url = ':///'.join(('file', url))
331 url = ':///'.join(('file', url))
349 return url
332 return url
350
333
351 def get_hook_location(self):
334 def get_hook_location(self):
352 """
335 """
353 returns absolute path to location where hooks are stored
336 returns absolute path to location where hooks are stored
354 """
337 """
355 loc = os.path.join(self.path, 'hooks')
338 loc = os.path.join(self.path, 'hooks')
356 if not self.bare:
339 if not self.bare:
357 loc = os.path.join(self.path, '.git', 'hooks')
340 loc = os.path.join(self.path, '.git', 'hooks')
358 return loc
341 return loc
359
342
360 @LazyProperty
343 @LazyProperty
361 def name(self):
344 def name(self):
362 return os.path.basename(self.path)
345 return os.path.basename(self.path)
363
346
364 @LazyProperty
347 @LazyProperty
365 def last_change(self):
348 def last_change(self):
366 """
349 """
367 Returns last change made on this repository as datetime object
350 Returns last change made on this repository as datetime object
368 """
351 """
369 return date_fromtimestamp(self._get_mtime(), makedate()[1])
352 return date_fromtimestamp(self._get_mtime(), makedate()[1])
370
353
371 def _get_mtime(self):
354 def _get_mtime(self):
372 try:
355 try:
373 return time.mktime(self.get_changeset().date.timetuple())
356 return time.mktime(self.get_changeset().date.timetuple())
374 except RepositoryError:
357 except RepositoryError:
375 idx_loc = '' if self.bare else '.git'
358 idx_loc = '' if self.bare else '.git'
376 # fallback to filesystem
359 # fallback to filesystem
377 in_path = os.path.join(self.path, idx_loc, "index")
360 in_path = os.path.join(self.path, idx_loc, "index")
378 he_path = os.path.join(self.path, idx_loc, "HEAD")
361 he_path = os.path.join(self.path, idx_loc, "HEAD")
379 if os.path.exists(in_path):
362 if os.path.exists(in_path):
380 return os.stat(in_path).st_mtime
363 return os.stat(in_path).st_mtime
381 else:
364 else:
382 return os.stat(he_path).st_mtime
365 return os.stat(he_path).st_mtime
383
366
384 @LazyProperty
367 @LazyProperty
385 def description(self):
368 def description(self):
386 undefined_description = u'unknown'
369 undefined_description = u'unknown'
387 _desc = self._repo.get_description()
370 _desc = self._repo.get_description()
388 return safe_unicode(_desc or undefined_description)
371 return safe_unicode(_desc or undefined_description)
389
372
390 @LazyProperty
373 @LazyProperty
391 def contact(self):
374 def contact(self):
392 undefined_contact = u'Unknown'
375 undefined_contact = u'Unknown'
393 return undefined_contact
376 return undefined_contact
394
377
395 @property
378 @property
396 def branches(self):
379 def branches(self):
397 if not self.revisions:
380 if not self.revisions:
398 return {}
381 return {}
399 sortkey = lambda ctx: ctx[0]
382 sortkey = lambda ctx: ctx[0]
400 _branches = [(x[0], x[1][0])
383 _branches = [(x[0], x[1][0])
401 for x in self._parsed_refs.iteritems() if x[1][1] == 'H']
384 for x in self._parsed_refs.iteritems() if x[1][1] == 'H']
402 return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
385 return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
403
386
404 @LazyProperty
387 @LazyProperty
405 def closed_branches(self):
388 def closed_branches(self):
406 return {}
389 return {}
407
390
408 @LazyProperty
391 @LazyProperty
409 def tags(self):
392 def tags(self):
410 return self._get_tags()
393 return self._get_tags()
411
394
412 def _get_tags(self):
395 def _get_tags(self):
413 if not self.revisions:
396 if not self.revisions:
414 return {}
397 return {}
415
398
416 sortkey = lambda ctx: ctx[0]
399 sortkey = lambda ctx: ctx[0]
417 _tags = [(x[0], x[1][0])
400 _tags = [(x[0], x[1][0])
418 for x in self._parsed_refs.iteritems() if x[1][1] == 'T']
401 for x in self._parsed_refs.iteritems() if x[1][1] == 'T']
419 return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
402 return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
420
403
421 def tag(self, name, user, revision=None, message=None, date=None,
404 def tag(self, name, user, revision=None, message=None, date=None,
422 **kwargs):
405 **kwargs):
423 """
406 """
424 Creates and returns a tag for the given ``revision``.
407 Creates and returns a tag for the given ``revision``.
425
408
426 :param name: name for new tag
409 :param name: name for new tag
427 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
410 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
428 :param revision: changeset id for which new tag would be created
411 :param revision: changeset id for which new tag would be created
429 :param message: message of the tag's commit
412 :param message: message of the tag's commit
430 :param date: date of tag's commit
413 :param date: date of tag's commit
431
414
432 :raises TagAlreadyExistError: if tag with same name already exists
415 :raises TagAlreadyExistError: if tag with same name already exists
433 """
416 """
434 if name in self.tags:
417 if name in self.tags:
435 raise TagAlreadyExistError("Tag %s already exists" % name)
418 raise TagAlreadyExistError("Tag %s already exists" % name)
436 changeset = self.get_changeset(revision)
419 changeset = self.get_changeset(revision)
437 message = message or "Added tag %s for commit %s" % (name,
420 message = message or "Added tag %s for commit %s" % (name,
438 changeset.raw_id)
421 changeset.raw_id)
439 self._repo.refs["refs/tags/%s" % name] = changeset._commit.id
422 self._repo.refs["refs/tags/%s" % name] = changeset._commit.id
440
423
441 self._parsed_refs = self._get_parsed_refs()
424 self._parsed_refs = self._get_parsed_refs()
442 self.tags = self._get_tags()
425 self.tags = self._get_tags()
443 return changeset
426 return changeset
444
427
445 def remove_tag(self, name, user, message=None, date=None):
428 def remove_tag(self, name, user, message=None, date=None):
446 """
429 """
447 Removes tag with the given ``name``.
430 Removes tag with the given ``name``.
448
431
449 :param name: name of the tag to be removed
432 :param name: name of the tag to be removed
450 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
433 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
451 :param message: message of the tag's removal commit
434 :param message: message of the tag's removal commit
452 :param date: date of tag's removal commit
435 :param date: date of tag's removal commit
453
436
454 :raises TagDoesNotExistError: if tag with given name does not exists
437 :raises TagDoesNotExistError: if tag with given name does not exists
455 """
438 """
456 if name not in self.tags:
439 if name not in self.tags:
457 raise TagDoesNotExistError("Tag %s does not exist" % name)
440 raise TagDoesNotExistError("Tag %s does not exist" % name)
458 tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name)
441 tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name)
459 try:
442 try:
460 os.remove(tagpath)
443 os.remove(tagpath)
461 self._parsed_refs = self._get_parsed_refs()
444 self._parsed_refs = self._get_parsed_refs()
462 self.tags = self._get_tags()
445 self.tags = self._get_tags()
463 except OSError, e:
446 except OSError, e:
464 raise RepositoryError(e.strerror)
447 raise RepositoryError(e.strerror)
465
448
466 @LazyProperty
449 @LazyProperty
467 def bookmarks(self):
450 def bookmarks(self):
468 """
451 """
469 Gets bookmarks for this repository
452 Gets bookmarks for this repository
470 """
453 """
471 return {}
454 return {}
472
455
473 @LazyProperty
456 @LazyProperty
474 def _parsed_refs(self):
457 def _parsed_refs(self):
475 return self._get_parsed_refs()
458 return self._get_parsed_refs()
476
459
477 def _get_parsed_refs(self):
460 def _get_parsed_refs(self):
478 # cache the property
461 # cache the property
479 _repo = self._repo
462 _repo = self._repo
480 refs = _repo.get_refs()
463 refs = _repo.get_refs()
481 keys = [('refs/heads/', 'H'),
464 keys = [('refs/heads/', 'H'),
482 ('refs/remotes/origin/', 'RH'),
465 ('refs/remotes/origin/', 'RH'),
483 ('refs/tags/', 'T')]
466 ('refs/tags/', 'T')]
484 _refs = {}
467 _refs = {}
485 for ref, sha in refs.iteritems():
468 for ref, sha in refs.iteritems():
486 for k, type_ in keys:
469 for k, type_ in keys:
487 if ref.startswith(k):
470 if ref.startswith(k):
488 _key = ref[len(k):]
471 _key = ref[len(k):]
489 if type_ == 'T':
472 if type_ == 'T':
490 obj = _repo.get_object(sha)
473 obj = _repo.get_object(sha)
491 if isinstance(obj, Tag):
474 if isinstance(obj, Tag):
492 sha = _repo.get_object(sha).object[1]
475 sha = _repo.get_object(sha).object[1]
493 _refs[_key] = [sha, type_]
476 _refs[_key] = [sha, type_]
494 break
477 break
495 return _refs
478 return _refs
496
479
497 def _heads(self, reverse=False):
480 def _heads(self, reverse=False):
498 refs = self._repo.get_refs()
481 refs = self._repo.get_refs()
499 heads = {}
482 heads = {}
500
483
501 for key, val in refs.items():
484 for key, val in refs.items():
502 for ref_key in ['refs/heads/', 'refs/remotes/origin/']:
485 for ref_key in ['refs/heads/', 'refs/remotes/origin/']:
503 if key.startswith(ref_key):
486 if key.startswith(ref_key):
504 n = key[len(ref_key):]
487 n = key[len(ref_key):]
505 if n not in ['HEAD']:
488 if n not in ['HEAD']:
506 heads[n] = val
489 heads[n] = val
507
490
508 return heads if reverse else dict((y, x) for x, y in heads.iteritems())
491 return heads if reverse else dict((y, x) for x, y in heads.iteritems())
509
492
510 def get_changeset(self, revision=None):
493 def get_changeset(self, revision=None):
511 """
494 """
512 Returns ``GitChangeset`` object representing commit from git repository
495 Returns ``GitChangeset`` object representing commit from git repository
513 at the given revision or head (most recent commit) if None given.
496 at the given revision or head (most recent commit) if None given.
514 """
497 """
515 if isinstance(revision, GitChangeset):
498 if isinstance(revision, GitChangeset):
516 return revision
499 return revision
517 revision = self._get_revision(revision)
500 revision = self._get_revision(revision)
518 changeset = GitChangeset(repository=self, revision=revision)
501 changeset = GitChangeset(repository=self, revision=revision)
519 return changeset
502 return changeset
520
503
521 def get_changesets(self, start=None, end=None, start_date=None,
504 def get_changesets(self, start=None, end=None, start_date=None,
522 end_date=None, branch_name=None, reverse=False):
505 end_date=None, branch_name=None, reverse=False):
523 """
506 """
524 Returns iterator of ``GitChangeset`` objects from start to end (both
507 Returns iterator of ``GitChangeset`` objects from start to end (both
525 are inclusive), in ascending date order (unless ``reverse`` is set).
508 are inclusive), in ascending date order (unless ``reverse`` is set).
526
509
527 :param start: changeset ID, as str; first returned changeset
510 :param start: changeset ID, as str; first returned changeset
528 :param end: changeset ID, as str; last returned changeset
511 :param end: changeset ID, as str; last returned changeset
529 :param start_date: if specified, changesets with commit date less than
512 :param start_date: if specified, changesets with commit date less than
530 ``start_date`` would be filtered out from returned set
513 ``start_date`` would be filtered out from returned set
531 :param end_date: if specified, changesets with commit date greater than
514 :param end_date: if specified, changesets with commit date greater than
532 ``end_date`` would be filtered out from returned set
515 ``end_date`` would be filtered out from returned set
533 :param branch_name: if specified, changesets not reachable from given
516 :param branch_name: if specified, changesets not reachable from given
534 branch would be filtered out from returned set
517 branch would be filtered out from returned set
535 :param reverse: if ``True``, returned generator would be reversed
518 :param reverse: if ``True``, returned generator would be reversed
536 (meaning that returned changesets would have descending date order)
519 (meaning that returned changesets would have descending date order)
537
520
538 :raise BranchDoesNotExistError: If given ``branch_name`` does not
521 :raise BranchDoesNotExistError: If given ``branch_name`` does not
539 exist.
522 exist.
540 :raise ChangesetDoesNotExistError: If changeset for given ``start`` or
523 :raise ChangesetDoesNotExistError: If changeset for given ``start`` or
541 ``end`` could not be found.
524 ``end`` could not be found.
542
525
543 """
526 """
544 if branch_name and branch_name not in self.branches:
527 if branch_name and branch_name not in self.branches:
545 raise BranchDoesNotExistError("Branch '%s' not found" \
528 raise BranchDoesNotExistError("Branch '%s' not found" \
546 % branch_name)
529 % branch_name)
547 # actually we should check now if it's not an empty repo to not spaw
530 # actually we should check now if it's not an empty repo to not spaw
548 # subprocess commands
531 # subprocess commands
549 if self._empty:
532 if self._empty:
550 raise EmptyRepositoryError("There are no changesets yet")
533 raise EmptyRepositoryError("There are no changesets yet")
551
534
552 # %H at format means (full) commit hash, initial hashes are retrieved
535 # %H at format means (full) commit hash, initial hashes are retrieved
553 # in ascending date order
536 # in ascending date order
554 cmd_template = 'log --date-order --reverse --pretty=format:"%H"'
537 cmd = ['log', '--date-order', '--reverse', '--pretty=format:%H']
555 cmd_params = {}
556 if start_date:
538 if start_date:
557 cmd_template += ' --since "$since"'
539 cmd += ['--since', start_date.strftime('%m/%d/%y %H:%M:%S')]
558 cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S')
559 if end_date:
540 if end_date:
560 cmd_template += ' --until "$until"'
541 cmd += ['--until', end_date.strftime('%m/%d/%y %H:%M:%S')]
561 cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S')
562 if branch_name:
542 if branch_name:
563 cmd_template += ' $branch_name'
543 cmd.append(branch_name)
564 cmd_params['branch_name'] = branch_name
565 else:
544 else:
566 rev_filter = settings.GIT_REV_FILTER
545 cmd.append(settings.GIT_REV_FILTER)
567 cmd_template += ' %s' % (rev_filter)
568
546
569 cmd = string.Template(cmd_template).safe_substitute(**cmd_params)
570 revs = self.run_git_command(cmd)[0].splitlines()
547 revs = self.run_git_command(cmd)[0].splitlines()
571 start_pos = 0
548 start_pos = 0
572 end_pos = len(revs)
549 end_pos = len(revs)
573 if start:
550 if start:
574 _start = self._get_revision(start)
551 _start = self._get_revision(start)
575 try:
552 try:
576 start_pos = revs.index(_start)
553 start_pos = revs.index(_start)
577 except ValueError:
554 except ValueError:
578 pass
555 pass
579
556
580 if end is not None:
557 if end is not None:
581 _end = self._get_revision(end)
558 _end = self._get_revision(end)
582 try:
559 try:
583 end_pos = revs.index(_end)
560 end_pos = revs.index(_end)
584 except ValueError:
561 except ValueError:
585 pass
562 pass
586
563
587 if None not in [start, end] and start_pos > end_pos:
564 if None not in [start, end] and start_pos > end_pos:
588 raise RepositoryError('start cannot be after end')
565 raise RepositoryError('start cannot be after end')
589
566
590 if end_pos is not None:
567 if end_pos is not None:
591 end_pos += 1
568 end_pos += 1
592
569
593 revs = revs[start_pos:end_pos]
570 revs = revs[start_pos:end_pos]
594 if reverse:
571 if reverse:
595 revs = reversed(revs)
572 revs = reversed(revs)
596 return CollectionGenerator(self, revs)
573 return CollectionGenerator(self, revs)
597
574
598 def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
575 def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
599 context=3):
576 context=3):
600 """
577 """
601 Returns (git like) *diff*, as plain text. Shows changes introduced by
578 Returns (git like) *diff*, as plain text. Shows changes introduced by
602 ``rev2`` since ``rev1``.
579 ``rev2`` since ``rev1``.
603
580
604 :param rev1: Entry point from which diff is shown. Can be
581 :param rev1: Entry point from which diff is shown. Can be
605 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
582 ``self.EMPTY_CHANGESET`` - in this case, patch showing all
606 the changes since empty state of the repository until ``rev2``
583 the changes since empty state of the repository until ``rev2``
607 :param rev2: Until which revision changes should be shown.
584 :param rev2: Until which revision changes should be shown.
608 :param ignore_whitespace: If set to ``True``, would not show whitespace
585 :param ignore_whitespace: If set to ``True``, would not show whitespace
609 changes. Defaults to ``False``.
586 changes. Defaults to ``False``.
610 :param context: How many lines before/after changed lines should be
587 :param context: How many lines before/after changed lines should be
611 shown. Defaults to ``3``.
588 shown. Defaults to ``3``.
612 """
589 """
613 flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40']
590 flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40']
614 if ignore_whitespace:
591 if ignore_whitespace:
615 flags.append('-w')
592 flags.append('-w')
616
593
617 if hasattr(rev1, 'raw_id'):
594 if hasattr(rev1, 'raw_id'):
618 rev1 = getattr(rev1, 'raw_id')
595 rev1 = getattr(rev1, 'raw_id')
619
596
620 if hasattr(rev2, 'raw_id'):
597 if hasattr(rev2, 'raw_id'):
621 rev2 = getattr(rev2, 'raw_id')
598 rev2 = getattr(rev2, 'raw_id')
622
599
623 if rev1 == self.EMPTY_CHANGESET:
600 if rev1 == self.EMPTY_CHANGESET:
624 rev2 = self.get_changeset(rev2).raw_id
601 rev2 = self.get_changeset(rev2).raw_id
625 cmd = ' '.join(['show'] + flags + [rev2])
602 cmd = ['show'] + flags + [rev2]
626 else:
603 else:
627 rev1 = self.get_changeset(rev1).raw_id
604 rev1 = self.get_changeset(rev1).raw_id
628 rev2 = self.get_changeset(rev2).raw_id
605 rev2 = self.get_changeset(rev2).raw_id
629 cmd = ' '.join(['diff'] + flags + [rev1, rev2])
606 cmd = ['diff'] + flags + [rev1, rev2]
630
607
631 if path:
608 if path:
632 cmd += ' -- "%s"' % path
609 cmd += ['--', path]
633
610
634 stdout, stderr = self.run_git_command(cmd)
611 stdout, stderr = self.run_git_command(cmd)
635 # TODO: don't ignore stderr
612 # TODO: don't ignore stderr
636 # If we used 'show' command, strip first few lines (until actual diff
613 # If we used 'show' command, strip first few lines (until actual diff
637 # starts)
614 # starts)
638 if rev1 == self.EMPTY_CHANGESET:
615 if rev1 == self.EMPTY_CHANGESET:
639 parts = stdout.split('\ndiff ', 1)
616 parts = stdout.split('\ndiff ', 1)
640 if len(parts) > 1:
617 if len(parts) > 1:
641 stdout = 'diff ' + parts[1]
618 stdout = 'diff ' + parts[1]
642 return stdout
619 return stdout
643
620
644 @LazyProperty
621 @LazyProperty
645 def in_memory_changeset(self):
622 def in_memory_changeset(self):
646 """
623 """
647 Returns ``GitInMemoryChangeset`` object for this repository.
624 Returns ``GitInMemoryChangeset`` object for this repository.
648 """
625 """
649 return GitInMemoryChangeset(self)
626 return GitInMemoryChangeset(self)
650
627
651 def clone(self, url, update_after_clone=True, bare=False):
628 def clone(self, url, update_after_clone=True, bare=False):
652 """
629 """
653 Tries to clone changes from external location.
630 Tries to clone changes from external location.
654
631
655 :param update_after_clone: If set to ``False``, git won't checkout
632 :param update_after_clone: If set to ``False``, git won't checkout
656 working directory
633 working directory
657 :param bare: If set to ``True``, repository would be cloned into
634 :param bare: If set to ``True``, repository would be cloned into
658 *bare* git repository (no working directory at all).
635 *bare* git repository (no working directory at all).
659 """
636 """
660 url = self._get_url(url)
637 url = self._get_url(url)
661 cmd = ['clone', '-q']
638 cmd = ['clone', '-q']
662 if bare:
639 if bare:
663 cmd.append('--bare')
640 cmd.append('--bare')
664 elif not update_after_clone:
641 elif not update_after_clone:
665 cmd.append('--no-checkout')
642 cmd.append('--no-checkout')
666 cmd += ['--', quote(url), quote(self.path)]
643 cmd += ['--', url, self.path]
667 cmd = ' '.join(cmd)
668 # If error occurs run_git_command raises RepositoryError already
644 # If error occurs run_git_command raises RepositoryError already
669 self.run_git_command(cmd)
645 self.run_git_command(cmd)
670
646
671 def pull(self, url):
647 def pull(self, url):
672 """
648 """
673 Tries to pull changes from external location.
649 Tries to pull changes from external location.
674 """
650 """
675 url = self._get_url(url)
651 url = self._get_url(url)
676 cmd = ['pull', "--ff-only", quote(url)]
652 cmd = ['pull', '--ff-only', url]
677 cmd = ' '.join(cmd)
678 # If error occurs run_git_command raises RepositoryError already
653 # If error occurs run_git_command raises RepositoryError already
679 self.run_git_command(cmd)
654 self.run_git_command(cmd)
680
655
681 def fetch(self, url):
656 def fetch(self, url):
682 """
657 """
683 Tries to pull changes from external location.
658 Tries to pull changes from external location.
684 """
659 """
685 url = self._get_url(url)
660 url = self._get_url(url)
686 so, se = self.run_git_command('ls-remote -h %s' % quote(url))
661 so, se = self.run_git_command(['ls-remote', '-h', url])
687 refs = []
662 cmd = ['fetch', url, '--']
688 for line in (x for x in so.splitlines()):
663 for line in (x for x in so.splitlines()):
689 sha, ref = line.split('\t')
664 sha, ref = line.split('\t')
690 refs.append(ref)
665 cmd.append('+%s:%s' % (ref, ref))
691 refs = ' '.join(('+%s:%s' % (r, r) for r in refs))
692 cmd = '''fetch %s -- %s''' % (quote(url), refs)
693 self.run_git_command(cmd)
666 self.run_git_command(cmd)
694
667
695 def _update_server_info(self):
668 def _update_server_info(self):
696 """
669 """
697 runs gits update-server-info command in this repo instance
670 runs gits update-server-info command in this repo instance
698 """
671 """
699 from dulwich.server import update_server_info
672 from dulwich.server import update_server_info
700 try:
673 try:
701 update_server_info(self._repo)
674 update_server_info(self._repo)
702 except OSError, e:
675 except OSError, e:
703 if e.errno != errno.ENOENT:
676 if e.errno != errno.ENOENT:
704 raise
677 raise
705 # Workaround for dulwich crashing on for example its own dulwich/tests/data/repos/simple_merge.git/info/refs.lock
678 # Workaround for dulwich crashing on for example its own dulwich/tests/data/repos/simple_merge.git/info/refs.lock
706 log.error('Ignoring error running update-server-info: %s', e)
679 log.error('Ignoring error running update-server-info: %s', e)
707
680
708 @LazyProperty
681 @LazyProperty
709 def workdir(self):
682 def workdir(self):
710 """
683 """
711 Returns ``Workdir`` instance for this repository.
684 Returns ``Workdir`` instance for this repository.
712 """
685 """
713 return GitWorkdir(self)
686 return GitWorkdir(self)
714
687
715 def get_config_value(self, section, name, config_file=None):
688 def get_config_value(self, section, name, config_file=None):
716 """
689 """
717 Returns configuration value for a given [``section``] and ``name``.
690 Returns configuration value for a given [``section``] and ``name``.
718
691
719 :param section: Section we want to retrieve value from
692 :param section: Section we want to retrieve value from
720 :param name: Name of configuration we want to retrieve
693 :param name: Name of configuration we want to retrieve
721 :param config_file: A path to file which should be used to retrieve
694 :param config_file: A path to file which should be used to retrieve
722 configuration from (might also be a list of file paths)
695 configuration from (might also be a list of file paths)
723 """
696 """
724 if config_file is None:
697 if config_file is None:
725 config_file = []
698 config_file = []
726 elif isinstance(config_file, basestring):
699 elif isinstance(config_file, basestring):
727 config_file = [config_file]
700 config_file = [config_file]
728
701
729 def gen_configs():
702 def gen_configs():
730 for path in config_file + self._config_files:
703 for path in config_file + self._config_files:
731 try:
704 try:
732 yield ConfigFile.from_path(path)
705 yield ConfigFile.from_path(path)
733 except (IOError, OSError, ValueError):
706 except (IOError, OSError, ValueError):
734 continue
707 continue
735
708
736 for config in gen_configs():
709 for config in gen_configs():
737 try:
710 try:
738 return config.get(section, name)
711 return config.get(section, name)
739 except KeyError:
712 except KeyError:
740 continue
713 continue
741 return None
714 return None
742
715
743 def get_user_name(self, config_file=None):
716 def get_user_name(self, config_file=None):
744 """
717 """
745 Returns user's name from global configuration file.
718 Returns user's name from global configuration file.
746
719
747 :param config_file: A path to file which should be used to retrieve
720 :param config_file: A path to file which should be used to retrieve
748 configuration from (might also be a list of file paths)
721 configuration from (might also be a list of file paths)
749 """
722 """
750 return self.get_config_value('user', 'name', config_file)
723 return self.get_config_value('user', 'name', config_file)
751
724
752 def get_user_email(self, config_file=None):
725 def get_user_email(self, config_file=None):
753 """
726 """
754 Returns user's email from global configuration file.
727 Returns user's email from global configuration file.
755
728
756 :param config_file: A path to file which should be used to retrieve
729 :param config_file: A path to file which should be used to retrieve
757 configuration from (might also be a list of file paths)
730 configuration from (might also be a list of file paths)
758 """
731 """
759 return self.get_config_value('user', 'email', config_file)
732 return self.get_config_value('user', 'email', config_file)
@@ -1,427 +1,425 b''
1 """
1 """
2 Module provides a class allowing to wrap communication over subprocess.Popen
2 Module provides a class allowing to wrap communication over subprocess.Popen
3 input, output, error streams into a meaningfull, non-blocking, concurrent
3 input, output, error streams into a meaningfull, non-blocking, concurrent
4 stream processor exposing the output data as an iterator fitting to be a
4 stream processor exposing the output data as an iterator fitting to be a
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
6
6
7 Copyright (c) 2011 Daniel Dotsenko <dotsa[at]hotmail.com>
7 Copyright (c) 2011 Daniel Dotsenko <dotsa[at]hotmail.com>
8
8
9 This file is part of git_http_backend.py Project.
9 This file is part of git_http_backend.py Project.
10
10
11 git_http_backend.py Project is free software: you can redistribute it and/or
11 git_http_backend.py Project is free software: you can redistribute it and/or
12 modify it under the terms of the GNU Lesser General Public License as
12 modify it under the terms of the GNU Lesser General Public License as
13 published by the Free Software Foundation, either version 2.1 of the License,
13 published by the Free Software Foundation, either version 2.1 of the License,
14 or (at your option) any later version.
14 or (at your option) any later version.
15
15
16 git_http_backend.py Project is distributed in the hope that it will be useful,
16 git_http_backend.py Project is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU Lesser General Public License for more details.
19 GNU Lesser General Public License for more details.
20
20
21 You should have received a copy of the GNU Lesser General Public License
21 You should have received a copy of the GNU Lesser General Public License
22 along with git_http_backend.py Project.
22 along with git_http_backend.py Project.
23 If not, see <http://www.gnu.org/licenses/>.
23 If not, see <http://www.gnu.org/licenses/>.
24 """
24 """
25 import os
25 import os
26 import subprocess
26 import subprocess
27 from kallithea.lib.vcs.utils.compat import deque, Event, Thread, _bytes, _bytearray
27 from kallithea.lib.vcs.utils.compat import deque, Event, Thread, _bytes, _bytearray
28
28
29
29
30 class StreamFeeder(Thread):
30 class StreamFeeder(Thread):
31 """
31 """
32 Normal writing into pipe-like is blocking once the buffer is filled.
32 Normal writing into pipe-like is blocking once the buffer is filled.
33 This thread allows a thread to seep data from a file-like into a pipe
33 This thread allows a thread to seep data from a file-like into a pipe
34 without blocking the main thread.
34 without blocking the main thread.
35 We close inpipe once the end of the source stream is reached.
35 We close inpipe once the end of the source stream is reached.
36 """
36 """
37
37
38 def __init__(self, source):
38 def __init__(self, source):
39 super(StreamFeeder, self).__init__()
39 super(StreamFeeder, self).__init__()
40 self.daemon = True
40 self.daemon = True
41 filelike = False
41 filelike = False
42 self.bytes = _bytes()
42 self.bytes = _bytes()
43 if type(source) in (type(''), _bytes, _bytearray): # string-like
43 if type(source) in (type(''), _bytes, _bytearray): # string-like
44 self.bytes = _bytes(source)
44 self.bytes = _bytes(source)
45 else: # can be either file pointer or file-like
45 else: # can be either file pointer or file-like
46 if type(source) in (int, long): # file pointer it is
46 if type(source) in (int, long): # file pointer it is
47 ## converting file descriptor (int) stdin into file-like
47 ## converting file descriptor (int) stdin into file-like
48 source = os.fdopen(source, 'rb', 16384)
48 source = os.fdopen(source, 'rb', 16384)
49 # let's see if source is file-like by now
49 # let's see if source is file-like by now
50 filelike = hasattr(source, 'read')
50 filelike = hasattr(source, 'read')
51 if not filelike and not self.bytes:
51 if not filelike and not self.bytes:
52 raise TypeError("StreamFeeder's source object must be a readable "
52 raise TypeError("StreamFeeder's source object must be a readable "
53 "file-like, a file descriptor, or a string-like.")
53 "file-like, a file descriptor, or a string-like.")
54 self.source = source
54 self.source = source
55 self.readiface, self.writeiface = os.pipe()
55 self.readiface, self.writeiface = os.pipe()
56
56
57 def run(self):
57 def run(self):
58 t = self.writeiface
58 t = self.writeiface
59 if self.bytes:
59 if self.bytes:
60 os.write(t, self.bytes)
60 os.write(t, self.bytes)
61 else:
61 else:
62 s = self.source
62 s = self.source
63 b = s.read(4096)
63 b = s.read(4096)
64 while b:
64 while b:
65 os.write(t, b)
65 os.write(t, b)
66 b = s.read(4096)
66 b = s.read(4096)
67 os.close(t)
67 os.close(t)
68
68
69 @property
69 @property
70 def output(self):
70 def output(self):
71 return self.readiface
71 return self.readiface
72
72
73
73
74 class InputStreamChunker(Thread):
74 class InputStreamChunker(Thread):
75 def __init__(self, source, target, buffer_size, chunk_size):
75 def __init__(self, source, target, buffer_size, chunk_size):
76
76
77 super(InputStreamChunker, self).__init__()
77 super(InputStreamChunker, self).__init__()
78
78
79 self.daemon = True # die die die.
79 self.daemon = True # die die die.
80
80
81 self.source = source
81 self.source = source
82 self.target = target
82 self.target = target
83 self.chunk_count_max = int(buffer_size / chunk_size) + 1
83 self.chunk_count_max = int(buffer_size / chunk_size) + 1
84 self.chunk_size = chunk_size
84 self.chunk_size = chunk_size
85
85
86 self.data_added = Event()
86 self.data_added = Event()
87 self.data_added.clear()
87 self.data_added.clear()
88
88
89 self.keep_reading = Event()
89 self.keep_reading = Event()
90 self.keep_reading.set()
90 self.keep_reading.set()
91
91
92 self.EOF = Event()
92 self.EOF = Event()
93 self.EOF.clear()
93 self.EOF.clear()
94
94
95 self.go = Event()
95 self.go = Event()
96 self.go.set()
96 self.go.set()
97
97
98 def stop(self):
98 def stop(self):
99 self.go.clear()
99 self.go.clear()
100 self.EOF.set()
100 self.EOF.set()
101 try:
101 try:
102 # this is not proper, but is done to force the reader thread let
102 # this is not proper, but is done to force the reader thread let
103 # go of the input because, if successful, .close() will send EOF
103 # go of the input because, if successful, .close() will send EOF
104 # down the pipe.
104 # down the pipe.
105 self.source.close()
105 self.source.close()
106 except:
106 except:
107 pass
107 pass
108
108
109 def run(self):
109 def run(self):
110 s = self.source
110 s = self.source
111 t = self.target
111 t = self.target
112 cs = self.chunk_size
112 cs = self.chunk_size
113 ccm = self.chunk_count_max
113 ccm = self.chunk_count_max
114 kr = self.keep_reading
114 kr = self.keep_reading
115 da = self.data_added
115 da = self.data_added
116 go = self.go
116 go = self.go
117
117
118 try:
118 try:
119 b = s.read(cs)
119 b = s.read(cs)
120 except ValueError:
120 except ValueError:
121 b = ''
121 b = ''
122
122
123 while b and go.is_set():
123 while b and go.is_set():
124 if len(t) > ccm:
124 if len(t) > ccm:
125 kr.clear()
125 kr.clear()
126 kr.wait(2)
126 kr.wait(2)
127 # # this only works on 2.7.x and up
127 # # this only works on 2.7.x and up
128 # if not kr.wait(10):
128 # if not kr.wait(10):
129 # raise Exception("Timed out while waiting for input to be read.")
129 # raise Exception("Timed out while waiting for input to be read.")
130 # instead we'll use this
130 # instead we'll use this
131 if len(t) > ccm + 3:
131 if len(t) > ccm + 3:
132 raise IOError(
132 raise IOError(
133 "Timed out while waiting for input from subprocess.")
133 "Timed out while waiting for input from subprocess.")
134 t.append(b)
134 t.append(b)
135 da.set()
135 da.set()
136 try:
136 try:
137 b = s.read(cs)
137 b = s.read(cs)
138 except ValueError: # probably "I/O operation on closed file"
138 except ValueError: # probably "I/O operation on closed file"
139 b = ''
139 b = ''
140
140
141 self.EOF.set()
141 self.EOF.set()
142 da.set() # for cases when done but there was no input.
142 da.set() # for cases when done but there was no input.
143
143
144
144
145 class BufferedGenerator(object):
145 class BufferedGenerator(object):
146 """
146 """
147 Class behaves as a non-blocking, buffered pipe reader.
147 Class behaves as a non-blocking, buffered pipe reader.
148 Reads chunks of data (through a thread)
148 Reads chunks of data (through a thread)
149 from a blocking pipe, and attaches these to an array (Deque) of chunks.
149 from a blocking pipe, and attaches these to an array (Deque) of chunks.
150 Reading is halted in the thread when max chunks is internally buffered.
150 Reading is halted in the thread when max chunks is internally buffered.
151 The .next() may operate in blocking or non-blocking fashion by yielding
151 The .next() may operate in blocking or non-blocking fashion by yielding
152 '' if no data is ready
152 '' if no data is ready
153 to be sent or by not returning until there is some data to send
153 to be sent or by not returning until there is some data to send
154 When we get EOF from underlying source pipe we raise the marker to raise
154 When we get EOF from underlying source pipe we raise the marker to raise
155 StopIteration after the last chunk of data is yielded.
155 StopIteration after the last chunk of data is yielded.
156 """
156 """
157
157
158 def __init__(self, source, buffer_size=65536, chunk_size=4096,
158 def __init__(self, source, buffer_size=65536, chunk_size=4096,
159 starting_values=[], bottomless=False):
159 starting_values=[], bottomless=False):
160
160
161 if bottomless:
161 if bottomless:
162 maxlen = int(buffer_size / chunk_size)
162 maxlen = int(buffer_size / chunk_size)
163 else:
163 else:
164 maxlen = None
164 maxlen = None
165
165
166 self.data = deque(starting_values, maxlen)
166 self.data = deque(starting_values, maxlen)
167 self.worker = InputStreamChunker(source, self.data, buffer_size,
167 self.worker = InputStreamChunker(source, self.data, buffer_size,
168 chunk_size)
168 chunk_size)
169 if starting_values:
169 if starting_values:
170 self.worker.data_added.set()
170 self.worker.data_added.set()
171 self.worker.start()
171 self.worker.start()
172
172
173 ####################
173 ####################
174 # Generator's methods
174 # Generator's methods
175 ####################
175 ####################
176
176
177 def __iter__(self):
177 def __iter__(self):
178 return self
178 return self
179
179
180 def next(self):
180 def next(self):
181 while not len(self.data) and not self.worker.EOF.is_set():
181 while not len(self.data) and not self.worker.EOF.is_set():
182 self.worker.data_added.clear()
182 self.worker.data_added.clear()
183 self.worker.data_added.wait(0.2)
183 self.worker.data_added.wait(0.2)
184 if len(self.data):
184 if len(self.data):
185 self.worker.keep_reading.set()
185 self.worker.keep_reading.set()
186 return _bytes(self.data.popleft())
186 return _bytes(self.data.popleft())
187 elif self.worker.EOF.is_set():
187 elif self.worker.EOF.is_set():
188 raise StopIteration
188 raise StopIteration
189
189
190 def throw(self, type, value=None, traceback=None):
190 def throw(self, type, value=None, traceback=None):
191 if not self.worker.EOF.is_set():
191 if not self.worker.EOF.is_set():
192 raise type(value)
192 raise type(value)
193
193
194 def start(self):
194 def start(self):
195 self.worker.start()
195 self.worker.start()
196
196
197 def stop(self):
197 def stop(self):
198 self.worker.stop()
198 self.worker.stop()
199
199
200 def close(self):
200 def close(self):
201 try:
201 try:
202 self.worker.stop()
202 self.worker.stop()
203 self.throw(GeneratorExit)
203 self.throw(GeneratorExit)
204 except (GeneratorExit, StopIteration):
204 except (GeneratorExit, StopIteration):
205 pass
205 pass
206
206
207 def __del__(self):
207 def __del__(self):
208 self.close()
208 self.close()
209
209
210 ####################
210 ####################
211 # Threaded reader's infrastructure.
211 # Threaded reader's infrastructure.
212 ####################
212 ####################
213 @property
213 @property
214 def input(self):
214 def input(self):
215 return self.worker.w
215 return self.worker.w
216
216
217 @property
217 @property
218 def data_added_event(self):
218 def data_added_event(self):
219 return self.worker.data_added
219 return self.worker.data_added
220
220
221 @property
221 @property
222 def data_added(self):
222 def data_added(self):
223 return self.worker.data_added.is_set()
223 return self.worker.data_added.is_set()
224
224
225 @property
225 @property
226 def reading_paused(self):
226 def reading_paused(self):
227 return not self.worker.keep_reading.is_set()
227 return not self.worker.keep_reading.is_set()
228
228
229 @property
229 @property
230 def done_reading_event(self):
230 def done_reading_event(self):
231 """
231 """
232 Done_reading does not mean that the iterator's buffer is empty.
232 Done_reading does not mean that the iterator's buffer is empty.
233 Iterator might have done reading from underlying source, but the read
233 Iterator might have done reading from underlying source, but the read
234 chunks might still be available for serving through .next() method.
234 chunks might still be available for serving through .next() method.
235
235
236 :returns: An Event class instance.
236 :returns: An Event class instance.
237 """
237 """
238 return self.worker.EOF
238 return self.worker.EOF
239
239
240 @property
240 @property
241 def done_reading(self):
241 def done_reading(self):
242 """
242 """
243 Done_reading does not mean that the iterator's buffer is empty.
243 Done_reading does not mean that the iterator's buffer is empty.
244 Iterator might have done reading from underlying source, but the read
244 Iterator might have done reading from underlying source, but the read
245 chunks might still be available for serving through .next() method.
245 chunks might still be available for serving through .next() method.
246
246
247 :returns: An Bool value.
247 :returns: An Bool value.
248 """
248 """
249 return self.worker.EOF.is_set()
249 return self.worker.EOF.is_set()
250
250
251 @property
251 @property
252 def length(self):
252 def length(self):
253 """
253 """
254 returns int.
254 returns int.
255
255
256 This is the length of the queue of chunks, not the length of
256 This is the length of the queue of chunks, not the length of
257 the combined contents in those chunks.
257 the combined contents in those chunks.
258
258
259 __len__() cannot be meaningfully implemented because this
259 __len__() cannot be meaningfully implemented because this
260 reader is just flying through a bottomless pit content and
260 reader is just flying through a bottomless pit content and
261 can only know the length of what it already saw.
261 can only know the length of what it already saw.
262
262
263 If __len__() on WSGI server per PEP 3333 returns a value,
263 If __len__() on WSGI server per PEP 3333 returns a value,
264 the response's length will be set to that. In order not to
264 the response's length will be set to that. In order not to
265 confuse WSGI PEP3333 servers, we will not implement __len__
265 confuse WSGI PEP3333 servers, we will not implement __len__
266 at all.
266 at all.
267 """
267 """
268 return len(self.data)
268 return len(self.data)
269
269
270 def prepend(self, x):
270 def prepend(self, x):
271 self.data.appendleft(x)
271 self.data.appendleft(x)
272
272
273 def append(self, x):
273 def append(self, x):
274 self.data.append(x)
274 self.data.append(x)
275
275
276 def extend(self, o):
276 def extend(self, o):
277 self.data.extend(o)
277 self.data.extend(o)
278
278
279 def __getitem__(self, i):
279 def __getitem__(self, i):
280 return self.data[i]
280 return self.data[i]
281
281
282
282
283 class SubprocessIOChunker(object):
283 class SubprocessIOChunker(object):
284 """
284 """
285 Processor class wrapping handling of subprocess IO.
285 Processor class wrapping handling of subprocess IO.
286
286
287 In a way, this is a "communicate()" replacement with a twist.
287 In a way, this is a "communicate()" replacement with a twist.
288
288
289 - We are multithreaded. Writing in and reading out, err are all sep threads.
289 - We are multithreaded. Writing in and reading out, err are all sep threads.
290 - We support concurrent (in and out) stream processing.
290 - We support concurrent (in and out) stream processing.
291 - The output is not a stream. It's a queue of read string (bytes, not unicode)
291 - The output is not a stream. It's a queue of read string (bytes, not unicode)
292 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
292 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
293 - We are non-blocking in more respects than communicate()
293 - We are non-blocking in more respects than communicate()
294 (reading from subprocess out pauses when internal buffer is full, but
294 (reading from subprocess out pauses when internal buffer is full, but
295 does not block the parent calling code. On the flip side, reading from
295 does not block the parent calling code. On the flip side, reading from
296 slow-yielding subprocess may block the iteration until data shows up. This
296 slow-yielding subprocess may block the iteration until data shows up. This
297 does not block the parallel inpipe reading occurring parallel thread.)
297 does not block the parallel inpipe reading occurring parallel thread.)
298
298
299 The purpose of the object is to allow us to wrap subprocess interactions into
299 The purpose of the object is to allow us to wrap subprocess interactions into
300 an iterable that can be passed to a WSGI server as the application's return
300 an iterable that can be passed to a WSGI server as the application's return
301 value. Because of stream-processing-ability, WSGI does not have to read ALL
301 value. Because of stream-processing-ability, WSGI does not have to read ALL
302 of the subprocess's output and buffer it, before handing it to WSGI server for
302 of the subprocess's output and buffer it, before handing it to WSGI server for
303 HTTP response. Instead, the class initializer reads just a bit of the stream
303 HTTP response. Instead, the class initializer reads just a bit of the stream
304 to figure out if error occurred or likely to occur and if not, just hands the
304 to figure out if error occurred or likely to occur and if not, just hands the
305 further iteration over subprocess output to the server for completion of HTTP
305 further iteration over subprocess output to the server for completion of HTTP
306 response.
306 response.
307
307
308 The real or perceived subprocess error is trapped and raised as one of
308 The real or perceived subprocess error is trapped and raised as one of
309 EnvironmentError family of exceptions
309 EnvironmentError family of exceptions
310
310
311 Example usage:
311 Example usage:
312 # try:
312 # try:
313 # answer = SubprocessIOChunker(
313 # answer = SubprocessIOChunker(
314 # cmd,
314 # cmd,
315 # input,
315 # input,
316 # buffer_size = 65536,
316 # buffer_size = 65536,
317 # chunk_size = 4096
317 # chunk_size = 4096
318 # )
318 # )
319 # except (EnvironmentError) as e:
319 # except (EnvironmentError) as e:
320 # print str(e)
320 # print str(e)
321 # raise e
321 # raise e
322 #
322 #
323 # return answer
323 # return answer
324
324
325
325
326 """
326 """
327
327
328 def __init__(self, cmd, inputstream=None, buffer_size=65536,
328 def __init__(self, cmd, inputstream=None, buffer_size=65536,
329 chunk_size=4096, starting_values=[], **kwargs):
329 chunk_size=4096, starting_values=[], **kwargs):
330 """
330 """
331 Initializes SubprocessIOChunker
331 Initializes SubprocessIOChunker
332
332
333 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
333 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
334 :param inputstream: (Default: None) A file-like, string, or file pointer.
334 :param inputstream: (Default: None) A file-like, string, or file pointer.
335 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
335 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
336 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
336 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
337 :param starting_values: (Default: []) An array of strings to put in front of output que.
337 :param starting_values: (Default: []) An array of strings to put in front of output que.
338 """
338 """
339
339
340 if inputstream:
340 if inputstream:
341 input_streamer = StreamFeeder(inputstream)
341 input_streamer = StreamFeeder(inputstream)
342 input_streamer.start()
342 input_streamer.start()
343 inputstream = input_streamer.output
343 inputstream = input_streamer.output
344
344
345 _shell = kwargs.get('shell', True)
345 # Note: fragile cmd mangling has been removed for use in Kallithea
346 if isinstance(cmd, (list, tuple)):
346 assert isinstance(cmd, list), cmd
347 cmd = ' '.join(cmd)
348
347
349 kwargs['shell'] = _shell
350 _p = subprocess.Popen(cmd, bufsize=-1,
348 _p = subprocess.Popen(cmd, bufsize=-1,
351 stdin=inputstream,
349 stdin=inputstream,
352 stdout=subprocess.PIPE,
350 stdout=subprocess.PIPE,
353 stderr=subprocess.PIPE,
351 stderr=subprocess.PIPE,
354 **kwargs)
352 **kwargs)
355
353
356 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
354 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
357 starting_values)
355 starting_values)
358 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
356 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
359
357
360 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
358 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
361 # doing this until we reach either end of file, or end of buffer.
359 # doing this until we reach either end of file, or end of buffer.
362 bg_out.data_added_event.wait(1)
360 bg_out.data_added_event.wait(1)
363 bg_out.data_added_event.clear()
361 bg_out.data_added_event.clear()
364
362
365 # at this point it's still ambiguous if we are done reading or just full buffer.
363 # at this point it's still ambiguous if we are done reading or just full buffer.
366 # Either way, if error (returned by ended process, or implied based on
364 # Either way, if error (returned by ended process, or implied based on
367 # presence of stuff in stderr output) we error out.
365 # presence of stuff in stderr output) we error out.
368 # Else, we are happy.
366 # Else, we are happy.
369 _returncode = _p.poll()
367 _returncode = _p.poll()
370 if _returncode or (_returncode is None and bg_err.length):
368 if _returncode or (_returncode is None and bg_err.length):
371 try:
369 try:
372 _p.terminate()
370 _p.terminate()
373 except Exception:
371 except Exception:
374 pass
372 pass
375 bg_out.stop()
373 bg_out.stop()
376 out = ''.join(bg_out)
374 out = ''.join(bg_out)
377 bg_err.stop()
375 bg_err.stop()
378 err = ''.join(bg_err)
376 err = ''.join(bg_err)
379 if (err.strip() == 'fatal: The remote end hung up unexpectedly' and
377 if (err.strip() == 'fatal: The remote end hung up unexpectedly' and
380 out.startswith('0034shallow ')):
378 out.startswith('0034shallow ')):
381 # hack inspired by https://github.com/schacon/grack/pull/7
379 # hack inspired by https://github.com/schacon/grack/pull/7
382 bg_out = iter([out])
380 bg_out = iter([out])
383 _p = None
381 _p = None
384 elif err:
382 elif err:
385 raise EnvironmentError(
383 raise EnvironmentError(
386 "Subprocess exited due to an error:\n" + err)
384 "Subprocess exited due to an error:\n" + err)
387 else:
385 else:
388 raise EnvironmentError(
386 raise EnvironmentError(
389 "Subprocess exited with non 0 ret code:%s" % _returncode)
387 "Subprocess exited with non 0 ret code:%s" % _returncode)
390 self.process = _p
388 self.process = _p
391 self.output = bg_out
389 self.output = bg_out
392 self.error = bg_err
390 self.error = bg_err
393 self.inputstream = inputstream
391 self.inputstream = inputstream
394
392
395 def __iter__(self):
393 def __iter__(self):
396 return self
394 return self
397
395
398 def next(self):
396 def next(self):
399 if self.process and self.process.poll():
397 if self.process and self.process.poll():
400 err = '%s' % ''.join(self.error)
398 err = '%s' % ''.join(self.error)
401 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
399 raise EnvironmentError("Subprocess exited due to an error:\n" + err)
402 return self.output.next()
400 return self.output.next()
403
401
404 def throw(self, type, value=None, traceback=None):
402 def throw(self, type, value=None, traceback=None):
405 if self.output.length or not self.output.done_reading:
403 if self.output.length or not self.output.done_reading:
406 raise type(value)
404 raise type(value)
407
405
408 def close(self):
406 def close(self):
409 try:
407 try:
410 self.process.terminate()
408 self.process.terminate()
411 except:
409 except:
412 pass
410 pass
413 try:
411 try:
414 self.output.close()
412 self.output.close()
415 except:
413 except:
416 pass
414 pass
417 try:
415 try:
418 self.error.close()
416 self.error.close()
419 except:
417 except:
420 pass
418 pass
421 try:
419 try:
422 os.close(self.inputstream)
420 os.close(self.inputstream)
423 except:
421 except:
424 pass
422 pass
425
423
426 def __del__(self):
424 def __del__(self):
427 self.close()
425 self.close()
@@ -1,758 +1,758 b''
1 from __future__ import with_statement
1 from __future__ import with_statement
2
2
3 import os
3 import os
4 import sys
4 import sys
5 import mock
5 import mock
6 import datetime
6 import datetime
7 import urllib2
7 import urllib2
8 from kallithea.lib.vcs.backends.git import GitRepository, GitChangeset
8 from kallithea.lib.vcs.backends.git import GitRepository, GitChangeset
9 from kallithea.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError
9 from kallithea.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError
10 from kallithea.lib.vcs.nodes import NodeKind, FileNode, DirNode, NodeState
10 from kallithea.lib.vcs.nodes import NodeKind, FileNode, DirNode, NodeState
11 from kallithea.lib.vcs.utils.compat import unittest
11 from kallithea.lib.vcs.utils.compat import unittest
12 from kallithea.tests.vcs.base import _BackendTestMixin
12 from kallithea.tests.vcs.base import _BackendTestMixin
13 from kallithea.tests.vcs.conf import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
13 from kallithea.tests.vcs.conf import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
14
14
15
15
16 class GitRepositoryTest(unittest.TestCase):
16 class GitRepositoryTest(unittest.TestCase):
17
17
18 def __check_for_existing_repo(self):
18 def __check_for_existing_repo(self):
19 if os.path.exists(TEST_GIT_REPO_CLONE):
19 if os.path.exists(TEST_GIT_REPO_CLONE):
20 self.fail('Cannot test git clone repo as location %s already '
20 self.fail('Cannot test git clone repo as location %s already '
21 'exists. You should manually remove it first.'
21 'exists. You should manually remove it first.'
22 % TEST_GIT_REPO_CLONE)
22 % TEST_GIT_REPO_CLONE)
23
23
24 def setUp(self):
24 def setUp(self):
25 self.repo = GitRepository(TEST_GIT_REPO)
25 self.repo = GitRepository(TEST_GIT_REPO)
26
26
27 def test_wrong_repo_path(self):
27 def test_wrong_repo_path(self):
28 wrong_repo_path = '/tmp/errorrepo'
28 wrong_repo_path = '/tmp/errorrepo'
29 self.assertRaises(RepositoryError, GitRepository, wrong_repo_path)
29 self.assertRaises(RepositoryError, GitRepository, wrong_repo_path)
30
30
31 def test_git_cmd_injection(self):
31 def test_git_cmd_injection(self):
32 repo_inject_path = TEST_GIT_REPO + '; echo "Cake";'
32 repo_inject_path = TEST_GIT_REPO + '; echo "Cake";'
33 with self.assertRaises(urllib2.URLError):
33 with self.assertRaises(urllib2.URLError):
34 # Should fail because URL will contain the parts after ; too
34 # Should fail because URL will contain the parts after ; too
35 urlerror_fail_repo = GitRepository(get_new_dir('injection-repo'), src_url=repo_inject_path, update_after_clone=True, create=True)
35 urlerror_fail_repo = GitRepository(get_new_dir('injection-repo'), src_url=repo_inject_path, update_after_clone=True, create=True)
36
36
37 with self.assertRaises(RepositoryError):
37 with self.assertRaises(RepositoryError):
38 # Should fail on direct clone call, which as of this writing does not happen outside of class
38 # Should fail on direct clone call, which as of this writing does not happen outside of class
39 clone_fail_repo = GitRepository(get_new_dir('injection-repo'), create=True)
39 clone_fail_repo = GitRepository(get_new_dir('injection-repo'), create=True)
40 clone_fail_repo.clone(repo_inject_path, update_after_clone=True,)
40 clone_fail_repo.clone(repo_inject_path, update_after_clone=True,)
41
41
42 # Verify correct quoting of evil characters that should work on posix file systems
42 # Verify correct quoting of evil characters that should work on posix file systems
43 if sys.platform == 'win32':
43 if sys.platform == 'win32':
44 # windows does not allow '"' in dir names
44 # windows does not allow '"' in dir names
45 tricky_path = get_new_dir("tricky-path-repo-$'`")
45 tricky_path = get_new_dir("tricky-path-repo-$'`")
46 else:
46 else:
47 tricky_path = get_new_dir("tricky-path-repo-$'\"`")
47 tricky_path = get_new_dir("tricky-path-repo-$'\"`")
48 successfully_cloned = GitRepository(tricky_path, src_url=TEST_GIT_REPO, update_after_clone=True, create=True)
48 successfully_cloned = GitRepository(tricky_path, src_url=TEST_GIT_REPO, update_after_clone=True, create=True)
49 # Repo should have been created
49 # Repo should have been created
50 self.assertFalse(successfully_cloned._repo.bare)
50 self.assertFalse(successfully_cloned._repo.bare)
51
51
52 if sys.platform == 'win32':
52 if sys.platform == 'win32':
53 # windows does not allow '"' in dir names
53 # windows does not allow '"' in dir names
54 tricky_path_2 = get_new_dir("tricky-path-2-repo-$'`")
54 tricky_path_2 = get_new_dir("tricky-path-2-repo-$'`")
55 else:
55 else:
56 tricky_path_2 = get_new_dir("tricky-path-2-repo-$'\"`")
56 tricky_path_2 = get_new_dir("tricky-path-2-repo-$'\"`")
57 successfully_cloned2 = GitRepository(tricky_path_2, src_url=tricky_path, bare=True, create=True)
57 successfully_cloned2 = GitRepository(tricky_path_2, src_url=tricky_path, bare=True, create=True)
58 # Repo should have been created and thus used correct quoting for clone
58 # Repo should have been created and thus used correct quoting for clone
59 self.assertTrue(successfully_cloned2._repo.bare)
59 self.assertTrue(successfully_cloned2._repo.bare)
60
60
61 # Should pass because URL has been properly quoted
61 # Should pass because URL has been properly quoted
62 successfully_cloned.pull(tricky_path_2)
62 successfully_cloned.pull(tricky_path_2)
63 successfully_cloned2.fetch(tricky_path)
63 successfully_cloned2.fetch(tricky_path)
64
64
65 def test_repo_create_with_spaces_in_path(self):
65 def test_repo_create_with_spaces_in_path(self):
66 repo_path = get_new_dir("path with spaces")
66 repo_path = get_new_dir("path with spaces")
67 repo = GitRepository(repo_path, src_url=None, bare=True, create=True)
67 repo = GitRepository(repo_path, src_url=None, bare=True, create=True)
68 # Repo should have been created
68 # Repo should have been created
69 self.assertTrue(repo._repo.bare)
69 self.assertTrue(repo._repo.bare)
70
70
71 def test_repo_clone(self):
71 def test_repo_clone(self):
72 self.__check_for_existing_repo()
72 self.__check_for_existing_repo()
73 repo = GitRepository(TEST_GIT_REPO)
73 repo = GitRepository(TEST_GIT_REPO)
74 repo_clone = GitRepository(TEST_GIT_REPO_CLONE,
74 repo_clone = GitRepository(TEST_GIT_REPO_CLONE,
75 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
75 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
76 self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
76 self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
77 # Checking hashes of changesets should be enough
77 # Checking hashes of changesets should be enough
78 for changeset in repo.get_changesets():
78 for changeset in repo.get_changesets():
79 raw_id = changeset.raw_id
79 raw_id = changeset.raw_id
80 self.assertEqual(raw_id, repo_clone.get_changeset(raw_id).raw_id)
80 self.assertEqual(raw_id, repo_clone.get_changeset(raw_id).raw_id)
81
81
82 def test_repo_clone_with_spaces_in_path(self):
82 def test_repo_clone_with_spaces_in_path(self):
83 repo_path = get_new_dir("path with spaces")
83 repo_path = get_new_dir("path with spaces")
84 successfully_cloned = GitRepository(repo_path, src_url=TEST_GIT_REPO, update_after_clone=True, create=True)
84 successfully_cloned = GitRepository(repo_path, src_url=TEST_GIT_REPO, update_after_clone=True, create=True)
85 # Repo should have been created
85 # Repo should have been created
86 self.assertFalse(successfully_cloned._repo.bare)
86 self.assertFalse(successfully_cloned._repo.bare)
87
87
88 successfully_cloned.pull(TEST_GIT_REPO)
88 successfully_cloned.pull(TEST_GIT_REPO)
89 self.repo.fetch(repo_path)
89 self.repo.fetch(repo_path)
90
90
91 def test_repo_clone_without_create(self):
91 def test_repo_clone_without_create(self):
92 self.assertRaises(RepositoryError, GitRepository,
92 self.assertRaises(RepositoryError, GitRepository,
93 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
93 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
94
94
95 def test_repo_clone_with_update(self):
95 def test_repo_clone_with_update(self):
96 repo = GitRepository(TEST_GIT_REPO)
96 repo = GitRepository(TEST_GIT_REPO)
97 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
97 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
98 repo_clone = GitRepository(clone_path,
98 repo_clone = GitRepository(clone_path,
99 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
99 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
100 self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
100 self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
101
101
102 #check if current workdir was updated
102 #check if current workdir was updated
103 fpath = os.path.join(clone_path, 'MANIFEST.in')
103 fpath = os.path.join(clone_path, 'MANIFEST.in')
104 self.assertEqual(True, os.path.isfile(fpath),
104 self.assertEqual(True, os.path.isfile(fpath),
105 'Repo was cloned and updated but file %s could not be found'
105 'Repo was cloned and updated but file %s could not be found'
106 % fpath)
106 % fpath)
107
107
108 def test_repo_clone_without_update(self):
108 def test_repo_clone_without_update(self):
109 repo = GitRepository(TEST_GIT_REPO)
109 repo = GitRepository(TEST_GIT_REPO)
110 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
110 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
111 repo_clone = GitRepository(clone_path,
111 repo_clone = GitRepository(clone_path,
112 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
112 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
113 self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
113 self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
114 #check if current workdir was *NOT* updated
114 #check if current workdir was *NOT* updated
115 fpath = os.path.join(clone_path, 'MANIFEST.in')
115 fpath = os.path.join(clone_path, 'MANIFEST.in')
116 # Make sure it's not bare repo
116 # Make sure it's not bare repo
117 self.assertFalse(repo_clone._repo.bare)
117 self.assertFalse(repo_clone._repo.bare)
118 self.assertEqual(False, os.path.isfile(fpath),
118 self.assertEqual(False, os.path.isfile(fpath),
119 'Repo was cloned and updated but file %s was found'
119 'Repo was cloned and updated but file %s was found'
120 % fpath)
120 % fpath)
121
121
122 def test_repo_clone_into_bare_repo(self):
122 def test_repo_clone_into_bare_repo(self):
123 repo = GitRepository(TEST_GIT_REPO)
123 repo = GitRepository(TEST_GIT_REPO)
124 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
124 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
125 repo_clone = GitRepository(clone_path, create=True,
125 repo_clone = GitRepository(clone_path, create=True,
126 src_url=repo.path, bare=True)
126 src_url=repo.path, bare=True)
127 self.assertTrue(repo_clone._repo.bare)
127 self.assertTrue(repo_clone._repo.bare)
128
128
129 def test_create_repo_is_not_bare_by_default(self):
129 def test_create_repo_is_not_bare_by_default(self):
130 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
130 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
131 self.assertFalse(repo._repo.bare)
131 self.assertFalse(repo._repo.bare)
132
132
133 def test_create_bare_repo(self):
133 def test_create_bare_repo(self):
134 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
134 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
135 self.assertTrue(repo._repo.bare)
135 self.assertTrue(repo._repo.bare)
136
136
137 def test_revisions(self):
137 def test_revisions(self):
138 # there are 112 revisions (by now)
138 # there are 112 revisions (by now)
139 # so we can assume they would be available from now on
139 # so we can assume they would be available from now on
140 subset = set([
140 subset = set([
141 'c1214f7e79e02fc37156ff215cd71275450cffc3',
141 'c1214f7e79e02fc37156ff215cd71275450cffc3',
142 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
142 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
143 'fa6600f6848800641328adbf7811fd2372c02ab2',
143 'fa6600f6848800641328adbf7811fd2372c02ab2',
144 '102607b09cdd60e2793929c4f90478be29f85a17',
144 '102607b09cdd60e2793929c4f90478be29f85a17',
145 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
145 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
146 '2d1028c054665b962fa3d307adfc923ddd528038',
146 '2d1028c054665b962fa3d307adfc923ddd528038',
147 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
147 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
148 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
148 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
149 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
149 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
150 '8430a588b43b5d6da365400117c89400326e7992',
150 '8430a588b43b5d6da365400117c89400326e7992',
151 'd955cd312c17b02143c04fa1099a352b04368118',
151 'd955cd312c17b02143c04fa1099a352b04368118',
152 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
152 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
153 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
153 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
154 'f298fe1189f1b69779a4423f40b48edf92a703fc',
154 'f298fe1189f1b69779a4423f40b48edf92a703fc',
155 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
155 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
156 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
156 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
157 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
157 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
158 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
158 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
159 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
159 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
160 '45223f8f114c64bf4d6f853e3c35a369a6305520',
160 '45223f8f114c64bf4d6f853e3c35a369a6305520',
161 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
161 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
162 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
162 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
163 '27d48942240f5b91dfda77accd2caac94708cc7d',
163 '27d48942240f5b91dfda77accd2caac94708cc7d',
164 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
164 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
165 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
165 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
166 self.assertTrue(subset.issubset(set(self.repo.revisions)))
166 self.assertTrue(subset.issubset(set(self.repo.revisions)))
167
167
168
168
169
169
170 def test_slicing(self):
170 def test_slicing(self):
171 #4 1 5 10 95
171 #4 1 5 10 95
172 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
172 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
173 (10, 20, 10), (5, 100, 95)]:
173 (10, 20, 10), (5, 100, 95)]:
174 revs = list(self.repo[sfrom:sto])
174 revs = list(self.repo[sfrom:sto])
175 self.assertEqual(len(revs), size)
175 self.assertEqual(len(revs), size)
176 self.assertEqual(revs[0], self.repo.get_changeset(sfrom))
176 self.assertEqual(revs[0], self.repo.get_changeset(sfrom))
177 self.assertEqual(revs[-1], self.repo.get_changeset(sto - 1))
177 self.assertEqual(revs[-1], self.repo.get_changeset(sto - 1))
178
178
179
179
180 def test_branches(self):
180 def test_branches(self):
181 # TODO: Need more tests here
181 # TODO: Need more tests here
182 # Removed (those are 'remotes' branches for cloned repo)
182 # Removed (those are 'remotes' branches for cloned repo)
183 #self.assertTrue('master' in self.repo.branches)
183 #self.assertTrue('master' in self.repo.branches)
184 #self.assertTrue('gittree' in self.repo.branches)
184 #self.assertTrue('gittree' in self.repo.branches)
185 #self.assertTrue('web-branch' in self.repo.branches)
185 #self.assertTrue('web-branch' in self.repo.branches)
186 for name, id in self.repo.branches.items():
186 for name, id in self.repo.branches.items():
187 self.assertTrue(isinstance(
187 self.assertTrue(isinstance(
188 self.repo.get_changeset(id), GitChangeset))
188 self.repo.get_changeset(id), GitChangeset))
189
189
190 def test_tags(self):
190 def test_tags(self):
191 # TODO: Need more tests here
191 # TODO: Need more tests here
192 self.assertTrue('v0.1.1' in self.repo.tags)
192 self.assertTrue('v0.1.1' in self.repo.tags)
193 self.assertTrue('v0.1.2' in self.repo.tags)
193 self.assertTrue('v0.1.2' in self.repo.tags)
194 for name, id in self.repo.tags.items():
194 for name, id in self.repo.tags.items():
195 self.assertTrue(isinstance(
195 self.assertTrue(isinstance(
196 self.repo.get_changeset(id), GitChangeset))
196 self.repo.get_changeset(id), GitChangeset))
197
197
198 def _test_single_changeset_cache(self, revision):
198 def _test_single_changeset_cache(self, revision):
199 chset = self.repo.get_changeset(revision)
199 chset = self.repo.get_changeset(revision)
200 self.assertTrue(revision in self.repo.changesets)
200 self.assertTrue(revision in self.repo.changesets)
201 self.assertTrue(chset is self.repo.changesets[revision])
201 self.assertTrue(chset is self.repo.changesets[revision])
202
202
203 def test_initial_changeset(self):
203 def test_initial_changeset(self):
204 id = self.repo.revisions[0]
204 id = self.repo.revisions[0]
205 init_chset = self.repo.get_changeset(id)
205 init_chset = self.repo.get_changeset(id)
206 self.assertEqual(init_chset.message, 'initial import\n')
206 self.assertEqual(init_chset.message, 'initial import\n')
207 self.assertEqual(init_chset.author,
207 self.assertEqual(init_chset.author,
208 'Marcin Kuzminski <marcin@python-blog.com>')
208 'Marcin Kuzminski <marcin@python-blog.com>')
209 for path in ('vcs/__init__.py',
209 for path in ('vcs/__init__.py',
210 'vcs/backends/BaseRepository.py',
210 'vcs/backends/BaseRepository.py',
211 'vcs/backends/__init__.py'):
211 'vcs/backends/__init__.py'):
212 self.assertTrue(isinstance(init_chset.get_node(path), FileNode))
212 self.assertTrue(isinstance(init_chset.get_node(path), FileNode))
213 for path in ('', 'vcs', 'vcs/backends'):
213 for path in ('', 'vcs', 'vcs/backends'):
214 self.assertTrue(isinstance(init_chset.get_node(path), DirNode))
214 self.assertTrue(isinstance(init_chset.get_node(path), DirNode))
215
215
216 self.assertRaises(NodeDoesNotExistError, init_chset.get_node, path='foobar')
216 self.assertRaises(NodeDoesNotExistError, init_chset.get_node, path='foobar')
217
217
218 node = init_chset.get_node('vcs/')
218 node = init_chset.get_node('vcs/')
219 self.assertTrue(hasattr(node, 'kind'))
219 self.assertTrue(hasattr(node, 'kind'))
220 self.assertEqual(node.kind, NodeKind.DIR)
220 self.assertEqual(node.kind, NodeKind.DIR)
221
221
222 node = init_chset.get_node('vcs')
222 node = init_chset.get_node('vcs')
223 self.assertTrue(hasattr(node, 'kind'))
223 self.assertTrue(hasattr(node, 'kind'))
224 self.assertEqual(node.kind, NodeKind.DIR)
224 self.assertEqual(node.kind, NodeKind.DIR)
225
225
226 node = init_chset.get_node('vcs/__init__.py')
226 node = init_chset.get_node('vcs/__init__.py')
227 self.assertTrue(hasattr(node, 'kind'))
227 self.assertTrue(hasattr(node, 'kind'))
228 self.assertEqual(node.kind, NodeKind.FILE)
228 self.assertEqual(node.kind, NodeKind.FILE)
229
229
230 def test_not_existing_changeset(self):
230 def test_not_existing_changeset(self):
231 self.assertRaises(RepositoryError, self.repo.get_changeset,
231 self.assertRaises(RepositoryError, self.repo.get_changeset,
232 'f' * 40)
232 'f' * 40)
233
233
234 def test_changeset10(self):
234 def test_changeset10(self):
235
235
236 chset10 = self.repo.get_changeset(self.repo.revisions[9])
236 chset10 = self.repo.get_changeset(self.repo.revisions[9])
237 README = """===
237 README = """===
238 VCS
238 VCS
239 ===
239 ===
240
240
241 Various Version Control System management abstraction layer for Python.
241 Various Version Control System management abstraction layer for Python.
242
242
243 Introduction
243 Introduction
244 ------------
244 ------------
245
245
246 TODO: To be written...
246 TODO: To be written...
247
247
248 """
248 """
249 node = chset10.get_node('README.rst')
249 node = chset10.get_node('README.rst')
250 self.assertEqual(node.kind, NodeKind.FILE)
250 self.assertEqual(node.kind, NodeKind.FILE)
251 self.assertEqual(node.content, README)
251 self.assertEqual(node.content, README)
252
252
253
253
254 class GitChangesetTest(unittest.TestCase):
254 class GitChangesetTest(unittest.TestCase):
255
255
256 def setUp(self):
256 def setUp(self):
257 self.repo = GitRepository(TEST_GIT_REPO)
257 self.repo = GitRepository(TEST_GIT_REPO)
258
258
259 def test_default_changeset(self):
259 def test_default_changeset(self):
260 tip = self.repo.get_changeset()
260 tip = self.repo.get_changeset()
261 self.assertEqual(tip, self.repo.get_changeset(None))
261 self.assertEqual(tip, self.repo.get_changeset(None))
262 self.assertEqual(tip, self.repo.get_changeset('tip'))
262 self.assertEqual(tip, self.repo.get_changeset('tip'))
263
263
264 def test_root_node(self):
264 def test_root_node(self):
265 tip = self.repo.get_changeset()
265 tip = self.repo.get_changeset()
266 self.assertTrue(tip.root is tip.get_node(''))
266 self.assertTrue(tip.root is tip.get_node(''))
267
267
268 def test_lazy_fetch(self):
268 def test_lazy_fetch(self):
269 """
269 """
270 Test if changeset's nodes expands and are cached as we walk through
270 Test if changeset's nodes expands and are cached as we walk through
271 the revision. This test is somewhat hard to write as order of tests
271 the revision. This test is somewhat hard to write as order of tests
272 is a key here. Written by running command after command in a shell.
272 is a key here. Written by running command after command in a shell.
273 """
273 """
274 hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
274 hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
275 self.assertTrue(hex in self.repo.revisions)
275 self.assertTrue(hex in self.repo.revisions)
276 chset = self.repo.get_changeset(hex)
276 chset = self.repo.get_changeset(hex)
277 self.assertTrue(len(chset.nodes) == 0)
277 self.assertTrue(len(chset.nodes) == 0)
278 root = chset.root
278 root = chset.root
279 self.assertTrue(len(chset.nodes) == 1)
279 self.assertTrue(len(chset.nodes) == 1)
280 self.assertTrue(len(root.nodes) == 8)
280 self.assertTrue(len(root.nodes) == 8)
281 # accessing root.nodes updates chset.nodes
281 # accessing root.nodes updates chset.nodes
282 self.assertTrue(len(chset.nodes) == 9)
282 self.assertTrue(len(chset.nodes) == 9)
283
283
284 docs = root.get_node('docs')
284 docs = root.get_node('docs')
285 # we haven't yet accessed anything new as docs dir was already cached
285 # we haven't yet accessed anything new as docs dir was already cached
286 self.assertTrue(len(chset.nodes) == 9)
286 self.assertTrue(len(chset.nodes) == 9)
287 self.assertTrue(len(docs.nodes) == 8)
287 self.assertTrue(len(docs.nodes) == 8)
288 # accessing docs.nodes updates chset.nodes
288 # accessing docs.nodes updates chset.nodes
289 self.assertTrue(len(chset.nodes) == 17)
289 self.assertTrue(len(chset.nodes) == 17)
290
290
291 self.assertTrue(docs is chset.get_node('docs'))
291 self.assertTrue(docs is chset.get_node('docs'))
292 self.assertTrue(docs is root.nodes[0])
292 self.assertTrue(docs is root.nodes[0])
293 self.assertTrue(docs is root.dirs[0])
293 self.assertTrue(docs is root.dirs[0])
294 self.assertTrue(docs is chset.get_node('docs'))
294 self.assertTrue(docs is chset.get_node('docs'))
295
295
296 def test_nodes_with_changeset(self):
296 def test_nodes_with_changeset(self):
297 hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
297 hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
298 chset = self.repo.get_changeset(hex)
298 chset = self.repo.get_changeset(hex)
299 root = chset.root
299 root = chset.root
300 docs = root.get_node('docs')
300 docs = root.get_node('docs')
301 self.assertTrue(docs is chset.get_node('docs'))
301 self.assertTrue(docs is chset.get_node('docs'))
302 api = docs.get_node('api')
302 api = docs.get_node('api')
303 self.assertTrue(api is chset.get_node('docs/api'))
303 self.assertTrue(api is chset.get_node('docs/api'))
304 index = api.get_node('index.rst')
304 index = api.get_node('index.rst')
305 self.assertTrue(index is chset.get_node('docs/api/index.rst'))
305 self.assertTrue(index is chset.get_node('docs/api/index.rst'))
306 self.assertTrue(index is chset.get_node('docs')\
306 self.assertTrue(index is chset.get_node('docs')\
307 .get_node('api')\
307 .get_node('api')\
308 .get_node('index.rst'))
308 .get_node('index.rst'))
309
309
310 def test_branch_and_tags(self):
310 def test_branch_and_tags(self):
311 """
311 """
312 rev0 = self.repo.revisions[0]
312 rev0 = self.repo.revisions[0]
313 chset0 = self.repo.get_changeset(rev0)
313 chset0 = self.repo.get_changeset(rev0)
314 self.assertEqual(chset0.branch, 'master')
314 self.assertEqual(chset0.branch, 'master')
315 self.assertEqual(chset0.tags, [])
315 self.assertEqual(chset0.tags, [])
316
316
317 rev10 = self.repo.revisions[10]
317 rev10 = self.repo.revisions[10]
318 chset10 = self.repo.get_changeset(rev10)
318 chset10 = self.repo.get_changeset(rev10)
319 self.assertEqual(chset10.branch, 'master')
319 self.assertEqual(chset10.branch, 'master')
320 self.assertEqual(chset10.tags, [])
320 self.assertEqual(chset10.tags, [])
321
321
322 rev44 = self.repo.revisions[44]
322 rev44 = self.repo.revisions[44]
323 chset44 = self.repo.get_changeset(rev44)
323 chset44 = self.repo.get_changeset(rev44)
324 self.assertEqual(chset44.branch, 'web-branch')
324 self.assertEqual(chset44.branch, 'web-branch')
325
325
326 tip = self.repo.get_changeset('tip')
326 tip = self.repo.get_changeset('tip')
327 self.assertTrue('tip' in tip.tags)
327 self.assertTrue('tip' in tip.tags)
328 """
328 """
329 # Those tests would fail - branches are now going
329 # Those tests would fail - branches are now going
330 # to be changed at main API in order to support git backend
330 # to be changed at main API in order to support git backend
331 pass
331 pass
332
332
333 def _test_slices(self, limit, offset):
333 def _test_slices(self, limit, offset):
334 count = self.repo.count()
334 count = self.repo.count()
335 changesets = self.repo.get_changesets(limit=limit, offset=offset)
335 changesets = self.repo.get_changesets(limit=limit, offset=offset)
336 idx = 0
336 idx = 0
337 for changeset in changesets:
337 for changeset in changesets:
338 rev = offset + idx
338 rev = offset + idx
339 idx += 1
339 idx += 1
340 rev_id = self.repo.revisions[rev]
340 rev_id = self.repo.revisions[rev]
341 if idx > limit:
341 if idx > limit:
342 self.fail("Exceeded limit already (getting revision %s, "
342 self.fail("Exceeded limit already (getting revision %s, "
343 "there are %s total revisions, offset=%s, limit=%s)"
343 "there are %s total revisions, offset=%s, limit=%s)"
344 % (rev_id, count, offset, limit))
344 % (rev_id, count, offset, limit))
345 self.assertEqual(changeset, self.repo.get_changeset(rev_id))
345 self.assertEqual(changeset, self.repo.get_changeset(rev_id))
346 result = list(self.repo.get_changesets(limit=limit, offset=offset))
346 result = list(self.repo.get_changesets(limit=limit, offset=offset))
347 start = offset
347 start = offset
348 end = limit and offset + limit or None
348 end = limit and offset + limit or None
349 sliced = list(self.repo[start:end])
349 sliced = list(self.repo[start:end])
350 self.failUnlessEqual(result, sliced,
350 self.failUnlessEqual(result, sliced,
351 msg="Comparison failed for limit=%s, offset=%s"
351 msg="Comparison failed for limit=%s, offset=%s"
352 "(get_changeset returned: %s and sliced: %s"
352 "(get_changeset returned: %s and sliced: %s"
353 % (limit, offset, result, sliced))
353 % (limit, offset, result, sliced))
354
354
355 def _test_file_size(self, revision, path, size):
355 def _test_file_size(self, revision, path, size):
356 node = self.repo.get_changeset(revision).get_node(path)
356 node = self.repo.get_changeset(revision).get_node(path)
357 self.assertTrue(node.is_file())
357 self.assertTrue(node.is_file())
358 self.assertEqual(node.size, size)
358 self.assertEqual(node.size, size)
359
359
360 def test_file_size(self):
360 def test_file_size(self):
361 to_check = (
361 to_check = (
362 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
362 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
363 'vcs/backends/BaseRepository.py', 502),
363 'vcs/backends/BaseRepository.py', 502),
364 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
364 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
365 'vcs/backends/hg.py', 854),
365 'vcs/backends/hg.py', 854),
366 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
366 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
367 'setup.py', 1068),
367 'setup.py', 1068),
368
368
369 ('d955cd312c17b02143c04fa1099a352b04368118',
369 ('d955cd312c17b02143c04fa1099a352b04368118',
370 'vcs/backends/base.py', 2921),
370 'vcs/backends/base.py', 2921),
371 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
371 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
372 'vcs/backends/base.py', 3936),
372 'vcs/backends/base.py', 3936),
373 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
373 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
374 'vcs/backends/base.py', 6189),
374 'vcs/backends/base.py', 6189),
375 )
375 )
376 for revision, path, size in to_check:
376 for revision, path, size in to_check:
377 self._test_file_size(revision, path, size)
377 self._test_file_size(revision, path, size)
378
378
379 def test_file_history(self):
379 def test_file_history(self):
380 # we can only check if those revisions are present in the history
380 # we can only check if those revisions are present in the history
381 # as we cannot update this test every time file is changed
381 # as we cannot update this test every time file is changed
382 files = {
382 files = {
383 'setup.py': [
383 'setup.py': [
384 '54386793436c938cff89326944d4c2702340037d',
384 '54386793436c938cff89326944d4c2702340037d',
385 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
385 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
386 '998ed409c795fec2012b1c0ca054d99888b22090',
386 '998ed409c795fec2012b1c0ca054d99888b22090',
387 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
387 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
388 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
388 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
389 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
389 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
390 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
390 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
391 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
391 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
392 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
392 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
393 ],
393 ],
394 'vcs/nodes.py': [
394 'vcs/nodes.py': [
395 '33fa3223355104431402a888fa77a4e9956feb3e',
395 '33fa3223355104431402a888fa77a4e9956feb3e',
396 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
396 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
397 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
397 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
398 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
398 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
399 'c877b68d18e792a66b7f4c529ea02c8f80801542',
399 'c877b68d18e792a66b7f4c529ea02c8f80801542',
400 '4313566d2e417cb382948f8d9d7c765330356054',
400 '4313566d2e417cb382948f8d9d7c765330356054',
401 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
401 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
402 '54386793436c938cff89326944d4c2702340037d',
402 '54386793436c938cff89326944d4c2702340037d',
403 '54000345d2e78b03a99d561399e8e548de3f3203',
403 '54000345d2e78b03a99d561399e8e548de3f3203',
404 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
404 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
405 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
405 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
406 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
406 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
407 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
407 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
408 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
408 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
409 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
409 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
410 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
410 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
411 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
411 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
412 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
412 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
413 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
413 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
414 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
414 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
415 'f15c21f97864b4f071cddfbf2750ec2e23859414',
415 'f15c21f97864b4f071cddfbf2750ec2e23859414',
416 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
416 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
417 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
417 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
418 '84dec09632a4458f79f50ddbbd155506c460b4f9',
418 '84dec09632a4458f79f50ddbbd155506c460b4f9',
419 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
419 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
420 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
420 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
421 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
421 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
422 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
422 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
423 '6970b057cffe4aab0a792aa634c89f4bebf01441',
423 '6970b057cffe4aab0a792aa634c89f4bebf01441',
424 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
424 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
425 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
425 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
426 ],
426 ],
427 'vcs/backends/git.py': [
427 'vcs/backends/git.py': [
428 '4cf116ad5a457530381135e2f4c453e68a1b0105',
428 '4cf116ad5a457530381135e2f4c453e68a1b0105',
429 '9a751d84d8e9408e736329767387f41b36935153',
429 '9a751d84d8e9408e736329767387f41b36935153',
430 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
430 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
431 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
431 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
432 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
432 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
433 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
433 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
434 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
434 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
435 '54000345d2e78b03a99d561399e8e548de3f3203',
435 '54000345d2e78b03a99d561399e8e548de3f3203',
436 ],
436 ],
437 }
437 }
438 for path, revs in files.items():
438 for path, revs in files.items():
439 node = self.repo.get_changeset(revs[0]).get_node(path)
439 node = self.repo.get_changeset(revs[0]).get_node(path)
440 node_revs = [chset.raw_id for chset in node.history]
440 node_revs = [chset.raw_id for chset in node.history]
441 self.assertTrue(set(revs).issubset(set(node_revs)),
441 self.assertTrue(set(revs).issubset(set(node_revs)),
442 "We assumed that %s is subset of revisions for which file %s "
442 "We assumed that %s is subset of revisions for which file %s "
443 "has been changed, and history of that node returned: %s"
443 "has been changed, and history of that node returned: %s"
444 % (revs, path, node_revs))
444 % (revs, path, node_revs))
445
445
446 def test_file_annotate(self):
446 def test_file_annotate(self):
447 files = {
447 files = {
448 'vcs/backends/__init__.py': {
448 'vcs/backends/__init__.py': {
449 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
449 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
450 'lines_no': 1,
450 'lines_no': 1,
451 'changesets': [
451 'changesets': [
452 'c1214f7e79e02fc37156ff215cd71275450cffc3',
452 'c1214f7e79e02fc37156ff215cd71275450cffc3',
453 ],
453 ],
454 },
454 },
455 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
455 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
456 'lines_no': 21,
456 'lines_no': 21,
457 'changesets': [
457 'changesets': [
458 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
458 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
459 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
459 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
460 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
460 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
461 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
461 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
462 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
462 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
463 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
463 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
464 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
464 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
465 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
465 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
466 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
466 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
467 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
467 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
468 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
468 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
469 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
469 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
470 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
470 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
471 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
471 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
472 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
472 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
473 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
473 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
474 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
474 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
475 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
475 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
476 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
476 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
477 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
477 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
478 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
478 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
479 ],
479 ],
480 },
480 },
481 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
481 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
482 'lines_no': 32,
482 'lines_no': 32,
483 'changesets': [
483 'changesets': [
484 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
484 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
485 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
485 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
486 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
486 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
487 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
487 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
488 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
488 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
489 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
489 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
490 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
490 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
491 '54000345d2e78b03a99d561399e8e548de3f3203',
491 '54000345d2e78b03a99d561399e8e548de3f3203',
492 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
492 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
493 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
493 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
494 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
494 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
495 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
495 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
496 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
496 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
497 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
497 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
498 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
498 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
499 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
499 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
500 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
500 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
501 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
501 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
502 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
502 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
503 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
503 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
504 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
504 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
505 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
505 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
506 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
506 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
507 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
507 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
508 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
508 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
509 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
509 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
510 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
510 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
511 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
511 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
512 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
512 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
513 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
513 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
514 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
514 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
515 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
515 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
516 ],
516 ],
517 },
517 },
518 },
518 },
519 }
519 }
520
520
521 for fname, revision_dict in files.items():
521 for fname, revision_dict in files.items():
522 for rev, data in revision_dict.items():
522 for rev, data in revision_dict.items():
523 cs = self.repo.get_changeset(rev)
523 cs = self.repo.get_changeset(rev)
524
524
525 l1_1 = [x[1] for x in cs.get_file_annotate(fname)]
525 l1_1 = [x[1] for x in cs.get_file_annotate(fname)]
526 l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)]
526 l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)]
527 self.assertEqual(l1_1, l1_2)
527 self.assertEqual(l1_1, l1_2)
528 l1 = l1_1
528 l1 = l1_1
529 l2 = files[fname][rev]['changesets']
529 l2 = files[fname][rev]['changesets']
530 self.assertTrue(l1 == l2 , "The lists of revision for %s@rev %s"
530 self.assertTrue(l1 == l2 , "The lists of revision for %s@rev %s"
531 "from annotation list should match each other, "
531 "from annotation list should match each other, "
532 "got \n%s \nvs \n%s " % (fname, rev, l1, l2))
532 "got \n%s \nvs \n%s " % (fname, rev, l1, l2))
533
533
534 def test_files_state(self):
534 def test_files_state(self):
535 """
535 """
536 Tests state of FileNodes.
536 Tests state of FileNodes.
537 """
537 """
538 node = self.repo\
538 node = self.repo\
539 .get_changeset('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
539 .get_changeset('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
540 .get_node('vcs/utils/diffs.py')
540 .get_node('vcs/utils/diffs.py')
541 self.assertTrue(node.state, NodeState.ADDED)
541 self.assertTrue(node.state, NodeState.ADDED)
542 self.assertTrue(node.added)
542 self.assertTrue(node.added)
543 self.assertFalse(node.changed)
543 self.assertFalse(node.changed)
544 self.assertFalse(node.not_changed)
544 self.assertFalse(node.not_changed)
545 self.assertFalse(node.removed)
545 self.assertFalse(node.removed)
546
546
547 node = self.repo\
547 node = self.repo\
548 .get_changeset('33fa3223355104431402a888fa77a4e9956feb3e')\
548 .get_changeset('33fa3223355104431402a888fa77a4e9956feb3e')\
549 .get_node('.hgignore')
549 .get_node('.hgignore')
550 self.assertTrue(node.state, NodeState.CHANGED)
550 self.assertTrue(node.state, NodeState.CHANGED)
551 self.assertFalse(node.added)
551 self.assertFalse(node.added)
552 self.assertTrue(node.changed)
552 self.assertTrue(node.changed)
553 self.assertFalse(node.not_changed)
553 self.assertFalse(node.not_changed)
554 self.assertFalse(node.removed)
554 self.assertFalse(node.removed)
555
555
556 node = self.repo\
556 node = self.repo\
557 .get_changeset('e29b67bd158580fc90fc5e9111240b90e6e86064')\
557 .get_changeset('e29b67bd158580fc90fc5e9111240b90e6e86064')\
558 .get_node('setup.py')
558 .get_node('setup.py')
559 self.assertTrue(node.state, NodeState.NOT_CHANGED)
559 self.assertTrue(node.state, NodeState.NOT_CHANGED)
560 self.assertFalse(node.added)
560 self.assertFalse(node.added)
561 self.assertFalse(node.changed)
561 self.assertFalse(node.changed)
562 self.assertTrue(node.not_changed)
562 self.assertTrue(node.not_changed)
563 self.assertFalse(node.removed)
563 self.assertFalse(node.removed)
564
564
565 # If node has REMOVED state then trying to fetch it would raise
565 # If node has REMOVED state then trying to fetch it would raise
566 # ChangesetError exception
566 # ChangesetError exception
567 chset = self.repo.get_changeset(
567 chset = self.repo.get_changeset(
568 'fa6600f6848800641328adbf7811fd2372c02ab2')
568 'fa6600f6848800641328adbf7811fd2372c02ab2')
569 path = 'vcs/backends/BaseRepository.py'
569 path = 'vcs/backends/BaseRepository.py'
570 self.assertRaises(NodeDoesNotExistError, chset.get_node, path)
570 self.assertRaises(NodeDoesNotExistError, chset.get_node, path)
571 # but it would be one of ``removed`` (changeset's attribute)
571 # but it would be one of ``removed`` (changeset's attribute)
572 self.assertTrue(path in [rf.path for rf in chset.removed])
572 self.assertTrue(path in [rf.path for rf in chset.removed])
573
573
574 chset = self.repo.get_changeset(
574 chset = self.repo.get_changeset(
575 '54386793436c938cff89326944d4c2702340037d')
575 '54386793436c938cff89326944d4c2702340037d')
576 changed = ['setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
576 changed = ['setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
577 'vcs/nodes.py']
577 'vcs/nodes.py']
578 self.assertEqual(set(changed), set([f.path for f in chset.changed]))
578 self.assertEqual(set(changed), set([f.path for f in chset.changed]))
579
579
580 def test_commit_message_is_unicode(self):
580 def test_commit_message_is_unicode(self):
581 for cs in self.repo:
581 for cs in self.repo:
582 self.assertEqual(type(cs.message), unicode)
582 self.assertEqual(type(cs.message), unicode)
583
583
584 def test_changeset_author_is_unicode(self):
584 def test_changeset_author_is_unicode(self):
585 for cs in self.repo:
585 for cs in self.repo:
586 self.assertEqual(type(cs.author), unicode)
586 self.assertEqual(type(cs.author), unicode)
587
587
588 def test_repo_files_content_is_unicode(self):
588 def test_repo_files_content_is_unicode(self):
589 changeset = self.repo.get_changeset()
589 changeset = self.repo.get_changeset()
590 for node in changeset.get_node('/'):
590 for node in changeset.get_node('/'):
591 if node.is_file():
591 if node.is_file():
592 self.assertEqual(type(node.content), unicode)
592 self.assertEqual(type(node.content), unicode)
593
593
594 def test_wrong_path(self):
594 def test_wrong_path(self):
595 # There is 'setup.py' in the root dir but not there:
595 # There is 'setup.py' in the root dir but not there:
596 path = 'foo/bar/setup.py'
596 path = 'foo/bar/setup.py'
597 tip = self.repo.get_changeset()
597 tip = self.repo.get_changeset()
598 self.assertRaises(VCSError, tip.get_node, path)
598 self.assertRaises(VCSError, tip.get_node, path)
599
599
600 def test_author_email(self):
600 def test_author_email(self):
601 self.assertEqual('marcin@python-blog.com',
601 self.assertEqual('marcin@python-blog.com',
602 self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\
602 self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\
603 .author_email)
603 .author_email)
604 self.assertEqual('lukasz.balcerzak@python-center.pl',
604 self.assertEqual('lukasz.balcerzak@python-center.pl',
605 self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\
605 self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\
606 .author_email)
606 .author_email)
607 self.assertEqual('none@none',
607 self.assertEqual('none@none',
608 self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\
608 self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\
609 .author_email)
609 .author_email)
610
610
611 def test_author_username(self):
611 def test_author_username(self):
612 self.assertEqual('Marcin Kuzminski',
612 self.assertEqual('Marcin Kuzminski',
613 self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\
613 self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\
614 .author_name)
614 .author_name)
615 self.assertEqual('Lukasz Balcerzak',
615 self.assertEqual('Lukasz Balcerzak',
616 self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\
616 self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\
617 .author_name)
617 .author_name)
618 self.assertEqual('marcink',
618 self.assertEqual('marcink',
619 self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\
619 self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\
620 .author_name)
620 .author_name)
621
621
622
622
623 class GitSpecificTest(unittest.TestCase):
623 class GitSpecificTest(unittest.TestCase):
624
624
625 def test_error_is_raised_for_added_if_diff_name_status_is_wrong(self):
625 def test_error_is_raised_for_added_if_diff_name_status_is_wrong(self):
626 repo = mock.MagicMock()
626 repo = mock.MagicMock()
627 changeset = GitChangeset(repo, 'foobar')
627 changeset = GitChangeset(repo, 'foobar')
628 changeset._diff_name_status = 'foobar'
628 changeset._diff_name_status = 'foobar'
629 with self.assertRaises(VCSError):
629 with self.assertRaises(VCSError):
630 changeset.added
630 changeset.added
631
631
632 def test_error_is_raised_for_changed_if_diff_name_status_is_wrong(self):
632 def test_error_is_raised_for_changed_if_diff_name_status_is_wrong(self):
633 repo = mock.MagicMock()
633 repo = mock.MagicMock()
634 changeset = GitChangeset(repo, 'foobar')
634 changeset = GitChangeset(repo, 'foobar')
635 changeset._diff_name_status = 'foobar'
635 changeset._diff_name_status = 'foobar'
636 with self.assertRaises(VCSError):
636 with self.assertRaises(VCSError):
637 changeset.added
637 changeset.added
638
638
639 def test_error_is_raised_for_removed_if_diff_name_status_is_wrong(self):
639 def test_error_is_raised_for_removed_if_diff_name_status_is_wrong(self):
640 repo = mock.MagicMock()
640 repo = mock.MagicMock()
641 changeset = GitChangeset(repo, 'foobar')
641 changeset = GitChangeset(repo, 'foobar')
642 changeset._diff_name_status = 'foobar'
642 changeset._diff_name_status = 'foobar'
643 with self.assertRaises(VCSError):
643 with self.assertRaises(VCSError):
644 changeset.added
644 changeset.added
645
645
646
646
647 class GitSpecificWithRepoTest(_BackendTestMixin, unittest.TestCase):
647 class GitSpecificWithRepoTest(_BackendTestMixin, unittest.TestCase):
648 backend_alias = 'git'
648 backend_alias = 'git'
649
649
650 @classmethod
650 @classmethod
651 def _get_commits(cls):
651 def _get_commits(cls):
652 return [
652 return [
653 {
653 {
654 'message': 'Initial',
654 'message': 'Initial',
655 'author': 'Joe Doe <joe.doe@example.com>',
655 'author': 'Joe Doe <joe.doe@example.com>',
656 'date': datetime.datetime(2010, 1, 1, 20),
656 'date': datetime.datetime(2010, 1, 1, 20),
657 'added': [
657 'added': [
658 FileNode('foobar/static/js/admin/base.js', content='base'),
658 FileNode('foobar/static/js/admin/base.js', content='base'),
659 FileNode('foobar/static/admin', content='admin',
659 FileNode('foobar/static/admin', content='admin',
660 mode=0120000), # this is a link
660 mode=0120000), # this is a link
661 FileNode('foo', content='foo'),
661 FileNode('foo', content='foo'),
662 ],
662 ],
663 },
663 },
664 {
664 {
665 'message': 'Second',
665 'message': 'Second',
666 'author': 'Joe Doe <joe.doe@example.com>',
666 'author': 'Joe Doe <joe.doe@example.com>',
667 'date': datetime.datetime(2010, 1, 1, 22),
667 'date': datetime.datetime(2010, 1, 1, 22),
668 'added': [
668 'added': [
669 FileNode('foo2', content='foo2'),
669 FileNode('foo2', content='foo2'),
670 ],
670 ],
671 },
671 },
672 ]
672 ]
673
673
674 def test_paths_slow_traversing(self):
674 def test_paths_slow_traversing(self):
675 cs = self.repo.get_changeset()
675 cs = self.repo.get_changeset()
676 self.assertEqual(cs.get_node('foobar').get_node('static').get_node('js')
676 self.assertEqual(cs.get_node('foobar').get_node('static').get_node('js')
677 .get_node('admin').get_node('base.js').content, 'base')
677 .get_node('admin').get_node('base.js').content, 'base')
678
678
679 def test_paths_fast_traversing(self):
679 def test_paths_fast_traversing(self):
680 cs = self.repo.get_changeset()
680 cs = self.repo.get_changeset()
681 self.assertEqual(cs.get_node('foobar/static/js/admin/base.js').content,
681 self.assertEqual(cs.get_node('foobar/static/js/admin/base.js').content,
682 'base')
682 'base')
683
683
684 def test_workdir_get_branch(self):
684 def test_workdir_get_branch(self):
685 self.repo.run_git_command('checkout -b production')
685 self.repo.run_git_command(['checkout', '-b', 'production'])
686 # Regression test: one of following would fail if we don't check
686 # Regression test: one of following would fail if we don't check
687 # .git/HEAD file
687 # .git/HEAD file
688 self.repo.run_git_command('checkout production')
688 self.repo.run_git_command(['checkout', 'production'])
689 self.assertEqual(self.repo.workdir.get_branch(), 'production')
689 self.assertEqual(self.repo.workdir.get_branch(), 'production')
690 self.repo.run_git_command('checkout master')
690 self.repo.run_git_command(['checkout', 'master'])
691 self.assertEqual(self.repo.workdir.get_branch(), 'master')
691 self.assertEqual(self.repo.workdir.get_branch(), 'master')
692
692
693 def test_get_diff_runs_git_command_with_hashes(self):
693 def test_get_diff_runs_git_command_with_hashes(self):
694 self.repo.run_git_command = mock.Mock(return_value=['', ''])
694 self.repo.run_git_command = mock.Mock(return_value=['', ''])
695 self.repo.get_diff(0, 1)
695 self.repo.get_diff(0, 1)
696 self.repo.run_git_command.assert_called_once_with(
696 self.repo.run_git_command.assert_called_once_with(
697 'diff -U%s --full-index --binary -p -M --abbrev=40 %s %s' %
697 ['diff', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
698 (3, self.repo._get_revision(0), self.repo._get_revision(1)))
698 self.repo._get_revision(0), self.repo._get_revision(1)])
699
699
700 def test_get_diff_runs_git_command_with_str_hashes(self):
700 def test_get_diff_runs_git_command_with_str_hashes(self):
701 self.repo.run_git_command = mock.Mock(return_value=['', ''])
701 self.repo.run_git_command = mock.Mock(return_value=['', ''])
702 self.repo.get_diff(self.repo.EMPTY_CHANGESET, 1)
702 self.repo.get_diff(self.repo.EMPTY_CHANGESET, 1)
703 self.repo.run_git_command.assert_called_once_with(
703 self.repo.run_git_command.assert_called_once_with(
704 'show -U%s --full-index --binary -p -M --abbrev=40 %s' %
704 ['show', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
705 (3, self.repo._get_revision(1)))
705 self.repo._get_revision(1)])
706
706
707 def test_get_diff_runs_git_command_with_path_if_its_given(self):
707 def test_get_diff_runs_git_command_with_path_if_its_given(self):
708 self.repo.run_git_command = mock.Mock(return_value=['', ''])
708 self.repo.run_git_command = mock.Mock(return_value=['', ''])
709 self.repo.get_diff(0, 1, 'foo')
709 self.repo.get_diff(0, 1, 'foo')
710 self.repo.run_git_command.assert_called_once_with(
710 self.repo.run_git_command.assert_called_once_with(
711 'diff -U%s --full-index --binary -p -M --abbrev=40 %s %s -- "foo"'
711 ['diff', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
712 % (3, self.repo._get_revision(0), self.repo._get_revision(1)))
712 self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'])
713
713
714
714
715 class GitRegressionTest(_BackendTestMixin, unittest.TestCase):
715 class GitRegressionTest(_BackendTestMixin, unittest.TestCase):
716 backend_alias = 'git'
716 backend_alias = 'git'
717
717
718 @classmethod
718 @classmethod
719 def _get_commits(cls):
719 def _get_commits(cls):
720 return [
720 return [
721 {
721 {
722 'message': 'Initial',
722 'message': 'Initial',
723 'author': 'Joe Doe <joe.doe@example.com>',
723 'author': 'Joe Doe <joe.doe@example.com>',
724 'date': datetime.datetime(2010, 1, 1, 20),
724 'date': datetime.datetime(2010, 1, 1, 20),
725 'added': [
725 'added': [
726 FileNode('bot/__init__.py', content='base'),
726 FileNode('bot/__init__.py', content='base'),
727 FileNode('bot/templates/404.html', content='base'),
727 FileNode('bot/templates/404.html', content='base'),
728 FileNode('bot/templates/500.html', content='base'),
728 FileNode('bot/templates/500.html', content='base'),
729 ],
729 ],
730 },
730 },
731 {
731 {
732 'message': 'Second',
732 'message': 'Second',
733 'author': 'Joe Doe <joe.doe@example.com>',
733 'author': 'Joe Doe <joe.doe@example.com>',
734 'date': datetime.datetime(2010, 1, 1, 22),
734 'date': datetime.datetime(2010, 1, 1, 22),
735 'added': [
735 'added': [
736 FileNode('bot/build/migrations/1.py', content='foo2'),
736 FileNode('bot/build/migrations/1.py', content='foo2'),
737 FileNode('bot/build/migrations/2.py', content='foo2'),
737 FileNode('bot/build/migrations/2.py', content='foo2'),
738 FileNode('bot/build/static/templates/f.html', content='foo2'),
738 FileNode('bot/build/static/templates/f.html', content='foo2'),
739 FileNode('bot/build/static/templates/f1.html', content='foo2'),
739 FileNode('bot/build/static/templates/f1.html', content='foo2'),
740 FileNode('bot/build/templates/err.html', content='foo2'),
740 FileNode('bot/build/templates/err.html', content='foo2'),
741 FileNode('bot/build/templates/err2.html', content='foo2'),
741 FileNode('bot/build/templates/err2.html', content='foo2'),
742 ],
742 ],
743 },
743 },
744 ]
744 ]
745
745
746 def test_similar_paths(self):
746 def test_similar_paths(self):
747 cs = self.repo.get_changeset()
747 cs = self.repo.get_changeset()
748 paths = lambda *n:[x.path for x in n]
748 paths = lambda *n:[x.path for x in n]
749 self.assertEqual(paths(*cs.get_nodes('bot')), ['bot/build', 'bot/templates', 'bot/__init__.py'])
749 self.assertEqual(paths(*cs.get_nodes('bot')), ['bot/build', 'bot/templates', 'bot/__init__.py'])
750 self.assertEqual(paths(*cs.get_nodes('bot/build')), ['bot/build/migrations', 'bot/build/static', 'bot/build/templates'])
750 self.assertEqual(paths(*cs.get_nodes('bot/build')), ['bot/build/migrations', 'bot/build/static', 'bot/build/templates'])
751 self.assertEqual(paths(*cs.get_nodes('bot/build/static')), ['bot/build/static/templates'])
751 self.assertEqual(paths(*cs.get_nodes('bot/build/static')), ['bot/build/static/templates'])
752 # this get_nodes below causes troubles !
752 # this get_nodes below causes troubles !
753 self.assertEqual(paths(*cs.get_nodes('bot/build/static/templates')), ['bot/build/static/templates/f.html', 'bot/build/static/templates/f1.html'])
753 self.assertEqual(paths(*cs.get_nodes('bot/build/static/templates')), ['bot/build/static/templates/f.html', 'bot/build/static/templates/f1.html'])
754 self.assertEqual(paths(*cs.get_nodes('bot/build/templates')), ['bot/build/templates/err.html', 'bot/build/templates/err2.html'])
754 self.assertEqual(paths(*cs.get_nodes('bot/build/templates')), ['bot/build/templates/err.html', 'bot/build/templates/err2.html'])
755 self.assertEqual(paths(*cs.get_nodes('bot/templates/')), ['bot/templates/404.html', 'bot/templates/500.html'])
755 self.assertEqual(paths(*cs.get_nodes('bot/templates/')), ['bot/templates/404.html', 'bot/templates/500.html'])
756
756
757 if __name__ == '__main__':
757 if __name__ == '__main__':
758 unittest.main()
758 unittest.main()
General Comments 0
You need to be logged in to leave comments. Login now