Show More
@@ -1,265 +1,265 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.controllers.compare |
|
3 | rhodecode.controllers.compare | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | compare controller for pylons showing differences between two |
|
6 | compare controller for pylons showing differences between two | |
7 | repos, branches, bookmarks or tips |
|
7 | repos, branches, bookmarks or tips | |
8 |
|
8 | |||
9 | :created_on: May 6, 2012 |
|
9 | :created_on: May 6, 2012 | |
10 | :author: marcink |
|
10 | :author: marcink | |
11 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
12 | :license: GPLv3, see COPYING for more details. |
|
12 | :license: GPLv3, see COPYING for more details. | |
13 | """ |
|
13 | """ | |
14 | # This program is free software: you can redistribute it and/or modify |
|
14 | # This program is free software: you can redistribute it and/or modify | |
15 | # it under the terms of the GNU General Public License as published by |
|
15 | # it under the terms of the GNU General Public License as published by | |
16 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | # the Free Software Foundation, either version 3 of the License, or | |
17 | # (at your option) any later version. |
|
17 | # (at your option) any later version. | |
18 | # |
|
18 | # | |
19 | # This program is distributed in the hope that it will be useful, |
|
19 | # This program is distributed in the hope that it will be useful, | |
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
22 | # GNU General Public License for more details. |
|
22 | # GNU General Public License for more details. | |
23 | # |
|
23 | # | |
24 | # You should have received a copy of the GNU General Public License |
|
24 | # You should have received a copy of the GNU General Public License | |
25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
26 |
|
26 | |||
27 | import logging |
|
27 | import logging | |
28 | import traceback |
|
28 | import traceback | |
29 | import re |
|
29 | import re | |
30 |
|
30 | |||
31 | from webob.exc import HTTPNotFound, HTTPBadRequest |
|
31 | from webob.exc import HTTPNotFound, HTTPBadRequest | |
32 | from pylons import request, response, session, tmpl_context as c, url |
|
32 | from pylons import request, response, session, tmpl_context as c, url | |
33 | from pylons.controllers.util import abort, redirect |
|
33 | from pylons.controllers.util import abort, redirect | |
34 | from pylons.i18n.translation import _ |
|
34 | from pylons.i18n.translation import _ | |
35 |
|
35 | |||
36 | from rhodecode.lib.vcs.exceptions import EmptyRepositoryError, RepositoryError |
|
36 | from rhodecode.lib.vcs.exceptions import EmptyRepositoryError, RepositoryError | |
37 | from rhodecode.lib.vcs.utils import safe_str |
|
37 | from rhodecode.lib.vcs.utils import safe_str | |
38 | from rhodecode.lib.vcs.utils.hgcompat import scmutil, unionrepo |
|
38 | from rhodecode.lib.vcs.utils.hgcompat import scmutil, unionrepo | |
39 | from rhodecode.lib import helpers as h |
|
39 | from rhodecode.lib import helpers as h | |
40 | from rhodecode.lib.base import BaseRepoController, render |
|
40 | from rhodecode.lib.base import BaseRepoController, render | |
41 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
41 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator | |
42 | from rhodecode.lib import diffs |
|
42 | from rhodecode.lib import diffs | |
43 | from rhodecode.lib.utils2 import safe_str |
|
43 | from rhodecode.lib.utils2 import safe_str | |
44 | from rhodecode.model.db import Repository |
|
44 | from rhodecode.model.db import Repository | |
45 | from rhodecode.lib.diffs import LimitedDiffContainer |
|
45 | from rhodecode.lib.diffs import LimitedDiffContainer | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | log = logging.getLogger(__name__) |
|
48 | log = logging.getLogger(__name__) | |
49 |
|
49 | |||
50 |
|
50 | |||
51 | class CompareController(BaseRepoController): |
|
51 | class CompareController(BaseRepoController): | |
52 |
|
52 | |||
53 | def __before__(self): |
|
53 | def __before__(self): | |
54 | super(CompareController, self).__before__() |
|
54 | super(CompareController, self).__before__() | |
55 |
|
55 | |||
56 | def __get_rev_or_redirect(self, ref, repo, redirect_after=True, |
|
56 | def __get_rev_or_redirect(self, ref, repo, redirect_after=True, | |
57 | partial=False): |
|
57 | partial=False): | |
58 | """ |
|
58 | """ | |
59 | Safe way to get changeset if error occur it redirects to changeset with |
|
59 | Safe way to get changeset if error occur it redirects to changeset with | |
60 | proper message. If partial is set then don't do redirect raise Exception |
|
60 | proper message. If partial is set then don't do redirect raise Exception | |
61 | instead |
|
61 | instead | |
62 |
|
62 | |||
63 | :param rev: revision to fetch |
|
63 | :param rev: revision to fetch | |
64 | :param repo: repo instance |
|
64 | :param repo: repo instance | |
65 | """ |
|
65 | """ | |
66 |
|
66 | |||
67 | rev = ref[1] # default and used for git |
|
67 | rev = ref[1] # default and used for git | |
68 | if repo.scm_instance.alias == 'hg': |
|
68 | if repo.scm_instance.alias == 'hg': | |
69 | # lookup up the exact node id |
|
69 | # lookup up the exact node id | |
70 | _revset_predicates = { |
|
70 | _revset_predicates = { | |
71 | 'branch': 'branch', |
|
71 | 'branch': 'branch', | |
72 | 'book': 'bookmark', |
|
72 | 'book': 'bookmark', | |
73 | 'tag': 'tag', |
|
73 | 'tag': 'tag', | |
74 | 'rev': 'id', |
|
74 | 'rev': 'id', | |
75 | } |
|
75 | } | |
76 | rev_spec = "max(%s(%%s))" % _revset_predicates[ref[0]] |
|
76 | rev_spec = "max(%s(%%s))" % _revset_predicates[ref[0]] | |
77 | revs = repo.scm_instance._repo.revs(rev_spec, safe_str(ref[1])) |
|
77 | revs = repo.scm_instance._repo.revs(rev_spec, safe_str(ref[1])) | |
78 | if revs: |
|
78 | if revs: | |
79 | rev = revs[-1] |
|
79 | rev = revs[-1] | |
80 | # else: TODO: just report 'not found' |
|
80 | # else: TODO: just report 'not found' | |
81 |
|
81 | |||
82 | try: |
|
82 | try: | |
83 | return repo.scm_instance.get_changeset(rev).raw_id |
|
83 | return repo.scm_instance.get_changeset(rev).raw_id | |
84 | except EmptyRepositoryError, e: |
|
84 | except EmptyRepositoryError, e: | |
85 | if not redirect_after: |
|
85 | if not redirect_after: | |
86 | return None |
|
86 | return None | |
87 | h.flash(h.literal(_('There are no changesets yet')), |
|
87 | h.flash(h.literal(_('There are no changesets yet')), | |
88 | category='warning') |
|
88 | category='warning') | |
89 | redirect(url('summary_home', repo_name=repo.repo_name)) |
|
89 | redirect(url('summary_home', repo_name=repo.repo_name)) | |
90 |
|
90 | |||
91 | except RepositoryError, e: |
|
91 | except RepositoryError, e: | |
92 | log.error(traceback.format_exc()) |
|
92 | log.error(traceback.format_exc()) | |
93 | h.flash(safe_str(e), category='warning') |
|
93 | h.flash(safe_str(e), category='warning') | |
94 | if not partial: |
|
94 | if not partial: | |
95 | redirect(h.url('summary_home', repo_name=repo.repo_name)) |
|
95 | redirect(h.url('summary_home', repo_name=repo.repo_name)) | |
96 | raise HTTPBadRequest() |
|
96 | raise HTTPBadRequest() | |
97 |
|
97 | |||
98 | def _get_changesets(self, alias, org_repo, org_rev, other_repo, other_rev, merge): |
|
98 | def _get_changesets(self, alias, org_repo, org_rev, other_repo, other_rev, merge): | |
99 | """ |
|
99 | """ | |
100 | Returns a list of changesets that can be merged from org_repo@org_rev |
|
100 | Returns a list of changesets that can be merged from org_repo@org_rev | |
101 | to other_repo@other_rev ... and the ancestor that would be used for merge |
|
101 | to other_repo@other_rev ... and the ancestor that would be used for merge | |
102 | """ |
|
102 | """ | |
103 |
|
103 | |||
104 | ancestor = None |
|
104 | ancestor = None | |
105 |
|
105 | |||
106 | if org_rev == other_rev: |
|
106 | if org_rev == other_rev: | |
107 | changesets = [] |
|
107 | changesets = [] | |
108 | if merge: |
|
108 | if merge: | |
109 | ancestor = org_rev |
|
109 | ancestor = org_rev | |
110 |
|
110 | |||
111 | elif alias == 'hg': |
|
111 | elif alias == 'hg': | |
112 | #case two independent repos |
|
112 | #case two independent repos | |
113 | if org_repo != other_repo: |
|
113 | if org_repo != other_repo: | |
114 | hgrepo = unionrepo.unionrepository(other_repo.baseui, |
|
114 | hgrepo = unionrepo.unionrepository(other_repo.baseui, | |
115 | other_repo.path, |
|
115 | other_repo.path, | |
116 | org_repo.path) |
|
116 | org_repo.path) | |
117 | # all the changesets we are looking for will be in other_repo, |
|
117 | # all the changesets we are looking for will be in other_repo, | |
118 | # so rev numbers from hgrepo can be used in other_repo |
|
118 | # so rev numbers from hgrepo can be used in other_repo | |
119 |
|
119 | |||
120 | #no remote compare do it on the same repository |
|
120 | #no remote compare do it on the same repository | |
121 | else: |
|
121 | else: | |
122 | hgrepo = other_repo._repo |
|
122 | hgrepo = other_repo._repo | |
123 |
|
123 | |||
124 | if merge: |
|
124 | if merge: | |
125 | revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", |
|
125 | revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", | |
126 | other_rev, org_rev, org_rev) |
|
126 | other_rev, org_rev, org_rev) | |
127 |
|
127 | |||
128 | ancestors = hgrepo.revs("ancestor(id(%s), id(%s))", org_rev, other_rev) |
|
128 | ancestors = hgrepo.revs("ancestor(id(%s), id(%s))", org_rev, other_rev) | |
129 | if ancestors: |
|
129 | if ancestors: | |
130 | # pick arbitrary ancestor - but there is usually only one |
|
130 | # pick arbitrary ancestor - but there is usually only one | |
131 | ancestor = hgrepo[ancestors[0]].hex() |
|
131 | ancestor = hgrepo[ancestors[0]].hex() | |
132 | else: |
|
132 | else: | |
133 | # TODO: have both + and - changesets |
|
133 | # TODO: have both + and - changesets | |
134 | revs = hgrepo.revs("id(%s) :: id(%s) - id(%s)", |
|
134 | revs = hgrepo.revs("id(%s) :: id(%s) - id(%s)", | |
135 | org_rev, other_rev, org_rev) |
|
135 | org_rev, other_rev, org_rev) | |
136 |
|
136 | |||
137 | changesets = [other_repo.get_changeset(rev) for rev in revs] |
|
137 | changesets = [other_repo.get_changeset(rev) for rev in revs] | |
138 |
|
138 | |||
139 | elif alias == 'git': |
|
139 | elif alias == 'git': | |
140 | if org_repo != other_repo: |
|
140 | if org_repo != other_repo: | |
141 | raise Exception('Comparing of different GIT repositories is not' |
|
141 | raise Exception('Comparing of different GIT repositories is not' | |
142 | 'allowed. Got %s != %s' % (org_repo, other_repo)) |
|
142 | 'allowed. Got %s != %s' % (org_repo, other_repo)) | |
143 |
|
143 | |||
144 | so, se = org_repo.run_git_command( |
|
144 | so, se = org_repo.run_git_command( | |
145 |
'log --reverse --pretty="format: %%H" -s |
|
145 | 'log --reverse --pretty="format: %%H" -s %s..%s' | |
146 | % (org_rev, other_rev) |
|
146 | % (org_rev, other_rev) | |
147 | ) |
|
147 | ) | |
148 | changesets = [org_repo.get_changeset(cs) |
|
148 | changesets = [org_repo.get_changeset(cs) | |
149 | for cs in re.findall(r'[0-9a-fA-F]{40}', so)] |
|
149 | for cs in re.findall(r'[0-9a-fA-F]{40}', so)] | |
150 |
|
150 | |||
151 | return changesets, ancestor |
|
151 | return changesets, ancestor | |
152 |
|
152 | |||
153 | @LoginRequired() |
|
153 | @LoginRequired() | |
154 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
154 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
155 | 'repository.admin') |
|
155 | 'repository.admin') | |
156 | def index(self, org_ref_type, org_ref, other_ref_type, other_ref): |
|
156 | def index(self, org_ref_type, org_ref, other_ref_type, other_ref): | |
157 | # org_ref will be evaluated in org_repo |
|
157 | # org_ref will be evaluated in org_repo | |
158 | org_repo = c.rhodecode_db_repo.repo_name |
|
158 | org_repo = c.rhodecode_db_repo.repo_name | |
159 | org_ref = (org_ref_type, org_ref) |
|
159 | org_ref = (org_ref_type, org_ref) | |
160 | # other_ref will be evaluated in other_repo |
|
160 | # other_ref will be evaluated in other_repo | |
161 | other_ref = (other_ref_type, other_ref) |
|
161 | other_ref = (other_ref_type, other_ref) | |
162 | other_repo = request.GET.get('other_repo', org_repo) |
|
162 | other_repo = request.GET.get('other_repo', org_repo) | |
163 | # If merge is True: |
|
163 | # If merge is True: | |
164 | # Show what org would get if merged with other: |
|
164 | # Show what org would get if merged with other: | |
165 | # List changesets that are ancestors of other but not of org. |
|
165 | # List changesets that are ancestors of other but not of org. | |
166 | # New changesets in org is thus ignored. |
|
166 | # New changesets in org is thus ignored. | |
167 | # Diff will be from common ancestor, and merges of org to other will thus be ignored. |
|
167 | # Diff will be from common ancestor, and merges of org to other will thus be ignored. | |
168 | # If merge is False: |
|
168 | # If merge is False: | |
169 | # Make a raw diff from org to other, no matter if related or not. |
|
169 | # Make a raw diff from org to other, no matter if related or not. | |
170 | # Changesets in one and not in the other will be ignored |
|
170 | # Changesets in one and not in the other will be ignored | |
171 | merge = bool(request.GET.get('merge')) |
|
171 | merge = bool(request.GET.get('merge')) | |
172 | # fulldiff disables cut_off_limit |
|
172 | # fulldiff disables cut_off_limit | |
173 | c.fulldiff = request.GET.get('fulldiff') |
|
173 | c.fulldiff = request.GET.get('fulldiff') | |
174 | # partial uses compare_cs.html template directly |
|
174 | # partial uses compare_cs.html template directly | |
175 | partial = request.environ.get('HTTP_X_PARTIAL_XHR') |
|
175 | partial = request.environ.get('HTTP_X_PARTIAL_XHR') | |
176 | # as_form puts hidden input field with changeset revisions |
|
176 | # as_form puts hidden input field with changeset revisions | |
177 | c.as_form = partial and request.GET.get('as_form') |
|
177 | c.as_form = partial and request.GET.get('as_form') | |
178 | # swap url for compare_diff page - never partial and never as_form |
|
178 | # swap url for compare_diff page - never partial and never as_form | |
179 | c.swap_url = h.url('compare_url', |
|
179 | c.swap_url = h.url('compare_url', | |
180 | repo_name=other_repo, |
|
180 | repo_name=other_repo, | |
181 | org_ref_type=other_ref[0], org_ref=other_ref[1], |
|
181 | org_ref_type=other_ref[0], org_ref=other_ref[1], | |
182 | other_repo=org_repo, |
|
182 | other_repo=org_repo, | |
183 | other_ref_type=org_ref[0], other_ref=org_ref[1], |
|
183 | other_ref_type=org_ref[0], other_ref=org_ref[1], | |
184 | merge=merge or '') |
|
184 | merge=merge or '') | |
185 |
|
185 | |||
186 | org_repo = Repository.get_by_repo_name(org_repo) |
|
186 | org_repo = Repository.get_by_repo_name(org_repo) | |
187 | other_repo = Repository.get_by_repo_name(other_repo) |
|
187 | other_repo = Repository.get_by_repo_name(other_repo) | |
188 |
|
188 | |||
189 | if org_repo is None: |
|
189 | if org_repo is None: | |
190 | log.error('Could not find org repo %s' % org_repo) |
|
190 | log.error('Could not find org repo %s' % org_repo) | |
191 | raise HTTPNotFound |
|
191 | raise HTTPNotFound | |
192 | if other_repo is None: |
|
192 | if other_repo is None: | |
193 | log.error('Could not find other repo %s' % other_repo) |
|
193 | log.error('Could not find other repo %s' % other_repo) | |
194 | raise HTTPNotFound |
|
194 | raise HTTPNotFound | |
195 |
|
195 | |||
196 | if org_repo != other_repo and h.is_git(org_repo): |
|
196 | if org_repo != other_repo and h.is_git(org_repo): | |
197 | log.error('compare of two remote repos not available for GIT REPOS') |
|
197 | log.error('compare of two remote repos not available for GIT REPOS') | |
198 | raise HTTPNotFound |
|
198 | raise HTTPNotFound | |
199 |
|
199 | |||
200 | if org_repo.scm_instance.alias != other_repo.scm_instance.alias: |
|
200 | if org_repo.scm_instance.alias != other_repo.scm_instance.alias: | |
201 | log.error('compare of two different kind of remote repos not available') |
|
201 | log.error('compare of two different kind of remote repos not available') | |
202 | raise HTTPNotFound |
|
202 | raise HTTPNotFound | |
203 |
|
203 | |||
204 | org_rev = self.__get_rev_or_redirect(ref=org_ref, repo=org_repo, partial=partial) |
|
204 | org_rev = self.__get_rev_or_redirect(ref=org_ref, repo=org_repo, partial=partial) | |
205 | other_rev = self.__get_rev_or_redirect(ref=other_ref, repo=other_repo, partial=partial) |
|
205 | other_rev = self.__get_rev_or_redirect(ref=other_ref, repo=other_repo, partial=partial) | |
206 |
|
206 | |||
207 | c.org_repo = org_repo |
|
207 | c.org_repo = org_repo | |
208 | c.other_repo = other_repo |
|
208 | c.other_repo = other_repo | |
209 | c.org_ref = org_ref[1] |
|
209 | c.org_ref = org_ref[1] | |
210 | c.other_ref = other_ref[1] |
|
210 | c.other_ref = other_ref[1] | |
211 | c.org_ref_type = org_ref[0] |
|
211 | c.org_ref_type = org_ref[0] | |
212 | c.other_ref_type = other_ref[0] |
|
212 | c.other_ref_type = other_ref[0] | |
213 |
|
213 | |||
214 | c.cs_ranges, c.ancestor = self._get_changesets(org_repo.scm_instance.alias, |
|
214 | c.cs_ranges, c.ancestor = self._get_changesets(org_repo.scm_instance.alias, | |
215 | org_repo.scm_instance, org_rev, |
|
215 | org_repo.scm_instance, org_rev, | |
216 | other_repo.scm_instance, other_rev, |
|
216 | other_repo.scm_instance, other_rev, | |
217 | merge) |
|
217 | merge) | |
218 |
|
218 | |||
219 | c.statuses = c.rhodecode_db_repo.statuses([x.raw_id for x in |
|
219 | c.statuses = c.rhodecode_db_repo.statuses([x.raw_id for x in | |
220 | c.cs_ranges]) |
|
220 | c.cs_ranges]) | |
221 | if merge and not c.ancestor: |
|
221 | if merge and not c.ancestor: | |
222 | log.error('Unable to find ancestor revision') |
|
222 | log.error('Unable to find ancestor revision') | |
223 |
|
223 | |||
224 | if partial: |
|
224 | if partial: | |
225 | return render('compare/compare_cs.html') |
|
225 | return render('compare/compare_cs.html') | |
226 |
|
226 | |||
227 | if c.ancestor: |
|
227 | if c.ancestor: | |
228 | assert merge |
|
228 | assert merge | |
229 | # case we want a simple diff without incoming changesets, |
|
229 | # case we want a simple diff without incoming changesets, | |
230 | # previewing what will be merged. |
|
230 | # previewing what will be merged. | |
231 | # Make the diff on the other repo (which is known to have other_ref) |
|
231 | # Make the diff on the other repo (which is known to have other_ref) | |
232 | log.debug('Using ancestor %s as org_ref instead of %s' |
|
232 | log.debug('Using ancestor %s as org_ref instead of %s' | |
233 | % (c.ancestor, org_ref)) |
|
233 | % (c.ancestor, org_ref)) | |
234 | org_rev = c.ancestor |
|
234 | org_rev = c.ancestor | |
235 | org_repo = other_repo |
|
235 | org_repo = other_repo | |
236 |
|
236 | |||
237 | diff_limit = self.cut_off_limit if not c.fulldiff else None |
|
237 | diff_limit = self.cut_off_limit if not c.fulldiff else None | |
238 |
|
238 | |||
239 | log.debug('running diff between %s and %s in %s' |
|
239 | log.debug('running diff between %s and %s in %s' | |
240 | % (org_rev, other_rev, org_repo.scm_instance.path)) |
|
240 | % (org_rev, other_rev, org_repo.scm_instance.path)) | |
241 | txtdiff = org_repo.scm_instance.get_diff(rev1=org_rev, rev2=other_rev) |
|
241 | txtdiff = org_repo.scm_instance.get_diff(rev1=org_rev, rev2=other_rev) | |
242 |
|
242 | |||
243 | diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff', |
|
243 | diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff', | |
244 | diff_limit=diff_limit) |
|
244 | diff_limit=diff_limit) | |
245 | _parsed = diff_processor.prepare() |
|
245 | _parsed = diff_processor.prepare() | |
246 |
|
246 | |||
247 | c.limited_diff = False |
|
247 | c.limited_diff = False | |
248 | if isinstance(_parsed, LimitedDiffContainer): |
|
248 | if isinstance(_parsed, LimitedDiffContainer): | |
249 | c.limited_diff = True |
|
249 | c.limited_diff = True | |
250 |
|
250 | |||
251 | c.files = [] |
|
251 | c.files = [] | |
252 | c.changes = {} |
|
252 | c.changes = {} | |
253 | c.lines_added = 0 |
|
253 | c.lines_added = 0 | |
254 | c.lines_deleted = 0 |
|
254 | c.lines_deleted = 0 | |
255 | for f in _parsed: |
|
255 | for f in _parsed: | |
256 | st = f['stats'] |
|
256 | st = f['stats'] | |
257 | if not st['binary']: |
|
257 | if not st['binary']: | |
258 | c.lines_added += st['added'] |
|
258 | c.lines_added += st['added'] | |
259 | c.lines_deleted += st['deleted'] |
|
259 | c.lines_deleted += st['deleted'] | |
260 | fid = h.FID('', f['filename']) |
|
260 | fid = h.FID('', f['filename']) | |
261 | c.files.append([fid, f['operation'], f['filename'], f['stats']]) |
|
261 | c.files.append([fid, f['operation'], f['filename'], f['stats']]) | |
262 | htmldiff = diff_processor.as_html(enable_comments=False, parsed_lines=[f]) |
|
262 | htmldiff = diff_processor.as_html(enable_comments=False, parsed_lines=[f]) | |
263 | c.changes[fid] = [f['operation'], f['filename'], htmldiff] |
|
263 | c.changes[fid] = [f['operation'], f['filename'], htmldiff] | |
264 |
|
264 | |||
265 | return render('compare/compare_diff.html') |
|
265 | return render('compare/compare_diff.html') |
@@ -1,554 +1,552 b'' | |||||
1 | import re |
|
1 | import re | |
2 | from itertools import chain |
|
2 | from itertools import chain | |
3 | from dulwich import objects |
|
3 | from dulwich import objects | |
4 | from subprocess import Popen, PIPE |
|
4 | from subprocess import Popen, PIPE | |
5 |
|
5 | |||
6 | from rhodecode.lib.vcs.conf import settings |
|
6 | from rhodecode.lib.vcs.conf import settings | |
7 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyChangeset |
|
7 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyChangeset | |
8 | from rhodecode.lib.vcs.exceptions import ( |
|
8 | from rhodecode.lib.vcs.exceptions import ( | |
9 | RepositoryError, ChangesetError, NodeDoesNotExistError, VCSError, |
|
9 | RepositoryError, ChangesetError, NodeDoesNotExistError, VCSError, | |
10 | ChangesetDoesNotExistError, ImproperArchiveTypeError |
|
10 | ChangesetDoesNotExistError, ImproperArchiveTypeError | |
11 | ) |
|
11 | ) | |
12 | from rhodecode.lib.vcs.nodes import ( |
|
12 | from rhodecode.lib.vcs.nodes import ( | |
13 | FileNode, DirNode, NodeKind, RootNode, RemovedFileNode, SubModuleNode, |
|
13 | FileNode, DirNode, NodeKind, RootNode, RemovedFileNode, SubModuleNode, | |
14 | ChangedFileNodesGenerator, AddedFileNodesGenerator, RemovedFileNodesGenerator |
|
14 | ChangedFileNodesGenerator, AddedFileNodesGenerator, RemovedFileNodesGenerator | |
15 | ) |
|
15 | ) | |
16 | from rhodecode.lib.vcs.utils import ( |
|
16 | from rhodecode.lib.vcs.utils import ( | |
17 | safe_unicode, safe_str, safe_int, date_fromtimestamp |
|
17 | safe_unicode, safe_str, safe_int, date_fromtimestamp | |
18 | ) |
|
18 | ) | |
19 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
19 | from rhodecode.lib.vcs.utils.lazy import LazyProperty | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | class GitChangeset(BaseChangeset): |
|
22 | class GitChangeset(BaseChangeset): | |
23 | """ |
|
23 | """ | |
24 | Represents state of the repository at single revision. |
|
24 | Represents state of the repository at single revision. | |
25 | """ |
|
25 | """ | |
26 |
|
26 | |||
27 | def __init__(self, repository, revision): |
|
27 | def __init__(self, repository, revision): | |
28 | self._stat_modes = {} |
|
28 | self._stat_modes = {} | |
29 | self.repository = repository |
|
29 | self.repository = repository | |
30 |
|
30 | |||
31 | try: |
|
31 | try: | |
32 | commit = self.repository._repo[revision] |
|
32 | commit = self.repository._repo[revision] | |
33 | if isinstance(commit, objects.Tag): |
|
33 | if isinstance(commit, objects.Tag): | |
34 | revision = commit.object[1] |
|
34 | revision = commit.object[1] | |
35 | commit = self.repository._repo.get_object(commit.object[1]) |
|
35 | commit = self.repository._repo.get_object(commit.object[1]) | |
36 | except KeyError: |
|
36 | except KeyError: | |
37 | raise RepositoryError("Cannot get object with id %s" % revision) |
|
37 | raise RepositoryError("Cannot get object with id %s" % revision) | |
38 | self.raw_id = revision |
|
38 | self.raw_id = revision | |
39 | self.id = self.raw_id |
|
39 | self.id = self.raw_id | |
40 | self.short_id = self.raw_id[:12] |
|
40 | self.short_id = self.raw_id[:12] | |
41 | self._commit = commit |
|
41 | self._commit = commit | |
42 | self._tree_id = commit.tree |
|
42 | self._tree_id = commit.tree | |
43 | self._committer_property = 'committer' |
|
43 | self._committer_property = 'committer' | |
44 | self._author_property = 'author' |
|
44 | self._author_property = 'author' | |
45 | self._date_property = 'commit_time' |
|
45 | self._date_property = 'commit_time' | |
46 | self._date_tz_property = 'commit_timezone' |
|
46 | self._date_tz_property = 'commit_timezone' | |
47 | self.revision = repository.revisions.index(revision) |
|
47 | self.revision = repository.revisions.index(revision) | |
48 |
|
48 | |||
49 | self.nodes = {} |
|
49 | self.nodes = {} | |
50 | self._paths = {} |
|
50 | self._paths = {} | |
51 |
|
51 | |||
52 | @LazyProperty |
|
52 | @LazyProperty | |
53 | def message(self): |
|
53 | def message(self): | |
54 | return safe_unicode(self._commit.message) |
|
54 | return safe_unicode(self._commit.message) | |
55 |
|
55 | |||
56 | @LazyProperty |
|
56 | @LazyProperty | |
57 | def committer(self): |
|
57 | def committer(self): | |
58 | return safe_unicode(getattr(self._commit, self._committer_property)) |
|
58 | return safe_unicode(getattr(self._commit, self._committer_property)) | |
59 |
|
59 | |||
60 | @LazyProperty |
|
60 | @LazyProperty | |
61 | def author(self): |
|
61 | def author(self): | |
62 | return safe_unicode(getattr(self._commit, self._author_property)) |
|
62 | return safe_unicode(getattr(self._commit, self._author_property)) | |
63 |
|
63 | |||
64 | @LazyProperty |
|
64 | @LazyProperty | |
65 | def date(self): |
|
65 | def date(self): | |
66 | return date_fromtimestamp(getattr(self._commit, self._date_property), |
|
66 | return date_fromtimestamp(getattr(self._commit, self._date_property), | |
67 | getattr(self._commit, self._date_tz_property)) |
|
67 | getattr(self._commit, self._date_tz_property)) | |
68 |
|
68 | |||
69 | @LazyProperty |
|
69 | @LazyProperty | |
70 | def _timestamp(self): |
|
70 | def _timestamp(self): | |
71 | return getattr(self._commit, self._date_property) |
|
71 | return getattr(self._commit, self._date_property) | |
72 |
|
72 | |||
73 | @LazyProperty |
|
73 | @LazyProperty | |
74 | def status(self): |
|
74 | def status(self): | |
75 | """ |
|
75 | """ | |
76 | Returns modified, added, removed, deleted files for current changeset |
|
76 | Returns modified, added, removed, deleted files for current changeset | |
77 | """ |
|
77 | """ | |
78 | return self.changed, self.added, self.removed |
|
78 | return self.changed, self.added, self.removed | |
79 |
|
79 | |||
80 | @LazyProperty |
|
80 | @LazyProperty | |
81 | def tags(self): |
|
81 | def tags(self): | |
82 | _tags = [] |
|
82 | _tags = [] | |
83 | for tname, tsha in self.repository.tags.iteritems(): |
|
83 | for tname, tsha in self.repository.tags.iteritems(): | |
84 | if tsha == self.raw_id: |
|
84 | if tsha == self.raw_id: | |
85 | _tags.append(tname) |
|
85 | _tags.append(tname) | |
86 | return _tags |
|
86 | return _tags | |
87 |
|
87 | |||
88 | @LazyProperty |
|
88 | @LazyProperty | |
89 | def branch(self): |
|
89 | def branch(self): | |
90 |
|
90 | |||
91 | heads = self.repository._heads(reverse=False) |
|
91 | heads = self.repository._heads(reverse=False) | |
92 |
|
92 | |||
93 | ref = heads.get(self.raw_id) |
|
93 | ref = heads.get(self.raw_id) | |
94 | if ref: |
|
94 | if ref: | |
95 | return safe_unicode(ref) |
|
95 | return safe_unicode(ref) | |
96 |
|
96 | |||
97 | def _fix_path(self, path): |
|
97 | def _fix_path(self, path): | |
98 | """ |
|
98 | """ | |
99 | Paths are stored without trailing slash so we need to get rid off it if |
|
99 | Paths are stored without trailing slash so we need to get rid off it if | |
100 | needed. |
|
100 | needed. | |
101 | """ |
|
101 | """ | |
102 | if path.endswith('/'): |
|
102 | if path.endswith('/'): | |
103 | path = path.rstrip('/') |
|
103 | path = path.rstrip('/') | |
104 | return path |
|
104 | return path | |
105 |
|
105 | |||
106 | def _get_id_for_path(self, path): |
|
106 | def _get_id_for_path(self, path): | |
107 | path = safe_str(path) |
|
107 | path = safe_str(path) | |
108 | # FIXME: Please, spare a couple of minutes and make those codes cleaner; |
|
108 | # FIXME: Please, spare a couple of minutes and make those codes cleaner; | |
109 | if not path in self._paths: |
|
109 | if not path in self._paths: | |
110 | path = path.strip('/') |
|
110 | path = path.strip('/') | |
111 | # set root tree |
|
111 | # set root tree | |
112 | tree = self.repository._repo[self._tree_id] |
|
112 | tree = self.repository._repo[self._tree_id] | |
113 | if path == '': |
|
113 | if path == '': | |
114 | self._paths[''] = tree.id |
|
114 | self._paths[''] = tree.id | |
115 | return tree.id |
|
115 | return tree.id | |
116 | splitted = path.split('/') |
|
116 | splitted = path.split('/') | |
117 | dirs, name = splitted[:-1], splitted[-1] |
|
117 | dirs, name = splitted[:-1], splitted[-1] | |
118 | curdir = '' |
|
118 | curdir = '' | |
119 |
|
119 | |||
120 | # initially extract things from root dir |
|
120 | # initially extract things from root dir | |
121 | for item, stat, id in tree.iteritems(): |
|
121 | for item, stat, id in tree.iteritems(): | |
122 | if curdir: |
|
122 | if curdir: | |
123 | name = '/'.join((curdir, item)) |
|
123 | name = '/'.join((curdir, item)) | |
124 | else: |
|
124 | else: | |
125 | name = item |
|
125 | name = item | |
126 | self._paths[name] = id |
|
126 | self._paths[name] = id | |
127 | self._stat_modes[name] = stat |
|
127 | self._stat_modes[name] = stat | |
128 |
|
128 | |||
129 | for dir in dirs: |
|
129 | for dir in dirs: | |
130 | if curdir: |
|
130 | if curdir: | |
131 | curdir = '/'.join((curdir, dir)) |
|
131 | curdir = '/'.join((curdir, dir)) | |
132 | else: |
|
132 | else: | |
133 | curdir = dir |
|
133 | curdir = dir | |
134 | dir_id = None |
|
134 | dir_id = None | |
135 | for item, stat, id in tree.iteritems(): |
|
135 | for item, stat, id in tree.iteritems(): | |
136 | if dir == item: |
|
136 | if dir == item: | |
137 | dir_id = id |
|
137 | dir_id = id | |
138 | if dir_id: |
|
138 | if dir_id: | |
139 | # Update tree |
|
139 | # Update tree | |
140 | tree = self.repository._repo[dir_id] |
|
140 | tree = self.repository._repo[dir_id] | |
141 | if not isinstance(tree, objects.Tree): |
|
141 | if not isinstance(tree, objects.Tree): | |
142 | raise ChangesetError('%s is not a directory' % curdir) |
|
142 | raise ChangesetError('%s is not a directory' % curdir) | |
143 | else: |
|
143 | else: | |
144 | raise ChangesetError('%s have not been found' % curdir) |
|
144 | raise ChangesetError('%s have not been found' % curdir) | |
145 |
|
145 | |||
146 | # cache all items from the given traversed tree |
|
146 | # cache all items from the given traversed tree | |
147 | for item, stat, id in tree.iteritems(): |
|
147 | for item, stat, id in tree.iteritems(): | |
148 | if curdir: |
|
148 | if curdir: | |
149 | name = '/'.join((curdir, item)) |
|
149 | name = '/'.join((curdir, item)) | |
150 | else: |
|
150 | else: | |
151 | name = item |
|
151 | name = item | |
152 | self._paths[name] = id |
|
152 | self._paths[name] = id | |
153 | self._stat_modes[name] = stat |
|
153 | self._stat_modes[name] = stat | |
154 | if not path in self._paths: |
|
154 | if not path in self._paths: | |
155 | raise NodeDoesNotExistError("There is no file nor directory " |
|
155 | raise NodeDoesNotExistError("There is no file nor directory " | |
156 | "at the given path '%s' at revision %s" |
|
156 | "at the given path '%s' at revision %s" | |
157 | % (path, safe_str(self.short_id))) |
|
157 | % (path, safe_str(self.short_id))) | |
158 | return self._paths[path] |
|
158 | return self._paths[path] | |
159 |
|
159 | |||
160 | def _get_kind(self, path): |
|
160 | def _get_kind(self, path): | |
161 | obj = self.repository._repo[self._get_id_for_path(path)] |
|
161 | obj = self.repository._repo[self._get_id_for_path(path)] | |
162 | if isinstance(obj, objects.Blob): |
|
162 | if isinstance(obj, objects.Blob): | |
163 | return NodeKind.FILE |
|
163 | return NodeKind.FILE | |
164 | elif isinstance(obj, objects.Tree): |
|
164 | elif isinstance(obj, objects.Tree): | |
165 | return NodeKind.DIR |
|
165 | return NodeKind.DIR | |
166 |
|
166 | |||
167 | def _get_filectx(self, path): |
|
167 | def _get_filectx(self, path): | |
168 | path = self._fix_path(path) |
|
168 | path = self._fix_path(path) | |
169 | if self._get_kind(path) != NodeKind.FILE: |
|
169 | if self._get_kind(path) != NodeKind.FILE: | |
170 | raise ChangesetError("File does not exist for revision %s at " |
|
170 | raise ChangesetError("File does not exist for revision %s at " | |
171 | " '%s'" % (self.raw_id, path)) |
|
171 | " '%s'" % (self.raw_id, path)) | |
172 | return path |
|
172 | return path | |
173 |
|
173 | |||
174 | def _get_file_nodes(self): |
|
174 | def _get_file_nodes(self): | |
175 | return chain(*(t[2] for t in self.walk())) |
|
175 | return chain(*(t[2] for t in self.walk())) | |
176 |
|
176 | |||
177 | @LazyProperty |
|
177 | @LazyProperty | |
178 | def parents(self): |
|
178 | def parents(self): | |
179 | """ |
|
179 | """ | |
180 | Returns list of parents changesets. |
|
180 | Returns list of parents changesets. | |
181 | """ |
|
181 | """ | |
182 | return [self.repository.get_changeset(parent) |
|
182 | return [self.repository.get_changeset(parent) | |
183 | for parent in self._commit.parents] |
|
183 | for parent in self._commit.parents] | |
184 |
|
184 | |||
185 | @LazyProperty |
|
185 | @LazyProperty | |
186 | def children(self): |
|
186 | def children(self): | |
187 | """ |
|
187 | """ | |
188 | Returns list of children changesets. |
|
188 | Returns list of children changesets. | |
189 | """ |
|
189 | """ | |
190 | rev_filter = _git_path = settings.GIT_REV_FILTER |
|
190 | rev_filter = _git_path = settings.GIT_REV_FILTER | |
191 | so, se = self.repository.run_git_command( |
|
191 | so, se = self.repository.run_git_command( | |
192 | "rev-list %s --children | grep '^%s'" % (rev_filter, self.raw_id) |
|
192 | "rev-list %s --children | grep '^%s'" % (rev_filter, self.raw_id) | |
193 | ) |
|
193 | ) | |
194 |
|
194 | |||
195 | children = [] |
|
195 | children = [] | |
196 | for l in so.splitlines(): |
|
196 | for l in so.splitlines(): | |
197 | childs = l.split(' ')[1:] |
|
197 | childs = l.split(' ')[1:] | |
198 | children.extend(childs) |
|
198 | children.extend(childs) | |
199 | return [self.repository.get_changeset(cs) for cs in children] |
|
199 | return [self.repository.get_changeset(cs) for cs in children] | |
200 |
|
200 | |||
201 | def next(self, branch=None): |
|
201 | def next(self, branch=None): | |
202 |
|
202 | |||
203 | if branch and self.branch != branch: |
|
203 | if branch and self.branch != branch: | |
204 | raise VCSError('Branch option used on changeset not belonging ' |
|
204 | raise VCSError('Branch option used on changeset not belonging ' | |
205 | 'to that branch') |
|
205 | 'to that branch') | |
206 |
|
206 | |||
207 | def _next(changeset, branch): |
|
207 | def _next(changeset, branch): | |
208 | try: |
|
208 | try: | |
209 | next_ = changeset.revision + 1 |
|
209 | next_ = changeset.revision + 1 | |
210 | next_rev = changeset.repository.revisions[next_] |
|
210 | next_rev = changeset.repository.revisions[next_] | |
211 | except IndexError: |
|
211 | except IndexError: | |
212 | raise ChangesetDoesNotExistError |
|
212 | raise ChangesetDoesNotExistError | |
213 | cs = changeset.repository.get_changeset(next_rev) |
|
213 | cs = changeset.repository.get_changeset(next_rev) | |
214 |
|
214 | |||
215 | if branch and branch != cs.branch: |
|
215 | if branch and branch != cs.branch: | |
216 | return _next(cs, branch) |
|
216 | return _next(cs, branch) | |
217 |
|
217 | |||
218 | return cs |
|
218 | return cs | |
219 |
|
219 | |||
220 | return _next(self, branch) |
|
220 | return _next(self, branch) | |
221 |
|
221 | |||
222 | def prev(self, branch=None): |
|
222 | def prev(self, branch=None): | |
223 | if branch and self.branch != branch: |
|
223 | if branch and self.branch != branch: | |
224 | raise VCSError('Branch option used on changeset not belonging ' |
|
224 | raise VCSError('Branch option used on changeset not belonging ' | |
225 | 'to that branch') |
|
225 | 'to that branch') | |
226 |
|
226 | |||
227 | def _prev(changeset, branch): |
|
227 | def _prev(changeset, branch): | |
228 | try: |
|
228 | try: | |
229 | prev_ = changeset.revision - 1 |
|
229 | prev_ = changeset.revision - 1 | |
230 | if prev_ < 0: |
|
230 | if prev_ < 0: | |
231 | raise IndexError |
|
231 | raise IndexError | |
232 | prev_rev = changeset.repository.revisions[prev_] |
|
232 | prev_rev = changeset.repository.revisions[prev_] | |
233 | except IndexError: |
|
233 | except IndexError: | |
234 | raise ChangesetDoesNotExistError |
|
234 | raise ChangesetDoesNotExistError | |
235 |
|
235 | |||
236 | cs = changeset.repository.get_changeset(prev_rev) |
|
236 | cs = changeset.repository.get_changeset(prev_rev) | |
237 |
|
237 | |||
238 | if branch and branch != cs.branch: |
|
238 | if branch and branch != cs.branch: | |
239 | return _prev(cs, branch) |
|
239 | return _prev(cs, branch) | |
240 |
|
240 | |||
241 | return cs |
|
241 | return cs | |
242 |
|
242 | |||
243 | return _prev(self, branch) |
|
243 | return _prev(self, branch) | |
244 |
|
244 | |||
245 | def diff(self, ignore_whitespace=True, context=3): |
|
245 | def diff(self, ignore_whitespace=True, context=3): | |
246 | rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET |
|
246 | rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET | |
247 | rev2 = self |
|
247 | rev2 = self | |
248 | return ''.join(self.repository.get_diff(rev1, rev2, |
|
248 | return ''.join(self.repository.get_diff(rev1, rev2, | |
249 | ignore_whitespace=ignore_whitespace, |
|
249 | ignore_whitespace=ignore_whitespace, | |
250 | context=context)) |
|
250 | context=context)) | |
251 |
|
251 | |||
252 | def get_file_mode(self, path): |
|
252 | def get_file_mode(self, path): | |
253 | """ |
|
253 | """ | |
254 | Returns stat mode of the file at the given ``path``. |
|
254 | Returns stat mode of the file at the given ``path``. | |
255 | """ |
|
255 | """ | |
256 | # ensure path is traversed |
|
256 | # ensure path is traversed | |
257 | path = safe_str(path) |
|
257 | path = safe_str(path) | |
258 | self._get_id_for_path(path) |
|
258 | self._get_id_for_path(path) | |
259 | return self._stat_modes[path] |
|
259 | return self._stat_modes[path] | |
260 |
|
260 | |||
261 | def get_file_content(self, path): |
|
261 | def get_file_content(self, path): | |
262 | """ |
|
262 | """ | |
263 | Returns content of the file at given ``path``. |
|
263 | Returns content of the file at given ``path``. | |
264 | """ |
|
264 | """ | |
265 | id = self._get_id_for_path(path) |
|
265 | id = self._get_id_for_path(path) | |
266 | blob = self.repository._repo[id] |
|
266 | blob = self.repository._repo[id] | |
267 | return blob.as_pretty_string() |
|
267 | return blob.as_pretty_string() | |
268 |
|
268 | |||
269 | def get_file_size(self, path): |
|
269 | def get_file_size(self, path): | |
270 | """ |
|
270 | """ | |
271 | Returns size of the file at given ``path``. |
|
271 | Returns size of the file at given ``path``. | |
272 | """ |
|
272 | """ | |
273 | id = self._get_id_for_path(path) |
|
273 | id = self._get_id_for_path(path) | |
274 | blob = self.repository._repo[id] |
|
274 | blob = self.repository._repo[id] | |
275 | return blob.raw_length() |
|
275 | return blob.raw_length() | |
276 |
|
276 | |||
277 | def get_file_changeset(self, path): |
|
277 | def get_file_changeset(self, path): | |
278 | """ |
|
278 | """ | |
279 | Returns last commit of the file at the given ``path``. |
|
279 | Returns last commit of the file at the given ``path``. | |
280 | """ |
|
280 | """ | |
281 | return self.get_file_history(path, limit=1)[0] |
|
281 | return self.get_file_history(path, limit=1)[0] | |
282 |
|
282 | |||
283 | def get_file_history(self, path, limit=None): |
|
283 | def get_file_history(self, path, limit=None): | |
284 | """ |
|
284 | """ | |
285 | Returns history of file as reversed list of ``Changeset`` objects for |
|
285 | Returns history of file as reversed list of ``Changeset`` objects for | |
286 | which file at given ``path`` has been modified. |
|
286 | which file at given ``path`` has been modified. | |
287 |
|
287 | |||
288 | TODO: This function now uses os underlying 'git' and 'grep' commands |
|
288 | TODO: This function now uses os underlying 'git' and 'grep' commands | |
289 | which is generally not good. Should be replaced with algorithm |
|
289 | which is generally not good. Should be replaced with algorithm | |
290 | iterating commits. |
|
290 | iterating commits. | |
291 | """ |
|
291 | """ | |
292 | self._get_filectx(path) |
|
292 | self._get_filectx(path) | |
293 | cs_id = safe_str(self.id) |
|
293 | cs_id = safe_str(self.id) | |
294 | f_path = safe_str(path) |
|
294 | f_path = safe_str(path) | |
295 |
|
295 | |||
296 | if limit: |
|
296 | if limit: | |
297 |
cmd = 'log -n %s --pretty="format: %%H" -s |
|
297 | cmd = 'log -n %s --pretty="format: %%H" -s %s -- "%s"' % ( | |
298 | safe_int(limit, 0), cs_id, f_path |
|
298 | safe_int(limit, 0), cs_id, f_path) | |
299 | ) |
|
|||
300 |
|
299 | |||
301 | else: |
|
300 | else: | |
302 |
cmd = 'log --pretty="format: %%H" -s |
|
301 | cmd = 'log --pretty="format: %%H" -s %s -- "%s"' % ( | |
303 | cs_id, f_path |
|
302 | cs_id, f_path) | |
304 | ) |
|
|||
305 | so, se = self.repository.run_git_command(cmd) |
|
303 | so, se = self.repository.run_git_command(cmd) | |
306 | ids = re.findall(r'[0-9a-fA-F]{40}', so) |
|
304 | ids = re.findall(r'[0-9a-fA-F]{40}', so) | |
307 |
return [self.repository.get_changeset( |
|
305 | return [self.repository.get_changeset(sha) for sha in ids] | |
308 |
|
306 | |||
309 | def get_file_history_2(self, path): |
|
307 | def get_file_history_2(self, path): | |
310 | """ |
|
308 | """ | |
311 | Returns history of file as reversed list of ``Changeset`` objects for |
|
309 | Returns history of file as reversed list of ``Changeset`` objects for | |
312 | which file at given ``path`` has been modified. |
|
310 | which file at given ``path`` has been modified. | |
313 |
|
311 | |||
314 | """ |
|
312 | """ | |
315 | self._get_filectx(path) |
|
313 | self._get_filectx(path) | |
316 | from dulwich.walk import Walker |
|
314 | from dulwich.walk import Walker | |
317 | include = [self.id] |
|
315 | include = [self.id] | |
318 | walker = Walker(self.repository._repo.object_store, include, |
|
316 | walker = Walker(self.repository._repo.object_store, include, | |
319 | paths=[path], max_entries=1) |
|
317 | paths=[path], max_entries=1) | |
320 | return [self.repository.get_changeset(sha) |
|
318 | return [self.repository.get_changeset(sha) | |
321 | for sha in (x.commit.id for x in walker)] |
|
319 | for sha in (x.commit.id for x in walker)] | |
322 |
|
320 | |||
323 | def get_file_annotate(self, path): |
|
321 | def get_file_annotate(self, path): | |
324 | """ |
|
322 | """ | |
325 | Returns a generator of four element tuples with |
|
323 | Returns a generator of four element tuples with | |
326 | lineno, sha, changeset lazy loader and line |
|
324 | lineno, sha, changeset lazy loader and line | |
327 |
|
325 | |||
328 | TODO: This function now uses os underlying 'git' command which is |
|
326 | TODO: This function now uses os underlying 'git' command which is | |
329 | generally not good. Should be replaced with algorithm iterating |
|
327 | generally not good. Should be replaced with algorithm iterating | |
330 | commits. |
|
328 | commits. | |
331 | """ |
|
329 | """ | |
332 | cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path) |
|
330 | cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path) | |
333 | # -l ==> outputs long shas (and we need all 40 characters) |
|
331 | # -l ==> outputs long shas (and we need all 40 characters) | |
334 | # --root ==> doesn't put '^' character for bounderies |
|
332 | # --root ==> doesn't put '^' character for bounderies | |
335 | # -r sha ==> blames for the given revision |
|
333 | # -r sha ==> blames for the given revision | |
336 | so, se = self.repository.run_git_command(cmd) |
|
334 | so, se = self.repository.run_git_command(cmd) | |
337 |
|
335 | |||
338 | for i, blame_line in enumerate(so.split('\n')[:-1]): |
|
336 | for i, blame_line in enumerate(so.split('\n')[:-1]): | |
339 | ln_no = i + 1 |
|
337 | ln_no = i + 1 | |
340 | sha, line = re.split(r' ', blame_line, 1) |
|
338 | sha, line = re.split(r' ', blame_line, 1) | |
341 | yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line) |
|
339 | yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line) | |
342 |
|
340 | |||
343 | def fill_archive(self, stream=None, kind='tgz', prefix=None, |
|
341 | def fill_archive(self, stream=None, kind='tgz', prefix=None, | |
344 | subrepos=False): |
|
342 | subrepos=False): | |
345 | """ |
|
343 | """ | |
346 | Fills up given stream. |
|
344 | Fills up given stream. | |
347 |
|
345 | |||
348 | :param stream: file like object. |
|
346 | :param stream: file like object. | |
349 | :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``. |
|
347 | :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``. | |
350 | Default: ``tgz``. |
|
348 | Default: ``tgz``. | |
351 | :param prefix: name of root directory in archive. |
|
349 | :param prefix: name of root directory in archive. | |
352 | Default is repository name and changeset's raw_id joined with dash |
|
350 | Default is repository name and changeset's raw_id joined with dash | |
353 | (``repo-tip.<KIND>``). |
|
351 | (``repo-tip.<KIND>``). | |
354 | :param subrepos: include subrepos in this archive. |
|
352 | :param subrepos: include subrepos in this archive. | |
355 |
|
353 | |||
356 | :raise ImproperArchiveTypeError: If given kind is wrong. |
|
354 | :raise ImproperArchiveTypeError: If given kind is wrong. | |
357 | :raise VcsError: If given stream is None |
|
355 | :raise VcsError: If given stream is None | |
358 |
|
356 | |||
359 | """ |
|
357 | """ | |
360 | allowed_kinds = settings.ARCHIVE_SPECS.keys() |
|
358 | allowed_kinds = settings.ARCHIVE_SPECS.keys() | |
361 | if kind not in allowed_kinds: |
|
359 | if kind not in allowed_kinds: | |
362 | raise ImproperArchiveTypeError('Archive kind not supported use one' |
|
360 | raise ImproperArchiveTypeError('Archive kind not supported use one' | |
363 | 'of %s', allowed_kinds) |
|
361 | 'of %s', allowed_kinds) | |
364 |
|
362 | |||
365 | if prefix is None: |
|
363 | if prefix is None: | |
366 | prefix = '%s-%s' % (self.repository.name, self.short_id) |
|
364 | prefix = '%s-%s' % (self.repository.name, self.short_id) | |
367 | elif prefix.startswith('/'): |
|
365 | elif prefix.startswith('/'): | |
368 | raise VCSError("Prefix cannot start with leading slash") |
|
366 | raise VCSError("Prefix cannot start with leading slash") | |
369 | elif prefix.strip() == '': |
|
367 | elif prefix.strip() == '': | |
370 | raise VCSError("Prefix cannot be empty") |
|
368 | raise VCSError("Prefix cannot be empty") | |
371 |
|
369 | |||
372 | if kind == 'zip': |
|
370 | if kind == 'zip': | |
373 | frmt = 'zip' |
|
371 | frmt = 'zip' | |
374 | else: |
|
372 | else: | |
375 | frmt = 'tar' |
|
373 | frmt = 'tar' | |
376 | _git_path = settings.GIT_EXECUTABLE_PATH |
|
374 | _git_path = settings.GIT_EXECUTABLE_PATH | |
377 | cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path, |
|
375 | cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path, | |
378 | frmt, prefix, self.raw_id) |
|
376 | frmt, prefix, self.raw_id) | |
379 | if kind == 'tgz': |
|
377 | if kind == 'tgz': | |
380 | cmd += ' | gzip -9' |
|
378 | cmd += ' | gzip -9' | |
381 | elif kind == 'tbz2': |
|
379 | elif kind == 'tbz2': | |
382 | cmd += ' | bzip2 -9' |
|
380 | cmd += ' | bzip2 -9' | |
383 |
|
381 | |||
384 | if stream is None: |
|
382 | if stream is None: | |
385 | raise VCSError('You need to pass in a valid stream for filling' |
|
383 | raise VCSError('You need to pass in a valid stream for filling' | |
386 | ' with archival data') |
|
384 | ' with archival data') | |
387 | popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True, |
|
385 | popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True, | |
388 | cwd=self.repository.path) |
|
386 | cwd=self.repository.path) | |
389 |
|
387 | |||
390 | buffer_size = 1024 * 8 |
|
388 | buffer_size = 1024 * 8 | |
391 | chunk = popen.stdout.read(buffer_size) |
|
389 | chunk = popen.stdout.read(buffer_size) | |
392 | while chunk: |
|
390 | while chunk: | |
393 | stream.write(chunk) |
|
391 | stream.write(chunk) | |
394 | chunk = popen.stdout.read(buffer_size) |
|
392 | chunk = popen.stdout.read(buffer_size) | |
395 | # Make sure all descriptors would be read |
|
393 | # Make sure all descriptors would be read | |
396 | popen.communicate() |
|
394 | popen.communicate() | |
397 |
|
395 | |||
398 | def get_nodes(self, path): |
|
396 | def get_nodes(self, path): | |
399 | if self._get_kind(path) != NodeKind.DIR: |
|
397 | if self._get_kind(path) != NodeKind.DIR: | |
400 | raise ChangesetError("Directory does not exist for revision %s at " |
|
398 | raise ChangesetError("Directory does not exist for revision %s at " | |
401 | " '%s'" % (self.revision, path)) |
|
399 | " '%s'" % (self.revision, path)) | |
402 | path = self._fix_path(path) |
|
400 | path = self._fix_path(path) | |
403 | id = self._get_id_for_path(path) |
|
401 | id = self._get_id_for_path(path) | |
404 | tree = self.repository._repo[id] |
|
402 | tree = self.repository._repo[id] | |
405 | dirnodes = [] |
|
403 | dirnodes = [] | |
406 | filenodes = [] |
|
404 | filenodes = [] | |
407 | als = self.repository.alias |
|
405 | als = self.repository.alias | |
408 | for name, stat, id in tree.iteritems(): |
|
406 | for name, stat, id in tree.iteritems(): | |
409 | if objects.S_ISGITLINK(stat): |
|
407 | if objects.S_ISGITLINK(stat): | |
410 | dirnodes.append(SubModuleNode(name, url=None, changeset=id, |
|
408 | dirnodes.append(SubModuleNode(name, url=None, changeset=id, | |
411 | alias=als)) |
|
409 | alias=als)) | |
412 | continue |
|
410 | continue | |
413 |
|
411 | |||
414 | obj = self.repository._repo.get_object(id) |
|
412 | obj = self.repository._repo.get_object(id) | |
415 | if path != '': |
|
413 | if path != '': | |
416 | obj_path = '/'.join((path, name)) |
|
414 | obj_path = '/'.join((path, name)) | |
417 | else: |
|
415 | else: | |
418 | obj_path = name |
|
416 | obj_path = name | |
419 | if obj_path not in self._stat_modes: |
|
417 | if obj_path not in self._stat_modes: | |
420 | self._stat_modes[obj_path] = stat |
|
418 | self._stat_modes[obj_path] = stat | |
421 | if isinstance(obj, objects.Tree): |
|
419 | if isinstance(obj, objects.Tree): | |
422 | dirnodes.append(DirNode(obj_path, changeset=self)) |
|
420 | dirnodes.append(DirNode(obj_path, changeset=self)) | |
423 | elif isinstance(obj, objects.Blob): |
|
421 | elif isinstance(obj, objects.Blob): | |
424 | filenodes.append(FileNode(obj_path, changeset=self, mode=stat)) |
|
422 | filenodes.append(FileNode(obj_path, changeset=self, mode=stat)) | |
425 | else: |
|
423 | else: | |
426 | raise ChangesetError("Requested object should be Tree " |
|
424 | raise ChangesetError("Requested object should be Tree " | |
427 | "or Blob, is %r" % type(obj)) |
|
425 | "or Blob, is %r" % type(obj)) | |
428 | nodes = dirnodes + filenodes |
|
426 | nodes = dirnodes + filenodes | |
429 | for node in nodes: |
|
427 | for node in nodes: | |
430 | if not node.path in self.nodes: |
|
428 | if not node.path in self.nodes: | |
431 | self.nodes[node.path] = node |
|
429 | self.nodes[node.path] = node | |
432 | nodes.sort() |
|
430 | nodes.sort() | |
433 | return nodes |
|
431 | return nodes | |
434 |
|
432 | |||
435 | def get_node(self, path): |
|
433 | def get_node(self, path): | |
436 | if isinstance(path, unicode): |
|
434 | if isinstance(path, unicode): | |
437 | path = path.encode('utf-8') |
|
435 | path = path.encode('utf-8') | |
438 | path = self._fix_path(path) |
|
436 | path = self._fix_path(path) | |
439 | if not path in self.nodes: |
|
437 | if not path in self.nodes: | |
440 | try: |
|
438 | try: | |
441 | id_ = self._get_id_for_path(path) |
|
439 | id_ = self._get_id_for_path(path) | |
442 | except ChangesetError: |
|
440 | except ChangesetError: | |
443 | raise NodeDoesNotExistError("Cannot find one of parents' " |
|
441 | raise NodeDoesNotExistError("Cannot find one of parents' " | |
444 | "directories for a given path: %s" % path) |
|
442 | "directories for a given path: %s" % path) | |
445 |
|
443 | |||
446 | _GL = lambda m: m and objects.S_ISGITLINK(m) |
|
444 | _GL = lambda m: m and objects.S_ISGITLINK(m) | |
447 | if _GL(self._stat_modes.get(path)): |
|
445 | if _GL(self._stat_modes.get(path)): | |
448 | node = SubModuleNode(path, url=None, changeset=id_, |
|
446 | node = SubModuleNode(path, url=None, changeset=id_, | |
449 | alias=self.repository.alias) |
|
447 | alias=self.repository.alias) | |
450 | else: |
|
448 | else: | |
451 | obj = self.repository._repo.get_object(id_) |
|
449 | obj = self.repository._repo.get_object(id_) | |
452 |
|
450 | |||
453 | if isinstance(obj, objects.Tree): |
|
451 | if isinstance(obj, objects.Tree): | |
454 | if path == '': |
|
452 | if path == '': | |
455 | node = RootNode(changeset=self) |
|
453 | node = RootNode(changeset=self) | |
456 | else: |
|
454 | else: | |
457 | node = DirNode(path, changeset=self) |
|
455 | node = DirNode(path, changeset=self) | |
458 | node._tree = obj |
|
456 | node._tree = obj | |
459 | elif isinstance(obj, objects.Blob): |
|
457 | elif isinstance(obj, objects.Blob): | |
460 | node = FileNode(path, changeset=self) |
|
458 | node = FileNode(path, changeset=self) | |
461 | node._blob = obj |
|
459 | node._blob = obj | |
462 | else: |
|
460 | else: | |
463 | raise NodeDoesNotExistError("There is no file nor directory " |
|
461 | raise NodeDoesNotExistError("There is no file nor directory " | |
464 | "at the given path '%s' at revision %s" |
|
462 | "at the given path '%s' at revision %s" | |
465 | % (path, self.short_id)) |
|
463 | % (path, self.short_id)) | |
466 | # cache node |
|
464 | # cache node | |
467 | self.nodes[path] = node |
|
465 | self.nodes[path] = node | |
468 | return self.nodes[path] |
|
466 | return self.nodes[path] | |
469 |
|
467 | |||
470 | @LazyProperty |
|
468 | @LazyProperty | |
471 | def affected_files(self): |
|
469 | def affected_files(self): | |
472 | """ |
|
470 | """ | |
473 | Gets a fast accessible file changes for given changeset |
|
471 | Gets a fast accessible file changes for given changeset | |
474 | """ |
|
472 | """ | |
475 | added, modified, deleted = self._changes_cache |
|
473 | added, modified, deleted = self._changes_cache | |
476 | return list(added.union(modified).union(deleted)) |
|
474 | return list(added.union(modified).union(deleted)) | |
477 |
|
475 | |||
478 | @LazyProperty |
|
476 | @LazyProperty | |
479 | def _diff_name_status(self): |
|
477 | def _diff_name_status(self): | |
480 | output = [] |
|
478 | output = [] | |
481 | for parent in self.parents: |
|
479 | for parent in self.parents: | |
482 | cmd = 'diff --name-status %s %s --encoding=utf8' % (parent.raw_id, |
|
480 | cmd = 'diff --name-status %s %s --encoding=utf8' % (parent.raw_id, | |
483 | self.raw_id) |
|
481 | self.raw_id) | |
484 | so, se = self.repository.run_git_command(cmd) |
|
482 | so, se = self.repository.run_git_command(cmd) | |
485 | output.append(so.strip()) |
|
483 | output.append(so.strip()) | |
486 | return '\n'.join(output) |
|
484 | return '\n'.join(output) | |
487 |
|
485 | |||
488 | @LazyProperty |
|
486 | @LazyProperty | |
489 | def _changes_cache(self): |
|
487 | def _changes_cache(self): | |
490 | added = set() |
|
488 | added = set() | |
491 | modified = set() |
|
489 | modified = set() | |
492 | deleted = set() |
|
490 | deleted = set() | |
493 | _r = self.repository._repo |
|
491 | _r = self.repository._repo | |
494 |
|
492 | |||
495 | parents = self.parents |
|
493 | parents = self.parents | |
496 | if not self.parents: |
|
494 | if not self.parents: | |
497 | parents = [EmptyChangeset()] |
|
495 | parents = [EmptyChangeset()] | |
498 | for parent in parents: |
|
496 | for parent in parents: | |
499 | if isinstance(parent, EmptyChangeset): |
|
497 | if isinstance(parent, EmptyChangeset): | |
500 | oid = None |
|
498 | oid = None | |
501 | else: |
|
499 | else: | |
502 | oid = _r[parent.raw_id].tree |
|
500 | oid = _r[parent.raw_id].tree | |
503 | changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree) |
|
501 | changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree) | |
504 | for (oldpath, newpath), (_, _), (_, _) in changes: |
|
502 | for (oldpath, newpath), (_, _), (_, _) in changes: | |
505 | if newpath and oldpath: |
|
503 | if newpath and oldpath: | |
506 | modified.add(newpath) |
|
504 | modified.add(newpath) | |
507 | elif newpath and not oldpath: |
|
505 | elif newpath and not oldpath: | |
508 | added.add(newpath) |
|
506 | added.add(newpath) | |
509 | elif not newpath and oldpath: |
|
507 | elif not newpath and oldpath: | |
510 | deleted.add(oldpath) |
|
508 | deleted.add(oldpath) | |
511 | return added, modified, deleted |
|
509 | return added, modified, deleted | |
512 |
|
510 | |||
513 | def _get_paths_for_status(self, status): |
|
511 | def _get_paths_for_status(self, status): | |
514 | """ |
|
512 | """ | |
515 | Returns sorted list of paths for given ``status``. |
|
513 | Returns sorted list of paths for given ``status``. | |
516 |
|
514 | |||
517 | :param status: one of: *added*, *modified* or *deleted* |
|
515 | :param status: one of: *added*, *modified* or *deleted* | |
518 | """ |
|
516 | """ | |
519 | added, modified, deleted = self._changes_cache |
|
517 | added, modified, deleted = self._changes_cache | |
520 | return sorted({ |
|
518 | return sorted({ | |
521 | 'added': list(added), |
|
519 | 'added': list(added), | |
522 | 'modified': list(modified), |
|
520 | 'modified': list(modified), | |
523 | 'deleted': list(deleted)}[status] |
|
521 | 'deleted': list(deleted)}[status] | |
524 | ) |
|
522 | ) | |
525 |
|
523 | |||
526 | @LazyProperty |
|
524 | @LazyProperty | |
527 | def added(self): |
|
525 | def added(self): | |
528 | """ |
|
526 | """ | |
529 | Returns list of added ``FileNode`` objects. |
|
527 | Returns list of added ``FileNode`` objects. | |
530 | """ |
|
528 | """ | |
531 | if not self.parents: |
|
529 | if not self.parents: | |
532 | return list(self._get_file_nodes()) |
|
530 | return list(self._get_file_nodes()) | |
533 | return AddedFileNodesGenerator([n for n in |
|
531 | return AddedFileNodesGenerator([n for n in | |
534 | self._get_paths_for_status('added')], self) |
|
532 | self._get_paths_for_status('added')], self) | |
535 |
|
533 | |||
536 | @LazyProperty |
|
534 | @LazyProperty | |
537 | def changed(self): |
|
535 | def changed(self): | |
538 | """ |
|
536 | """ | |
539 | Returns list of modified ``FileNode`` objects. |
|
537 | Returns list of modified ``FileNode`` objects. | |
540 | """ |
|
538 | """ | |
541 | if not self.parents: |
|
539 | if not self.parents: | |
542 | return [] |
|
540 | return [] | |
543 | return ChangedFileNodesGenerator([n for n in |
|
541 | return ChangedFileNodesGenerator([n for n in | |
544 | self._get_paths_for_status('modified')], self) |
|
542 | self._get_paths_for_status('modified')], self) | |
545 |
|
543 | |||
546 | @LazyProperty |
|
544 | @LazyProperty | |
547 | def removed(self): |
|
545 | def removed(self): | |
548 | """ |
|
546 | """ | |
549 | Returns list of removed ``FileNode`` objects. |
|
547 | Returns list of removed ``FileNode`` objects. | |
550 | """ |
|
548 | """ | |
551 | if not self.parents: |
|
549 | if not self.parents: | |
552 | return [] |
|
550 | return [] | |
553 | return RemovedFileNodesGenerator([n for n in |
|
551 | return RemovedFileNodesGenerator([n for n in | |
554 | self._get_paths_for_status('deleted')], self) |
|
552 | self._get_paths_for_status('deleted')], self) |
General Comments 0
You need to be logged in to leave comments.
Login now