Show More
@@ -1,295 +1,295 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.controllers.compare |
|
15 | kallithea.controllers.compare | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | compare controller showing differences between two |
|
18 | compare controller showing differences between two | |
19 | repos, branches, bookmarks or tips |
|
19 | repos, branches, bookmarks or tips | |
20 |
|
20 | |||
21 | This file was forked by the Kallithea project in July 2014. |
|
21 | This file was forked by the Kallithea project in July 2014. | |
22 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | Original author and date, and relevant copyright and licensing information is below: | |
23 | :created_on: May 6, 2012 |
|
23 | :created_on: May 6, 2012 | |
24 | :author: marcink |
|
24 | :author: marcink | |
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
26 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | :license: GPLv3, see LICENSE.md for more details. | |
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | import logging |
|
30 | import logging | |
31 | import re |
|
31 | import re | |
32 |
|
32 | |||
|
33 | import mercurial.unionrepo | |||
33 | from tg import request |
|
34 | from tg import request | |
34 | from tg import tmpl_context as c |
|
35 | from tg import tmpl_context as c | |
35 | from tg.i18n import ugettext as _ |
|
36 | from tg.i18n import ugettext as _ | |
36 | from webob.exc import HTTPBadRequest, HTTPFound, HTTPNotFound |
|
37 | from webob.exc import HTTPBadRequest, HTTPFound, HTTPNotFound | |
37 |
|
38 | |||
38 | from kallithea.config.routing import url |
|
39 | from kallithea.config.routing import url | |
39 | from kallithea.controllers.changeset import _context_url, _ignorews_url |
|
40 | from kallithea.controllers.changeset import _context_url, _ignorews_url | |
40 | from kallithea.lib import diffs |
|
41 | from kallithea.lib import diffs | |
41 | from kallithea.lib import helpers as h |
|
42 | from kallithea.lib import helpers as h | |
42 | from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired |
|
43 | from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired | |
43 | from kallithea.lib.base import BaseRepoController, render |
|
44 | from kallithea.lib.base import BaseRepoController, render | |
44 | from kallithea.lib.graphmod import graph_data |
|
45 | from kallithea.lib.graphmod import graph_data | |
45 | from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_int, safe_str |
|
46 | from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_int, safe_str | |
46 | from kallithea.lib.vcs.utils.hgcompat import unionrepo |
|
|||
47 | from kallithea.model.db import Repository |
|
47 | from kallithea.model.db import Repository | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | log = logging.getLogger(__name__) |
|
50 | log = logging.getLogger(__name__) | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | class CompareController(BaseRepoController): |
|
53 | class CompareController(BaseRepoController): | |
54 |
|
54 | |||
55 | def _before(self, *args, **kwargs): |
|
55 | def _before(self, *args, **kwargs): | |
56 | super(CompareController, self)._before(*args, **kwargs) |
|
56 | super(CompareController, self)._before(*args, **kwargs) | |
57 |
|
57 | |||
58 | # The base repository has already been retrieved. |
|
58 | # The base repository has already been retrieved. | |
59 | c.a_repo = c.db_repo |
|
59 | c.a_repo = c.db_repo | |
60 |
|
60 | |||
61 | # Retrieve the "changeset" repository (default: same as base). |
|
61 | # Retrieve the "changeset" repository (default: same as base). | |
62 | other_repo = request.GET.get('other_repo', None) |
|
62 | other_repo = request.GET.get('other_repo', None) | |
63 | if other_repo is None: |
|
63 | if other_repo is None: | |
64 | c.cs_repo = c.a_repo |
|
64 | c.cs_repo = c.a_repo | |
65 | else: |
|
65 | else: | |
66 | c.cs_repo = Repository.get_by_repo_name(other_repo) |
|
66 | c.cs_repo = Repository.get_by_repo_name(other_repo) | |
67 | if c.cs_repo is None: |
|
67 | if c.cs_repo is None: | |
68 | msg = _('Could not find other repository %s') % other_repo |
|
68 | msg = _('Could not find other repository %s') % other_repo | |
69 | h.flash(msg, category='error') |
|
69 | h.flash(msg, category='error') | |
70 | raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name)) |
|
70 | raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name)) | |
71 |
|
71 | |||
72 | # Verify that it's even possible to compare these two repositories. |
|
72 | # Verify that it's even possible to compare these two repositories. | |
73 | if c.a_repo.scm_instance.alias != c.cs_repo.scm_instance.alias: |
|
73 | if c.a_repo.scm_instance.alias != c.cs_repo.scm_instance.alias: | |
74 | msg = _('Cannot compare repositories of different types') |
|
74 | msg = _('Cannot compare repositories of different types') | |
75 | h.flash(msg, category='error') |
|
75 | h.flash(msg, category='error') | |
76 | raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name)) |
|
76 | raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name)) | |
77 |
|
77 | |||
78 | @staticmethod |
|
78 | @staticmethod | |
79 | def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev): |
|
79 | def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev): | |
80 | """ |
|
80 | """ | |
81 | Returns lists of changesets that can be merged from org_repo@org_rev |
|
81 | Returns lists of changesets that can be merged from org_repo@org_rev | |
82 | to other_repo@other_rev |
|
82 | to other_repo@other_rev | |
83 | ... and the other way |
|
83 | ... and the other way | |
84 | ... and the ancestors that would be used for merge |
|
84 | ... and the ancestors that would be used for merge | |
85 |
|
85 | |||
86 | :param org_repo: repo object, that is most likely the original repo we forked from |
|
86 | :param org_repo: repo object, that is most likely the original repo we forked from | |
87 | :param org_rev: the revision we want our compare to be made |
|
87 | :param org_rev: the revision we want our compare to be made | |
88 | :param other_repo: repo object, most likely the fork of org_repo. It has |
|
88 | :param other_repo: repo object, most likely the fork of org_repo. It has | |
89 | all changesets that we need to obtain |
|
89 | all changesets that we need to obtain | |
90 | :param other_rev: revision we want out compare to be made on other_repo |
|
90 | :param other_rev: revision we want out compare to be made on other_repo | |
91 | """ |
|
91 | """ | |
92 | ancestors = None |
|
92 | ancestors = None | |
93 | if org_rev == other_rev: |
|
93 | if org_rev == other_rev: | |
94 | org_changesets = [] |
|
94 | org_changesets = [] | |
95 | other_changesets = [] |
|
95 | other_changesets = [] | |
96 |
|
96 | |||
97 | elif alias == 'hg': |
|
97 | elif alias == 'hg': | |
98 | # case two independent repos |
|
98 | # case two independent repos | |
99 | if org_repo != other_repo: |
|
99 | if org_repo != other_repo: | |
100 | hgrepo = unionrepo.makeunionrepository(other_repo.baseui, |
|
100 | hgrepo = mercurial.unionrepo.makeunionrepository(other_repo.baseui, | |
101 | other_repo.path, |
|
101 | other_repo.path, | |
102 | org_repo.path) |
|
102 | org_repo.path) | |
103 | # all ancestors of other_rev will be in other_repo and |
|
103 | # all ancestors of other_rev will be in other_repo and | |
104 | # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot |
|
104 | # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot | |
105 |
|
105 | |||
106 | # no remote compare do it on the same repository |
|
106 | # no remote compare do it on the same repository | |
107 | else: |
|
107 | else: | |
108 | hgrepo = other_repo._repo |
|
108 | hgrepo = other_repo._repo | |
109 |
|
109 | |||
110 | ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in |
|
110 | ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in | |
111 | hgrepo.revs(b"id(%s) & ::id(%s)", ascii_bytes(other_rev), ascii_bytes(org_rev))] |
|
111 | hgrepo.revs(b"id(%s) & ::id(%s)", ascii_bytes(other_rev), ascii_bytes(org_rev))] | |
112 | if ancestors: |
|
112 | if ancestors: | |
113 | log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev) |
|
113 | log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev) | |
114 | else: |
|
114 | else: | |
115 | log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev) |
|
115 | log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev) | |
116 | ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in |
|
116 | ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in | |
117 | hgrepo.revs(b"heads(::id(%s) & ::id(%s))", ascii_bytes(org_rev), ascii_bytes(other_rev))] # FIXME: expensive! |
|
117 | hgrepo.revs(b"heads(::id(%s) & ::id(%s))", ascii_bytes(org_rev), ascii_bytes(other_rev))] # FIXME: expensive! | |
118 |
|
118 | |||
119 | other_changesets = [ |
|
119 | other_changesets = [ | |
120 | other_repo.get_changeset(rev) |
|
120 | other_repo.get_changeset(rev) | |
121 | for rev in hgrepo.revs( |
|
121 | for rev in hgrepo.revs( | |
122 | b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", |
|
122 | b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", | |
123 | ascii_bytes(other_rev), ascii_bytes(org_rev), ascii_bytes(org_rev)) |
|
123 | ascii_bytes(other_rev), ascii_bytes(org_rev), ascii_bytes(org_rev)) | |
124 | ] |
|
124 | ] | |
125 | org_changesets = [ |
|
125 | org_changesets = [ | |
126 | org_repo.get_changeset(ascii_str(hgrepo[rev].hex())) |
|
126 | org_repo.get_changeset(ascii_str(hgrepo[rev].hex())) | |
127 | for rev in hgrepo.revs( |
|
127 | for rev in hgrepo.revs( | |
128 | b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", |
|
128 | b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", | |
129 | ascii_bytes(org_rev), ascii_bytes(other_rev), ascii_bytes(other_rev)) |
|
129 | ascii_bytes(org_rev), ascii_bytes(other_rev), ascii_bytes(other_rev)) | |
130 | ] |
|
130 | ] | |
131 |
|
131 | |||
132 | elif alias == 'git': |
|
132 | elif alias == 'git': | |
133 | if org_repo != other_repo: |
|
133 | if org_repo != other_repo: | |
134 | from dulwich.repo import Repo |
|
134 | from dulwich.repo import Repo | |
135 | from dulwich.client import SubprocessGitClient |
|
135 | from dulwich.client import SubprocessGitClient | |
136 |
|
136 | |||
137 | gitrepo = Repo(org_repo.path) |
|
137 | gitrepo = Repo(org_repo.path) | |
138 | SubprocessGitClient(thin_packs=False).fetch(safe_str(other_repo.path), gitrepo) |
|
138 | SubprocessGitClient(thin_packs=False).fetch(safe_str(other_repo.path), gitrepo) | |
139 |
|
139 | |||
140 | gitrepo_remote = Repo(other_repo.path) |
|
140 | gitrepo_remote = Repo(other_repo.path) | |
141 | SubprocessGitClient(thin_packs=False).fetch(safe_str(org_repo.path), gitrepo_remote) |
|
141 | SubprocessGitClient(thin_packs=False).fetch(safe_str(org_repo.path), gitrepo_remote) | |
142 |
|
142 | |||
143 | revs = [ |
|
143 | revs = [ | |
144 | ascii_str(x.commit.id) |
|
144 | ascii_str(x.commit.id) | |
145 | for x in gitrepo_remote.get_walker(include=[ascii_bytes(other_rev)], |
|
145 | for x in gitrepo_remote.get_walker(include=[ascii_bytes(other_rev)], | |
146 | exclude=[ascii_bytes(org_rev)]) |
|
146 | exclude=[ascii_bytes(org_rev)]) | |
147 | ] |
|
147 | ] | |
148 | other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)] |
|
148 | other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)] | |
149 | if other_changesets: |
|
149 | if other_changesets: | |
150 | ancestors = [other_changesets[0].parents[0].raw_id] |
|
150 | ancestors = [other_changesets[0].parents[0].raw_id] | |
151 | else: |
|
151 | else: | |
152 | # no changesets from other repo, ancestor is the other_rev |
|
152 | # no changesets from other repo, ancestor is the other_rev | |
153 | ancestors = [other_rev] |
|
153 | ancestors = [other_rev] | |
154 |
|
154 | |||
155 | gitrepo.close() |
|
155 | gitrepo.close() | |
156 | gitrepo_remote.close() |
|
156 | gitrepo_remote.close() | |
157 |
|
157 | |||
158 | else: |
|
158 | else: | |
159 | so = org_repo.run_git_command( |
|
159 | so = org_repo.run_git_command( | |
160 | ['log', '--reverse', '--pretty=format:%H', |
|
160 | ['log', '--reverse', '--pretty=format:%H', | |
161 | '-s', '%s..%s' % (org_rev, other_rev)] |
|
161 | '-s', '%s..%s' % (org_rev, other_rev)] | |
162 | ) |
|
162 | ) | |
163 | other_changesets = [org_repo.get_changeset(cs) |
|
163 | other_changesets = [org_repo.get_changeset(cs) | |
164 | for cs in re.findall(r'[0-9a-fA-F]{40}', so)] |
|
164 | for cs in re.findall(r'[0-9a-fA-F]{40}', so)] | |
165 | so = org_repo.run_git_command( |
|
165 | so = org_repo.run_git_command( | |
166 | ['merge-base', org_rev, other_rev] |
|
166 | ['merge-base', org_rev, other_rev] | |
167 | ) |
|
167 | ) | |
168 | ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]] |
|
168 | ancestors = [re.findall(r'[0-9a-fA-F]{40}', so)[0]] | |
169 | org_changesets = [] |
|
169 | org_changesets = [] | |
170 |
|
170 | |||
171 | else: |
|
171 | else: | |
172 | raise Exception('Bad alias only git and hg is allowed') |
|
172 | raise Exception('Bad alias only git and hg is allowed') | |
173 |
|
173 | |||
174 | return other_changesets, org_changesets, ancestors |
|
174 | return other_changesets, org_changesets, ancestors | |
175 |
|
175 | |||
176 | @LoginRequired(allow_default_user=True) |
|
176 | @LoginRequired(allow_default_user=True) | |
177 | @HasRepoPermissionLevelDecorator('read') |
|
177 | @HasRepoPermissionLevelDecorator('read') | |
178 | def index(self, repo_name): |
|
178 | def index(self, repo_name): | |
179 | c.compare_home = True |
|
179 | c.compare_home = True | |
180 | c.a_ref_name = c.cs_ref_name = None |
|
180 | c.a_ref_name = c.cs_ref_name = None | |
181 | return render('compare/compare_diff.html') |
|
181 | return render('compare/compare_diff.html') | |
182 |
|
182 | |||
183 | @LoginRequired(allow_default_user=True) |
|
183 | @LoginRequired(allow_default_user=True) | |
184 | @HasRepoPermissionLevelDecorator('read') |
|
184 | @HasRepoPermissionLevelDecorator('read') | |
185 | def compare(self, repo_name, org_ref_type, org_ref_name, other_ref_type, other_ref_name): |
|
185 | def compare(self, repo_name, org_ref_type, org_ref_name, other_ref_type, other_ref_name): | |
186 | org_ref_name = org_ref_name.strip() |
|
186 | org_ref_name = org_ref_name.strip() | |
187 | other_ref_name = other_ref_name.strip() |
|
187 | other_ref_name = other_ref_name.strip() | |
188 |
|
188 | |||
189 | # If merge is True: |
|
189 | # If merge is True: | |
190 | # Show what org would get if merged with other: |
|
190 | # Show what org would get if merged with other: | |
191 | # List changesets that are ancestors of other but not of org. |
|
191 | # List changesets that are ancestors of other but not of org. | |
192 | # New changesets in org is thus ignored. |
|
192 | # New changesets in org is thus ignored. | |
193 | # Diff will be from common ancestor, and merges of org to other will thus be ignored. |
|
193 | # Diff will be from common ancestor, and merges of org to other will thus be ignored. | |
194 | # If merge is False: |
|
194 | # If merge is False: | |
195 | # Make a raw diff from org to other, no matter if related or not. |
|
195 | # Make a raw diff from org to other, no matter if related or not. | |
196 | # Changesets in one and not in the other will be ignored |
|
196 | # Changesets in one and not in the other will be ignored | |
197 | merge = bool(request.GET.get('merge')) |
|
197 | merge = bool(request.GET.get('merge')) | |
198 | # fulldiff disables cut_off_limit |
|
198 | # fulldiff disables cut_off_limit | |
199 | fulldiff = request.GET.get('fulldiff') |
|
199 | fulldiff = request.GET.get('fulldiff') | |
200 | # partial uses compare_cs.html template directly |
|
200 | # partial uses compare_cs.html template directly | |
201 | partial = request.environ.get('HTTP_X_PARTIAL_XHR') |
|
201 | partial = request.environ.get('HTTP_X_PARTIAL_XHR') | |
202 | # is_ajax_preview puts hidden input field with changeset revisions |
|
202 | # is_ajax_preview puts hidden input field with changeset revisions | |
203 | c.is_ajax_preview = partial and request.GET.get('is_ajax_preview') |
|
203 | c.is_ajax_preview = partial and request.GET.get('is_ajax_preview') | |
204 | # swap url for compare_diff page - never partial and never is_ajax_preview |
|
204 | # swap url for compare_diff page - never partial and never is_ajax_preview | |
205 | c.swap_url = h.url('compare_url', |
|
205 | c.swap_url = h.url('compare_url', | |
206 | repo_name=c.cs_repo.repo_name, |
|
206 | repo_name=c.cs_repo.repo_name, | |
207 | org_ref_type=other_ref_type, org_ref_name=other_ref_name, |
|
207 | org_ref_type=other_ref_type, org_ref_name=other_ref_name, | |
208 | other_repo=c.a_repo.repo_name, |
|
208 | other_repo=c.a_repo.repo_name, | |
209 | other_ref_type=org_ref_type, other_ref_name=org_ref_name, |
|
209 | other_ref_type=org_ref_type, other_ref_name=org_ref_name, | |
210 | merge=merge or '') |
|
210 | merge=merge or '') | |
211 |
|
211 | |||
212 | # set callbacks for generating markup for icons |
|
212 | # set callbacks for generating markup for icons | |
213 | c.ignorews_url = _ignorews_url |
|
213 | c.ignorews_url = _ignorews_url | |
214 | c.context_url = _context_url |
|
214 | c.context_url = _context_url | |
215 | ignore_whitespace = request.GET.get('ignorews') == '1' |
|
215 | ignore_whitespace = request.GET.get('ignorews') == '1' | |
216 | line_context = safe_int(request.GET.get('context'), 3) |
|
216 | line_context = safe_int(request.GET.get('context'), 3) | |
217 |
|
217 | |||
218 | c.a_rev = self._get_ref_rev(c.a_repo, org_ref_type, org_ref_name, |
|
218 | c.a_rev = self._get_ref_rev(c.a_repo, org_ref_type, org_ref_name, | |
219 | returnempty=True) |
|
219 | returnempty=True) | |
220 | c.cs_rev = self._get_ref_rev(c.cs_repo, other_ref_type, other_ref_name) |
|
220 | c.cs_rev = self._get_ref_rev(c.cs_repo, other_ref_type, other_ref_name) | |
221 |
|
221 | |||
222 | c.compare_home = False |
|
222 | c.compare_home = False | |
223 | c.a_ref_name = org_ref_name |
|
223 | c.a_ref_name = org_ref_name | |
224 | c.a_ref_type = org_ref_type |
|
224 | c.a_ref_type = org_ref_type | |
225 | c.cs_ref_name = other_ref_name |
|
225 | c.cs_ref_name = other_ref_name | |
226 | c.cs_ref_type = other_ref_type |
|
226 | c.cs_ref_type = other_ref_type | |
227 |
|
227 | |||
228 | c.cs_ranges, c.cs_ranges_org, c.ancestors = self._get_changesets( |
|
228 | c.cs_ranges, c.cs_ranges_org, c.ancestors = self._get_changesets( | |
229 | c.a_repo.scm_instance.alias, c.a_repo.scm_instance, c.a_rev, |
|
229 | c.a_repo.scm_instance.alias, c.a_repo.scm_instance, c.a_rev, | |
230 | c.cs_repo.scm_instance, c.cs_rev) |
|
230 | c.cs_repo.scm_instance, c.cs_rev) | |
231 | raw_ids = [x.raw_id for x in c.cs_ranges] |
|
231 | raw_ids = [x.raw_id for x in c.cs_ranges] | |
232 | c.cs_comments = c.cs_repo.get_comments(raw_ids) |
|
232 | c.cs_comments = c.cs_repo.get_comments(raw_ids) | |
233 | c.cs_statuses = c.cs_repo.statuses(raw_ids) |
|
233 | c.cs_statuses = c.cs_repo.statuses(raw_ids) | |
234 |
|
234 | |||
235 | revs = [ctx.revision for ctx in reversed(c.cs_ranges)] |
|
235 | revs = [ctx.revision for ctx in reversed(c.cs_ranges)] | |
236 | c.jsdata = graph_data(c.cs_repo.scm_instance, revs) |
|
236 | c.jsdata = graph_data(c.cs_repo.scm_instance, revs) | |
237 |
|
237 | |||
238 | if partial: |
|
238 | if partial: | |
239 | return render('compare/compare_cs.html') |
|
239 | return render('compare/compare_cs.html') | |
240 |
|
240 | |||
241 | org_repo = c.a_repo |
|
241 | org_repo = c.a_repo | |
242 | other_repo = c.cs_repo |
|
242 | other_repo = c.cs_repo | |
243 |
|
243 | |||
244 | if merge: |
|
244 | if merge: | |
245 | rev1 = msg = None |
|
245 | rev1 = msg = None | |
246 | if not c.cs_ranges: |
|
246 | if not c.cs_ranges: | |
247 | msg = _('Cannot show empty diff') |
|
247 | msg = _('Cannot show empty diff') | |
248 | elif not c.ancestors: |
|
248 | elif not c.ancestors: | |
249 | msg = _('No ancestor found for merge diff') |
|
249 | msg = _('No ancestor found for merge diff') | |
250 | elif len(c.ancestors) == 1: |
|
250 | elif len(c.ancestors) == 1: | |
251 | rev1 = c.ancestors[0] |
|
251 | rev1 = c.ancestors[0] | |
252 | else: |
|
252 | else: | |
253 | msg = _('Multiple merge ancestors found for merge compare') |
|
253 | msg = _('Multiple merge ancestors found for merge compare') | |
254 | if rev1 is None: |
|
254 | if rev1 is None: | |
255 | h.flash(msg, category='error') |
|
255 | h.flash(msg, category='error') | |
256 | log.error(msg) |
|
256 | log.error(msg) | |
257 | raise HTTPNotFound |
|
257 | raise HTTPNotFound | |
258 |
|
258 | |||
259 | # case we want a simple diff without incoming changesets, |
|
259 | # case we want a simple diff without incoming changesets, | |
260 | # previewing what will be merged. |
|
260 | # previewing what will be merged. | |
261 | # Make the diff on the other repo (which is known to have other_rev) |
|
261 | # Make the diff on the other repo (which is known to have other_rev) | |
262 | log.debug('Using ancestor %s as rev1 instead of %s', |
|
262 | log.debug('Using ancestor %s as rev1 instead of %s', | |
263 | rev1, c.a_rev) |
|
263 | rev1, c.a_rev) | |
264 | org_repo = other_repo |
|
264 | org_repo = other_repo | |
265 | else: # comparing tips, not necessarily linearly related |
|
265 | else: # comparing tips, not necessarily linearly related | |
266 | if org_repo != other_repo: |
|
266 | if org_repo != other_repo: | |
267 | # TODO: we could do this by using hg unionrepo |
|
267 | # TODO: we could do this by using hg unionrepo | |
268 | log.error('cannot compare across repos %s and %s', org_repo, other_repo) |
|
268 | log.error('cannot compare across repos %s and %s', org_repo, other_repo) | |
269 | h.flash(_('Cannot compare repositories without using common ancestor'), category='error') |
|
269 | h.flash(_('Cannot compare repositories without using common ancestor'), category='error') | |
270 | raise HTTPBadRequest |
|
270 | raise HTTPBadRequest | |
271 | rev1 = c.a_rev |
|
271 | rev1 = c.a_rev | |
272 |
|
272 | |||
273 | diff_limit = None if fulldiff else self.cut_off_limit |
|
273 | diff_limit = None if fulldiff else self.cut_off_limit | |
274 |
|
274 | |||
275 | log.debug('running diff between %s and %s in %s', |
|
275 | log.debug('running diff between %s and %s in %s', | |
276 | rev1, c.cs_rev, org_repo.scm_instance.path) |
|
276 | rev1, c.cs_rev, org_repo.scm_instance.path) | |
277 | raw_diff = diffs.get_diff(org_repo.scm_instance, rev1=rev1, rev2=c.cs_rev, |
|
277 | raw_diff = diffs.get_diff(org_repo.scm_instance, rev1=rev1, rev2=c.cs_rev, | |
278 | ignore_whitespace=ignore_whitespace, |
|
278 | ignore_whitespace=ignore_whitespace, | |
279 | context=line_context) |
|
279 | context=line_context) | |
280 |
|
280 | |||
281 | diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit) |
|
281 | diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit) | |
282 | c.limited_diff = diff_processor.limited_diff |
|
282 | c.limited_diff = diff_processor.limited_diff | |
283 | c.file_diff_data = [] |
|
283 | c.file_diff_data = [] | |
284 | c.lines_added = 0 |
|
284 | c.lines_added = 0 | |
285 | c.lines_deleted = 0 |
|
285 | c.lines_deleted = 0 | |
286 | for f in diff_processor.parsed: |
|
286 | for f in diff_processor.parsed: | |
287 | st = f['stats'] |
|
287 | st = f['stats'] | |
288 | c.lines_added += st['added'] |
|
288 | c.lines_added += st['added'] | |
289 | c.lines_deleted += st['deleted'] |
|
289 | c.lines_deleted += st['deleted'] | |
290 | filename = f['filename'] |
|
290 | filename = f['filename'] | |
291 | fid = h.FID('', filename) |
|
291 | fid = h.FID('', filename) | |
292 | html_diff = diffs.as_html(enable_comments=False, parsed_lines=[f]) |
|
292 | html_diff = diffs.as_html(enable_comments=False, parsed_lines=[f]) | |
293 | c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, html_diff, st)) |
|
293 | c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, html_diff, st)) | |
294 |
|
294 | |||
295 | return render('compare/compare_diff.html') |
|
295 | return render('compare/compare_diff.html') |
@@ -1,643 +1,643 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.controllers.pullrequests |
|
15 | kallithea.controllers.pullrequests | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | pull requests controller for Kallithea for initializing pull requests |
|
18 | pull requests controller for Kallithea for initializing pull requests | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: May 7, 2012 |
|
22 | :created_on: May 7, 2012 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import logging |
|
28 | import logging | |
29 | import traceback |
|
29 | import traceback | |
30 |
|
30 | |||
31 | import formencode |
|
31 | import formencode | |
|
32 | import mercurial.unionrepo | |||
32 | from tg import request |
|
33 | from tg import request | |
33 | from tg import tmpl_context as c |
|
34 | from tg import tmpl_context as c | |
34 | from tg.i18n import ugettext as _ |
|
35 | from tg.i18n import ugettext as _ | |
35 | from webob.exc import HTTPBadRequest, HTTPForbidden, HTTPFound, HTTPNotFound |
|
36 | from webob.exc import HTTPBadRequest, HTTPForbidden, HTTPFound, HTTPNotFound | |
36 |
|
37 | |||
37 | from kallithea.config.routing import url |
|
38 | from kallithea.config.routing import url | |
38 | from kallithea.controllers.changeset import _context_url, _ignorews_url, create_cs_pr_comment, delete_cs_pr_comment |
|
39 | from kallithea.controllers.changeset import _context_url, _ignorews_url, create_cs_pr_comment, delete_cs_pr_comment | |
39 | from kallithea.lib import diffs |
|
40 | from kallithea.lib import diffs | |
40 | from kallithea.lib import helpers as h |
|
41 | from kallithea.lib import helpers as h | |
41 | from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired |
|
42 | from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired | |
42 | from kallithea.lib.base import BaseRepoController, jsonify, render |
|
43 | from kallithea.lib.base import BaseRepoController, jsonify, render | |
43 | from kallithea.lib.graphmod import graph_data |
|
44 | from kallithea.lib.graphmod import graph_data | |
44 | from kallithea.lib.page import Page |
|
45 | from kallithea.lib.page import Page | |
45 | from kallithea.lib.utils2 import ascii_bytes, safe_bytes, safe_int, safe_str |
|
46 | from kallithea.lib.utils2 import ascii_bytes, safe_bytes, safe_int, safe_str | |
46 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError |
|
47 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError | |
47 | from kallithea.lib.vcs.utils.hgcompat import unionrepo |
|
|||
48 | from kallithea.model.changeset_status import ChangesetStatusModel |
|
48 | from kallithea.model.changeset_status import ChangesetStatusModel | |
49 | from kallithea.model.comment import ChangesetCommentsModel |
|
49 | from kallithea.model.comment import ChangesetCommentsModel | |
50 | from kallithea.model.db import ChangesetStatus, PullRequest, PullRequestReviewer, Repository, User |
|
50 | from kallithea.model.db import ChangesetStatus, PullRequest, PullRequestReviewer, Repository, User | |
51 | from kallithea.model.forms import PullRequestForm, PullRequestPostForm |
|
51 | from kallithea.model.forms import PullRequestForm, PullRequestPostForm | |
52 | from kallithea.model.meta import Session |
|
52 | from kallithea.model.meta import Session | |
53 | from kallithea.model.pull_request import CreatePullRequestAction, CreatePullRequestIterationAction, PullRequestModel |
|
53 | from kallithea.model.pull_request import CreatePullRequestAction, CreatePullRequestIterationAction, PullRequestModel | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | log = logging.getLogger(__name__) |
|
56 | log = logging.getLogger(__name__) | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | def _get_reviewer(user_id): |
|
59 | def _get_reviewer(user_id): | |
60 | """Look up user by ID and validate it as a potential reviewer.""" |
|
60 | """Look up user by ID and validate it as a potential reviewer.""" | |
61 | try: |
|
61 | try: | |
62 | user = User.get(int(user_id)) |
|
62 | user = User.get(int(user_id)) | |
63 | except ValueError: |
|
63 | except ValueError: | |
64 | user = None |
|
64 | user = None | |
65 |
|
65 | |||
66 | if user is None or user.is_default_user: |
|
66 | if user is None or user.is_default_user: | |
67 | h.flash(_('Invalid reviewer "%s" specified') % user_id, category='error') |
|
67 | h.flash(_('Invalid reviewer "%s" specified') % user_id, category='error') | |
68 | raise HTTPBadRequest() |
|
68 | raise HTTPBadRequest() | |
69 |
|
69 | |||
70 | return user |
|
70 | return user | |
71 |
|
71 | |||
72 |
|
72 | |||
73 | class PullrequestsController(BaseRepoController): |
|
73 | class PullrequestsController(BaseRepoController): | |
74 |
|
74 | |||
75 | def _get_repo_refs(self, repo, rev=None, branch=None, branch_rev=None): |
|
75 | def _get_repo_refs(self, repo, rev=None, branch=None, branch_rev=None): | |
76 | """return a structure with repo's interesting changesets, suitable for |
|
76 | """return a structure with repo's interesting changesets, suitable for | |
77 | the selectors in pullrequest.html |
|
77 | the selectors in pullrequest.html | |
78 |
|
78 | |||
79 | rev: a revision that must be in the list somehow and selected by default |
|
79 | rev: a revision that must be in the list somehow and selected by default | |
80 | branch: a branch that must be in the list and selected by default - even if closed |
|
80 | branch: a branch that must be in the list and selected by default - even if closed | |
81 | branch_rev: a revision of which peers should be preferred and available.""" |
|
81 | branch_rev: a revision of which peers should be preferred and available.""" | |
82 | # list named branches that has been merged to this named branch - it should probably merge back |
|
82 | # list named branches that has been merged to this named branch - it should probably merge back | |
83 | peers = [] |
|
83 | peers = [] | |
84 |
|
84 | |||
85 | if rev: |
|
85 | if rev: | |
86 | rev = safe_str(rev) |
|
86 | rev = safe_str(rev) | |
87 |
|
87 | |||
88 | if branch: |
|
88 | if branch: | |
89 | branch = safe_str(branch) |
|
89 | branch = safe_str(branch) | |
90 |
|
90 | |||
91 | if branch_rev: |
|
91 | if branch_rev: | |
92 | # a revset not restricting to merge() would be better |
|
92 | # a revset not restricting to merge() would be better | |
93 | # (especially because it would get the branch point) |
|
93 | # (especially because it would get the branch point) | |
94 | # ... but is currently too expensive |
|
94 | # ... but is currently too expensive | |
95 | # including branches of children could be nice too |
|
95 | # including branches of children could be nice too | |
96 | peerbranches = set() |
|
96 | peerbranches = set() | |
97 | for i in repo._repo.revs( |
|
97 | for i in repo._repo.revs( | |
98 | b"sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)", |
|
98 | b"sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)", | |
99 | ascii_bytes(branch_rev), ascii_bytes(branch_rev), |
|
99 | ascii_bytes(branch_rev), ascii_bytes(branch_rev), | |
100 | ): |
|
100 | ): | |
101 | for abranch in repo.get_changeset(i).branches: |
|
101 | for abranch in repo.get_changeset(i).branches: | |
102 | if abranch not in peerbranches: |
|
102 | if abranch not in peerbranches: | |
103 | n = 'branch:%s:%s' % (abranch, repo.get_changeset(abranch).raw_id) |
|
103 | n = 'branch:%s:%s' % (abranch, repo.get_changeset(abranch).raw_id) | |
104 | peers.append((n, abranch)) |
|
104 | peers.append((n, abranch)) | |
105 | peerbranches.add(abranch) |
|
105 | peerbranches.add(abranch) | |
106 |
|
106 | |||
107 | selected = None |
|
107 | selected = None | |
108 | tiprev = repo.tags.get('tip') |
|
108 | tiprev = repo.tags.get('tip') | |
109 | tipbranch = None |
|
109 | tipbranch = None | |
110 |
|
110 | |||
111 | branches = [] |
|
111 | branches = [] | |
112 | for abranch, branchrev in repo.branches.iteritems(): |
|
112 | for abranch, branchrev in repo.branches.iteritems(): | |
113 | n = 'branch:%s:%s' % (abranch, branchrev) |
|
113 | n = 'branch:%s:%s' % (abranch, branchrev) | |
114 | desc = abranch |
|
114 | desc = abranch | |
115 | if branchrev == tiprev: |
|
115 | if branchrev == tiprev: | |
116 | tipbranch = abranch |
|
116 | tipbranch = abranch | |
117 | desc = '%s (current tip)' % desc |
|
117 | desc = '%s (current tip)' % desc | |
118 | branches.append((n, desc)) |
|
118 | branches.append((n, desc)) | |
119 | if rev == branchrev: |
|
119 | if rev == branchrev: | |
120 | selected = n |
|
120 | selected = n | |
121 | if branch == abranch: |
|
121 | if branch == abranch: | |
122 | if not rev: |
|
122 | if not rev: | |
123 | selected = n |
|
123 | selected = n | |
124 | branch = None |
|
124 | branch = None | |
125 | if branch: # branch not in list - it is probably closed |
|
125 | if branch: # branch not in list - it is probably closed | |
126 | branchrev = repo.closed_branches.get(branch) |
|
126 | branchrev = repo.closed_branches.get(branch) | |
127 | if branchrev: |
|
127 | if branchrev: | |
128 | n = 'branch:%s:%s' % (branch, branchrev) |
|
128 | n = 'branch:%s:%s' % (branch, branchrev) | |
129 | branches.append((n, _('%s (closed)') % branch)) |
|
129 | branches.append((n, _('%s (closed)') % branch)) | |
130 | selected = n |
|
130 | selected = n | |
131 | branch = None |
|
131 | branch = None | |
132 | if branch: |
|
132 | if branch: | |
133 | log.debug('branch %r not found in %s', branch, repo) |
|
133 | log.debug('branch %r not found in %s', branch, repo) | |
134 |
|
134 | |||
135 | bookmarks = [] |
|
135 | bookmarks = [] | |
136 | for bookmark, bookmarkrev in repo.bookmarks.iteritems(): |
|
136 | for bookmark, bookmarkrev in repo.bookmarks.iteritems(): | |
137 | n = 'book:%s:%s' % (bookmark, bookmarkrev) |
|
137 | n = 'book:%s:%s' % (bookmark, bookmarkrev) | |
138 | bookmarks.append((n, bookmark)) |
|
138 | bookmarks.append((n, bookmark)) | |
139 | if rev == bookmarkrev: |
|
139 | if rev == bookmarkrev: | |
140 | selected = n |
|
140 | selected = n | |
141 |
|
141 | |||
142 | tags = [] |
|
142 | tags = [] | |
143 | for tag, tagrev in repo.tags.iteritems(): |
|
143 | for tag, tagrev in repo.tags.iteritems(): | |
144 | if tag == 'tip': |
|
144 | if tag == 'tip': | |
145 | continue |
|
145 | continue | |
146 | n = 'tag:%s:%s' % (tag, tagrev) |
|
146 | n = 'tag:%s:%s' % (tag, tagrev) | |
147 | tags.append((n, tag)) |
|
147 | tags.append((n, tag)) | |
148 | # note: even if rev == tagrev, don't select the static tag - it must be chosen explicitly |
|
148 | # note: even if rev == tagrev, don't select the static tag - it must be chosen explicitly | |
149 |
|
149 | |||
150 | # prio 1: rev was selected as existing entry above |
|
150 | # prio 1: rev was selected as existing entry above | |
151 |
|
151 | |||
152 | # prio 2: create special entry for rev; rev _must_ be used |
|
152 | # prio 2: create special entry for rev; rev _must_ be used | |
153 | specials = [] |
|
153 | specials = [] | |
154 | if rev and selected is None: |
|
154 | if rev and selected is None: | |
155 | selected = 'rev:%s:%s' % (rev, rev) |
|
155 | selected = 'rev:%s:%s' % (rev, rev) | |
156 | specials = [(selected, '%s: %s' % (_("Changeset"), rev[:12]))] |
|
156 | specials = [(selected, '%s: %s' % (_("Changeset"), rev[:12]))] | |
157 |
|
157 | |||
158 | # prio 3: most recent peer branch |
|
158 | # prio 3: most recent peer branch | |
159 | if peers and not selected: |
|
159 | if peers and not selected: | |
160 | selected = peers[0][0] |
|
160 | selected = peers[0][0] | |
161 |
|
161 | |||
162 | # prio 4: tip revision |
|
162 | # prio 4: tip revision | |
163 | if not selected: |
|
163 | if not selected: | |
164 | if h.is_hg(repo): |
|
164 | if h.is_hg(repo): | |
165 | if tipbranch: |
|
165 | if tipbranch: | |
166 | selected = 'branch:%s:%s' % (tipbranch, tiprev) |
|
166 | selected = 'branch:%s:%s' % (tipbranch, tiprev) | |
167 | else: |
|
167 | else: | |
168 | selected = 'tag:null:' + repo.EMPTY_CHANGESET |
|
168 | selected = 'tag:null:' + repo.EMPTY_CHANGESET | |
169 | tags.append((selected, 'null')) |
|
169 | tags.append((selected, 'null')) | |
170 | else: |
|
170 | else: | |
171 | if 'master' in repo.branches: |
|
171 | if 'master' in repo.branches: | |
172 | selected = 'branch:master:%s' % repo.branches['master'] |
|
172 | selected = 'branch:master:%s' % repo.branches['master'] | |
173 | else: |
|
173 | else: | |
174 | k, v = list(repo.branches.items())[0] |
|
174 | k, v = list(repo.branches.items())[0] | |
175 | selected = 'branch:%s:%s' % (k, v) |
|
175 | selected = 'branch:%s:%s' % (k, v) | |
176 |
|
176 | |||
177 | groups = [(specials, _("Special")), |
|
177 | groups = [(specials, _("Special")), | |
178 | (peers, _("Peer branches")), |
|
178 | (peers, _("Peer branches")), | |
179 | (bookmarks, _("Bookmarks")), |
|
179 | (bookmarks, _("Bookmarks")), | |
180 | (branches, _("Branches")), |
|
180 | (branches, _("Branches")), | |
181 | (tags, _("Tags")), |
|
181 | (tags, _("Tags")), | |
182 | ] |
|
182 | ] | |
183 | return [g for g in groups if g[0]], selected |
|
183 | return [g for g in groups if g[0]], selected | |
184 |
|
184 | |||
185 | def _is_allowed_to_change_status(self, pull_request): |
|
185 | def _is_allowed_to_change_status(self, pull_request): | |
186 | if pull_request.is_closed(): |
|
186 | if pull_request.is_closed(): | |
187 | return False |
|
187 | return False | |
188 |
|
188 | |||
189 | owner = request.authuser.user_id == pull_request.owner_id |
|
189 | owner = request.authuser.user_id == pull_request.owner_id | |
190 | reviewer = PullRequestReviewer.query() \ |
|
190 | reviewer = PullRequestReviewer.query() \ | |
191 | .filter(PullRequestReviewer.pull_request == pull_request) \ |
|
191 | .filter(PullRequestReviewer.pull_request == pull_request) \ | |
192 | .filter(PullRequestReviewer.user_id == request.authuser.user_id) \ |
|
192 | .filter(PullRequestReviewer.user_id == request.authuser.user_id) \ | |
193 | .count() != 0 |
|
193 | .count() != 0 | |
194 |
|
194 | |||
195 | return request.authuser.admin or owner or reviewer |
|
195 | return request.authuser.admin or owner or reviewer | |
196 |
|
196 | |||
197 | @LoginRequired(allow_default_user=True) |
|
197 | @LoginRequired(allow_default_user=True) | |
198 | @HasRepoPermissionLevelDecorator('read') |
|
198 | @HasRepoPermissionLevelDecorator('read') | |
199 | def show_all(self, repo_name): |
|
199 | def show_all(self, repo_name): | |
200 | c.from_ = request.GET.get('from_') or '' |
|
200 | c.from_ = request.GET.get('from_') or '' | |
201 | c.closed = request.GET.get('closed') or '' |
|
201 | c.closed = request.GET.get('closed') or '' | |
202 | url_params = {} |
|
202 | url_params = {} | |
203 | if c.from_: |
|
203 | if c.from_: | |
204 | url_params['from_'] = 1 |
|
204 | url_params['from_'] = 1 | |
205 | if c.closed: |
|
205 | if c.closed: | |
206 | url_params['closed'] = 1 |
|
206 | url_params['closed'] = 1 | |
207 | p = safe_int(request.GET.get('page'), 1) |
|
207 | p = safe_int(request.GET.get('page'), 1) | |
208 |
|
208 | |||
209 | q = PullRequest.query(include_closed=c.closed, sorted=True) |
|
209 | q = PullRequest.query(include_closed=c.closed, sorted=True) | |
210 | if c.from_: |
|
210 | if c.from_: | |
211 | q = q.filter_by(org_repo=c.db_repo) |
|
211 | q = q.filter_by(org_repo=c.db_repo) | |
212 | else: |
|
212 | else: | |
213 | q = q.filter_by(other_repo=c.db_repo) |
|
213 | q = q.filter_by(other_repo=c.db_repo) | |
214 | c.pull_requests = q.all() |
|
214 | c.pull_requests = q.all() | |
215 |
|
215 | |||
216 | c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=100, **url_params) |
|
216 | c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=100, **url_params) | |
217 |
|
217 | |||
218 | return render('/pullrequests/pullrequest_show_all.html') |
|
218 | return render('/pullrequests/pullrequest_show_all.html') | |
219 |
|
219 | |||
220 | @LoginRequired() |
|
220 | @LoginRequired() | |
221 | def show_my(self): |
|
221 | def show_my(self): | |
222 | c.closed = request.GET.get('closed') or '' |
|
222 | c.closed = request.GET.get('closed') or '' | |
223 |
|
223 | |||
224 | c.my_pull_requests = PullRequest.query( |
|
224 | c.my_pull_requests = PullRequest.query( | |
225 | include_closed=c.closed, |
|
225 | include_closed=c.closed, | |
226 | sorted=True, |
|
226 | sorted=True, | |
227 | ).filter_by(owner_id=request.authuser.user_id).all() |
|
227 | ).filter_by(owner_id=request.authuser.user_id).all() | |
228 |
|
228 | |||
229 | c.participate_in_pull_requests = [] |
|
229 | c.participate_in_pull_requests = [] | |
230 | c.participate_in_pull_requests_todo = [] |
|
230 | c.participate_in_pull_requests_todo = [] | |
231 | done_status = set([ChangesetStatus.STATUS_APPROVED, ChangesetStatus.STATUS_REJECTED]) |
|
231 | done_status = set([ChangesetStatus.STATUS_APPROVED, ChangesetStatus.STATUS_REJECTED]) | |
232 | for pr in PullRequest.query( |
|
232 | for pr in PullRequest.query( | |
233 | include_closed=c.closed, |
|
233 | include_closed=c.closed, | |
234 | reviewer_id=request.authuser.user_id, |
|
234 | reviewer_id=request.authuser.user_id, | |
235 | sorted=True, |
|
235 | sorted=True, | |
236 | ): |
|
236 | ): | |
237 | status = pr.user_review_status(request.authuser.user_id) # very inefficient!!! |
|
237 | status = pr.user_review_status(request.authuser.user_id) # very inefficient!!! | |
238 | if status in done_status: |
|
238 | if status in done_status: | |
239 | c.participate_in_pull_requests.append(pr) |
|
239 | c.participate_in_pull_requests.append(pr) | |
240 | else: |
|
240 | else: | |
241 | c.participate_in_pull_requests_todo.append(pr) |
|
241 | c.participate_in_pull_requests_todo.append(pr) | |
242 |
|
242 | |||
243 | return render('/pullrequests/pullrequest_show_my.html') |
|
243 | return render('/pullrequests/pullrequest_show_my.html') | |
244 |
|
244 | |||
245 | @LoginRequired() |
|
245 | @LoginRequired() | |
246 | @HasRepoPermissionLevelDecorator('read') |
|
246 | @HasRepoPermissionLevelDecorator('read') | |
247 | def index(self): |
|
247 | def index(self): | |
248 | org_repo = c.db_repo |
|
248 | org_repo = c.db_repo | |
249 | org_scm_instance = org_repo.scm_instance |
|
249 | org_scm_instance = org_repo.scm_instance | |
250 | try: |
|
250 | try: | |
251 | org_scm_instance.get_changeset() |
|
251 | org_scm_instance.get_changeset() | |
252 | except EmptyRepositoryError as e: |
|
252 | except EmptyRepositoryError as e: | |
253 | h.flash(_('There are no changesets yet'), |
|
253 | h.flash(_('There are no changesets yet'), | |
254 | category='warning') |
|
254 | category='warning') | |
255 | raise HTTPFound(location=url('summary_home', repo_name=org_repo.repo_name)) |
|
255 | raise HTTPFound(location=url('summary_home', repo_name=org_repo.repo_name)) | |
256 |
|
256 | |||
257 | org_rev = request.GET.get('rev_end') |
|
257 | org_rev = request.GET.get('rev_end') | |
258 | # rev_start is not directly useful - its parent could however be used |
|
258 | # rev_start is not directly useful - its parent could however be used | |
259 | # as default for other and thus give a simple compare view |
|
259 | # as default for other and thus give a simple compare view | |
260 | rev_start = request.GET.get('rev_start') |
|
260 | rev_start = request.GET.get('rev_start') | |
261 | other_rev = None |
|
261 | other_rev = None | |
262 | if rev_start: |
|
262 | if rev_start: | |
263 | starters = org_repo.get_changeset(rev_start).parents |
|
263 | starters = org_repo.get_changeset(rev_start).parents | |
264 | if starters: |
|
264 | if starters: | |
265 | other_rev = starters[0].raw_id |
|
265 | other_rev = starters[0].raw_id | |
266 | else: |
|
266 | else: | |
267 | other_rev = org_repo.scm_instance.EMPTY_CHANGESET |
|
267 | other_rev = org_repo.scm_instance.EMPTY_CHANGESET | |
268 | branch = request.GET.get('branch') |
|
268 | branch = request.GET.get('branch') | |
269 |
|
269 | |||
270 | c.cs_repos = [(org_repo.repo_name, org_repo.repo_name)] |
|
270 | c.cs_repos = [(org_repo.repo_name, org_repo.repo_name)] | |
271 | c.default_cs_repo = org_repo.repo_name |
|
271 | c.default_cs_repo = org_repo.repo_name | |
272 | c.cs_refs, c.default_cs_ref = self._get_repo_refs(org_scm_instance, rev=org_rev, branch=branch) |
|
272 | c.cs_refs, c.default_cs_ref = self._get_repo_refs(org_scm_instance, rev=org_rev, branch=branch) | |
273 |
|
273 | |||
274 | default_cs_ref_type, default_cs_branch, default_cs_rev = c.default_cs_ref.split(':') |
|
274 | default_cs_ref_type, default_cs_branch, default_cs_rev = c.default_cs_ref.split(':') | |
275 | if default_cs_ref_type != 'branch': |
|
275 | if default_cs_ref_type != 'branch': | |
276 | default_cs_branch = org_repo.get_changeset(default_cs_rev).branch |
|
276 | default_cs_branch = org_repo.get_changeset(default_cs_rev).branch | |
277 |
|
277 | |||
278 | # add org repo to other so we can open pull request against peer branches on itself |
|
278 | # add org repo to other so we can open pull request against peer branches on itself | |
279 | c.a_repos = [(org_repo.repo_name, '%s (self)' % org_repo.repo_name)] |
|
279 | c.a_repos = [(org_repo.repo_name, '%s (self)' % org_repo.repo_name)] | |
280 |
|
280 | |||
281 | if org_repo.parent: |
|
281 | if org_repo.parent: | |
282 | # add parent of this fork also and select it. |
|
282 | # add parent of this fork also and select it. | |
283 | # use the same branch on destination as on source, if available. |
|
283 | # use the same branch on destination as on source, if available. | |
284 | c.a_repos.append((org_repo.parent.repo_name, '%s (parent)' % org_repo.parent.repo_name)) |
|
284 | c.a_repos.append((org_repo.parent.repo_name, '%s (parent)' % org_repo.parent.repo_name)) | |
285 | c.a_repo = org_repo.parent |
|
285 | c.a_repo = org_repo.parent | |
286 | c.a_refs, c.default_a_ref = self._get_repo_refs( |
|
286 | c.a_refs, c.default_a_ref = self._get_repo_refs( | |
287 | org_repo.parent.scm_instance, branch=default_cs_branch, rev=other_rev) |
|
287 | org_repo.parent.scm_instance, branch=default_cs_branch, rev=other_rev) | |
288 |
|
288 | |||
289 | else: |
|
289 | else: | |
290 | c.a_repo = org_repo |
|
290 | c.a_repo = org_repo | |
291 | c.a_refs, c.default_a_ref = self._get_repo_refs(org_scm_instance, rev=other_rev) |
|
291 | c.a_refs, c.default_a_ref = self._get_repo_refs(org_scm_instance, rev=other_rev) | |
292 |
|
292 | |||
293 | # gather forks and add to this list ... even though it is rare to |
|
293 | # gather forks and add to this list ... even though it is rare to | |
294 | # request forks to pull from their parent |
|
294 | # request forks to pull from their parent | |
295 | for fork in org_repo.forks: |
|
295 | for fork in org_repo.forks: | |
296 | c.a_repos.append((fork.repo_name, fork.repo_name)) |
|
296 | c.a_repos.append((fork.repo_name, fork.repo_name)) | |
297 |
|
297 | |||
298 | return render('/pullrequests/pullrequest.html') |
|
298 | return render('/pullrequests/pullrequest.html') | |
299 |
|
299 | |||
300 | @LoginRequired() |
|
300 | @LoginRequired() | |
301 | @HasRepoPermissionLevelDecorator('read') |
|
301 | @HasRepoPermissionLevelDecorator('read') | |
302 | @jsonify |
|
302 | @jsonify | |
303 | def repo_info(self, repo_name): |
|
303 | def repo_info(self, repo_name): | |
304 | repo = c.db_repo |
|
304 | repo = c.db_repo | |
305 | refs, selected_ref = self._get_repo_refs(repo.scm_instance) |
|
305 | refs, selected_ref = self._get_repo_refs(repo.scm_instance) | |
306 | return { |
|
306 | return { | |
307 | 'description': repo.description.split('\n', 1)[0], |
|
307 | 'description': repo.description.split('\n', 1)[0], | |
308 | 'selected_ref': selected_ref, |
|
308 | 'selected_ref': selected_ref, | |
309 | 'refs': refs, |
|
309 | 'refs': refs, | |
310 | } |
|
310 | } | |
311 |
|
311 | |||
312 | @LoginRequired() |
|
312 | @LoginRequired() | |
313 | @HasRepoPermissionLevelDecorator('read') |
|
313 | @HasRepoPermissionLevelDecorator('read') | |
314 | def create(self, repo_name): |
|
314 | def create(self, repo_name): | |
315 | repo = c.db_repo |
|
315 | repo = c.db_repo | |
316 | try: |
|
316 | try: | |
317 | _form = PullRequestForm(repo.repo_id)().to_python(request.POST) |
|
317 | _form = PullRequestForm(repo.repo_id)().to_python(request.POST) | |
318 | except formencode.Invalid as errors: |
|
318 | except formencode.Invalid as errors: | |
319 | log.error(traceback.format_exc()) |
|
319 | log.error(traceback.format_exc()) | |
320 | log.error(str(errors)) |
|
320 | log.error(str(errors)) | |
321 | msg = _('Error creating pull request: %s') % errors.msg |
|
321 | msg = _('Error creating pull request: %s') % errors.msg | |
322 | h.flash(msg, 'error') |
|
322 | h.flash(msg, 'error') | |
323 | raise HTTPBadRequest |
|
323 | raise HTTPBadRequest | |
324 |
|
324 | |||
325 | # heads up: org and other might seem backward here ... |
|
325 | # heads up: org and other might seem backward here ... | |
326 | org_ref = _form['org_ref'] # will have merge_rev as rev but symbolic name |
|
326 | org_ref = _form['org_ref'] # will have merge_rev as rev but symbolic name | |
327 | org_repo = Repository.guess_instance(_form['org_repo']) |
|
327 | org_repo = Repository.guess_instance(_form['org_repo']) | |
328 |
|
328 | |||
329 | other_ref = _form['other_ref'] # will have symbolic name and head revision |
|
329 | other_ref = _form['other_ref'] # will have symbolic name and head revision | |
330 | other_repo = Repository.guess_instance(_form['other_repo']) |
|
330 | other_repo = Repository.guess_instance(_form['other_repo']) | |
331 |
|
331 | |||
332 | reviewers = [] |
|
332 | reviewers = [] | |
333 |
|
333 | |||
334 | title = _form['pullrequest_title'] |
|
334 | title = _form['pullrequest_title'] | |
335 | description = _form['pullrequest_desc'].strip() |
|
335 | description = _form['pullrequest_desc'].strip() | |
336 | owner = User.get(request.authuser.user_id) |
|
336 | owner = User.get(request.authuser.user_id) | |
337 |
|
337 | |||
338 | try: |
|
338 | try: | |
339 | cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, description, owner, reviewers) |
|
339 | cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, description, owner, reviewers) | |
340 | except CreatePullRequestAction.ValidationError as e: |
|
340 | except CreatePullRequestAction.ValidationError as e: | |
341 | h.flash(e, category='error', logf=log.error) |
|
341 | h.flash(e, category='error', logf=log.error) | |
342 | raise HTTPNotFound |
|
342 | raise HTTPNotFound | |
343 |
|
343 | |||
344 | try: |
|
344 | try: | |
345 | pull_request = cmd.execute() |
|
345 | pull_request = cmd.execute() | |
346 | Session().commit() |
|
346 | Session().commit() | |
347 | except Exception: |
|
347 | except Exception: | |
348 | h.flash(_('Error occurred while creating pull request'), |
|
348 | h.flash(_('Error occurred while creating pull request'), | |
349 | category='error') |
|
349 | category='error') | |
350 | log.error(traceback.format_exc()) |
|
350 | log.error(traceback.format_exc()) | |
351 | raise HTTPFound(location=url('pullrequest_home', repo_name=repo_name)) |
|
351 | raise HTTPFound(location=url('pullrequest_home', repo_name=repo_name)) | |
352 |
|
352 | |||
353 | h.flash(_('Successfully opened new pull request'), |
|
353 | h.flash(_('Successfully opened new pull request'), | |
354 | category='success') |
|
354 | category='success') | |
355 | raise HTTPFound(location=pull_request.url()) |
|
355 | raise HTTPFound(location=pull_request.url()) | |
356 |
|
356 | |||
357 | def create_new_iteration(self, old_pull_request, new_rev, title, description, reviewers): |
|
357 | def create_new_iteration(self, old_pull_request, new_rev, title, description, reviewers): | |
358 | owner = User.get(request.authuser.user_id) |
|
358 | owner = User.get(request.authuser.user_id) | |
359 | new_org_rev = self._get_ref_rev(old_pull_request.org_repo, 'rev', new_rev) |
|
359 | new_org_rev = self._get_ref_rev(old_pull_request.org_repo, 'rev', new_rev) | |
360 | new_other_rev = self._get_ref_rev(old_pull_request.other_repo, old_pull_request.other_ref_parts[0], old_pull_request.other_ref_parts[1]) |
|
360 | new_other_rev = self._get_ref_rev(old_pull_request.other_repo, old_pull_request.other_ref_parts[0], old_pull_request.other_ref_parts[1]) | |
361 | try: |
|
361 | try: | |
362 | cmd = CreatePullRequestIterationAction(old_pull_request, new_org_rev, new_other_rev, title, description, owner, reviewers) |
|
362 | cmd = CreatePullRequestIterationAction(old_pull_request, new_org_rev, new_other_rev, title, description, owner, reviewers) | |
363 | except CreatePullRequestAction.ValidationError as e: |
|
363 | except CreatePullRequestAction.ValidationError as e: | |
364 | h.flash(e, category='error', logf=log.error) |
|
364 | h.flash(e, category='error', logf=log.error) | |
365 | raise HTTPNotFound |
|
365 | raise HTTPNotFound | |
366 |
|
366 | |||
367 | try: |
|
367 | try: | |
368 | pull_request = cmd.execute() |
|
368 | pull_request = cmd.execute() | |
369 | Session().commit() |
|
369 | Session().commit() | |
370 | except Exception: |
|
370 | except Exception: | |
371 | h.flash(_('Error occurred while creating pull request'), |
|
371 | h.flash(_('Error occurred while creating pull request'), | |
372 | category='error') |
|
372 | category='error') | |
373 | log.error(traceback.format_exc()) |
|
373 | log.error(traceback.format_exc()) | |
374 | raise HTTPFound(location=old_pull_request.url()) |
|
374 | raise HTTPFound(location=old_pull_request.url()) | |
375 |
|
375 | |||
376 | h.flash(_('New pull request iteration created'), |
|
376 | h.flash(_('New pull request iteration created'), | |
377 | category='success') |
|
377 | category='success') | |
378 | raise HTTPFound(location=pull_request.url()) |
|
378 | raise HTTPFound(location=pull_request.url()) | |
379 |
|
379 | |||
380 | # pullrequest_post for PR editing |
|
380 | # pullrequest_post for PR editing | |
381 | @LoginRequired() |
|
381 | @LoginRequired() | |
382 | @HasRepoPermissionLevelDecorator('read') |
|
382 | @HasRepoPermissionLevelDecorator('read') | |
383 | def post(self, repo_name, pull_request_id): |
|
383 | def post(self, repo_name, pull_request_id): | |
384 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
384 | pull_request = PullRequest.get_or_404(pull_request_id) | |
385 | if pull_request.is_closed(): |
|
385 | if pull_request.is_closed(): | |
386 | raise HTTPForbidden() |
|
386 | raise HTTPForbidden() | |
387 | assert pull_request.other_repo.repo_name == repo_name |
|
387 | assert pull_request.other_repo.repo_name == repo_name | |
388 | # only owner or admin can update it |
|
388 | # only owner or admin can update it | |
389 | owner = pull_request.owner_id == request.authuser.user_id |
|
389 | owner = pull_request.owner_id == request.authuser.user_id | |
390 | repo_admin = h.HasRepoPermissionLevel('admin')(c.repo_name) |
|
390 | repo_admin = h.HasRepoPermissionLevel('admin')(c.repo_name) | |
391 | if not (h.HasPermissionAny('hg.admin')() or repo_admin or owner): |
|
391 | if not (h.HasPermissionAny('hg.admin')() or repo_admin or owner): | |
392 | raise HTTPForbidden() |
|
392 | raise HTTPForbidden() | |
393 |
|
393 | |||
394 | _form = PullRequestPostForm()().to_python(request.POST) |
|
394 | _form = PullRequestPostForm()().to_python(request.POST) | |
395 |
|
395 | |||
396 | cur_reviewers = set(pull_request.get_reviewer_users()) |
|
396 | cur_reviewers = set(pull_request.get_reviewer_users()) | |
397 | new_reviewers = set(_get_reviewer(s) for s in _form['review_members']) |
|
397 | new_reviewers = set(_get_reviewer(s) for s in _form['review_members']) | |
398 | old_reviewers = set(_get_reviewer(s) for s in _form['org_review_members']) |
|
398 | old_reviewers = set(_get_reviewer(s) for s in _form['org_review_members']) | |
399 |
|
399 | |||
400 | other_added = cur_reviewers - old_reviewers |
|
400 | other_added = cur_reviewers - old_reviewers | |
401 | other_removed = old_reviewers - cur_reviewers |
|
401 | other_removed = old_reviewers - cur_reviewers | |
402 |
|
402 | |||
403 | if other_added: |
|
403 | if other_added: | |
404 | h.flash(_('Meanwhile, the following reviewers have been added: %s') % |
|
404 | h.flash(_('Meanwhile, the following reviewers have been added: %s') % | |
405 | (', '.join(u.username for u in other_added)), |
|
405 | (', '.join(u.username for u in other_added)), | |
406 | category='warning') |
|
406 | category='warning') | |
407 | if other_removed: |
|
407 | if other_removed: | |
408 | h.flash(_('Meanwhile, the following reviewers have been removed: %s') % |
|
408 | h.flash(_('Meanwhile, the following reviewers have been removed: %s') % | |
409 | (', '.join(u.username for u in other_removed)), |
|
409 | (', '.join(u.username for u in other_removed)), | |
410 | category='warning') |
|
410 | category='warning') | |
411 |
|
411 | |||
412 | if _form['updaterev']: |
|
412 | if _form['updaterev']: | |
413 | return self.create_new_iteration(pull_request, |
|
413 | return self.create_new_iteration(pull_request, | |
414 | _form['updaterev'], |
|
414 | _form['updaterev'], | |
415 | _form['pullrequest_title'], |
|
415 | _form['pullrequest_title'], | |
416 | _form['pullrequest_desc'], |
|
416 | _form['pullrequest_desc'], | |
417 | new_reviewers) |
|
417 | new_reviewers) | |
418 |
|
418 | |||
419 | added_reviewers = new_reviewers - old_reviewers - cur_reviewers |
|
419 | added_reviewers = new_reviewers - old_reviewers - cur_reviewers | |
420 | removed_reviewers = (old_reviewers - new_reviewers) & cur_reviewers |
|
420 | removed_reviewers = (old_reviewers - new_reviewers) & cur_reviewers | |
421 |
|
421 | |||
422 | old_description = pull_request.description |
|
422 | old_description = pull_request.description | |
423 | pull_request.title = _form['pullrequest_title'] |
|
423 | pull_request.title = _form['pullrequest_title'] | |
424 | pull_request.description = _form['pullrequest_desc'].strip() or _('No description') |
|
424 | pull_request.description = _form['pullrequest_desc'].strip() or _('No description') | |
425 | pull_request.owner = User.get_by_username(_form['owner']) |
|
425 | pull_request.owner = User.get_by_username(_form['owner']) | |
426 | user = User.get(request.authuser.user_id) |
|
426 | user = User.get(request.authuser.user_id) | |
427 |
|
427 | |||
428 | PullRequestModel().mention_from_description(user, pull_request, old_description) |
|
428 | PullRequestModel().mention_from_description(user, pull_request, old_description) | |
429 | PullRequestModel().add_reviewers(user, pull_request, added_reviewers) |
|
429 | PullRequestModel().add_reviewers(user, pull_request, added_reviewers) | |
430 | PullRequestModel().remove_reviewers(user, pull_request, removed_reviewers) |
|
430 | PullRequestModel().remove_reviewers(user, pull_request, removed_reviewers) | |
431 |
|
431 | |||
432 | Session().commit() |
|
432 | Session().commit() | |
433 | h.flash(_('Pull request updated'), category='success') |
|
433 | h.flash(_('Pull request updated'), category='success') | |
434 |
|
434 | |||
435 | raise HTTPFound(location=pull_request.url()) |
|
435 | raise HTTPFound(location=pull_request.url()) | |
436 |
|
436 | |||
437 | @LoginRequired() |
|
437 | @LoginRequired() | |
438 | @HasRepoPermissionLevelDecorator('read') |
|
438 | @HasRepoPermissionLevelDecorator('read') | |
439 | @jsonify |
|
439 | @jsonify | |
440 | def delete(self, repo_name, pull_request_id): |
|
440 | def delete(self, repo_name, pull_request_id): | |
441 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
441 | pull_request = PullRequest.get_or_404(pull_request_id) | |
442 | # only owner can delete it ! |
|
442 | # only owner can delete it ! | |
443 | if pull_request.owner_id == request.authuser.user_id: |
|
443 | if pull_request.owner_id == request.authuser.user_id: | |
444 | PullRequestModel().delete(pull_request) |
|
444 | PullRequestModel().delete(pull_request) | |
445 | Session().commit() |
|
445 | Session().commit() | |
446 | h.flash(_('Successfully deleted pull request'), |
|
446 | h.flash(_('Successfully deleted pull request'), | |
447 | category='success') |
|
447 | category='success') | |
448 | raise HTTPFound(location=url('my_pullrequests')) |
|
448 | raise HTTPFound(location=url('my_pullrequests')) | |
449 | raise HTTPForbidden() |
|
449 | raise HTTPForbidden() | |
450 |
|
450 | |||
451 | @LoginRequired(allow_default_user=True) |
|
451 | @LoginRequired(allow_default_user=True) | |
452 | @HasRepoPermissionLevelDecorator('read') |
|
452 | @HasRepoPermissionLevelDecorator('read') | |
453 | def show(self, repo_name, pull_request_id, extra=None): |
|
453 | def show(self, repo_name, pull_request_id, extra=None): | |
454 | c.pull_request = PullRequest.get_or_404(pull_request_id) |
|
454 | c.pull_request = PullRequest.get_or_404(pull_request_id) | |
455 | c.allowed_to_change_status = self._is_allowed_to_change_status(c.pull_request) |
|
455 | c.allowed_to_change_status = self._is_allowed_to_change_status(c.pull_request) | |
456 | cc_model = ChangesetCommentsModel() |
|
456 | cc_model = ChangesetCommentsModel() | |
457 | cs_model = ChangesetStatusModel() |
|
457 | cs_model = ChangesetStatusModel() | |
458 |
|
458 | |||
459 | # pull_requests repo_name we opened it against |
|
459 | # pull_requests repo_name we opened it against | |
460 | # ie. other_repo must match |
|
460 | # ie. other_repo must match | |
461 | if repo_name != c.pull_request.other_repo.repo_name: |
|
461 | if repo_name != c.pull_request.other_repo.repo_name: | |
462 | raise HTTPNotFound |
|
462 | raise HTTPNotFound | |
463 |
|
463 | |||
464 | # load compare data into template context |
|
464 | # load compare data into template context | |
465 | c.cs_repo = c.pull_request.org_repo |
|
465 | c.cs_repo = c.pull_request.org_repo | |
466 | (c.cs_ref_type, |
|
466 | (c.cs_ref_type, | |
467 | c.cs_ref_name, |
|
467 | c.cs_ref_name, | |
468 | c.cs_rev) = c.pull_request.org_ref.split(':') |
|
468 | c.cs_rev) = c.pull_request.org_ref.split(':') | |
469 |
|
469 | |||
470 | c.a_repo = c.pull_request.other_repo |
|
470 | c.a_repo = c.pull_request.other_repo | |
471 | (c.a_ref_type, |
|
471 | (c.a_ref_type, | |
472 | c.a_ref_name, |
|
472 | c.a_ref_name, | |
473 | c.a_rev) = c.pull_request.other_ref.split(':') # a_rev is ancestor |
|
473 | c.a_rev) = c.pull_request.other_ref.split(':') # a_rev is ancestor | |
474 |
|
474 | |||
475 | org_scm_instance = c.cs_repo.scm_instance # property with expensive cache invalidation check!!! |
|
475 | org_scm_instance = c.cs_repo.scm_instance # property with expensive cache invalidation check!!! | |
476 | try: |
|
476 | try: | |
477 | c.cs_ranges = [] |
|
477 | c.cs_ranges = [] | |
478 | for x in c.pull_request.revisions: |
|
478 | for x in c.pull_request.revisions: | |
479 | c.cs_ranges.append(org_scm_instance.get_changeset(x)) |
|
479 | c.cs_ranges.append(org_scm_instance.get_changeset(x)) | |
480 | except ChangesetDoesNotExistError: |
|
480 | except ChangesetDoesNotExistError: | |
481 | c.cs_ranges = [] |
|
481 | c.cs_ranges = [] | |
482 | h.flash(_('Revision %s not found in %s') % (x, c.cs_repo.repo_name), |
|
482 | h.flash(_('Revision %s not found in %s') % (x, c.cs_repo.repo_name), | |
483 | 'error') |
|
483 | 'error') | |
484 | c.cs_ranges_org = None # not stored and not important and moving target - could be calculated ... |
|
484 | c.cs_ranges_org = None # not stored and not important and moving target - could be calculated ... | |
485 | revs = [ctx.revision for ctx in reversed(c.cs_ranges)] |
|
485 | revs = [ctx.revision for ctx in reversed(c.cs_ranges)] | |
486 | c.jsdata = graph_data(org_scm_instance, revs) |
|
486 | c.jsdata = graph_data(org_scm_instance, revs) | |
487 |
|
487 | |||
488 | c.is_range = False |
|
488 | c.is_range = False | |
489 | try: |
|
489 | try: | |
490 | if c.a_ref_type == 'rev': # this looks like a free range where target is ancestor |
|
490 | if c.a_ref_type == 'rev': # this looks like a free range where target is ancestor | |
491 | cs_a = org_scm_instance.get_changeset(c.a_rev) |
|
491 | cs_a = org_scm_instance.get_changeset(c.a_rev) | |
492 | root_parents = c.cs_ranges[0].parents |
|
492 | root_parents = c.cs_ranges[0].parents | |
493 | c.is_range = cs_a in root_parents |
|
493 | c.is_range = cs_a in root_parents | |
494 | #c.merge_root = len(root_parents) > 1 # a range starting with a merge might deserve a warning |
|
494 | #c.merge_root = len(root_parents) > 1 # a range starting with a merge might deserve a warning | |
495 | except ChangesetDoesNotExistError: # probably because c.a_rev not found |
|
495 | except ChangesetDoesNotExistError: # probably because c.a_rev not found | |
496 | pass |
|
496 | pass | |
497 | except IndexError: # probably because c.cs_ranges is empty, probably because revisions are missing |
|
497 | except IndexError: # probably because c.cs_ranges is empty, probably because revisions are missing | |
498 | pass |
|
498 | pass | |
499 |
|
499 | |||
500 | avail_revs = set() |
|
500 | avail_revs = set() | |
501 | avail_show = [] |
|
501 | avail_show = [] | |
502 | c.cs_branch_name = c.cs_ref_name |
|
502 | c.cs_branch_name = c.cs_ref_name | |
503 | c.a_branch_name = None |
|
503 | c.a_branch_name = None | |
504 | other_scm_instance = c.a_repo.scm_instance |
|
504 | other_scm_instance = c.a_repo.scm_instance | |
505 | c.update_msg = "" |
|
505 | c.update_msg = "" | |
506 | c.update_msg_other = "" |
|
506 | c.update_msg_other = "" | |
507 | try: |
|
507 | try: | |
508 | if not c.cs_ranges: |
|
508 | if not c.cs_ranges: | |
509 | c.update_msg = _('Error: changesets not found when displaying pull request from %s.') % c.cs_rev |
|
509 | c.update_msg = _('Error: changesets not found when displaying pull request from %s.') % c.cs_rev | |
510 | elif org_scm_instance.alias == 'hg' and c.a_ref_name != 'ancestor': |
|
510 | elif org_scm_instance.alias == 'hg' and c.a_ref_name != 'ancestor': | |
511 | if c.cs_ref_type != 'branch': |
|
511 | if c.cs_ref_type != 'branch': | |
512 | c.cs_branch_name = org_scm_instance.get_changeset(c.cs_ref_name).branch # use ref_type ? |
|
512 | c.cs_branch_name = org_scm_instance.get_changeset(c.cs_ref_name).branch # use ref_type ? | |
513 | c.a_branch_name = c.a_ref_name |
|
513 | c.a_branch_name = c.a_ref_name | |
514 | if c.a_ref_type != 'branch': |
|
514 | if c.a_ref_type != 'branch': | |
515 | try: |
|
515 | try: | |
516 | c.a_branch_name = other_scm_instance.get_changeset(c.a_ref_name).branch # use ref_type ? |
|
516 | c.a_branch_name = other_scm_instance.get_changeset(c.a_ref_name).branch # use ref_type ? | |
517 | except EmptyRepositoryError: |
|
517 | except EmptyRepositoryError: | |
518 | c.a_branch_name = 'null' # not a branch name ... but close enough |
|
518 | c.a_branch_name = 'null' # not a branch name ... but close enough | |
519 | # candidates: descendants of old head that are on the right branch |
|
519 | # candidates: descendants of old head that are on the right branch | |
520 | # and not are the old head itself ... |
|
520 | # and not are the old head itself ... | |
521 | # and nothing at all if old head is a descendant of target ref name |
|
521 | # and nothing at all if old head is a descendant of target ref name | |
522 | if not c.is_range and other_scm_instance._repo.revs('present(%s)::&%s', c.cs_ranges[-1].raw_id, c.a_branch_name): |
|
522 | if not c.is_range and other_scm_instance._repo.revs('present(%s)::&%s', c.cs_ranges[-1].raw_id, c.a_branch_name): | |
523 | c.update_msg = _('This pull request has already been merged to %s.') % c.a_branch_name |
|
523 | c.update_msg = _('This pull request has already been merged to %s.') % c.a_branch_name | |
524 | elif c.pull_request.is_closed(): |
|
524 | elif c.pull_request.is_closed(): | |
525 | c.update_msg = _('This pull request has been closed and can not be updated.') |
|
525 | c.update_msg = _('This pull request has been closed and can not be updated.') | |
526 | else: # look for descendants of PR head on source branch in org repo |
|
526 | else: # look for descendants of PR head on source branch in org repo | |
527 | avail_revs = org_scm_instance._repo.revs('%s:: & branch(%s)', |
|
527 | avail_revs = org_scm_instance._repo.revs('%s:: & branch(%s)', | |
528 | revs[0], c.cs_branch_name) |
|
528 | revs[0], c.cs_branch_name) | |
529 | if len(avail_revs) > 1: # more than just revs[0] |
|
529 | if len(avail_revs) > 1: # more than just revs[0] | |
530 | # also show changesets that not are descendants but would be merged in |
|
530 | # also show changesets that not are descendants but would be merged in | |
531 | targethead = other_scm_instance.get_changeset(c.a_branch_name).raw_id |
|
531 | targethead = other_scm_instance.get_changeset(c.a_branch_name).raw_id | |
532 | if org_scm_instance.path != other_scm_instance.path: |
|
532 | if org_scm_instance.path != other_scm_instance.path: | |
533 | # Note: org_scm_instance.path must come first so all |
|
533 | # Note: org_scm_instance.path must come first so all | |
534 | # valid revision numbers are 100% org_scm compatible |
|
534 | # valid revision numbers are 100% org_scm compatible | |
535 | # - both for avail_revs and for revset results |
|
535 | # - both for avail_revs and for revset results | |
536 | hgrepo = unionrepo.makeunionrepository(org_scm_instance.baseui, |
|
536 | hgrepo = mercurial.unionrepo.makeunionrepository(org_scm_instance.baseui, | |
537 | org_scm_instance.path, |
|
537 | org_scm_instance.path, | |
538 | other_scm_instance.path) |
|
538 | other_scm_instance.path) | |
539 | else: |
|
539 | else: | |
540 | hgrepo = org_scm_instance._repo |
|
540 | hgrepo = org_scm_instance._repo | |
541 | show = set(hgrepo.revs('::%ld & !::parents(%s) & !::%s', |
|
541 | show = set(hgrepo.revs('::%ld & !::parents(%s) & !::%s', | |
542 | avail_revs, revs[0], targethead)) |
|
542 | avail_revs, revs[0], targethead)) | |
543 | if show: |
|
543 | if show: | |
544 | c.update_msg = _('The following additional changes are available on %s:') % c.cs_branch_name |
|
544 | c.update_msg = _('The following additional changes are available on %s:') % c.cs_branch_name | |
545 | else: |
|
545 | else: | |
546 | c.update_msg = _('No additional changesets found for iterating on this pull request.') |
|
546 | c.update_msg = _('No additional changesets found for iterating on this pull request.') | |
547 | else: |
|
547 | else: | |
548 | show = set() |
|
548 | show = set() | |
549 | avail_revs = set() # drop revs[0] |
|
549 | avail_revs = set() # drop revs[0] | |
550 | c.update_msg = _('No additional changesets found for iterating on this pull request.') |
|
550 | c.update_msg = _('No additional changesets found for iterating on this pull request.') | |
551 |
|
551 | |||
552 | # TODO: handle branch heads that not are tip-most |
|
552 | # TODO: handle branch heads that not are tip-most | |
553 | brevs = org_scm_instance._repo.revs('%s - %ld - %s', c.cs_branch_name, avail_revs, revs[0]) |
|
553 | brevs = org_scm_instance._repo.revs('%s - %ld - %s', c.cs_branch_name, avail_revs, revs[0]) | |
554 | if brevs: |
|
554 | if brevs: | |
555 | # also show changesets that are on branch but neither ancestors nor descendants |
|
555 | # also show changesets that are on branch but neither ancestors nor descendants | |
556 | show.update(org_scm_instance._repo.revs('::%ld - ::%ld - ::%s', brevs, avail_revs, c.a_branch_name)) |
|
556 | show.update(org_scm_instance._repo.revs('::%ld - ::%ld - ::%s', brevs, avail_revs, c.a_branch_name)) | |
557 | show.add(revs[0]) # make sure graph shows this so we can see how they relate |
|
557 | show.add(revs[0]) # make sure graph shows this so we can see how they relate | |
558 | c.update_msg_other = _('Note: Branch %s has another head: %s.') % (c.cs_branch_name, |
|
558 | c.update_msg_other = _('Note: Branch %s has another head: %s.') % (c.cs_branch_name, | |
559 | h.short_id(org_scm_instance.get_changeset((max(brevs))).raw_id)) |
|
559 | h.short_id(org_scm_instance.get_changeset((max(brevs))).raw_id)) | |
560 |
|
560 | |||
561 | avail_show = sorted(show, reverse=True) |
|
561 | avail_show = sorted(show, reverse=True) | |
562 |
|
562 | |||
563 | elif org_scm_instance.alias == 'git': |
|
563 | elif org_scm_instance.alias == 'git': | |
564 | c.cs_repo.scm_instance.get_changeset(c.cs_rev) # check it exists - raise ChangesetDoesNotExistError if not |
|
564 | c.cs_repo.scm_instance.get_changeset(c.cs_rev) # check it exists - raise ChangesetDoesNotExistError if not | |
565 | c.update_msg = _("Git pull requests don't support iterating yet.") |
|
565 | c.update_msg = _("Git pull requests don't support iterating yet.") | |
566 | except ChangesetDoesNotExistError: |
|
566 | except ChangesetDoesNotExistError: | |
567 | c.update_msg = _('Error: some changesets not found when displaying pull request from %s.') % c.cs_rev |
|
567 | c.update_msg = _('Error: some changesets not found when displaying pull request from %s.') % c.cs_rev | |
568 |
|
568 | |||
569 | c.avail_revs = avail_revs |
|
569 | c.avail_revs = avail_revs | |
570 | c.avail_cs = [org_scm_instance.get_changeset(r) for r in avail_show] |
|
570 | c.avail_cs = [org_scm_instance.get_changeset(r) for r in avail_show] | |
571 | c.avail_jsdata = graph_data(org_scm_instance, avail_show) |
|
571 | c.avail_jsdata = graph_data(org_scm_instance, avail_show) | |
572 |
|
572 | |||
573 | raw_ids = [x.raw_id for x in c.cs_ranges] |
|
573 | raw_ids = [x.raw_id for x in c.cs_ranges] | |
574 | c.cs_comments = c.cs_repo.get_comments(raw_ids) |
|
574 | c.cs_comments = c.cs_repo.get_comments(raw_ids) | |
575 | c.cs_statuses = c.cs_repo.statuses(raw_ids) |
|
575 | c.cs_statuses = c.cs_repo.statuses(raw_ids) | |
576 |
|
576 | |||
577 | ignore_whitespace = request.GET.get('ignorews') == '1' |
|
577 | ignore_whitespace = request.GET.get('ignorews') == '1' | |
578 | line_context = safe_int(request.GET.get('context'), 3) |
|
578 | line_context = safe_int(request.GET.get('context'), 3) | |
579 | c.ignorews_url = _ignorews_url |
|
579 | c.ignorews_url = _ignorews_url | |
580 | c.context_url = _context_url |
|
580 | c.context_url = _context_url | |
581 | fulldiff = request.GET.get('fulldiff') |
|
581 | fulldiff = request.GET.get('fulldiff') | |
582 | diff_limit = None if fulldiff else self.cut_off_limit |
|
582 | diff_limit = None if fulldiff else self.cut_off_limit | |
583 |
|
583 | |||
584 | # we swap org/other ref since we run a simple diff on one repo |
|
584 | # we swap org/other ref since we run a simple diff on one repo | |
585 | log.debug('running diff between %s and %s in %s', |
|
585 | log.debug('running diff between %s and %s in %s', | |
586 | c.a_rev, c.cs_rev, org_scm_instance.path) |
|
586 | c.a_rev, c.cs_rev, org_scm_instance.path) | |
587 | try: |
|
587 | try: | |
588 | raw_diff = diffs.get_diff(org_scm_instance, rev1=safe_str(c.a_rev), rev2=safe_str(c.cs_rev), |
|
588 | raw_diff = diffs.get_diff(org_scm_instance, rev1=safe_str(c.a_rev), rev2=safe_str(c.cs_rev), | |
589 | ignore_whitespace=ignore_whitespace, context=line_context) |
|
589 | ignore_whitespace=ignore_whitespace, context=line_context) | |
590 | except ChangesetDoesNotExistError: |
|
590 | except ChangesetDoesNotExistError: | |
591 | raw_diff = safe_bytes(_("The diff can't be shown - the PR revisions could not be found.")) |
|
591 | raw_diff = safe_bytes(_("The diff can't be shown - the PR revisions could not be found.")) | |
592 | diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit) |
|
592 | diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit) | |
593 | c.limited_diff = diff_processor.limited_diff |
|
593 | c.limited_diff = diff_processor.limited_diff | |
594 | c.file_diff_data = [] |
|
594 | c.file_diff_data = [] | |
595 | c.lines_added = 0 |
|
595 | c.lines_added = 0 | |
596 | c.lines_deleted = 0 |
|
596 | c.lines_deleted = 0 | |
597 |
|
597 | |||
598 | for f in diff_processor.parsed: |
|
598 | for f in diff_processor.parsed: | |
599 | st = f['stats'] |
|
599 | st = f['stats'] | |
600 | c.lines_added += st['added'] |
|
600 | c.lines_added += st['added'] | |
601 | c.lines_deleted += st['deleted'] |
|
601 | c.lines_deleted += st['deleted'] | |
602 | filename = f['filename'] |
|
602 | filename = f['filename'] | |
603 | fid = h.FID('', filename) |
|
603 | fid = h.FID('', filename) | |
604 | html_diff = diffs.as_html(enable_comments=True, parsed_lines=[f]) |
|
604 | html_diff = diffs.as_html(enable_comments=True, parsed_lines=[f]) | |
605 | c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, html_diff, st)) |
|
605 | c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, html_diff, st)) | |
606 |
|
606 | |||
607 | # inline comments |
|
607 | # inline comments | |
608 | c.inline_cnt = 0 |
|
608 | c.inline_cnt = 0 | |
609 | c.inline_comments = cc_model.get_inline_comments( |
|
609 | c.inline_comments = cc_model.get_inline_comments( | |
610 | c.db_repo.repo_id, |
|
610 | c.db_repo.repo_id, | |
611 | pull_request=pull_request_id) |
|
611 | pull_request=pull_request_id) | |
612 | # count inline comments |
|
612 | # count inline comments | |
613 | for __, lines in c.inline_comments: |
|
613 | for __, lines in c.inline_comments: | |
614 | for comments in lines.values(): |
|
614 | for comments in lines.values(): | |
615 | c.inline_cnt += len(comments) |
|
615 | c.inline_cnt += len(comments) | |
616 | # comments |
|
616 | # comments | |
617 | c.comments = cc_model.get_comments(c.db_repo.repo_id, pull_request=pull_request_id) |
|
617 | c.comments = cc_model.get_comments(c.db_repo.repo_id, pull_request=pull_request_id) | |
618 |
|
618 | |||
619 | # (badly named) pull-request status calculation based on reviewer votes |
|
619 | # (badly named) pull-request status calculation based on reviewer votes | |
620 | (c.pull_request_reviewers, |
|
620 | (c.pull_request_reviewers, | |
621 | c.pull_request_pending_reviewers, |
|
621 | c.pull_request_pending_reviewers, | |
622 | c.current_voting_result, |
|
622 | c.current_voting_result, | |
623 | ) = cs_model.calculate_pull_request_result(c.pull_request) |
|
623 | ) = cs_model.calculate_pull_request_result(c.pull_request) | |
624 | c.changeset_statuses = ChangesetStatus.STATUSES |
|
624 | c.changeset_statuses = ChangesetStatus.STATUSES | |
625 |
|
625 | |||
626 | c.is_ajax_preview = False |
|
626 | c.is_ajax_preview = False | |
627 | c.ancestors = None # [c.a_rev] ... but that is shown in an other way |
|
627 | c.ancestors = None # [c.a_rev] ... but that is shown in an other way | |
628 | return render('/pullrequests/pullrequest_show.html') |
|
628 | return render('/pullrequests/pullrequest_show.html') | |
629 |
|
629 | |||
630 | @LoginRequired() |
|
630 | @LoginRequired() | |
631 | @HasRepoPermissionLevelDecorator('read') |
|
631 | @HasRepoPermissionLevelDecorator('read') | |
632 | @jsonify |
|
632 | @jsonify | |
633 | def comment(self, repo_name, pull_request_id): |
|
633 | def comment(self, repo_name, pull_request_id): | |
634 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
634 | pull_request = PullRequest.get_or_404(pull_request_id) | |
635 | allowed_to_change_status = self._is_allowed_to_change_status(pull_request) |
|
635 | allowed_to_change_status = self._is_allowed_to_change_status(pull_request) | |
636 | return create_cs_pr_comment(repo_name, pull_request=pull_request, |
|
636 | return create_cs_pr_comment(repo_name, pull_request=pull_request, | |
637 | allowed_to_change_status=allowed_to_change_status) |
|
637 | allowed_to_change_status=allowed_to_change_status) | |
638 |
|
638 | |||
639 | @LoginRequired() |
|
639 | @LoginRequired() | |
640 | @HasRepoPermissionLevelDecorator('read') |
|
640 | @HasRepoPermissionLevelDecorator('read') | |
641 | @jsonify |
|
641 | @jsonify | |
642 | def delete_comment(self, repo_name, comment_id): |
|
642 | def delete_comment(self, repo_name, comment_id): | |
643 | return delete_cs_pr_comment(repo_name, comment_id) |
|
643 | return delete_cs_pr_comment(repo_name, comment_id) |
@@ -1,402 +1,403 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.hooks |
|
15 | kallithea.lib.hooks | |
16 | ~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Hooks run by Kallithea |
|
18 | Hooks run by Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Aug 6, 2010 |
|
22 | :created_on: Aug 6, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import os |
|
28 | import os | |
29 | import time |
|
29 | import time | |
30 |
|
30 | |||
|
31 | import mercurial.scmutil | |||
|
32 | ||||
31 | from kallithea.lib import helpers as h |
|
33 | from kallithea.lib import helpers as h | |
32 | from kallithea.lib.exceptions import UserCreationError |
|
34 | from kallithea.lib.exceptions import UserCreationError | |
33 | from kallithea.lib.utils import action_logger, make_ui, setup_cache_regions |
|
35 | from kallithea.lib.utils import action_logger, make_ui, setup_cache_regions | |
34 | from kallithea.lib.utils2 import ascii_str, get_hook_environment, safe_str, safe_unicode |
|
36 | from kallithea.lib.utils2 import ascii_str, get_hook_environment, safe_str, safe_unicode | |
35 | from kallithea.lib.vcs.backends.base import EmptyChangeset |
|
37 | from kallithea.lib.vcs.backends.base import EmptyChangeset | |
36 | from kallithea.lib.vcs.utils.hgcompat import revrange |
|
|||
37 | from kallithea.model.db import Repository, User |
|
38 | from kallithea.model.db import Repository, User | |
38 |
|
39 | |||
39 |
|
40 | |||
40 | def _get_scm_size(alias, root_path): |
|
41 | def _get_scm_size(alias, root_path): | |
41 | if not alias.startswith('.'): |
|
42 | if not alias.startswith('.'): | |
42 | alias += '.' |
|
43 | alias += '.' | |
43 |
|
44 | |||
44 | size_scm, size_root = 0, 0 |
|
45 | size_scm, size_root = 0, 0 | |
45 | for path, dirs, files in os.walk(safe_str(root_path)): |
|
46 | for path, dirs, files in os.walk(safe_str(root_path)): | |
46 | if path.find(alias) != -1: |
|
47 | if path.find(alias) != -1: | |
47 | for f in files: |
|
48 | for f in files: | |
48 | try: |
|
49 | try: | |
49 | size_scm += os.path.getsize(os.path.join(path, f)) |
|
50 | size_scm += os.path.getsize(os.path.join(path, f)) | |
50 | except OSError: |
|
51 | except OSError: | |
51 | pass |
|
52 | pass | |
52 | else: |
|
53 | else: | |
53 | for f in files: |
|
54 | for f in files: | |
54 | try: |
|
55 | try: | |
55 | size_root += os.path.getsize(os.path.join(path, f)) |
|
56 | size_root += os.path.getsize(os.path.join(path, f)) | |
56 | except OSError: |
|
57 | except OSError: | |
57 | pass |
|
58 | pass | |
58 |
|
59 | |||
59 | size_scm_f = h.format_byte_size(size_scm) |
|
60 | size_scm_f = h.format_byte_size(size_scm) | |
60 | size_root_f = h.format_byte_size(size_root) |
|
61 | size_root_f = h.format_byte_size(size_root) | |
61 | size_total_f = h.format_byte_size(size_root + size_scm) |
|
62 | size_total_f = h.format_byte_size(size_root + size_scm) | |
62 |
|
63 | |||
63 | return size_scm_f, size_root_f, size_total_f |
|
64 | return size_scm_f, size_root_f, size_total_f | |
64 |
|
65 | |||
65 |
|
66 | |||
66 | def repo_size(ui, repo, hooktype=None, **kwargs): |
|
67 | def repo_size(ui, repo, hooktype=None, **kwargs): | |
67 | """Presents size of repository after push""" |
|
68 | """Presents size of repository after push""" | |
68 | size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root) |
|
69 | size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root) | |
69 |
|
70 | |||
70 | last_cs = repo[len(repo) - 1] |
|
71 | last_cs = repo[len(repo) - 1] | |
71 |
|
72 | |||
72 | msg = ('Repository size .hg: %s Checkout: %s Total: %s\n' |
|
73 | msg = ('Repository size .hg: %s Checkout: %s Total: %s\n' | |
73 | 'Last revision is now r%s:%s\n') % ( |
|
74 | 'Last revision is now r%s:%s\n') % ( | |
74 | size_hg_f, size_root_f, size_total_f, last_cs.rev(), ascii_str(last_cs.hex())[:12] |
|
75 | size_hg_f, size_root_f, size_total_f, last_cs.rev(), ascii_str(last_cs.hex())[:12] | |
75 | ) |
|
76 | ) | |
76 | ui.status(msg) |
|
77 | ui.status(msg) | |
77 |
|
78 | |||
78 |
|
79 | |||
79 | def log_pull_action(ui, repo, **kwargs): |
|
80 | def log_pull_action(ui, repo, **kwargs): | |
80 | """Logs user last pull action |
|
81 | """Logs user last pull action | |
81 |
|
82 | |||
82 | Called as Mercurial hook outgoing.pull_logger or from Kallithea before invoking Git. |
|
83 | Called as Mercurial hook outgoing.pull_logger or from Kallithea before invoking Git. | |
83 |
|
84 | |||
84 | Does *not* use the action from the hook environment but is always 'pull'. |
|
85 | Does *not* use the action from the hook environment but is always 'pull'. | |
85 | """ |
|
86 | """ | |
86 | ex = get_hook_environment() |
|
87 | ex = get_hook_environment() | |
87 |
|
88 | |||
88 | user = User.get_by_username(ex.username) |
|
89 | user = User.get_by_username(ex.username) | |
89 | action = 'pull' |
|
90 | action = 'pull' | |
90 | action_logger(user, action, ex.repository, ex.ip, commit=True) |
|
91 | action_logger(user, action, ex.repository, ex.ip, commit=True) | |
91 | # extension hook call |
|
92 | # extension hook call | |
92 | from kallithea import EXTENSIONS |
|
93 | from kallithea import EXTENSIONS | |
93 | callback = getattr(EXTENSIONS, 'PULL_HOOK', None) |
|
94 | callback = getattr(EXTENSIONS, 'PULL_HOOK', None) | |
94 | if callable(callback): |
|
95 | if callable(callback): | |
95 | kw = {} |
|
96 | kw = {} | |
96 | kw.update(ex) |
|
97 | kw.update(ex) | |
97 | callback(**kw) |
|
98 | callback(**kw) | |
98 |
|
99 | |||
99 | return 0 |
|
100 | return 0 | |
100 |
|
101 | |||
101 |
|
102 | |||
102 | def log_push_action(ui, repo, node, node_last, **kwargs): |
|
103 | def log_push_action(ui, repo, node, node_last, **kwargs): | |
103 | """ |
|
104 | """ | |
104 | Entry point for Mercurial hook changegroup.push_logger. |
|
105 | Entry point for Mercurial hook changegroup.push_logger. | |
105 |
|
106 | |||
106 | The pushed changesets is given by the revset 'node:node_last'. |
|
107 | The pushed changesets is given by the revset 'node:node_last'. | |
107 |
|
108 | |||
108 | Note: This hook is not only logging, but also the side effect invalidating |
|
109 | Note: This hook is not only logging, but also the side effect invalidating | |
109 | cahes! The function should perhaps be renamed. |
|
110 | cahes! The function should perhaps be renamed. | |
110 | """ |
|
111 | """ | |
111 | revs = [ascii_str(repo[r].hex()) for r in revrange(repo, [b'%s:%s' % (node, node_last)])] |
|
112 | revs = [ascii_str(repo[r].hex()) for r in mercurial.scmutil.revrange(repo, [b'%s:%s' % (node, node_last)])] | |
112 | process_pushed_raw_ids(revs) |
|
113 | process_pushed_raw_ids(revs) | |
113 | return 0 |
|
114 | return 0 | |
114 |
|
115 | |||
115 |
|
116 | |||
116 | def process_pushed_raw_ids(revs): |
|
117 | def process_pushed_raw_ids(revs): | |
117 | """ |
|
118 | """ | |
118 | Register that changes have been added to the repo - log the action *and* invalidate caches. |
|
119 | Register that changes have been added to the repo - log the action *and* invalidate caches. | |
119 |
|
120 | |||
120 | Called from Mercurial changegroup.push_logger calling hook log_push_action, |
|
121 | Called from Mercurial changegroup.push_logger calling hook log_push_action, | |
121 | or from the Git post-receive hook calling handle_git_post_receive ... |
|
122 | or from the Git post-receive hook calling handle_git_post_receive ... | |
122 | or from scm _handle_push. |
|
123 | or from scm _handle_push. | |
123 | """ |
|
124 | """ | |
124 | ex = get_hook_environment() |
|
125 | ex = get_hook_environment() | |
125 |
|
126 | |||
126 | action = '%s:%s' % (ex.action, ','.join(revs)) |
|
127 | action = '%s:%s' % (ex.action, ','.join(revs)) | |
127 | action_logger(ex.username, action, ex.repository, ex.ip, commit=True) |
|
128 | action_logger(ex.username, action, ex.repository, ex.ip, commit=True) | |
128 |
|
129 | |||
129 | from kallithea.model.scm import ScmModel |
|
130 | from kallithea.model.scm import ScmModel | |
130 | ScmModel().mark_for_invalidation(ex.repository) |
|
131 | ScmModel().mark_for_invalidation(ex.repository) | |
131 |
|
132 | |||
132 | # extension hook call |
|
133 | # extension hook call | |
133 | from kallithea import EXTENSIONS |
|
134 | from kallithea import EXTENSIONS | |
134 | callback = getattr(EXTENSIONS, 'PUSH_HOOK', None) |
|
135 | callback = getattr(EXTENSIONS, 'PUSH_HOOK', None) | |
135 | if callable(callback): |
|
136 | if callable(callback): | |
136 | kw = {'pushed_revs': revs} |
|
137 | kw = {'pushed_revs': revs} | |
137 | kw.update(ex) |
|
138 | kw.update(ex) | |
138 | callback(**kw) |
|
139 | callback(**kw) | |
139 |
|
140 | |||
140 |
|
141 | |||
141 | def log_create_repository(repository_dict, created_by, **kwargs): |
|
142 | def log_create_repository(repository_dict, created_by, **kwargs): | |
142 | """ |
|
143 | """ | |
143 | Post create repository Hook. |
|
144 | Post create repository Hook. | |
144 |
|
145 | |||
145 | :param repository: dict dump of repository object |
|
146 | :param repository: dict dump of repository object | |
146 | :param created_by: username who created repository |
|
147 | :param created_by: username who created repository | |
147 |
|
148 | |||
148 | available keys of repository_dict: |
|
149 | available keys of repository_dict: | |
149 |
|
150 | |||
150 | 'repo_type', |
|
151 | 'repo_type', | |
151 | 'description', |
|
152 | 'description', | |
152 | 'private', |
|
153 | 'private', | |
153 | 'created_on', |
|
154 | 'created_on', | |
154 | 'enable_downloads', |
|
155 | 'enable_downloads', | |
155 | 'repo_id', |
|
156 | 'repo_id', | |
156 | 'owner_id', |
|
157 | 'owner_id', | |
157 | 'enable_statistics', |
|
158 | 'enable_statistics', | |
158 | 'clone_uri', |
|
159 | 'clone_uri', | |
159 | 'fork_id', |
|
160 | 'fork_id', | |
160 | 'group_id', |
|
161 | 'group_id', | |
161 | 'repo_name' |
|
162 | 'repo_name' | |
162 |
|
163 | |||
163 | """ |
|
164 | """ | |
164 | from kallithea import EXTENSIONS |
|
165 | from kallithea import EXTENSIONS | |
165 | callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None) |
|
166 | callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None) | |
166 | if callable(callback): |
|
167 | if callable(callback): | |
167 | kw = {} |
|
168 | kw = {} | |
168 | kw.update(repository_dict) |
|
169 | kw.update(repository_dict) | |
169 | kw.update({'created_by': created_by}) |
|
170 | kw.update({'created_by': created_by}) | |
170 | kw.update(kwargs) |
|
171 | kw.update(kwargs) | |
171 | return callback(**kw) |
|
172 | return callback(**kw) | |
172 |
|
173 | |||
173 | return 0 |
|
174 | return 0 | |
174 |
|
175 | |||
175 |
|
176 | |||
176 | def check_allowed_create_user(user_dict, created_by, **kwargs): |
|
177 | def check_allowed_create_user(user_dict, created_by, **kwargs): | |
177 | # pre create hooks |
|
178 | # pre create hooks | |
178 | from kallithea import EXTENSIONS |
|
179 | from kallithea import EXTENSIONS | |
179 | callback = getattr(EXTENSIONS, 'PRE_CREATE_USER_HOOK', None) |
|
180 | callback = getattr(EXTENSIONS, 'PRE_CREATE_USER_HOOK', None) | |
180 | if callable(callback): |
|
181 | if callable(callback): | |
181 | allowed, reason = callback(created_by=created_by, **user_dict) |
|
182 | allowed, reason = callback(created_by=created_by, **user_dict) | |
182 | if not allowed: |
|
183 | if not allowed: | |
183 | raise UserCreationError(reason) |
|
184 | raise UserCreationError(reason) | |
184 |
|
185 | |||
185 |
|
186 | |||
186 | def log_create_user(user_dict, created_by, **kwargs): |
|
187 | def log_create_user(user_dict, created_by, **kwargs): | |
187 | """ |
|
188 | """ | |
188 | Post create user Hook. |
|
189 | Post create user Hook. | |
189 |
|
190 | |||
190 | :param user_dict: dict dump of user object |
|
191 | :param user_dict: dict dump of user object | |
191 |
|
192 | |||
192 | available keys for user_dict: |
|
193 | available keys for user_dict: | |
193 |
|
194 | |||
194 | 'username', |
|
195 | 'username', | |
195 | 'full_name_or_username', |
|
196 | 'full_name_or_username', | |
196 | 'full_contact', |
|
197 | 'full_contact', | |
197 | 'user_id', |
|
198 | 'user_id', | |
198 | 'name', |
|
199 | 'name', | |
199 | 'firstname', |
|
200 | 'firstname', | |
200 | 'short_contact', |
|
201 | 'short_contact', | |
201 | 'admin', |
|
202 | 'admin', | |
202 | 'lastname', |
|
203 | 'lastname', | |
203 | 'ip_addresses', |
|
204 | 'ip_addresses', | |
204 | 'ldap_dn', |
|
205 | 'ldap_dn', | |
205 | 'email', |
|
206 | 'email', | |
206 | 'api_key', |
|
207 | 'api_key', | |
207 | 'last_login', |
|
208 | 'last_login', | |
208 | 'full_name', |
|
209 | 'full_name', | |
209 | 'active', |
|
210 | 'active', | |
210 | 'password', |
|
211 | 'password', | |
211 | 'emails', |
|
212 | 'emails', | |
212 |
|
213 | |||
213 | """ |
|
214 | """ | |
214 | from kallithea import EXTENSIONS |
|
215 | from kallithea import EXTENSIONS | |
215 | callback = getattr(EXTENSIONS, 'CREATE_USER_HOOK', None) |
|
216 | callback = getattr(EXTENSIONS, 'CREATE_USER_HOOK', None) | |
216 | if callable(callback): |
|
217 | if callable(callback): | |
217 | return callback(created_by=created_by, **user_dict) |
|
218 | return callback(created_by=created_by, **user_dict) | |
218 |
|
219 | |||
219 | return 0 |
|
220 | return 0 | |
220 |
|
221 | |||
221 |
|
222 | |||
222 | def log_delete_repository(repository_dict, deleted_by, **kwargs): |
|
223 | def log_delete_repository(repository_dict, deleted_by, **kwargs): | |
223 | """ |
|
224 | """ | |
224 | Post delete repository Hook. |
|
225 | Post delete repository Hook. | |
225 |
|
226 | |||
226 | :param repository: dict dump of repository object |
|
227 | :param repository: dict dump of repository object | |
227 | :param deleted_by: username who deleted the repository |
|
228 | :param deleted_by: username who deleted the repository | |
228 |
|
229 | |||
229 | available keys of repository_dict: |
|
230 | available keys of repository_dict: | |
230 |
|
231 | |||
231 | 'repo_type', |
|
232 | 'repo_type', | |
232 | 'description', |
|
233 | 'description', | |
233 | 'private', |
|
234 | 'private', | |
234 | 'created_on', |
|
235 | 'created_on', | |
235 | 'enable_downloads', |
|
236 | 'enable_downloads', | |
236 | 'repo_id', |
|
237 | 'repo_id', | |
237 | 'owner_id', |
|
238 | 'owner_id', | |
238 | 'enable_statistics', |
|
239 | 'enable_statistics', | |
239 | 'clone_uri', |
|
240 | 'clone_uri', | |
240 | 'fork_id', |
|
241 | 'fork_id', | |
241 | 'group_id', |
|
242 | 'group_id', | |
242 | 'repo_name' |
|
243 | 'repo_name' | |
243 |
|
244 | |||
244 | """ |
|
245 | """ | |
245 | from kallithea import EXTENSIONS |
|
246 | from kallithea import EXTENSIONS | |
246 | callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None) |
|
247 | callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None) | |
247 | if callable(callback): |
|
248 | if callable(callback): | |
248 | kw = {} |
|
249 | kw = {} | |
249 | kw.update(repository_dict) |
|
250 | kw.update(repository_dict) | |
250 | kw.update({'deleted_by': deleted_by, |
|
251 | kw.update({'deleted_by': deleted_by, | |
251 | 'deleted_on': time.time()}) |
|
252 | 'deleted_on': time.time()}) | |
252 | kw.update(kwargs) |
|
253 | kw.update(kwargs) | |
253 | return callback(**kw) |
|
254 | return callback(**kw) | |
254 |
|
255 | |||
255 | return 0 |
|
256 | return 0 | |
256 |
|
257 | |||
257 |
|
258 | |||
258 | def log_delete_user(user_dict, deleted_by, **kwargs): |
|
259 | def log_delete_user(user_dict, deleted_by, **kwargs): | |
259 | """ |
|
260 | """ | |
260 | Post delete user Hook. |
|
261 | Post delete user Hook. | |
261 |
|
262 | |||
262 | :param user_dict: dict dump of user object |
|
263 | :param user_dict: dict dump of user object | |
263 |
|
264 | |||
264 | available keys for user_dict: |
|
265 | available keys for user_dict: | |
265 |
|
266 | |||
266 | 'username', |
|
267 | 'username', | |
267 | 'full_name_or_username', |
|
268 | 'full_name_or_username', | |
268 | 'full_contact', |
|
269 | 'full_contact', | |
269 | 'user_id', |
|
270 | 'user_id', | |
270 | 'name', |
|
271 | 'name', | |
271 | 'firstname', |
|
272 | 'firstname', | |
272 | 'short_contact', |
|
273 | 'short_contact', | |
273 | 'admin', |
|
274 | 'admin', | |
274 | 'lastname', |
|
275 | 'lastname', | |
275 | 'ip_addresses', |
|
276 | 'ip_addresses', | |
276 | 'ldap_dn', |
|
277 | 'ldap_dn', | |
277 | 'email', |
|
278 | 'email', | |
278 | 'api_key', |
|
279 | 'api_key', | |
279 | 'last_login', |
|
280 | 'last_login', | |
280 | 'full_name', |
|
281 | 'full_name', | |
281 | 'active', |
|
282 | 'active', | |
282 | 'password', |
|
283 | 'password', | |
283 | 'emails', |
|
284 | 'emails', | |
284 |
|
285 | |||
285 | """ |
|
286 | """ | |
286 | from kallithea import EXTENSIONS |
|
287 | from kallithea import EXTENSIONS | |
287 | callback = getattr(EXTENSIONS, 'DELETE_USER_HOOK', None) |
|
288 | callback = getattr(EXTENSIONS, 'DELETE_USER_HOOK', None) | |
288 | if callable(callback): |
|
289 | if callable(callback): | |
289 | return callback(deleted_by=deleted_by, **user_dict) |
|
290 | return callback(deleted_by=deleted_by, **user_dict) | |
290 |
|
291 | |||
291 | return 0 |
|
292 | return 0 | |
292 |
|
293 | |||
293 |
|
294 | |||
294 | def _hook_environment(repo_path): |
|
295 | def _hook_environment(repo_path): | |
295 | """ |
|
296 | """ | |
296 | Create a light-weight environment for stand-alone scripts and return an UI and the |
|
297 | Create a light-weight environment for stand-alone scripts and return an UI and the | |
297 | db repository. |
|
298 | db repository. | |
298 |
|
299 | |||
299 | Git hooks are executed as subprocess of Git while Kallithea is waiting, and |
|
300 | Git hooks are executed as subprocess of Git while Kallithea is waiting, and | |
300 | they thus need enough info to be able to create an app environment and |
|
301 | they thus need enough info to be able to create an app environment and | |
301 | connect to the database. |
|
302 | connect to the database. | |
302 | """ |
|
303 | """ | |
303 | from paste.deploy import appconfig |
|
304 | from paste.deploy import appconfig | |
304 | from sqlalchemy import engine_from_config |
|
305 | from sqlalchemy import engine_from_config | |
305 | from kallithea.config.environment import load_environment |
|
306 | from kallithea.config.environment import load_environment | |
306 | from kallithea.model.base import init_model |
|
307 | from kallithea.model.base import init_model | |
307 |
|
308 | |||
308 | extras = get_hook_environment() |
|
309 | extras = get_hook_environment() | |
309 | ini_file_path = extras['config'] |
|
310 | ini_file_path = extras['config'] | |
310 | #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging |
|
311 | #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging | |
311 | app_conf = appconfig('config:%s' % ini_file_path) |
|
312 | app_conf = appconfig('config:%s' % ini_file_path) | |
312 | conf = load_environment(app_conf.global_conf, app_conf.local_conf) |
|
313 | conf = load_environment(app_conf.global_conf, app_conf.local_conf) | |
313 |
|
314 | |||
314 | setup_cache_regions(conf) |
|
315 | setup_cache_regions(conf) | |
315 |
|
316 | |||
316 | engine = engine_from_config(conf, 'sqlalchemy.') |
|
317 | engine = engine_from_config(conf, 'sqlalchemy.') | |
317 | init_model(engine) |
|
318 | init_model(engine) | |
318 |
|
319 | |||
319 | repo_path = safe_unicode(repo_path) |
|
320 | repo_path = safe_unicode(repo_path) | |
320 | # fix if it's not a bare repo |
|
321 | # fix if it's not a bare repo | |
321 | if repo_path.endswith(os.sep + '.git'): |
|
322 | if repo_path.endswith(os.sep + '.git'): | |
322 | repo_path = repo_path[:-5] |
|
323 | repo_path = repo_path[:-5] | |
323 |
|
324 | |||
324 | repo = Repository.get_by_full_path(repo_path) |
|
325 | repo = Repository.get_by_full_path(repo_path) | |
325 | if not repo: |
|
326 | if not repo: | |
326 | raise OSError('Repository %s not found in database' |
|
327 | raise OSError('Repository %s not found in database' | |
327 | % (safe_str(repo_path))) |
|
328 | % (safe_str(repo_path))) | |
328 |
|
329 | |||
329 | baseui = make_ui() |
|
330 | baseui = make_ui() | |
330 | return baseui, repo |
|
331 | return baseui, repo | |
331 |
|
332 | |||
332 |
|
333 | |||
333 | def handle_git_pre_receive(repo_path, git_stdin_lines): |
|
334 | def handle_git_pre_receive(repo_path, git_stdin_lines): | |
334 | """Called from Git pre-receive hook""" |
|
335 | """Called from Git pre-receive hook""" | |
335 | # Currently unused. TODO: remove? |
|
336 | # Currently unused. TODO: remove? | |
336 | return 0 |
|
337 | return 0 | |
337 |
|
338 | |||
338 |
|
339 | |||
339 | def handle_git_post_receive(repo_path, git_stdin_lines): |
|
340 | def handle_git_post_receive(repo_path, git_stdin_lines): | |
340 | """Called from Git post-receive hook""" |
|
341 | """Called from Git post-receive hook""" | |
341 | baseui, repo = _hook_environment(repo_path) |
|
342 | baseui, repo = _hook_environment(repo_path) | |
342 |
|
343 | |||
343 | # the post push hook should never use the cached instance |
|
344 | # the post push hook should never use the cached instance | |
344 | scm_repo = repo.scm_instance_no_cache() |
|
345 | scm_repo = repo.scm_instance_no_cache() | |
345 |
|
346 | |||
346 | rev_data = [] |
|
347 | rev_data = [] | |
347 | for l in git_stdin_lines: |
|
348 | for l in git_stdin_lines: | |
348 | old_rev, new_rev, ref = l.strip().split(' ') |
|
349 | old_rev, new_rev, ref = l.strip().split(' ') | |
349 | _ref_data = ref.split('/') |
|
350 | _ref_data = ref.split('/') | |
350 | if _ref_data[1] in ['tags', 'heads']: |
|
351 | if _ref_data[1] in ['tags', 'heads']: | |
351 | rev_data.append({'old_rev': old_rev, |
|
352 | rev_data.append({'old_rev': old_rev, | |
352 | 'new_rev': new_rev, |
|
353 | 'new_rev': new_rev, | |
353 | 'ref': ref, |
|
354 | 'ref': ref, | |
354 | 'type': _ref_data[1], |
|
355 | 'type': _ref_data[1], | |
355 | 'name': '/'.join(_ref_data[2:])}) |
|
356 | 'name': '/'.join(_ref_data[2:])}) | |
356 |
|
357 | |||
357 | git_revs = [] |
|
358 | git_revs = [] | |
358 | for push_ref in rev_data: |
|
359 | for push_ref in rev_data: | |
359 | _type = push_ref['type'] |
|
360 | _type = push_ref['type'] | |
360 | if _type == 'heads': |
|
361 | if _type == 'heads': | |
361 | if push_ref['old_rev'] == EmptyChangeset().raw_id: |
|
362 | if push_ref['old_rev'] == EmptyChangeset().raw_id: | |
362 | # update the symbolic ref if we push new repo |
|
363 | # update the symbolic ref if we push new repo | |
363 | if scm_repo.is_empty(): |
|
364 | if scm_repo.is_empty(): | |
364 | scm_repo._repo.refs.set_symbolic_ref( |
|
365 | scm_repo._repo.refs.set_symbolic_ref( | |
365 | b'HEAD', |
|
366 | b'HEAD', | |
366 | b'refs/heads/%s' % push_ref['name']) |
|
367 | b'refs/heads/%s' % push_ref['name']) | |
367 |
|
368 | |||
368 | # build exclude list without the ref |
|
369 | # build exclude list without the ref | |
369 | cmd = ['for-each-ref', '--format=%(refname)', 'refs/heads/*'] |
|
370 | cmd = ['for-each-ref', '--format=%(refname)', 'refs/heads/*'] | |
370 | stdout = scm_repo.run_git_command(cmd) |
|
371 | stdout = scm_repo.run_git_command(cmd) | |
371 | ref = push_ref['ref'] |
|
372 | ref = push_ref['ref'] | |
372 | heads = [head for head in stdout.splitlines() if head != ref] |
|
373 | heads = [head for head in stdout.splitlines() if head != ref] | |
373 | # now list the git revs while excluding from the list |
|
374 | # now list the git revs while excluding from the list | |
374 | cmd = ['log', push_ref['new_rev'], '--reverse', '--pretty=format:%H'] |
|
375 | cmd = ['log', push_ref['new_rev'], '--reverse', '--pretty=format:%H'] | |
375 | cmd.append('--not') |
|
376 | cmd.append('--not') | |
376 | cmd.extend(heads) # empty list is ok |
|
377 | cmd.extend(heads) # empty list is ok | |
377 | stdout = scm_repo.run_git_command(cmd) |
|
378 | stdout = scm_repo.run_git_command(cmd) | |
378 | git_revs += stdout.splitlines() |
|
379 | git_revs += stdout.splitlines() | |
379 |
|
380 | |||
380 | elif push_ref['new_rev'] == EmptyChangeset().raw_id: |
|
381 | elif push_ref['new_rev'] == EmptyChangeset().raw_id: | |
381 | # delete branch case |
|
382 | # delete branch case | |
382 | git_revs += ['delete_branch=>%s' % push_ref['name']] |
|
383 | git_revs += ['delete_branch=>%s' % push_ref['name']] | |
383 | else: |
|
384 | else: | |
384 | cmd = ['log', '%(old_rev)s..%(new_rev)s' % push_ref, |
|
385 | cmd = ['log', '%(old_rev)s..%(new_rev)s' % push_ref, | |
385 | '--reverse', '--pretty=format:%H'] |
|
386 | '--reverse', '--pretty=format:%H'] | |
386 | stdout = scm_repo.run_git_command(cmd) |
|
387 | stdout = scm_repo.run_git_command(cmd) | |
387 | git_revs += stdout.splitlines() |
|
388 | git_revs += stdout.splitlines() | |
388 |
|
389 | |||
389 | elif _type == 'tags': |
|
390 | elif _type == 'tags': | |
390 | git_revs += ['tag=>%s' % push_ref['name']] |
|
391 | git_revs += ['tag=>%s' % push_ref['name']] | |
391 |
|
392 | |||
392 | process_pushed_raw_ids(git_revs) |
|
393 | process_pushed_raw_ids(git_revs) | |
393 |
|
394 | |||
394 | return 0 |
|
395 | return 0 | |
395 |
|
396 | |||
396 |
|
397 | |||
397 | # Almost exactly like Mercurial contrib/hg-ssh: |
|
398 | # Almost exactly like Mercurial contrib/hg-ssh: | |
398 | def rejectpush(ui, **kwargs): |
|
399 | def rejectpush(ui, **kwargs): | |
399 | """Mercurial hook to be installed as pretxnopen and prepushkey for read-only repos""" |
|
400 | """Mercurial hook to be installed as pretxnopen and prepushkey for read-only repos""" | |
400 | ex = get_hook_environment() |
|
401 | ex = get_hook_environment() | |
401 | ui.warn((b"Push access to %r denied\n") % safe_str(ex.repository)) |
|
402 | ui.warn((b"Push access to %r denied\n") % safe_str(ex.repository)) | |
402 | return 1 |
|
403 | return 1 |
@@ -1,148 +1,149 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.middleware.simplehg |
|
15 | kallithea.lib.middleware.simplehg | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | SimpleHg middleware for handling Mercurial protocol requests (push/clone etc.). |
|
18 | SimpleHg middleware for handling Mercurial protocol requests (push/clone etc.). | |
19 | It's implemented with basic auth function |
|
19 | It's implemented with basic auth function | |
20 |
|
20 | |||
21 | This file was forked by the Kallithea project in July 2014. |
|
21 | This file was forked by the Kallithea project in July 2014. | |
22 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | Original author and date, and relevant copyright and licensing information is below: | |
23 | :created_on: Apr 28, 2010 |
|
23 | :created_on: Apr 28, 2010 | |
24 | :author: marcink |
|
24 | :author: marcink | |
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
26 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | :license: GPLv3, see LICENSE.md for more details. | |
27 |
|
27 | |||
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | import logging |
|
31 | import logging | |
32 | import os |
|
32 | import os | |
33 | import urllib |
|
33 | import urllib | |
34 |
|
34 | |||
|
35 | import mercurial.hgweb | |||
|
36 | ||||
35 | from kallithea.lib.base import BaseVCSController, get_path_info |
|
37 | from kallithea.lib.base import BaseVCSController, get_path_info | |
36 | from kallithea.lib.utils import make_ui |
|
38 | from kallithea.lib.utils import make_ui | |
37 | from kallithea.lib.utils2 import safe_str, safe_unicode |
|
39 | from kallithea.lib.utils2 import safe_str, safe_unicode | |
38 | from kallithea.lib.vcs.utils.hgcompat import hgweb_mod |
|
|||
39 |
|
40 | |||
40 |
|
41 | |||
41 | log = logging.getLogger(__name__) |
|
42 | log = logging.getLogger(__name__) | |
42 |
|
43 | |||
43 |
|
44 | |||
44 | def get_header_hgarg(environ): |
|
45 | def get_header_hgarg(environ): | |
45 | """Decode the special Mercurial encoding of big requests over multiple headers. |
|
46 | """Decode the special Mercurial encoding of big requests over multiple headers. | |
46 | >>> get_header_hgarg({}) |
|
47 | >>> get_header_hgarg({}) | |
47 | '' |
|
48 | '' | |
48 | >>> get_header_hgarg({'HTTP_X_HGARG_0': ' ', 'HTTP_X_HGARG_1': 'a','HTTP_X_HGARG_2': '','HTTP_X_HGARG_3': 'b+c %20'}) |
|
49 | >>> get_header_hgarg({'HTTP_X_HGARG_0': ' ', 'HTTP_X_HGARG_1': 'a','HTTP_X_HGARG_2': '','HTTP_X_HGARG_3': 'b+c %20'}) | |
49 | 'ab+c %20' |
|
50 | 'ab+c %20' | |
50 | """ |
|
51 | """ | |
51 | chunks = [] |
|
52 | chunks = [] | |
52 | i = 1 |
|
53 | i = 1 | |
53 | while True: |
|
54 | while True: | |
54 | v = environ.get('HTTP_X_HGARG_%d' % i) |
|
55 | v = environ.get('HTTP_X_HGARG_%d' % i) | |
55 | if v is None: |
|
56 | if v is None: | |
56 | break |
|
57 | break | |
57 | chunks.append(v) |
|
58 | chunks.append(v) | |
58 | i += 1 |
|
59 | i += 1 | |
59 | return ''.join(chunks) |
|
60 | return ''.join(chunks) | |
60 |
|
61 | |||
61 |
|
62 | |||
62 | cmd_mapping = { |
|
63 | cmd_mapping = { | |
63 | # 'batch' is not in this list - it is handled explicitly |
|
64 | # 'batch' is not in this list - it is handled explicitly | |
64 | 'between': 'pull', |
|
65 | 'between': 'pull', | |
65 | 'branches': 'pull', |
|
66 | 'branches': 'pull', | |
66 | 'branchmap': 'pull', |
|
67 | 'branchmap': 'pull', | |
67 | 'capabilities': 'pull', |
|
68 | 'capabilities': 'pull', | |
68 | 'changegroup': 'pull', |
|
69 | 'changegroup': 'pull', | |
69 | 'changegroupsubset': 'pull', |
|
70 | 'changegroupsubset': 'pull', | |
70 | 'changesetdata': 'pull', |
|
71 | 'changesetdata': 'pull', | |
71 | 'clonebundles': 'pull', |
|
72 | 'clonebundles': 'pull', | |
72 | 'debugwireargs': 'pull', |
|
73 | 'debugwireargs': 'pull', | |
73 | 'filedata': 'pull', |
|
74 | 'filedata': 'pull', | |
74 | 'getbundle': 'pull', |
|
75 | 'getbundle': 'pull', | |
75 | 'getlfile': 'pull', |
|
76 | 'getlfile': 'pull', | |
76 | 'heads': 'pull', |
|
77 | 'heads': 'pull', | |
77 | 'hello': 'pull', |
|
78 | 'hello': 'pull', | |
78 | 'known': 'pull', |
|
79 | 'known': 'pull', | |
79 | 'lheads': 'pull', |
|
80 | 'lheads': 'pull', | |
80 | 'listkeys': 'pull', |
|
81 | 'listkeys': 'pull', | |
81 | 'lookup': 'pull', |
|
82 | 'lookup': 'pull', | |
82 | 'manifestdata': 'pull', |
|
83 | 'manifestdata': 'pull', | |
83 | 'narrow_widen': 'pull', |
|
84 | 'narrow_widen': 'pull', | |
84 | 'protocaps': 'pull', |
|
85 | 'protocaps': 'pull', | |
85 | 'statlfile': 'pull', |
|
86 | 'statlfile': 'pull', | |
86 | 'stream_out': 'pull', |
|
87 | 'stream_out': 'pull', | |
87 | 'pushkey': 'push', |
|
88 | 'pushkey': 'push', | |
88 | 'putlfile': 'push', |
|
89 | 'putlfile': 'push', | |
89 | 'unbundle': 'push', |
|
90 | 'unbundle': 'push', | |
90 | } |
|
91 | } | |
91 |
|
92 | |||
92 |
|
93 | |||
93 | class SimpleHg(BaseVCSController): |
|
94 | class SimpleHg(BaseVCSController): | |
94 |
|
95 | |||
95 | scm_alias = 'hg' |
|
96 | scm_alias = 'hg' | |
96 |
|
97 | |||
97 | @classmethod |
|
98 | @classmethod | |
98 | def parse_request(cls, environ): |
|
99 | def parse_request(cls, environ): | |
99 | http_accept = environ.get('HTTP_ACCEPT', '') |
|
100 | http_accept = environ.get('HTTP_ACCEPT', '') | |
100 | if not http_accept.startswith('application/mercurial'): |
|
101 | if not http_accept.startswith('application/mercurial'): | |
101 | return None |
|
102 | return None | |
102 | path_info = get_path_info(environ) |
|
103 | path_info = get_path_info(environ) | |
103 | if not path_info.startswith('/'): # it must! |
|
104 | if not path_info.startswith('/'): # it must! | |
104 | return None |
|
105 | return None | |
105 |
|
106 | |||
106 | class parsed_request(object): |
|
107 | class parsed_request(object): | |
107 | repo_name = safe_unicode(path_info[1:].rstrip('/')) |
|
108 | repo_name = safe_unicode(path_info[1:].rstrip('/')) | |
108 |
|
109 | |||
109 | query_string = environ['QUERY_STRING'] |
|
110 | query_string = environ['QUERY_STRING'] | |
110 |
|
111 | |||
111 | action = None |
|
112 | action = None | |
112 | for qry in query_string.split('&'): |
|
113 | for qry in query_string.split('&'): | |
113 | parts = qry.split('=', 1) |
|
114 | parts = qry.split('=', 1) | |
114 | if len(parts) == 2 and parts[0] == 'cmd': |
|
115 | if len(parts) == 2 and parts[0] == 'cmd': | |
115 | cmd = parts[1] |
|
116 | cmd = parts[1] | |
116 | if cmd == 'batch': |
|
117 | if cmd == 'batch': | |
117 | hgarg = get_header_hgarg(environ) |
|
118 | hgarg = get_header_hgarg(environ) | |
118 | if not hgarg.startswith('cmds='): |
|
119 | if not hgarg.startswith('cmds='): | |
119 | action = 'push' # paranoid and safe |
|
120 | action = 'push' # paranoid and safe | |
120 | break |
|
121 | break | |
121 | action = 'pull' |
|
122 | action = 'pull' | |
122 | for cmd_arg in hgarg[5:].split(';'): |
|
123 | for cmd_arg in hgarg[5:].split(';'): | |
123 | cmd, _args = urllib.unquote_plus(cmd_arg).split(' ', 1) |
|
124 | cmd, _args = urllib.unquote_plus(cmd_arg).split(' ', 1) | |
124 | op = cmd_mapping.get(cmd, 'push') |
|
125 | op = cmd_mapping.get(cmd, 'push') | |
125 | if op != 'pull': |
|
126 | if op != 'pull': | |
126 | assert op == 'push' |
|
127 | assert op == 'push' | |
127 | action = 'push' |
|
128 | action = 'push' | |
128 | break |
|
129 | break | |
129 | else: |
|
130 | else: | |
130 | action = cmd_mapping.get(cmd, 'push') |
|
131 | action = cmd_mapping.get(cmd, 'push') | |
131 | break # only process one cmd |
|
132 | break # only process one cmd | |
132 |
|
133 | |||
133 | return parsed_request |
|
134 | return parsed_request | |
134 |
|
135 | |||
135 | def _make_app(self, parsed_request): |
|
136 | def _make_app(self, parsed_request): | |
136 | """ |
|
137 | """ | |
137 | Make an hgweb wsgi application. |
|
138 | Make an hgweb wsgi application. | |
138 | """ |
|
139 | """ | |
139 | str_repo_name = safe_str(parsed_request.repo_name) |
|
140 | str_repo_name = safe_str(parsed_request.repo_name) | |
140 | repo_path = os.path.join(safe_str(self.basepath), str_repo_name) |
|
141 | repo_path = os.path.join(safe_str(self.basepath), str_repo_name) | |
141 | baseui = make_ui(repo_path=repo_path) |
|
142 | baseui = make_ui(repo_path=repo_path) | |
142 |
hgweb_app = hgweb |
|
143 | hgweb_app = mercurial.hgweb.hgweb(repo_path, name=str_repo_name, baseui=baseui) | |
143 |
|
144 | |||
144 | def wrapper_app(environ, start_response): |
|
145 | def wrapper_app(environ, start_response): | |
145 |
environ['REPO_NAME'] = str_repo_name # used by hgweb |
|
146 | environ['REPO_NAME'] = str_repo_name # used by mercurial.hgweb.hgweb | |
146 | return hgweb_app(environ, start_response) |
|
147 | return hgweb_app(environ, start_response) | |
147 |
|
148 | |||
148 | return wrapper_app |
|
149 | return wrapper_app |
@@ -1,678 +1,679 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.utils |
|
15 | kallithea.lib.utils | |
16 | ~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Utilities library for Kallithea |
|
18 | Utilities library for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Apr 18, 2010 |
|
22 | :created_on: Apr 18, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import datetime |
|
28 | import datetime | |
29 | import logging |
|
29 | import logging | |
30 | import os |
|
30 | import os | |
31 | import re |
|
31 | import re | |
32 | import sys |
|
32 | import sys | |
33 | import traceback |
|
33 | import traceback | |
34 | from distutils.version import StrictVersion |
|
34 | from distutils.version import StrictVersion | |
35 |
|
35 | |||
36 | import beaker |
|
36 | import beaker | |
|
37 | import mercurial.config | |||
|
38 | import mercurial.ui | |||
37 | from beaker.cache import _cache_decorate |
|
39 | from beaker.cache import _cache_decorate | |
38 | from tg.i18n import ugettext as _ |
|
40 | from tg.i18n import ugettext as _ | |
39 |
|
41 | |||
40 | import kallithea.config.conf |
|
42 | import kallithea.config.conf | |
41 | from kallithea.lib.exceptions import HgsubversionImportError |
|
43 | from kallithea.lib.exceptions import HgsubversionImportError | |
42 | from kallithea.lib.utils2 import ascii_bytes, aslist, get_current_authuser, safe_bytes, safe_str, safe_unicode |
|
44 | from kallithea.lib.utils2 import ascii_bytes, aslist, get_current_authuser, safe_bytes, safe_str, safe_unicode | |
43 | from kallithea.lib.vcs.backends.git.repository import GitRepository |
|
45 | from kallithea.lib.vcs.backends.git.repository import GitRepository | |
44 | from kallithea.lib.vcs.backends.hg.repository import MercurialRepository |
|
46 | from kallithea.lib.vcs.backends.hg.repository import MercurialRepository | |
45 | from kallithea.lib.vcs.conf import settings |
|
47 | from kallithea.lib.vcs.conf import settings | |
46 | from kallithea.lib.vcs.exceptions import RepositoryError, VCSError |
|
48 | from kallithea.lib.vcs.exceptions import RepositoryError, VCSError | |
47 | from kallithea.lib.vcs.utils.fakemod import create_module |
|
49 | from kallithea.lib.vcs.utils.fakemod import create_module | |
48 | from kallithea.lib.vcs.utils.helpers import get_scm |
|
50 | from kallithea.lib.vcs.utils.helpers import get_scm | |
49 | from kallithea.lib.vcs.utils.hgcompat import config, ui |
|
|||
50 | from kallithea.model import meta |
|
51 | from kallithea.model import meta | |
51 | from kallithea.model.db import RepoGroup, Repository, Setting, Ui, User, UserGroup, UserLog |
|
52 | from kallithea.model.db import RepoGroup, Repository, Setting, Ui, User, UserGroup, UserLog | |
52 |
|
53 | |||
53 |
|
54 | |||
54 | log = logging.getLogger(__name__) |
|
55 | log = logging.getLogger(__name__) | |
55 |
|
56 | |||
56 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}_.*') |
|
57 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}_.*') | |
57 |
|
58 | |||
58 |
|
59 | |||
59 | #============================================================================== |
|
60 | #============================================================================== | |
60 | # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS |
|
61 | # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS | |
61 | #============================================================================== |
|
62 | #============================================================================== | |
62 | def get_repo_slug(request): |
|
63 | def get_repo_slug(request): | |
63 | _repo = request.environ['pylons.routes_dict'].get('repo_name') |
|
64 | _repo = request.environ['pylons.routes_dict'].get('repo_name') | |
64 | if _repo: |
|
65 | if _repo: | |
65 | _repo = _repo.rstrip('/') |
|
66 | _repo = _repo.rstrip('/') | |
66 | return _repo |
|
67 | return _repo | |
67 |
|
68 | |||
68 |
|
69 | |||
69 | def get_repo_group_slug(request): |
|
70 | def get_repo_group_slug(request): | |
70 | _group = request.environ['pylons.routes_dict'].get('group_name') |
|
71 | _group = request.environ['pylons.routes_dict'].get('group_name') | |
71 | if _group: |
|
72 | if _group: | |
72 | _group = _group.rstrip('/') |
|
73 | _group = _group.rstrip('/') | |
73 | return _group |
|
74 | return _group | |
74 |
|
75 | |||
75 |
|
76 | |||
76 | def get_user_group_slug(request): |
|
77 | def get_user_group_slug(request): | |
77 | _group = request.environ['pylons.routes_dict'].get('id') |
|
78 | _group = request.environ['pylons.routes_dict'].get('id') | |
78 | _group = UserGroup.get(_group) |
|
79 | _group = UserGroup.get(_group) | |
79 | if _group: |
|
80 | if _group: | |
80 | return _group.users_group_name |
|
81 | return _group.users_group_name | |
81 | return None |
|
82 | return None | |
82 |
|
83 | |||
83 |
|
84 | |||
84 | def _get_permanent_id(s): |
|
85 | def _get_permanent_id(s): | |
85 | """Helper for decoding stable URLs with repo ID. For a string like '_123' |
|
86 | """Helper for decoding stable URLs with repo ID. For a string like '_123' | |
86 | return 123. |
|
87 | return 123. | |
87 | """ |
|
88 | """ | |
88 | by_id_match = re.match(r'^_(\d+)$', s) |
|
89 | by_id_match = re.match(r'^_(\d+)$', s) | |
89 | if by_id_match is None: |
|
90 | if by_id_match is None: | |
90 | return None |
|
91 | return None | |
91 | return int(by_id_match.group(1)) |
|
92 | return int(by_id_match.group(1)) | |
92 |
|
93 | |||
93 |
|
94 | |||
94 | def fix_repo_id_name(path): |
|
95 | def fix_repo_id_name(path): | |
95 | """ |
|
96 | """ | |
96 | Rewrite repo_name for _<ID> permanent URLs. |
|
97 | Rewrite repo_name for _<ID> permanent URLs. | |
97 |
|
98 | |||
98 | Given a path, if the first path element is like _<ID>, return the path with |
|
99 | Given a path, if the first path element is like _<ID>, return the path with | |
99 | this part expanded to the corresponding full repo name, else return the |
|
100 | this part expanded to the corresponding full repo name, else return the | |
100 | provided path. |
|
101 | provided path. | |
101 | """ |
|
102 | """ | |
102 | first, rest = path, '' |
|
103 | first, rest = path, '' | |
103 | if '/' in path: |
|
104 | if '/' in path: | |
104 | first, rest_ = path.split('/', 1) |
|
105 | first, rest_ = path.split('/', 1) | |
105 | rest = '/' + rest_ |
|
106 | rest = '/' + rest_ | |
106 | repo_id = _get_permanent_id(first) |
|
107 | repo_id = _get_permanent_id(first) | |
107 | if repo_id is not None: |
|
108 | if repo_id is not None: | |
108 | repo = Repository.get(repo_id) |
|
109 | repo = Repository.get(repo_id) | |
109 | if repo is not None: |
|
110 | if repo is not None: | |
110 | return repo.repo_name + rest |
|
111 | return repo.repo_name + rest | |
111 | return path |
|
112 | return path | |
112 |
|
113 | |||
113 |
|
114 | |||
114 | def action_logger(user, action, repo, ipaddr='', commit=False): |
|
115 | def action_logger(user, action, repo, ipaddr='', commit=False): | |
115 | """ |
|
116 | """ | |
116 | Action logger for various actions made by users |
|
117 | Action logger for various actions made by users | |
117 |
|
118 | |||
118 | :param user: user that made this action, can be a unique username string or |
|
119 | :param user: user that made this action, can be a unique username string or | |
119 | object containing user_id attribute |
|
120 | object containing user_id attribute | |
120 | :param action: action to log, should be on of predefined unique actions for |
|
121 | :param action: action to log, should be on of predefined unique actions for | |
121 | easy translations |
|
122 | easy translations | |
122 | :param repo: string name of repository or object containing repo_id, |
|
123 | :param repo: string name of repository or object containing repo_id, | |
123 | that action was made on |
|
124 | that action was made on | |
124 | :param ipaddr: optional IP address from what the action was made |
|
125 | :param ipaddr: optional IP address from what the action was made | |
125 |
|
126 | |||
126 | """ |
|
127 | """ | |
127 |
|
128 | |||
128 | # if we don't get explicit IP address try to get one from registered user |
|
129 | # if we don't get explicit IP address try to get one from registered user | |
129 | # in tmpl context var |
|
130 | # in tmpl context var | |
130 | if not ipaddr: |
|
131 | if not ipaddr: | |
131 | ipaddr = getattr(get_current_authuser(), 'ip_addr', '') |
|
132 | ipaddr = getattr(get_current_authuser(), 'ip_addr', '') | |
132 |
|
133 | |||
133 | if getattr(user, 'user_id', None): |
|
134 | if getattr(user, 'user_id', None): | |
134 | user_obj = User.get(user.user_id) |
|
135 | user_obj = User.get(user.user_id) | |
135 | elif isinstance(user, basestring): |
|
136 | elif isinstance(user, basestring): | |
136 | user_obj = User.get_by_username(user) |
|
137 | user_obj = User.get_by_username(user) | |
137 | else: |
|
138 | else: | |
138 | raise Exception('You have to provide a user object or a username') |
|
139 | raise Exception('You have to provide a user object or a username') | |
139 |
|
140 | |||
140 | if getattr(repo, 'repo_id', None): |
|
141 | if getattr(repo, 'repo_id', None): | |
141 | repo_obj = Repository.get(repo.repo_id) |
|
142 | repo_obj = Repository.get(repo.repo_id) | |
142 | repo_name = repo_obj.repo_name |
|
143 | repo_name = repo_obj.repo_name | |
143 | elif isinstance(repo, basestring): |
|
144 | elif isinstance(repo, basestring): | |
144 | repo_name = repo.lstrip('/') |
|
145 | repo_name = repo.lstrip('/') | |
145 | repo_obj = Repository.get_by_repo_name(repo_name) |
|
146 | repo_obj = Repository.get_by_repo_name(repo_name) | |
146 | else: |
|
147 | else: | |
147 | repo_obj = None |
|
148 | repo_obj = None | |
148 | repo_name = u'' |
|
149 | repo_name = u'' | |
149 |
|
150 | |||
150 | user_log = UserLog() |
|
151 | user_log = UserLog() | |
151 | user_log.user_id = user_obj.user_id |
|
152 | user_log.user_id = user_obj.user_id | |
152 | user_log.username = user_obj.username |
|
153 | user_log.username = user_obj.username | |
153 | user_log.action = safe_unicode(action) |
|
154 | user_log.action = safe_unicode(action) | |
154 |
|
155 | |||
155 | user_log.repository = repo_obj |
|
156 | user_log.repository = repo_obj | |
156 | user_log.repository_name = repo_name |
|
157 | user_log.repository_name = repo_name | |
157 |
|
158 | |||
158 | user_log.action_date = datetime.datetime.now() |
|
159 | user_log.action_date = datetime.datetime.now() | |
159 | user_log.user_ip = ipaddr |
|
160 | user_log.user_ip = ipaddr | |
160 | meta.Session().add(user_log) |
|
161 | meta.Session().add(user_log) | |
161 |
|
162 | |||
162 | log.info('Logging action:%s on %s by user:%s ip:%s', |
|
163 | log.info('Logging action:%s on %s by user:%s ip:%s', | |
163 | action, safe_unicode(repo), user_obj, ipaddr) |
|
164 | action, safe_unicode(repo), user_obj, ipaddr) | |
164 | if commit: |
|
165 | if commit: | |
165 | meta.Session().commit() |
|
166 | meta.Session().commit() | |
166 |
|
167 | |||
167 |
|
168 | |||
168 | def get_filesystem_repos(path): |
|
169 | def get_filesystem_repos(path): | |
169 | """ |
|
170 | """ | |
170 | Scans given path for repos and return (name,(type,path)) tuple |
|
171 | Scans given path for repos and return (name,(type,path)) tuple | |
171 |
|
172 | |||
172 | :param path: path to scan for repositories |
|
173 | :param path: path to scan for repositories | |
173 | :param recursive: recursive search and return names with subdirs in front |
|
174 | :param recursive: recursive search and return names with subdirs in front | |
174 | """ |
|
175 | """ | |
175 |
|
176 | |||
176 | # remove ending slash for better results |
|
177 | # remove ending slash for better results | |
177 | path = safe_str(path.rstrip(os.sep)) |
|
178 | path = safe_str(path.rstrip(os.sep)) | |
178 | log.debug('now scanning in %s', path) |
|
179 | log.debug('now scanning in %s', path) | |
179 |
|
180 | |||
180 | def isdir(*n): |
|
181 | def isdir(*n): | |
181 | return os.path.isdir(os.path.join(*n)) |
|
182 | return os.path.isdir(os.path.join(*n)) | |
182 |
|
183 | |||
183 | for root, dirs, _files in os.walk(path): |
|
184 | for root, dirs, _files in os.walk(path): | |
184 | recurse_dirs = [] |
|
185 | recurse_dirs = [] | |
185 | for subdir in dirs: |
|
186 | for subdir in dirs: | |
186 | # skip removed repos |
|
187 | # skip removed repos | |
187 | if REMOVED_REPO_PAT.match(subdir): |
|
188 | if REMOVED_REPO_PAT.match(subdir): | |
188 | continue |
|
189 | continue | |
189 |
|
190 | |||
190 | # skip .<something> dirs TODO: rly? then we should prevent creating them ... |
|
191 | # skip .<something> dirs TODO: rly? then we should prevent creating them ... | |
191 | if subdir.startswith('.'): |
|
192 | if subdir.startswith('.'): | |
192 | continue |
|
193 | continue | |
193 |
|
194 | |||
194 | cur_path = os.path.join(root, subdir) |
|
195 | cur_path = os.path.join(root, subdir) | |
195 | if isdir(cur_path, '.git'): |
|
196 | if isdir(cur_path, '.git'): | |
196 | log.warning('ignoring non-bare Git repo: %s', cur_path) |
|
197 | log.warning('ignoring non-bare Git repo: %s', cur_path) | |
197 | continue |
|
198 | continue | |
198 |
|
199 | |||
199 | if (isdir(cur_path, '.hg') or |
|
200 | if (isdir(cur_path, '.hg') or | |
200 | isdir(cur_path, '.svn') or |
|
201 | isdir(cur_path, '.svn') or | |
201 | isdir(cur_path, 'objects') and (isdir(cur_path, 'refs') or |
|
202 | isdir(cur_path, 'objects') and (isdir(cur_path, 'refs') or | |
202 | os.path.isfile(os.path.join(cur_path, 'packed-refs')))): |
|
203 | os.path.isfile(os.path.join(cur_path, 'packed-refs')))): | |
203 |
|
204 | |||
204 | if not os.access(cur_path, os.R_OK) or not os.access(cur_path, os.X_OK): |
|
205 | if not os.access(cur_path, os.R_OK) or not os.access(cur_path, os.X_OK): | |
205 | log.warning('ignoring repo path without access: %s', cur_path) |
|
206 | log.warning('ignoring repo path without access: %s', cur_path) | |
206 | continue |
|
207 | continue | |
207 |
|
208 | |||
208 | if not os.access(cur_path, os.W_OK): |
|
209 | if not os.access(cur_path, os.W_OK): | |
209 | log.warning('repo path without write access: %s', cur_path) |
|
210 | log.warning('repo path without write access: %s', cur_path) | |
210 |
|
211 | |||
211 | try: |
|
212 | try: | |
212 | scm_info = get_scm(cur_path) |
|
213 | scm_info = get_scm(cur_path) | |
213 | assert cur_path.startswith(path) |
|
214 | assert cur_path.startswith(path) | |
214 | repo_path = cur_path[len(path) + 1:] |
|
215 | repo_path = cur_path[len(path) + 1:] | |
215 | yield repo_path, scm_info |
|
216 | yield repo_path, scm_info | |
216 | continue # no recursion |
|
217 | continue # no recursion | |
217 | except VCSError: |
|
218 | except VCSError: | |
218 | # We should perhaps ignore such broken repos, but especially |
|
219 | # We should perhaps ignore such broken repos, but especially | |
219 | # the bare git detection is unreliable so we dive into it |
|
220 | # the bare git detection is unreliable so we dive into it | |
220 | pass |
|
221 | pass | |
221 |
|
222 | |||
222 | recurse_dirs.append(subdir) |
|
223 | recurse_dirs.append(subdir) | |
223 |
|
224 | |||
224 | dirs[:] = recurse_dirs |
|
225 | dirs[:] = recurse_dirs | |
225 |
|
226 | |||
226 |
|
227 | |||
227 | def is_valid_repo_uri(repo_type, url, ui): |
|
228 | def is_valid_repo_uri(repo_type, url, ui): | |
228 | """Check if the url seems like a valid remote repo location - raise an Exception if any problems""" |
|
229 | """Check if the url seems like a valid remote repo location - raise an Exception if any problems""" | |
229 | if repo_type == 'hg': |
|
230 | if repo_type == 'hg': | |
230 | if url.startswith('http') or url.startswith('ssh'): |
|
231 | if url.startswith('http') or url.startswith('ssh'): | |
231 | # initially check if it's at least the proper URL |
|
232 | # initially check if it's at least the proper URL | |
232 | # or does it pass basic auth |
|
233 | # or does it pass basic auth | |
233 | MercurialRepository._check_url(url, ui) |
|
234 | MercurialRepository._check_url(url, ui) | |
234 | elif url.startswith('svn+http'): |
|
235 | elif url.startswith('svn+http'): | |
235 | try: |
|
236 | try: | |
236 | from hgsubversion.svnrepo import svnremoterepo |
|
237 | from hgsubversion.svnrepo import svnremoterepo | |
237 | except ImportError: |
|
238 | except ImportError: | |
238 | raise HgsubversionImportError(_('Unable to activate hgsubversion support. ' |
|
239 | raise HgsubversionImportError(_('Unable to activate hgsubversion support. ' | |
239 | 'The "hgsubversion" library is missing')) |
|
240 | 'The "hgsubversion" library is missing')) | |
240 | svnremoterepo(ui, url).svn.uuid |
|
241 | svnremoterepo(ui, url).svn.uuid | |
241 | elif url.startswith('git+http'): |
|
242 | elif url.startswith('git+http'): | |
242 | raise NotImplementedError() |
|
243 | raise NotImplementedError() | |
243 | else: |
|
244 | else: | |
244 | raise Exception('URI %s not allowed' % (url,)) |
|
245 | raise Exception('URI %s not allowed' % (url,)) | |
245 |
|
246 | |||
246 | elif repo_type == 'git': |
|
247 | elif repo_type == 'git': | |
247 | if url.startswith('http') or url.startswith('git'): |
|
248 | if url.startswith('http') or url.startswith('git'): | |
248 | # initially check if it's at least the proper URL |
|
249 | # initially check if it's at least the proper URL | |
249 | # or does it pass basic auth |
|
250 | # or does it pass basic auth | |
250 | GitRepository._check_url(url) |
|
251 | GitRepository._check_url(url) | |
251 | elif url.startswith('svn+http'): |
|
252 | elif url.startswith('svn+http'): | |
252 | raise NotImplementedError() |
|
253 | raise NotImplementedError() | |
253 | elif url.startswith('hg+http'): |
|
254 | elif url.startswith('hg+http'): | |
254 | raise NotImplementedError() |
|
255 | raise NotImplementedError() | |
255 | else: |
|
256 | else: | |
256 | raise Exception('URI %s not allowed' % (url)) |
|
257 | raise Exception('URI %s not allowed' % (url)) | |
257 |
|
258 | |||
258 |
|
259 | |||
259 | def is_valid_repo(repo_name, base_path, scm=None): |
|
260 | def is_valid_repo(repo_name, base_path, scm=None): | |
260 | """ |
|
261 | """ | |
261 | Returns True if given path is a valid repository False otherwise. |
|
262 | Returns True if given path is a valid repository False otherwise. | |
262 | If scm param is given also compare if given scm is the same as expected |
|
263 | If scm param is given also compare if given scm is the same as expected | |
263 | from scm parameter |
|
264 | from scm parameter | |
264 |
|
265 | |||
265 | :param repo_name: |
|
266 | :param repo_name: | |
266 | :param base_path: |
|
267 | :param base_path: | |
267 | :param scm: |
|
268 | :param scm: | |
268 |
|
269 | |||
269 | :return True: if given path is a valid repository |
|
270 | :return True: if given path is a valid repository | |
270 | """ |
|
271 | """ | |
271 | # TODO: paranoid security checks? |
|
272 | # TODO: paranoid security checks? | |
272 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) |
|
273 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) | |
273 |
|
274 | |||
274 | try: |
|
275 | try: | |
275 | scm_ = get_scm(full_path) |
|
276 | scm_ = get_scm(full_path) | |
276 | if scm: |
|
277 | if scm: | |
277 | return scm_[0] == scm |
|
278 | return scm_[0] == scm | |
278 | return True |
|
279 | return True | |
279 | except VCSError: |
|
280 | except VCSError: | |
280 | return False |
|
281 | return False | |
281 |
|
282 | |||
282 |
|
283 | |||
283 | def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False): |
|
284 | def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False): | |
284 | """ |
|
285 | """ | |
285 | Returns True if given path is a repository group False otherwise |
|
286 | Returns True if given path is a repository group False otherwise | |
286 |
|
287 | |||
287 | :param repo_name: |
|
288 | :param repo_name: | |
288 | :param base_path: |
|
289 | :param base_path: | |
289 | """ |
|
290 | """ | |
290 | full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name)) |
|
291 | full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name)) | |
291 |
|
292 | |||
292 | # check if it's not a repo |
|
293 | # check if it's not a repo | |
293 | if is_valid_repo(repo_group_name, base_path): |
|
294 | if is_valid_repo(repo_group_name, base_path): | |
294 | return False |
|
295 | return False | |
295 |
|
296 | |||
296 | try: |
|
297 | try: | |
297 | # we need to check bare git repos at higher level |
|
298 | # we need to check bare git repos at higher level | |
298 | # since we might match branches/hooks/info/objects or possible |
|
299 | # since we might match branches/hooks/info/objects or possible | |
299 | # other things inside bare git repo |
|
300 | # other things inside bare git repo | |
300 | get_scm(os.path.dirname(full_path)) |
|
301 | get_scm(os.path.dirname(full_path)) | |
301 | return False |
|
302 | return False | |
302 | except VCSError: |
|
303 | except VCSError: | |
303 | pass |
|
304 | pass | |
304 |
|
305 | |||
305 | # check if it's a valid path |
|
306 | # check if it's a valid path | |
306 | if skip_path_check or os.path.isdir(full_path): |
|
307 | if skip_path_check or os.path.isdir(full_path): | |
307 | return True |
|
308 | return True | |
308 |
|
309 | |||
309 | return False |
|
310 | return False | |
310 |
|
311 | |||
311 |
|
312 | |||
312 | # propagated from mercurial documentation |
|
313 | # propagated from mercurial documentation | |
313 | ui_sections = ['alias', 'auth', |
|
314 | ui_sections = ['alias', 'auth', | |
314 | 'decode/encode', 'defaults', |
|
315 | 'decode/encode', 'defaults', | |
315 | 'diff', 'email', |
|
316 | 'diff', 'email', | |
316 | 'extensions', 'format', |
|
317 | 'extensions', 'format', | |
317 | 'merge-patterns', 'merge-tools', |
|
318 | 'merge-patterns', 'merge-tools', | |
318 | 'hooks', 'http_proxy', |
|
319 | 'hooks', 'http_proxy', | |
319 | 'smtp', 'patch', |
|
320 | 'smtp', 'patch', | |
320 | 'paths', 'profiling', |
|
321 | 'paths', 'profiling', | |
321 | 'server', 'trusted', |
|
322 | 'server', 'trusted', | |
322 | 'ui', 'web', ] |
|
323 | 'ui', 'web', ] | |
323 |
|
324 | |||
324 |
|
325 | |||
325 | def make_ui(repo_path=None): |
|
326 | def make_ui(repo_path=None): | |
326 | """ |
|
327 | """ | |
327 | Create an Mercurial 'ui' object based on database Ui settings, possibly |
|
328 | Create an Mercurial 'ui' object based on database Ui settings, possibly | |
328 | augmenting with content from a hgrc file. |
|
329 | augmenting with content from a hgrc file. | |
329 | """ |
|
330 | """ | |
330 | baseui = ui.ui() |
|
331 | baseui = mercurial.ui.ui() | |
331 |
|
332 | |||
332 | # clean the baseui object |
|
333 | # clean the baseui object | |
333 | baseui._ocfg = config.config() |
|
334 | baseui._ocfg = mercurial.config.config() | |
334 | baseui._ucfg = config.config() |
|
335 | baseui._ucfg = mercurial.config.config() | |
335 | baseui._tcfg = config.config() |
|
336 | baseui._tcfg = mercurial.config.config() | |
336 |
|
337 | |||
337 | sa = meta.Session() |
|
338 | sa = meta.Session() | |
338 | for ui_ in sa.query(Ui).all(): |
|
339 | for ui_ in sa.query(Ui).all(): | |
339 | if ui_.ui_active: |
|
340 | if ui_.ui_active: | |
340 | log.debug('config from db: [%s] %s=%r', ui_.ui_section, |
|
341 | log.debug('config from db: [%s] %s=%r', ui_.ui_section, | |
341 | ui_.ui_key, ui_.ui_value) |
|
342 | ui_.ui_key, ui_.ui_value) | |
342 | baseui.setconfig(ascii_bytes(ui_.ui_section), ascii_bytes(ui_.ui_key), |
|
343 | baseui.setconfig(ascii_bytes(ui_.ui_section), ascii_bytes(ui_.ui_key), | |
343 | b'' if ui_.ui_value is None else safe_bytes(ui_.ui_value)) |
|
344 | b'' if ui_.ui_value is None else safe_bytes(ui_.ui_value)) | |
344 |
|
345 | |||
345 | # force set push_ssl requirement to False, Kallithea handles that |
|
346 | # force set push_ssl requirement to False, Kallithea handles that | |
346 | baseui.setconfig(b'web', b'push_ssl', False) |
|
347 | baseui.setconfig(b'web', b'push_ssl', False) | |
347 | baseui.setconfig(b'web', b'allow_push', b'*') |
|
348 | baseui.setconfig(b'web', b'allow_push', b'*') | |
348 | # prevent interactive questions for ssh password / passphrase |
|
349 | # prevent interactive questions for ssh password / passphrase | |
349 | ssh = baseui.config(b'ui', b'ssh', default=b'ssh') |
|
350 | ssh = baseui.config(b'ui', b'ssh', default=b'ssh') | |
350 | baseui.setconfig(b'ui', b'ssh', b'%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh) |
|
351 | baseui.setconfig(b'ui', b'ssh', b'%s -oBatchMode=yes -oIdentitiesOnly=yes' % ssh) | |
351 | # push / pull hooks |
|
352 | # push / pull hooks | |
352 | baseui.setconfig(b'hooks', b'changegroup.kallithea_log_push_action', b'python:kallithea.lib.hooks.log_push_action') |
|
353 | baseui.setconfig(b'hooks', b'changegroup.kallithea_log_push_action', b'python:kallithea.lib.hooks.log_push_action') | |
353 | baseui.setconfig(b'hooks', b'outgoing.kallithea_log_pull_action', b'python:kallithea.lib.hooks.log_pull_action') |
|
354 | baseui.setconfig(b'hooks', b'outgoing.kallithea_log_pull_action', b'python:kallithea.lib.hooks.log_pull_action') | |
354 |
|
355 | |||
355 | if repo_path is not None: |
|
356 | if repo_path is not None: | |
356 | hgrc_path = os.path.join(repo_path, '.hg', 'hgrc') |
|
357 | hgrc_path = os.path.join(repo_path, '.hg', 'hgrc') | |
357 | if os.path.isfile(hgrc_path): |
|
358 | if os.path.isfile(hgrc_path): | |
358 | log.debug('reading hgrc from %s', hgrc_path) |
|
359 | log.debug('reading hgrc from %s', hgrc_path) | |
359 | cfg = config.config() |
|
360 | cfg = mercurial.config.config() | |
360 | cfg.read(hgrc_path) |
|
361 | cfg.read(hgrc_path) | |
361 | for section in ui_sections: |
|
362 | for section in ui_sections: | |
362 | for k, v in cfg.items(section): |
|
363 | for k, v in cfg.items(section): | |
363 | log.debug('config from file: [%s] %s=%s', section, k, v) |
|
364 | log.debug('config from file: [%s] %s=%s', section, k, v) | |
364 | baseui.setconfig(ascii_bytes(section), ascii_bytes(k), safe_bytes(v)) |
|
365 | baseui.setconfig(ascii_bytes(section), ascii_bytes(k), safe_bytes(v)) | |
365 | else: |
|
366 | else: | |
366 | log.debug('hgrc file is not present at %s, skipping...', hgrc_path) |
|
367 | log.debug('hgrc file is not present at %s, skipping...', hgrc_path) | |
367 |
|
368 | |||
368 | return baseui |
|
369 | return baseui | |
369 |
|
370 | |||
370 |
|
371 | |||
371 | def set_app_settings(config): |
|
372 | def set_app_settings(config): | |
372 | """ |
|
373 | """ | |
373 | Updates app config with new settings from database |
|
374 | Updates app config with new settings from database | |
374 |
|
375 | |||
375 | :param config: |
|
376 | :param config: | |
376 | """ |
|
377 | """ | |
377 | hgsettings = Setting.get_app_settings() |
|
378 | hgsettings = Setting.get_app_settings() | |
378 | for k, v in hgsettings.items(): |
|
379 | for k, v in hgsettings.items(): | |
379 | config[k] = v |
|
380 | config[k] = v | |
380 |
|
381 | |||
381 |
|
382 | |||
382 | def set_vcs_config(config): |
|
383 | def set_vcs_config(config): | |
383 | """ |
|
384 | """ | |
384 | Patch VCS config with some Kallithea specific stuff |
|
385 | Patch VCS config with some Kallithea specific stuff | |
385 |
|
386 | |||
386 | :param config: kallithea.CONFIG |
|
387 | :param config: kallithea.CONFIG | |
387 | """ |
|
388 | """ | |
388 | settings.BACKENDS = { |
|
389 | settings.BACKENDS = { | |
389 | 'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository', |
|
390 | 'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository', | |
390 | 'git': 'kallithea.lib.vcs.backends.git.GitRepository', |
|
391 | 'git': 'kallithea.lib.vcs.backends.git.GitRepository', | |
391 | } |
|
392 | } | |
392 |
|
393 | |||
393 | settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git') |
|
394 | settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git') | |
394 | settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip() |
|
395 | settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip() | |
395 | settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding', |
|
396 | settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding', | |
396 | 'utf-8'), sep=',') |
|
397 | 'utf-8'), sep=',') | |
397 |
|
398 | |||
398 |
|
399 | |||
399 | def set_indexer_config(config): |
|
400 | def set_indexer_config(config): | |
400 | """ |
|
401 | """ | |
401 | Update Whoosh index mapping |
|
402 | Update Whoosh index mapping | |
402 |
|
403 | |||
403 | :param config: kallithea.CONFIG |
|
404 | :param config: kallithea.CONFIG | |
404 | """ |
|
405 | """ | |
405 | log.debug('adding extra into INDEX_EXTENSIONS') |
|
406 | log.debug('adding extra into INDEX_EXTENSIONS') | |
406 | kallithea.config.conf.INDEX_EXTENSIONS.extend(re.split(r'\s+', config.get('index.extensions', ''))) |
|
407 | kallithea.config.conf.INDEX_EXTENSIONS.extend(re.split(r'\s+', config.get('index.extensions', ''))) | |
407 |
|
408 | |||
408 | log.debug('adding extra into INDEX_FILENAMES') |
|
409 | log.debug('adding extra into INDEX_FILENAMES') | |
409 | kallithea.config.conf.INDEX_FILENAMES.extend(re.split(r'\s+', config.get('index.filenames', ''))) |
|
410 | kallithea.config.conf.INDEX_FILENAMES.extend(re.split(r'\s+', config.get('index.filenames', ''))) | |
410 |
|
411 | |||
411 |
|
412 | |||
412 | def map_groups(path): |
|
413 | def map_groups(path): | |
413 | """ |
|
414 | """ | |
414 | Given a full path to a repository, create all nested groups that this |
|
415 | Given a full path to a repository, create all nested groups that this | |
415 | repo is inside. This function creates parent-child relationships between |
|
416 | repo is inside. This function creates parent-child relationships between | |
416 | groups and creates default perms for all new groups. |
|
417 | groups and creates default perms for all new groups. | |
417 |
|
418 | |||
418 | :param paths: full path to repository |
|
419 | :param paths: full path to repository | |
419 | """ |
|
420 | """ | |
420 | from kallithea.model.repo_group import RepoGroupModel |
|
421 | from kallithea.model.repo_group import RepoGroupModel | |
421 | sa = meta.Session() |
|
422 | sa = meta.Session() | |
422 | groups = path.split(Repository.url_sep()) |
|
423 | groups = path.split(Repository.url_sep()) | |
423 | parent = None |
|
424 | parent = None | |
424 | group = None |
|
425 | group = None | |
425 |
|
426 | |||
426 | # last element is repo in nested groups structure |
|
427 | # last element is repo in nested groups structure | |
427 | groups = groups[:-1] |
|
428 | groups = groups[:-1] | |
428 | rgm = RepoGroupModel() |
|
429 | rgm = RepoGroupModel() | |
429 | owner = User.get_first_admin() |
|
430 | owner = User.get_first_admin() | |
430 | for lvl, group_name in enumerate(groups): |
|
431 | for lvl, group_name in enumerate(groups): | |
431 | group_name = u'/'.join(groups[:lvl] + [group_name]) |
|
432 | group_name = u'/'.join(groups[:lvl] + [group_name]) | |
432 | group = RepoGroup.get_by_group_name(group_name) |
|
433 | group = RepoGroup.get_by_group_name(group_name) | |
433 | desc = '%s group' % group_name |
|
434 | desc = '%s group' % group_name | |
434 |
|
435 | |||
435 | # skip folders that are now removed repos |
|
436 | # skip folders that are now removed repos | |
436 | if REMOVED_REPO_PAT.match(group_name): |
|
437 | if REMOVED_REPO_PAT.match(group_name): | |
437 | break |
|
438 | break | |
438 |
|
439 | |||
439 | if group is None: |
|
440 | if group is None: | |
440 | log.debug('creating group level: %s group_name: %s', |
|
441 | log.debug('creating group level: %s group_name: %s', | |
441 | lvl, group_name) |
|
442 | lvl, group_name) | |
442 | group = RepoGroup(group_name, parent) |
|
443 | group = RepoGroup(group_name, parent) | |
443 | group.group_description = desc |
|
444 | group.group_description = desc | |
444 | group.owner = owner |
|
445 | group.owner = owner | |
445 | sa.add(group) |
|
446 | sa.add(group) | |
446 | rgm._create_default_perms(group) |
|
447 | rgm._create_default_perms(group) | |
447 | sa.flush() |
|
448 | sa.flush() | |
448 |
|
449 | |||
449 | parent = group |
|
450 | parent = group | |
450 | return group |
|
451 | return group | |
451 |
|
452 | |||
452 |
|
453 | |||
453 | def repo2db_mapper(initial_repo_dict, remove_obsolete=False, |
|
454 | def repo2db_mapper(initial_repo_dict, remove_obsolete=False, | |
454 | install_git_hooks=False, user=None, overwrite_git_hooks=False): |
|
455 | install_git_hooks=False, user=None, overwrite_git_hooks=False): | |
455 | """ |
|
456 | """ | |
456 | maps all repos given in initial_repo_dict, non existing repositories |
|
457 | maps all repos given in initial_repo_dict, non existing repositories | |
457 | are created, if remove_obsolete is True it also check for db entries |
|
458 | are created, if remove_obsolete is True it also check for db entries | |
458 | that are not in initial_repo_dict and removes them. |
|
459 | that are not in initial_repo_dict and removes them. | |
459 |
|
460 | |||
460 | :param initial_repo_dict: mapping with repositories found by scanning methods |
|
461 | :param initial_repo_dict: mapping with repositories found by scanning methods | |
461 | :param remove_obsolete: check for obsolete entries in database |
|
462 | :param remove_obsolete: check for obsolete entries in database | |
462 | :param install_git_hooks: if this is True, also check and install git hook |
|
463 | :param install_git_hooks: if this is True, also check and install git hook | |
463 | for a repo if missing |
|
464 | for a repo if missing | |
464 | :param overwrite_git_hooks: if this is True, overwrite any existing git hooks |
|
465 | :param overwrite_git_hooks: if this is True, overwrite any existing git hooks | |
465 | that may be encountered (even if user-deployed) |
|
466 | that may be encountered (even if user-deployed) | |
466 | """ |
|
467 | """ | |
467 | from kallithea.model.repo import RepoModel |
|
468 | from kallithea.model.repo import RepoModel | |
468 | from kallithea.model.scm import ScmModel |
|
469 | from kallithea.model.scm import ScmModel | |
469 | sa = meta.Session() |
|
470 | sa = meta.Session() | |
470 | repo_model = RepoModel() |
|
471 | repo_model = RepoModel() | |
471 | if user is None: |
|
472 | if user is None: | |
472 | user = User.get_first_admin() |
|
473 | user = User.get_first_admin() | |
473 | added = [] |
|
474 | added = [] | |
474 |
|
475 | |||
475 | # creation defaults |
|
476 | # creation defaults | |
476 | defs = Setting.get_default_repo_settings(strip_prefix=True) |
|
477 | defs = Setting.get_default_repo_settings(strip_prefix=True) | |
477 | enable_statistics = defs.get('repo_enable_statistics') |
|
478 | enable_statistics = defs.get('repo_enable_statistics') | |
478 | enable_downloads = defs.get('repo_enable_downloads') |
|
479 | enable_downloads = defs.get('repo_enable_downloads') | |
479 | private = defs.get('repo_private') |
|
480 | private = defs.get('repo_private') | |
480 |
|
481 | |||
481 | for name, repo in initial_repo_dict.items(): |
|
482 | for name, repo in initial_repo_dict.items(): | |
482 | group = map_groups(name) |
|
483 | group = map_groups(name) | |
483 | unicode_name = safe_unicode(name) |
|
484 | unicode_name = safe_unicode(name) | |
484 | db_repo = repo_model.get_by_repo_name(unicode_name) |
|
485 | db_repo = repo_model.get_by_repo_name(unicode_name) | |
485 | # found repo that is on filesystem not in Kallithea database |
|
486 | # found repo that is on filesystem not in Kallithea database | |
486 | if not db_repo: |
|
487 | if not db_repo: | |
487 | log.info('repository %s not found, creating now', name) |
|
488 | log.info('repository %s not found, creating now', name) | |
488 | added.append(name) |
|
489 | added.append(name) | |
489 | desc = (repo.description |
|
490 | desc = (repo.description | |
490 | if repo.description != 'unknown' |
|
491 | if repo.description != 'unknown' | |
491 | else '%s repository' % name) |
|
492 | else '%s repository' % name) | |
492 |
|
493 | |||
493 | new_repo = repo_model._create_repo( |
|
494 | new_repo = repo_model._create_repo( | |
494 | repo_name=name, |
|
495 | repo_name=name, | |
495 | repo_type=repo.alias, |
|
496 | repo_type=repo.alias, | |
496 | description=desc, |
|
497 | description=desc, | |
497 | repo_group=getattr(group, 'group_id', None), |
|
498 | repo_group=getattr(group, 'group_id', None), | |
498 | owner=user, |
|
499 | owner=user, | |
499 | enable_downloads=enable_downloads, |
|
500 | enable_downloads=enable_downloads, | |
500 | enable_statistics=enable_statistics, |
|
501 | enable_statistics=enable_statistics, | |
501 | private=private, |
|
502 | private=private, | |
502 | state=Repository.STATE_CREATED |
|
503 | state=Repository.STATE_CREATED | |
503 | ) |
|
504 | ) | |
504 | sa.commit() |
|
505 | sa.commit() | |
505 | # we added that repo just now, and make sure it has githook |
|
506 | # we added that repo just now, and make sure it has githook | |
506 | # installed, and updated server info |
|
507 | # installed, and updated server info | |
507 | if new_repo.repo_type == 'git': |
|
508 | if new_repo.repo_type == 'git': | |
508 | git_repo = new_repo.scm_instance |
|
509 | git_repo = new_repo.scm_instance | |
509 | ScmModel().install_git_hooks(git_repo) |
|
510 | ScmModel().install_git_hooks(git_repo) | |
510 | # update repository server-info |
|
511 | # update repository server-info | |
511 | log.debug('Running update server info') |
|
512 | log.debug('Running update server info') | |
512 | git_repo._update_server_info() |
|
513 | git_repo._update_server_info() | |
513 | new_repo.update_changeset_cache() |
|
514 | new_repo.update_changeset_cache() | |
514 | elif install_git_hooks: |
|
515 | elif install_git_hooks: | |
515 | if db_repo.repo_type == 'git': |
|
516 | if db_repo.repo_type == 'git': | |
516 | ScmModel().install_git_hooks(db_repo.scm_instance, force_create=overwrite_git_hooks) |
|
517 | ScmModel().install_git_hooks(db_repo.scm_instance, force_create=overwrite_git_hooks) | |
517 |
|
518 | |||
518 | removed = [] |
|
519 | removed = [] | |
519 | # remove from database those repositories that are not in the filesystem |
|
520 | # remove from database those repositories that are not in the filesystem | |
520 | unicode_initial_repo_names = set(safe_unicode(name) for name in initial_repo_dict) |
|
521 | unicode_initial_repo_names = set(safe_unicode(name) for name in initial_repo_dict) | |
521 | for repo in sa.query(Repository).all(): |
|
522 | for repo in sa.query(Repository).all(): | |
522 | if repo.repo_name not in unicode_initial_repo_names: |
|
523 | if repo.repo_name not in unicode_initial_repo_names: | |
523 | if remove_obsolete: |
|
524 | if remove_obsolete: | |
524 | log.debug("Removing non-existing repository found in db `%s`", |
|
525 | log.debug("Removing non-existing repository found in db `%s`", | |
525 | repo.repo_name) |
|
526 | repo.repo_name) | |
526 | try: |
|
527 | try: | |
527 | RepoModel().delete(repo, forks='detach', fs_remove=False) |
|
528 | RepoModel().delete(repo, forks='detach', fs_remove=False) | |
528 | sa.commit() |
|
529 | sa.commit() | |
529 | except Exception: |
|
530 | except Exception: | |
530 | #don't hold further removals on error |
|
531 | #don't hold further removals on error | |
531 | log.error(traceback.format_exc()) |
|
532 | log.error(traceback.format_exc()) | |
532 | sa.rollback() |
|
533 | sa.rollback() | |
533 | removed.append(repo.repo_name) |
|
534 | removed.append(repo.repo_name) | |
534 | return added, removed |
|
535 | return added, removed | |
535 |
|
536 | |||
536 |
|
537 | |||
537 | def load_rcextensions(root_path): |
|
538 | def load_rcextensions(root_path): | |
538 | path = os.path.join(root_path, 'rcextensions', '__init__.py') |
|
539 | path = os.path.join(root_path, 'rcextensions', '__init__.py') | |
539 | if os.path.isfile(path): |
|
540 | if os.path.isfile(path): | |
540 | rcext = create_module('rc', path) |
|
541 | rcext = create_module('rc', path) | |
541 | EXT = kallithea.EXTENSIONS = rcext |
|
542 | EXT = kallithea.EXTENSIONS = rcext | |
542 | log.debug('Found rcextensions now loading %s...', rcext) |
|
543 | log.debug('Found rcextensions now loading %s...', rcext) | |
543 |
|
544 | |||
544 | # Additional mappings that are not present in the pygments lexers |
|
545 | # Additional mappings that are not present in the pygments lexers | |
545 | kallithea.config.conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) |
|
546 | kallithea.config.conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) | |
546 |
|
547 | |||
547 | # OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present) |
|
548 | # OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present) | |
548 |
|
549 | |||
549 | if getattr(EXT, 'INDEX_EXTENSIONS', []): |
|
550 | if getattr(EXT, 'INDEX_EXTENSIONS', []): | |
550 | log.debug('settings custom INDEX_EXTENSIONS') |
|
551 | log.debug('settings custom INDEX_EXTENSIONS') | |
551 | kallithea.config.conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', []) |
|
552 | kallithea.config.conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', []) | |
552 |
|
553 | |||
553 | # ADDITIONAL MAPPINGS |
|
554 | # ADDITIONAL MAPPINGS | |
554 | log.debug('adding extra into INDEX_EXTENSIONS') |
|
555 | log.debug('adding extra into INDEX_EXTENSIONS') | |
555 | kallithea.config.conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', [])) |
|
556 | kallithea.config.conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', [])) | |
556 |
|
557 | |||
557 | # auto check if the module is not missing any data, set to default if is |
|
558 | # auto check if the module is not missing any data, set to default if is | |
558 | # this will help autoupdate new feature of rcext module |
|
559 | # this will help autoupdate new feature of rcext module | |
559 | #from kallithea.config import rcextensions |
|
560 | #from kallithea.config import rcextensions | |
560 | #for k in dir(rcextensions): |
|
561 | #for k in dir(rcextensions): | |
561 | # if not k.startswith('_') and not hasattr(EXT, k): |
|
562 | # if not k.startswith('_') and not hasattr(EXT, k): | |
562 | # setattr(EXT, k, getattr(rcextensions, k)) |
|
563 | # setattr(EXT, k, getattr(rcextensions, k)) | |
563 |
|
564 | |||
564 |
|
565 | |||
565 | #============================================================================== |
|
566 | #============================================================================== | |
566 | # MISC |
|
567 | # MISC | |
567 | #============================================================================== |
|
568 | #============================================================================== | |
568 |
|
569 | |||
569 | git_req_ver = StrictVersion('1.7.4') |
|
570 | git_req_ver = StrictVersion('1.7.4') | |
570 |
|
571 | |||
571 | def check_git_version(): |
|
572 | def check_git_version(): | |
572 | """ |
|
573 | """ | |
573 | Checks what version of git is installed on the system, and raise a system exit |
|
574 | Checks what version of git is installed on the system, and raise a system exit | |
574 | if it's too old for Kallithea to work properly. |
|
575 | if it's too old for Kallithea to work properly. | |
575 | """ |
|
576 | """ | |
576 | if 'git' not in kallithea.BACKENDS: |
|
577 | if 'git' not in kallithea.BACKENDS: | |
577 | return None |
|
578 | return None | |
578 |
|
579 | |||
579 | if not settings.GIT_EXECUTABLE_PATH: |
|
580 | if not settings.GIT_EXECUTABLE_PATH: | |
580 | log.warning('No git executable configured - check "git_path" in the ini file.') |
|
581 | log.warning('No git executable configured - check "git_path" in the ini file.') | |
581 | return None |
|
582 | return None | |
582 |
|
583 | |||
583 | try: |
|
584 | try: | |
584 | stdout, stderr = GitRepository._run_git_command(['--version']) |
|
585 | stdout, stderr = GitRepository._run_git_command(['--version']) | |
585 | except RepositoryError as e: |
|
586 | except RepositoryError as e: | |
586 | # message will already have been logged as error |
|
587 | # message will already have been logged as error | |
587 | log.warning('No working git executable found - check "git_path" in the ini file.') |
|
588 | log.warning('No working git executable found - check "git_path" in the ini file.') | |
588 | return None |
|
589 | return None | |
589 |
|
590 | |||
590 | if stderr: |
|
591 | if stderr: | |
591 | log.warning('Error/stderr from "%s --version":\n%s', settings.GIT_EXECUTABLE_PATH, stderr) |
|
592 | log.warning('Error/stderr from "%s --version":\n%s', settings.GIT_EXECUTABLE_PATH, stderr) | |
592 |
|
593 | |||
593 | if not stdout: |
|
594 | if not stdout: | |
594 | log.warning('No working git executable found - check "git_path" in the ini file.') |
|
595 | log.warning('No working git executable found - check "git_path" in the ini file.') | |
595 | return None |
|
596 | return None | |
596 |
|
597 | |||
597 | output = stdout.strip() |
|
598 | output = stdout.strip() | |
598 | m = re.search(r"\d+.\d+.\d+", output) |
|
599 | m = re.search(r"\d+.\d+.\d+", output) | |
599 | if m: |
|
600 | if m: | |
600 | ver = StrictVersion(m.group(0)) |
|
601 | ver = StrictVersion(m.group(0)) | |
601 | log.debug('Git executable: "%s", version %s (parsed from: "%s")', |
|
602 | log.debug('Git executable: "%s", version %s (parsed from: "%s")', | |
602 | settings.GIT_EXECUTABLE_PATH, ver, output) |
|
603 | settings.GIT_EXECUTABLE_PATH, ver, output) | |
603 | if ver < git_req_ver: |
|
604 | if ver < git_req_ver: | |
604 | log.error('Kallithea detected %s version %s, which is too old ' |
|
605 | log.error('Kallithea detected %s version %s, which is too old ' | |
605 | 'for the system to function properly. ' |
|
606 | 'for the system to function properly. ' | |
606 | 'Please upgrade to version %s or later. ' |
|
607 | 'Please upgrade to version %s or later. ' | |
607 | 'If you strictly need Mercurial repositories, you can ' |
|
608 | 'If you strictly need Mercurial repositories, you can ' | |
608 | 'clear the "git_path" setting in the ini file.', |
|
609 | 'clear the "git_path" setting in the ini file.', | |
609 | settings.GIT_EXECUTABLE_PATH, ver, git_req_ver) |
|
610 | settings.GIT_EXECUTABLE_PATH, ver, git_req_ver) | |
610 | log.error("Terminating ...") |
|
611 | log.error("Terminating ...") | |
611 | sys.exit(1) |
|
612 | sys.exit(1) | |
612 | else: |
|
613 | else: | |
613 | ver = StrictVersion('0.0.0') |
|
614 | ver = StrictVersion('0.0.0') | |
614 | log.warning('Error finding version number in "%s --version" stdout:\n%s', |
|
615 | log.warning('Error finding version number in "%s --version" stdout:\n%s', | |
615 | settings.GIT_EXECUTABLE_PATH, output) |
|
616 | settings.GIT_EXECUTABLE_PATH, output) | |
616 |
|
617 | |||
617 | return ver |
|
618 | return ver | |
618 |
|
619 | |||
619 |
|
620 | |||
620 | #=============================================================================== |
|
621 | #=============================================================================== | |
621 | # CACHE RELATED METHODS |
|
622 | # CACHE RELATED METHODS | |
622 | #=============================================================================== |
|
623 | #=============================================================================== | |
623 |
|
624 | |||
624 | # set cache regions for beaker so celery can utilise it |
|
625 | # set cache regions for beaker so celery can utilise it | |
625 | def setup_cache_regions(settings): |
|
626 | def setup_cache_regions(settings): | |
626 | # Create dict with just beaker cache configs with prefix stripped |
|
627 | # Create dict with just beaker cache configs with prefix stripped | |
627 | cache_settings = {'regions': None} |
|
628 | cache_settings = {'regions': None} | |
628 | prefix = 'beaker.cache.' |
|
629 | prefix = 'beaker.cache.' | |
629 | for key in settings: |
|
630 | for key in settings: | |
630 | if key.startswith(prefix): |
|
631 | if key.startswith(prefix): | |
631 | name = key[len(prefix):] |
|
632 | name = key[len(prefix):] | |
632 | cache_settings[name] = settings[key] |
|
633 | cache_settings[name] = settings[key] | |
633 | # Find all regions, apply defaults, and apply to beaker |
|
634 | # Find all regions, apply defaults, and apply to beaker | |
634 | if cache_settings['regions']: |
|
635 | if cache_settings['regions']: | |
635 | for region in cache_settings['regions'].split(','): |
|
636 | for region in cache_settings['regions'].split(','): | |
636 | region = region.strip() |
|
637 | region = region.strip() | |
637 | prefix = region + '.' |
|
638 | prefix = region + '.' | |
638 | region_settings = {} |
|
639 | region_settings = {} | |
639 | for key in cache_settings: |
|
640 | for key in cache_settings: | |
640 | if key.startswith(prefix): |
|
641 | if key.startswith(prefix): | |
641 | name = key[len(prefix):] |
|
642 | name = key[len(prefix):] | |
642 | region_settings[name] = cache_settings[key] |
|
643 | region_settings[name] = cache_settings[key] | |
643 | region_settings.setdefault('expire', |
|
644 | region_settings.setdefault('expire', | |
644 | cache_settings.get('expire', '60')) |
|
645 | cache_settings.get('expire', '60')) | |
645 | region_settings.setdefault('lock_dir', |
|
646 | region_settings.setdefault('lock_dir', | |
646 | cache_settings.get('lock_dir')) |
|
647 | cache_settings.get('lock_dir')) | |
647 | region_settings.setdefault('data_dir', |
|
648 | region_settings.setdefault('data_dir', | |
648 | cache_settings.get('data_dir')) |
|
649 | cache_settings.get('data_dir')) | |
649 | region_settings.setdefault('type', |
|
650 | region_settings.setdefault('type', | |
650 | cache_settings.get('type', 'memory')) |
|
651 | cache_settings.get('type', 'memory')) | |
651 | beaker.cache.cache_regions[region] = region_settings |
|
652 | beaker.cache.cache_regions[region] = region_settings | |
652 |
|
653 | |||
653 |
|
654 | |||
654 | def conditional_cache(region, prefix, condition, func): |
|
655 | def conditional_cache(region, prefix, condition, func): | |
655 | """ |
|
656 | """ | |
656 |
|
657 | |||
657 | Conditional caching function use like:: |
|
658 | Conditional caching function use like:: | |
658 | def _c(arg): |
|
659 | def _c(arg): | |
659 | #heavy computation function |
|
660 | #heavy computation function | |
660 | return data |
|
661 | return data | |
661 |
|
662 | |||
662 | # depending from condition the compute is wrapped in cache or not |
|
663 | # depending from condition the compute is wrapped in cache or not | |
663 | compute = conditional_cache('short_term', 'cache_desc', condition=True, func=func) |
|
664 | compute = conditional_cache('short_term', 'cache_desc', condition=True, func=func) | |
664 | return compute(arg) |
|
665 | return compute(arg) | |
665 |
|
666 | |||
666 | :param region: name of cache region |
|
667 | :param region: name of cache region | |
667 | :param prefix: cache region prefix |
|
668 | :param prefix: cache region prefix | |
668 | :param condition: condition for cache to be triggered, and return data cached |
|
669 | :param condition: condition for cache to be triggered, and return data cached | |
669 | :param func: wrapped heavy function to compute |
|
670 | :param func: wrapped heavy function to compute | |
670 |
|
671 | |||
671 | """ |
|
672 | """ | |
672 | wrapped = func |
|
673 | wrapped = func | |
673 | if condition: |
|
674 | if condition: | |
674 | log.debug('conditional_cache: True, wrapping call of ' |
|
675 | log.debug('conditional_cache: True, wrapping call of ' | |
675 | 'func: %s into %s region cache' % (region, func)) |
|
676 | 'func: %s into %s region cache' % (region, func)) | |
676 | wrapped = _cache_decorate((prefix,), None, None, region)(func) |
|
677 | wrapped = _cache_decorate((prefix,), None, None, region)(func) | |
677 |
|
678 | |||
678 | return wrapped |
|
679 | return wrapped |
@@ -1,744 +1,745 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | vcs.backends.git.repository |
|
3 | vcs.backends.git.repository | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Git repository implementation. |
|
6 | Git repository implementation. | |
7 |
|
7 | |||
8 | :created_on: Apr 8, 2010 |
|
8 | :created_on: Apr 8, 2010 | |
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. |
|
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. | |
10 | """ |
|
10 | """ | |
11 |
|
11 | |||
12 | import errno |
|
12 | import errno | |
13 | import logging |
|
13 | import logging | |
14 | import os |
|
14 | import os | |
15 | import re |
|
15 | import re | |
16 | import time |
|
16 | import time | |
17 | import urllib |
|
17 | import urllib | |
18 | import urllib2 |
|
18 | import urllib2 | |
19 | from collections import OrderedDict |
|
19 | from collections import OrderedDict | |
20 |
|
20 | |||
|
21 | import mercurial.url # import httpbasicauthhandler, httpdigestauthhandler | |||
|
22 | import mercurial.util # import url as hg_url | |||
21 | from dulwich.config import ConfigFile |
|
23 | from dulwich.config import ConfigFile | |
22 | from dulwich.objects import Tag |
|
24 | from dulwich.objects import Tag | |
23 | from dulwich.repo import NotGitRepository, Repo |
|
25 | from dulwich.repo import NotGitRepository, Repo | |
24 |
|
26 | |||
25 | from kallithea.lib.vcs import subprocessio |
|
27 | from kallithea.lib.vcs import subprocessio | |
26 | from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator |
|
28 | from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator | |
27 | from kallithea.lib.vcs.conf import settings |
|
29 | from kallithea.lib.vcs.conf import settings | |
28 | from kallithea.lib.vcs.exceptions import ( |
|
30 | from kallithea.lib.vcs.exceptions import ( | |
29 | BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError) |
|
31 | BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError) | |
30 | from kallithea.lib.vcs.utils import ascii_str, date_fromtimestamp, makedate, safe_str, safe_unicode |
|
32 | from kallithea.lib.vcs.utils import ascii_str, date_fromtimestamp, makedate, safe_str, safe_unicode | |
31 | from kallithea.lib.vcs.utils.hgcompat import hg_url, httpbasicauthhandler, httpdigestauthhandler |
|
|||
32 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
33 | from kallithea.lib.vcs.utils.lazy import LazyProperty | |
33 | from kallithea.lib.vcs.utils.paths import abspath, get_user_home |
|
34 | from kallithea.lib.vcs.utils.paths import abspath, get_user_home | |
34 |
|
35 | |||
35 | from .changeset import GitChangeset |
|
36 | from .changeset import GitChangeset | |
36 | from .inmemory import GitInMemoryChangeset |
|
37 | from .inmemory import GitInMemoryChangeset | |
37 | from .workdir import GitWorkdir |
|
38 | from .workdir import GitWorkdir | |
38 |
|
39 | |||
39 |
|
40 | |||
40 | SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$') |
|
41 | SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$') | |
41 |
|
42 | |||
42 | log = logging.getLogger(__name__) |
|
43 | log = logging.getLogger(__name__) | |
43 |
|
44 | |||
44 |
|
45 | |||
45 | class GitRepository(BaseRepository): |
|
46 | class GitRepository(BaseRepository): | |
46 | """ |
|
47 | """ | |
47 | Git repository backend. |
|
48 | Git repository backend. | |
48 | """ |
|
49 | """ | |
49 | DEFAULT_BRANCH_NAME = 'master' |
|
50 | DEFAULT_BRANCH_NAME = 'master' | |
50 | scm = 'git' |
|
51 | scm = 'git' | |
51 |
|
52 | |||
52 | def __init__(self, repo_path, create=False, src_url=None, |
|
53 | def __init__(self, repo_path, create=False, src_url=None, | |
53 | update_after_clone=False, bare=False): |
|
54 | update_after_clone=False, bare=False): | |
54 |
|
55 | |||
55 | self.path = safe_unicode(abspath(repo_path)) |
|
56 | self.path = safe_unicode(abspath(repo_path)) | |
56 | self.repo = self._get_repo(create, src_url, update_after_clone, bare) |
|
57 | self.repo = self._get_repo(create, src_url, update_after_clone, bare) | |
57 | self.bare = self.repo.bare |
|
58 | self.bare = self.repo.bare | |
58 |
|
59 | |||
59 | @property |
|
60 | @property | |
60 | def _config_files(self): |
|
61 | def _config_files(self): | |
61 | return [ |
|
62 | return [ | |
62 | self.bare and abspath(self.path, 'config') |
|
63 | self.bare and abspath(self.path, 'config') | |
63 | or abspath(self.path, '.git', 'config'), |
|
64 | or abspath(self.path, '.git', 'config'), | |
64 | abspath(get_user_home(), '.gitconfig'), |
|
65 | abspath(get_user_home(), '.gitconfig'), | |
65 | ] |
|
66 | ] | |
66 |
|
67 | |||
67 | @property |
|
68 | @property | |
68 | def _repo(self): |
|
69 | def _repo(self): | |
69 | return self.repo |
|
70 | return self.repo | |
70 |
|
71 | |||
71 | @property |
|
72 | @property | |
72 | def head(self): |
|
73 | def head(self): | |
73 | try: |
|
74 | try: | |
74 | return self._repo.head() |
|
75 | return self._repo.head() | |
75 | except KeyError: |
|
76 | except KeyError: | |
76 | return None |
|
77 | return None | |
77 |
|
78 | |||
78 | @property |
|
79 | @property | |
79 | def _empty(self): |
|
80 | def _empty(self): | |
80 | """ |
|
81 | """ | |
81 | Checks if repository is empty ie. without any changesets |
|
82 | Checks if repository is empty ie. without any changesets | |
82 | """ |
|
83 | """ | |
83 |
|
84 | |||
84 | try: |
|
85 | try: | |
85 | self.revisions[0] |
|
86 | self.revisions[0] | |
86 | except (KeyError, IndexError): |
|
87 | except (KeyError, IndexError): | |
87 | return True |
|
88 | return True | |
88 | return False |
|
89 | return False | |
89 |
|
90 | |||
90 | @LazyProperty |
|
91 | @LazyProperty | |
91 | def revisions(self): |
|
92 | def revisions(self): | |
92 | """ |
|
93 | """ | |
93 | Returns list of revisions' ids, in ascending order. Being lazy |
|
94 | Returns list of revisions' ids, in ascending order. Being lazy | |
94 | attribute allows external tools to inject shas from cache. |
|
95 | attribute allows external tools to inject shas from cache. | |
95 | """ |
|
96 | """ | |
96 | return self._get_all_revisions() |
|
97 | return self._get_all_revisions() | |
97 |
|
98 | |||
98 | @classmethod |
|
99 | @classmethod | |
99 | def _run_git_command(cls, cmd, cwd=None): |
|
100 | def _run_git_command(cls, cmd, cwd=None): | |
100 | """ |
|
101 | """ | |
101 | Runs given ``cmd`` as git command and returns output bytes in a tuple |
|
102 | Runs given ``cmd`` as git command and returns output bytes in a tuple | |
102 | (stdout, stderr) ... or raise RepositoryError. |
|
103 | (stdout, stderr) ... or raise RepositoryError. | |
103 |
|
104 | |||
104 | :param cmd: git command to be executed |
|
105 | :param cmd: git command to be executed | |
105 | :param cwd: passed directly to subprocess |
|
106 | :param cwd: passed directly to subprocess | |
106 | """ |
|
107 | """ | |
107 | # need to clean fix GIT_DIR ! |
|
108 | # need to clean fix GIT_DIR ! | |
108 | gitenv = dict(os.environ) |
|
109 | gitenv = dict(os.environ) | |
109 | gitenv.pop('GIT_DIR', None) |
|
110 | gitenv.pop('GIT_DIR', None) | |
110 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' |
|
111 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' | |
111 |
|
112 | |||
112 | assert isinstance(cmd, list), cmd |
|
113 | assert isinstance(cmd, list), cmd | |
113 | cmd = [settings.GIT_EXECUTABLE_PATH, '-c', 'core.quotepath=false'] + cmd |
|
114 | cmd = [settings.GIT_EXECUTABLE_PATH, '-c', 'core.quotepath=false'] + cmd | |
114 | try: |
|
115 | try: | |
115 | p = subprocessio.SubprocessIOChunker(cmd, cwd=cwd, env=gitenv, shell=False) |
|
116 | p = subprocessio.SubprocessIOChunker(cmd, cwd=cwd, env=gitenv, shell=False) | |
116 | except (EnvironmentError, OSError) as err: |
|
117 | except (EnvironmentError, OSError) as err: | |
117 | # output from the failing process is in str(EnvironmentError) |
|
118 | # output from the failing process is in str(EnvironmentError) | |
118 | msg = ("Couldn't run git command %s.\n" |
|
119 | msg = ("Couldn't run git command %s.\n" | |
119 | "Subprocess failed with '%s': %s\n" % |
|
120 | "Subprocess failed with '%s': %s\n" % | |
120 | (cmd, type(err).__name__, err) |
|
121 | (cmd, type(err).__name__, err) | |
121 | ).strip() |
|
122 | ).strip() | |
122 | log.error(msg) |
|
123 | log.error(msg) | |
123 | raise RepositoryError(msg) |
|
124 | raise RepositoryError(msg) | |
124 |
|
125 | |||
125 | try: |
|
126 | try: | |
126 | stdout = b''.join(p.output) |
|
127 | stdout = b''.join(p.output) | |
127 | stderr = b''.join(p.error) |
|
128 | stderr = b''.join(p.error) | |
128 | finally: |
|
129 | finally: | |
129 | p.close() |
|
130 | p.close() | |
130 | # TODO: introduce option to make commands fail if they have any stderr output? |
|
131 | # TODO: introduce option to make commands fail if they have any stderr output? | |
131 | if stderr: |
|
132 | if stderr: | |
132 | log.debug('stderr from %s:\n%s', cmd, stderr) |
|
133 | log.debug('stderr from %s:\n%s', cmd, stderr) | |
133 | else: |
|
134 | else: | |
134 | log.debug('stderr from %s: None', cmd) |
|
135 | log.debug('stderr from %s: None', cmd) | |
135 | return stdout, stderr |
|
136 | return stdout, stderr | |
136 |
|
137 | |||
137 | def run_git_command(self, cmd): |
|
138 | def run_git_command(self, cmd): | |
138 | """ |
|
139 | """ | |
139 | Runs given ``cmd`` as git command with cwd set to current repo. |
|
140 | Runs given ``cmd`` as git command with cwd set to current repo. | |
140 | Returns stdout as unicode str ... or raise RepositoryError. |
|
141 | Returns stdout as unicode str ... or raise RepositoryError. | |
141 | """ |
|
142 | """ | |
142 | cwd = None |
|
143 | cwd = None | |
143 | if os.path.isdir(self.path): |
|
144 | if os.path.isdir(self.path): | |
144 | cwd = self.path |
|
145 | cwd = self.path | |
145 | stdout, _stderr = self._run_git_command(cmd, cwd=cwd) |
|
146 | stdout, _stderr = self._run_git_command(cmd, cwd=cwd) | |
146 | return safe_unicode(stdout) |
|
147 | return safe_unicode(stdout) | |
147 |
|
148 | |||
148 | @classmethod |
|
149 | @classmethod | |
149 | def _check_url(cls, url): |
|
150 | def _check_url(cls, url): | |
150 | """ |
|
151 | """ | |
151 | Function will check given url and try to verify if it's a valid |
|
152 | Function will check given url and try to verify if it's a valid | |
152 | link. Sometimes it may happened that git will issue basic |
|
153 | link. Sometimes it may happened that git will issue basic | |
153 | auth request that can cause whole API to hang when used from python |
|
154 | auth request that can cause whole API to hang when used from python | |
154 | or other external calls. |
|
155 | or other external calls. | |
155 |
|
156 | |||
156 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
157 | On failures it'll raise urllib2.HTTPError, exception is also thrown | |
157 | when the return code is non 200 |
|
158 | when the return code is non 200 | |
158 | """ |
|
159 | """ | |
159 |
|
160 | |||
160 | # check first if it's not an local url |
|
161 | # check first if it's not an local url | |
161 | if os.path.isdir(url) or url.startswith('file:'): |
|
162 | if os.path.isdir(url) or url.startswith('file:'): | |
162 | return True |
|
163 | return True | |
163 |
|
164 | |||
164 | if url.startswith('git://'): |
|
165 | if url.startswith('git://'): | |
165 | return True |
|
166 | return True | |
166 |
|
167 | |||
167 | if '+' in url[:url.find('://')]: |
|
168 | if '+' in url[:url.find('://')]: | |
168 | url = url[url.find('+') + 1:] |
|
169 | url = url[url.find('+') + 1:] | |
169 |
|
170 | |||
170 | handlers = [] |
|
171 | handlers = [] | |
171 |
url_obj = |
|
172 | url_obj = mercurial.util.url(url) | |
172 | test_uri, authinfo = url_obj.authinfo() |
|
173 | test_uri, authinfo = url_obj.authinfo() | |
173 | if not test_uri.endswith('info/refs'): |
|
174 | if not test_uri.endswith('info/refs'): | |
174 | test_uri = test_uri.rstrip('/') + '/info/refs' |
|
175 | test_uri = test_uri.rstrip('/') + '/info/refs' | |
175 |
|
176 | |||
176 | url_obj.passwd = b'*****' |
|
177 | url_obj.passwd = b'*****' | |
177 | cleaned_uri = str(url_obj) |
|
178 | cleaned_uri = str(url_obj) | |
178 |
|
179 | |||
179 | if authinfo: |
|
180 | if authinfo: | |
180 | # create a password manager |
|
181 | # create a password manager | |
181 | passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() |
|
182 | passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() | |
182 | passmgr.add_password(*authinfo) |
|
183 | passmgr.add_password(*authinfo) | |
183 |
|
184 | |||
184 | handlers.extend((httpbasicauthhandler(passmgr), |
|
185 | handlers.extend((mercurial.url.httpbasicauthhandler(passmgr), | |
185 | httpdigestauthhandler(passmgr))) |
|
186 | mercurial.url.httpdigestauthhandler(passmgr))) | |
186 |
|
187 | |||
187 | o = urllib2.build_opener(*handlers) |
|
188 | o = urllib2.build_opener(*handlers) | |
188 | o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git |
|
189 | o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git | |
189 |
|
190 | |||
190 | q = {"service": 'git-upload-pack'} |
|
191 | q = {"service": 'git-upload-pack'} | |
191 | qs = '?%s' % urllib.urlencode(q) |
|
192 | qs = '?%s' % urllib.urlencode(q) | |
192 | cu = "%s%s" % (test_uri, qs) |
|
193 | cu = "%s%s" % (test_uri, qs) | |
193 | req = urllib2.Request(cu, None, {}) |
|
194 | req = urllib2.Request(cu, None, {}) | |
194 |
|
195 | |||
195 | try: |
|
196 | try: | |
196 | resp = o.open(req) |
|
197 | resp = o.open(req) | |
197 | if resp.code != 200: |
|
198 | if resp.code != 200: | |
198 | raise Exception('Return Code is not 200') |
|
199 | raise Exception('Return Code is not 200') | |
199 | except Exception as e: |
|
200 | except Exception as e: | |
200 | # means it cannot be cloned |
|
201 | # means it cannot be cloned | |
201 | raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e)) |
|
202 | raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e)) | |
202 |
|
203 | |||
203 | # now detect if it's proper git repo |
|
204 | # now detect if it's proper git repo | |
204 | gitdata = resp.read() |
|
205 | gitdata = resp.read() | |
205 | if 'service=git-upload-pack' not in gitdata: |
|
206 | if 'service=git-upload-pack' not in gitdata: | |
206 | raise urllib2.URLError( |
|
207 | raise urllib2.URLError( | |
207 | "url [%s] does not look like an git" % cleaned_uri) |
|
208 | "url [%s] does not look like an git" % cleaned_uri) | |
208 |
|
209 | |||
209 | return True |
|
210 | return True | |
210 |
|
211 | |||
211 | def _get_repo(self, create, src_url=None, update_after_clone=False, |
|
212 | def _get_repo(self, create, src_url=None, update_after_clone=False, | |
212 | bare=False): |
|
213 | bare=False): | |
213 | if create and os.path.exists(self.path): |
|
214 | if create and os.path.exists(self.path): | |
214 | raise RepositoryError("Location already exist") |
|
215 | raise RepositoryError("Location already exist") | |
215 | if src_url and not create: |
|
216 | if src_url and not create: | |
216 | raise RepositoryError("Create should be set to True if src_url is " |
|
217 | raise RepositoryError("Create should be set to True if src_url is " | |
217 | "given (clone operation creates repository)") |
|
218 | "given (clone operation creates repository)") | |
218 | try: |
|
219 | try: | |
219 | if create and src_url: |
|
220 | if create and src_url: | |
220 | GitRepository._check_url(src_url) |
|
221 | GitRepository._check_url(src_url) | |
221 | self.clone(src_url, update_after_clone, bare) |
|
222 | self.clone(src_url, update_after_clone, bare) | |
222 | return Repo(self.path) |
|
223 | return Repo(self.path) | |
223 | elif create: |
|
224 | elif create: | |
224 | os.makedirs(self.path) |
|
225 | os.makedirs(self.path) | |
225 | if bare: |
|
226 | if bare: | |
226 | return Repo.init_bare(self.path) |
|
227 | return Repo.init_bare(self.path) | |
227 | else: |
|
228 | else: | |
228 | return Repo.init(self.path) |
|
229 | return Repo.init(self.path) | |
229 | else: |
|
230 | else: | |
230 | return Repo(self.path) |
|
231 | return Repo(self.path) | |
231 | except (NotGitRepository, OSError) as err: |
|
232 | except (NotGitRepository, OSError) as err: | |
232 | raise RepositoryError(err) |
|
233 | raise RepositoryError(err) | |
233 |
|
234 | |||
234 | def _get_all_revisions(self): |
|
235 | def _get_all_revisions(self): | |
235 | # we must check if this repo is not empty, since later command |
|
236 | # we must check if this repo is not empty, since later command | |
236 | # fails if it is. And it's cheaper to ask than throw the subprocess |
|
237 | # fails if it is. And it's cheaper to ask than throw the subprocess | |
237 | # errors |
|
238 | # errors | |
238 | try: |
|
239 | try: | |
239 | self._repo.head() |
|
240 | self._repo.head() | |
240 | except KeyError: |
|
241 | except KeyError: | |
241 | return [] |
|
242 | return [] | |
242 |
|
243 | |||
243 | rev_filter = settings.GIT_REV_FILTER |
|
244 | rev_filter = settings.GIT_REV_FILTER | |
244 | cmd = ['rev-list', rev_filter, '--reverse', '--date-order'] |
|
245 | cmd = ['rev-list', rev_filter, '--reverse', '--date-order'] | |
245 | try: |
|
246 | try: | |
246 | so = self.run_git_command(cmd) |
|
247 | so = self.run_git_command(cmd) | |
247 | except RepositoryError: |
|
248 | except RepositoryError: | |
248 | # Can be raised for empty repositories |
|
249 | # Can be raised for empty repositories | |
249 | return [] |
|
250 | return [] | |
250 | return so.splitlines() |
|
251 | return so.splitlines() | |
251 |
|
252 | |||
252 | def _get_all_revisions2(self): |
|
253 | def _get_all_revisions2(self): | |
253 | # alternate implementation using dulwich |
|
254 | # alternate implementation using dulwich | |
254 | includes = [ascii_str(x[1][0]) for x in self._parsed_refs.iteritems() |
|
255 | includes = [ascii_str(x[1][0]) for x in self._parsed_refs.iteritems() | |
255 | if x[1][1] != b'T'] |
|
256 | if x[1][1] != b'T'] | |
256 | return [c.commit.id for c in self._repo.get_walker(include=includes)] |
|
257 | return [c.commit.id for c in self._repo.get_walker(include=includes)] | |
257 |
|
258 | |||
258 | def _get_revision(self, revision): |
|
259 | def _get_revision(self, revision): | |
259 | """ |
|
260 | """ | |
260 | Given any revision identifier, returns a 40 char string with revision hash. |
|
261 | Given any revision identifier, returns a 40 char string with revision hash. | |
261 | """ |
|
262 | """ | |
262 | if self._empty: |
|
263 | if self._empty: | |
263 | raise EmptyRepositoryError("There are no changesets yet") |
|
264 | raise EmptyRepositoryError("There are no changesets yet") | |
264 |
|
265 | |||
265 | if revision in (None, '', 'tip', 'HEAD', 'head', -1): |
|
266 | if revision in (None, '', 'tip', 'HEAD', 'head', -1): | |
266 | revision = -1 |
|
267 | revision = -1 | |
267 |
|
268 | |||
268 | if isinstance(revision, int): |
|
269 | if isinstance(revision, int): | |
269 | try: |
|
270 | try: | |
270 | return self.revisions[revision] |
|
271 | return self.revisions[revision] | |
271 | except IndexError: |
|
272 | except IndexError: | |
272 | msg = ("Revision %s does not exist for %s" % (revision, self)) |
|
273 | msg = ("Revision %s does not exist for %s" % (revision, self)) | |
273 | raise ChangesetDoesNotExistError(msg) |
|
274 | raise ChangesetDoesNotExistError(msg) | |
274 |
|
275 | |||
275 | if isinstance(revision, (str, unicode)): |
|
276 | if isinstance(revision, (str, unicode)): | |
276 | if revision.isdigit() and (len(revision) < 12 or len(revision) == revision.count('0')): |
|
277 | if revision.isdigit() and (len(revision) < 12 or len(revision) == revision.count('0')): | |
277 | try: |
|
278 | try: | |
278 | return self.revisions[int(revision)] |
|
279 | return self.revisions[int(revision)] | |
279 | except IndexError: |
|
280 | except IndexError: | |
280 | msg = "Revision %r does not exist for %s" % (revision, self) |
|
281 | msg = "Revision %r does not exist for %s" % (revision, self) | |
281 | raise ChangesetDoesNotExistError(msg) |
|
282 | raise ChangesetDoesNotExistError(msg) | |
282 |
|
283 | |||
283 | # get by branch/tag name |
|
284 | # get by branch/tag name | |
284 | _ref_revision = self._parsed_refs.get(revision) |
|
285 | _ref_revision = self._parsed_refs.get(revision) | |
285 | if _ref_revision: # and _ref_revision[1] in [b'H', b'RH', b'T']: |
|
286 | if _ref_revision: # and _ref_revision[1] in [b'H', b'RH', b'T']: | |
286 | return ascii_str(_ref_revision[0]) |
|
287 | return ascii_str(_ref_revision[0]) | |
287 |
|
288 | |||
288 | if revision in self.revisions: |
|
289 | if revision in self.revisions: | |
289 | return revision |
|
290 | return revision | |
290 |
|
291 | |||
291 | # maybe it's a tag ? we don't have them in self.revisions |
|
292 | # maybe it's a tag ? we don't have them in self.revisions | |
292 | if revision in self.tags.values(): |
|
293 | if revision in self.tags.values(): | |
293 | return revision |
|
294 | return revision | |
294 |
|
295 | |||
295 | if SHA_PATTERN.match(revision): |
|
296 | if SHA_PATTERN.match(revision): | |
296 | msg = ("Revision %s does not exist for %s" % (revision, self)) |
|
297 | msg = ("Revision %s does not exist for %s" % (revision, self)) | |
297 | raise ChangesetDoesNotExistError(msg) |
|
298 | raise ChangesetDoesNotExistError(msg) | |
298 |
|
299 | |||
299 | raise ChangesetDoesNotExistError("Given revision %r not recognized" % revision) |
|
300 | raise ChangesetDoesNotExistError("Given revision %r not recognized" % revision) | |
300 |
|
301 | |||
301 | def get_ref_revision(self, ref_type, ref_name): |
|
302 | def get_ref_revision(self, ref_type, ref_name): | |
302 | """ |
|
303 | """ | |
303 | Returns ``GitChangeset`` object representing repository's |
|
304 | Returns ``GitChangeset`` object representing repository's | |
304 | changeset at the given ``revision``. |
|
305 | changeset at the given ``revision``. | |
305 | """ |
|
306 | """ | |
306 | return self._get_revision(ref_name) |
|
307 | return self._get_revision(ref_name) | |
307 |
|
308 | |||
308 | def _get_archives(self, archive_name='tip'): |
|
309 | def _get_archives(self, archive_name='tip'): | |
309 |
|
310 | |||
310 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: |
|
311 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: | |
311 | yield {"type": i[0], "extension": i[1], "node": archive_name} |
|
312 | yield {"type": i[0], "extension": i[1], "node": archive_name} | |
312 |
|
313 | |||
313 | def _get_url(self, url): |
|
314 | def _get_url(self, url): | |
314 | """ |
|
315 | """ | |
315 | Returns normalized url. If schema is not given, would fall to |
|
316 | Returns normalized url. If schema is not given, would fall to | |
316 | filesystem (``file:///``) schema. |
|
317 | filesystem (``file:///``) schema. | |
317 | """ |
|
318 | """ | |
318 | url = safe_str(url) |
|
319 | url = safe_str(url) | |
319 | if url != 'default' and '://' not in url: |
|
320 | if url != 'default' and '://' not in url: | |
320 | url = ':///'.join(('file', url)) |
|
321 | url = ':///'.join(('file', url)) | |
321 | return url |
|
322 | return url | |
322 |
|
323 | |||
323 | def get_hook_location(self): |
|
324 | def get_hook_location(self): | |
324 | """ |
|
325 | """ | |
325 | returns absolute path to location where hooks are stored |
|
326 | returns absolute path to location where hooks are stored | |
326 | """ |
|
327 | """ | |
327 | loc = os.path.join(self.path, 'hooks') |
|
328 | loc = os.path.join(self.path, 'hooks') | |
328 | if not self.bare: |
|
329 | if not self.bare: | |
329 | loc = os.path.join(self.path, '.git', 'hooks') |
|
330 | loc = os.path.join(self.path, '.git', 'hooks') | |
330 | return loc |
|
331 | return loc | |
331 |
|
332 | |||
332 | @LazyProperty |
|
333 | @LazyProperty | |
333 | def name(self): |
|
334 | def name(self): | |
334 | return os.path.basename(self.path) |
|
335 | return os.path.basename(self.path) | |
335 |
|
336 | |||
336 | @LazyProperty |
|
337 | @LazyProperty | |
337 | def last_change(self): |
|
338 | def last_change(self): | |
338 | """ |
|
339 | """ | |
339 | Returns last change made on this repository as datetime object |
|
340 | Returns last change made on this repository as datetime object | |
340 | """ |
|
341 | """ | |
341 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) |
|
342 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) | |
342 |
|
343 | |||
343 | def _get_mtime(self): |
|
344 | def _get_mtime(self): | |
344 | try: |
|
345 | try: | |
345 | return time.mktime(self.get_changeset().date.timetuple()) |
|
346 | return time.mktime(self.get_changeset().date.timetuple()) | |
346 | except RepositoryError: |
|
347 | except RepositoryError: | |
347 | idx_loc = '' if self.bare else '.git' |
|
348 | idx_loc = '' if self.bare else '.git' | |
348 | # fallback to filesystem |
|
349 | # fallback to filesystem | |
349 | in_path = os.path.join(self.path, idx_loc, "index") |
|
350 | in_path = os.path.join(self.path, idx_loc, "index") | |
350 | he_path = os.path.join(self.path, idx_loc, "HEAD") |
|
351 | he_path = os.path.join(self.path, idx_loc, "HEAD") | |
351 | if os.path.exists(in_path): |
|
352 | if os.path.exists(in_path): | |
352 | return os.stat(in_path).st_mtime |
|
353 | return os.stat(in_path).st_mtime | |
353 | else: |
|
354 | else: | |
354 | return os.stat(he_path).st_mtime |
|
355 | return os.stat(he_path).st_mtime | |
355 |
|
356 | |||
356 | @LazyProperty |
|
357 | @LazyProperty | |
357 | def description(self): |
|
358 | def description(self): | |
358 | return safe_unicode(self._repo.get_description() or b'unknown') |
|
359 | return safe_unicode(self._repo.get_description() or b'unknown') | |
359 |
|
360 | |||
360 | @LazyProperty |
|
361 | @LazyProperty | |
361 | def contact(self): |
|
362 | def contact(self): | |
362 | undefined_contact = u'Unknown' |
|
363 | undefined_contact = u'Unknown' | |
363 | return undefined_contact |
|
364 | return undefined_contact | |
364 |
|
365 | |||
365 | @property |
|
366 | @property | |
366 | def branches(self): |
|
367 | def branches(self): | |
367 | if not self.revisions: |
|
368 | if not self.revisions: | |
368 | return {} |
|
369 | return {} | |
369 | sortkey = lambda ctx: ctx[0] |
|
370 | sortkey = lambda ctx: ctx[0] | |
370 | _branches = [(x[0], ascii_str(x[1][0])) |
|
371 | _branches = [(x[0], ascii_str(x[1][0])) | |
371 | for x in self._parsed_refs.iteritems() if x[1][1] == b'H'] |
|
372 | for x in self._parsed_refs.iteritems() if x[1][1] == b'H'] | |
372 | return OrderedDict(sorted(_branches, key=sortkey, reverse=False)) |
|
373 | return OrderedDict(sorted(_branches, key=sortkey, reverse=False)) | |
373 |
|
374 | |||
374 | @LazyProperty |
|
375 | @LazyProperty | |
375 | def closed_branches(self): |
|
376 | def closed_branches(self): | |
376 | return {} |
|
377 | return {} | |
377 |
|
378 | |||
378 | @LazyProperty |
|
379 | @LazyProperty | |
379 | def tags(self): |
|
380 | def tags(self): | |
380 | return self._get_tags() |
|
381 | return self._get_tags() | |
381 |
|
382 | |||
382 | def _get_tags(self): |
|
383 | def _get_tags(self): | |
383 | if not self.revisions: |
|
384 | if not self.revisions: | |
384 | return {} |
|
385 | return {} | |
385 |
|
386 | |||
386 | sortkey = lambda ctx: ctx[0] |
|
387 | sortkey = lambda ctx: ctx[0] | |
387 | _tags = [(x[0], ascii_str(x[1][0])) |
|
388 | _tags = [(x[0], ascii_str(x[1][0])) | |
388 | for x in self._parsed_refs.iteritems() if x[1][1] == b'T'] |
|
389 | for x in self._parsed_refs.iteritems() if x[1][1] == b'T'] | |
389 | return OrderedDict(sorted(_tags, key=sortkey, reverse=True)) |
|
390 | return OrderedDict(sorted(_tags, key=sortkey, reverse=True)) | |
390 |
|
391 | |||
391 | def tag(self, name, user, revision=None, message=None, date=None, |
|
392 | def tag(self, name, user, revision=None, message=None, date=None, | |
392 | **kwargs): |
|
393 | **kwargs): | |
393 | """ |
|
394 | """ | |
394 | Creates and returns a tag for the given ``revision``. |
|
395 | Creates and returns a tag for the given ``revision``. | |
395 |
|
396 | |||
396 | :param name: name for new tag |
|
397 | :param name: name for new tag | |
397 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
398 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
398 | :param revision: changeset id for which new tag would be created |
|
399 | :param revision: changeset id for which new tag would be created | |
399 | :param message: message of the tag's commit |
|
400 | :param message: message of the tag's commit | |
400 | :param date: date of tag's commit |
|
401 | :param date: date of tag's commit | |
401 |
|
402 | |||
402 | :raises TagAlreadyExistError: if tag with same name already exists |
|
403 | :raises TagAlreadyExistError: if tag with same name already exists | |
403 | """ |
|
404 | """ | |
404 | if name in self.tags: |
|
405 | if name in self.tags: | |
405 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
406 | raise TagAlreadyExistError("Tag %s already exists" % name) | |
406 | changeset = self.get_changeset(revision) |
|
407 | changeset = self.get_changeset(revision) | |
407 | message = message or "Added tag %s for commit %s" % (name, |
|
408 | message = message or "Added tag %s for commit %s" % (name, | |
408 | changeset.raw_id) |
|
409 | changeset.raw_id) | |
409 | self._repo.refs[b"refs/tags/%s" % name] = changeset._commit.id |
|
410 | self._repo.refs[b"refs/tags/%s" % name] = changeset._commit.id | |
410 |
|
411 | |||
411 | self._parsed_refs = self._get_parsed_refs() |
|
412 | self._parsed_refs = self._get_parsed_refs() | |
412 | self.tags = self._get_tags() |
|
413 | self.tags = self._get_tags() | |
413 | return changeset |
|
414 | return changeset | |
414 |
|
415 | |||
415 | def remove_tag(self, name, user, message=None, date=None): |
|
416 | def remove_tag(self, name, user, message=None, date=None): | |
416 | """ |
|
417 | """ | |
417 | Removes tag with the given ``name``. |
|
418 | Removes tag with the given ``name``. | |
418 |
|
419 | |||
419 | :param name: name of the tag to be removed |
|
420 | :param name: name of the tag to be removed | |
420 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
421 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
421 | :param message: message of the tag's removal commit |
|
422 | :param message: message of the tag's removal commit | |
422 | :param date: date of tag's removal commit |
|
423 | :param date: date of tag's removal commit | |
423 |
|
424 | |||
424 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
425 | :raises TagDoesNotExistError: if tag with given name does not exists | |
425 | """ |
|
426 | """ | |
426 | if name not in self.tags: |
|
427 | if name not in self.tags: | |
427 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
428 | raise TagDoesNotExistError("Tag %s does not exist" % name) | |
428 | # self._repo.refs is a DiskRefsContainer, and .path gives the full absolute path of '.git' |
|
429 | # self._repo.refs is a DiskRefsContainer, and .path gives the full absolute path of '.git' | |
429 | tagpath = os.path.join(self._repo.refs.path, 'refs', 'tags', name) |
|
430 | tagpath = os.path.join(self._repo.refs.path, 'refs', 'tags', name) | |
430 | try: |
|
431 | try: | |
431 | os.remove(tagpath) |
|
432 | os.remove(tagpath) | |
432 | self._parsed_refs = self._get_parsed_refs() |
|
433 | self._parsed_refs = self._get_parsed_refs() | |
433 | self.tags = self._get_tags() |
|
434 | self.tags = self._get_tags() | |
434 | except OSError as e: |
|
435 | except OSError as e: | |
435 | raise RepositoryError(e.strerror) |
|
436 | raise RepositoryError(e.strerror) | |
436 |
|
437 | |||
437 | @LazyProperty |
|
438 | @LazyProperty | |
438 | def bookmarks(self): |
|
439 | def bookmarks(self): | |
439 | """ |
|
440 | """ | |
440 | Gets bookmarks for this repository |
|
441 | Gets bookmarks for this repository | |
441 | """ |
|
442 | """ | |
442 | return {} |
|
443 | return {} | |
443 |
|
444 | |||
444 | @LazyProperty |
|
445 | @LazyProperty | |
445 | def _parsed_refs(self): |
|
446 | def _parsed_refs(self): | |
446 | return self._get_parsed_refs() |
|
447 | return self._get_parsed_refs() | |
447 |
|
448 | |||
448 | def _get_parsed_refs(self): |
|
449 | def _get_parsed_refs(self): | |
449 | # cache the property |
|
450 | # cache the property | |
450 | _repo = self._repo |
|
451 | _repo = self._repo | |
451 | refs = _repo.get_refs() |
|
452 | refs = _repo.get_refs() | |
452 | keys = [(b'refs/heads/', b'H'), |
|
453 | keys = [(b'refs/heads/', b'H'), | |
453 | (b'refs/remotes/origin/', b'RH'), |
|
454 | (b'refs/remotes/origin/', b'RH'), | |
454 | (b'refs/tags/', b'T')] |
|
455 | (b'refs/tags/', b'T')] | |
455 | _refs = {} |
|
456 | _refs = {} | |
456 | for ref, sha in refs.iteritems(): |
|
457 | for ref, sha in refs.iteritems(): | |
457 | for k, type_ in keys: |
|
458 | for k, type_ in keys: | |
458 | if ref.startswith(k): |
|
459 | if ref.startswith(k): | |
459 | _key = ref[len(k):] |
|
460 | _key = ref[len(k):] | |
460 | if type_ == b'T': |
|
461 | if type_ == b'T': | |
461 | obj = _repo.get_object(sha) |
|
462 | obj = _repo.get_object(sha) | |
462 | if isinstance(obj, Tag): |
|
463 | if isinstance(obj, Tag): | |
463 | sha = _repo.get_object(sha).object[1] |
|
464 | sha = _repo.get_object(sha).object[1] | |
464 | _refs[_key] = [sha, type_] |
|
465 | _refs[_key] = [sha, type_] | |
465 | break |
|
466 | break | |
466 | return _refs |
|
467 | return _refs | |
467 |
|
468 | |||
468 | def _heads(self, reverse=False): |
|
469 | def _heads(self, reverse=False): | |
469 | refs = self._repo.get_refs() |
|
470 | refs = self._repo.get_refs() | |
470 | heads = {} |
|
471 | heads = {} | |
471 |
|
472 | |||
472 | for key, val in refs.items(): |
|
473 | for key, val in refs.items(): | |
473 | for ref_key in [b'refs/heads/', b'refs/remotes/origin/']: |
|
474 | for ref_key in [b'refs/heads/', b'refs/remotes/origin/']: | |
474 | if key.startswith(ref_key): |
|
475 | if key.startswith(ref_key): | |
475 | n = key[len(ref_key):] |
|
476 | n = key[len(ref_key):] | |
476 | if n not in [b'HEAD']: |
|
477 | if n not in [b'HEAD']: | |
477 | heads[n] = val |
|
478 | heads[n] = val | |
478 |
|
479 | |||
479 | return heads if reverse else dict((y, x) for x, y in heads.iteritems()) |
|
480 | return heads if reverse else dict((y, x) for x, y in heads.iteritems()) | |
480 |
|
481 | |||
481 | def get_changeset(self, revision=None): |
|
482 | def get_changeset(self, revision=None): | |
482 | """ |
|
483 | """ | |
483 | Returns ``GitChangeset`` object representing commit from git repository |
|
484 | Returns ``GitChangeset`` object representing commit from git repository | |
484 | at the given revision or head (most recent commit) if None given. |
|
485 | at the given revision or head (most recent commit) if None given. | |
485 | """ |
|
486 | """ | |
486 | if isinstance(revision, GitChangeset): |
|
487 | if isinstance(revision, GitChangeset): | |
487 | return revision |
|
488 | return revision | |
488 | revision = self._get_revision(revision) |
|
489 | revision = self._get_revision(revision) | |
489 | changeset = GitChangeset(repository=self, revision=revision) |
|
490 | changeset = GitChangeset(repository=self, revision=revision) | |
490 | return changeset |
|
491 | return changeset | |
491 |
|
492 | |||
492 | def get_changesets(self, start=None, end=None, start_date=None, |
|
493 | def get_changesets(self, start=None, end=None, start_date=None, | |
493 | end_date=None, branch_name=None, reverse=False, max_revisions=None): |
|
494 | end_date=None, branch_name=None, reverse=False, max_revisions=None): | |
494 | """ |
|
495 | """ | |
495 | Returns iterator of ``GitChangeset`` objects from start to end (both |
|
496 | Returns iterator of ``GitChangeset`` objects from start to end (both | |
496 | are inclusive), in ascending date order (unless ``reverse`` is set). |
|
497 | are inclusive), in ascending date order (unless ``reverse`` is set). | |
497 |
|
498 | |||
498 | :param start: changeset ID, as str; first returned changeset |
|
499 | :param start: changeset ID, as str; first returned changeset | |
499 | :param end: changeset ID, as str; last returned changeset |
|
500 | :param end: changeset ID, as str; last returned changeset | |
500 | :param start_date: if specified, changesets with commit date less than |
|
501 | :param start_date: if specified, changesets with commit date less than | |
501 | ``start_date`` would be filtered out from returned set |
|
502 | ``start_date`` would be filtered out from returned set | |
502 | :param end_date: if specified, changesets with commit date greater than |
|
503 | :param end_date: if specified, changesets with commit date greater than | |
503 | ``end_date`` would be filtered out from returned set |
|
504 | ``end_date`` would be filtered out from returned set | |
504 | :param branch_name: if specified, changesets not reachable from given |
|
505 | :param branch_name: if specified, changesets not reachable from given | |
505 | branch would be filtered out from returned set |
|
506 | branch would be filtered out from returned set | |
506 | :param reverse: if ``True``, returned generator would be reversed |
|
507 | :param reverse: if ``True``, returned generator would be reversed | |
507 | (meaning that returned changesets would have descending date order) |
|
508 | (meaning that returned changesets would have descending date order) | |
508 |
|
509 | |||
509 | :raise BranchDoesNotExistError: If given ``branch_name`` does not |
|
510 | :raise BranchDoesNotExistError: If given ``branch_name`` does not | |
510 | exist. |
|
511 | exist. | |
511 | :raise ChangesetDoesNotExistError: If changeset for given ``start`` or |
|
512 | :raise ChangesetDoesNotExistError: If changeset for given ``start`` or | |
512 | ``end`` could not be found. |
|
513 | ``end`` could not be found. | |
513 |
|
514 | |||
514 | """ |
|
515 | """ | |
515 | if branch_name and branch_name not in self.branches: |
|
516 | if branch_name and branch_name not in self.branches: | |
516 | raise BranchDoesNotExistError("Branch '%s' not found" |
|
517 | raise BranchDoesNotExistError("Branch '%s' not found" | |
517 | % branch_name) |
|
518 | % branch_name) | |
518 | # actually we should check now if it's not an empty repo to not spaw |
|
519 | # actually we should check now if it's not an empty repo to not spaw | |
519 | # subprocess commands |
|
520 | # subprocess commands | |
520 | if self._empty: |
|
521 | if self._empty: | |
521 | raise EmptyRepositoryError("There are no changesets yet") |
|
522 | raise EmptyRepositoryError("There are no changesets yet") | |
522 |
|
523 | |||
523 | # %H at format means (full) commit hash, initial hashes are retrieved |
|
524 | # %H at format means (full) commit hash, initial hashes are retrieved | |
524 | # in ascending date order |
|
525 | # in ascending date order | |
525 | cmd = ['log', '--date-order', '--reverse', '--pretty=format:%H'] |
|
526 | cmd = ['log', '--date-order', '--reverse', '--pretty=format:%H'] | |
526 | if max_revisions: |
|
527 | if max_revisions: | |
527 | cmd += ['--max-count=%s' % max_revisions] |
|
528 | cmd += ['--max-count=%s' % max_revisions] | |
528 | if start_date: |
|
529 | if start_date: | |
529 | cmd += ['--since', start_date.strftime('%m/%d/%y %H:%M:%S')] |
|
530 | cmd += ['--since', start_date.strftime('%m/%d/%y %H:%M:%S')] | |
530 | if end_date: |
|
531 | if end_date: | |
531 | cmd += ['--until', end_date.strftime('%m/%d/%y %H:%M:%S')] |
|
532 | cmd += ['--until', end_date.strftime('%m/%d/%y %H:%M:%S')] | |
532 | if branch_name: |
|
533 | if branch_name: | |
533 | cmd.append(branch_name) |
|
534 | cmd.append(branch_name) | |
534 | else: |
|
535 | else: | |
535 | cmd.append(settings.GIT_REV_FILTER) |
|
536 | cmd.append(settings.GIT_REV_FILTER) | |
536 |
|
537 | |||
537 | revs = self.run_git_command(cmd).splitlines() |
|
538 | revs = self.run_git_command(cmd).splitlines() | |
538 | start_pos = 0 |
|
539 | start_pos = 0 | |
539 | end_pos = len(revs) |
|
540 | end_pos = len(revs) | |
540 | if start: |
|
541 | if start: | |
541 | _start = self._get_revision(start) |
|
542 | _start = self._get_revision(start) | |
542 | try: |
|
543 | try: | |
543 | start_pos = revs.index(_start) |
|
544 | start_pos = revs.index(_start) | |
544 | except ValueError: |
|
545 | except ValueError: | |
545 | pass |
|
546 | pass | |
546 |
|
547 | |||
547 | if end is not None: |
|
548 | if end is not None: | |
548 | _end = self._get_revision(end) |
|
549 | _end = self._get_revision(end) | |
549 | try: |
|
550 | try: | |
550 | end_pos = revs.index(_end) |
|
551 | end_pos = revs.index(_end) | |
551 | except ValueError: |
|
552 | except ValueError: | |
552 | pass |
|
553 | pass | |
553 |
|
554 | |||
554 | if None not in [start, end] and start_pos > end_pos: |
|
555 | if None not in [start, end] and start_pos > end_pos: | |
555 | raise RepositoryError('start cannot be after end') |
|
556 | raise RepositoryError('start cannot be after end') | |
556 |
|
557 | |||
557 | if end_pos is not None: |
|
558 | if end_pos is not None: | |
558 | end_pos += 1 |
|
559 | end_pos += 1 | |
559 |
|
560 | |||
560 | revs = revs[start_pos:end_pos] |
|
561 | revs = revs[start_pos:end_pos] | |
561 | if reverse: |
|
562 | if reverse: | |
562 | revs.reverse() |
|
563 | revs.reverse() | |
563 |
|
564 | |||
564 | return CollectionGenerator(self, revs) |
|
565 | return CollectionGenerator(self, revs) | |
565 |
|
566 | |||
566 | def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False, |
|
567 | def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False, | |
567 | context=3): |
|
568 | context=3): | |
568 | """ |
|
569 | """ | |
569 | Returns (git like) *diff*, as plain bytes text. Shows changes |
|
570 | Returns (git like) *diff*, as plain bytes text. Shows changes | |
570 | introduced by ``rev2`` since ``rev1``. |
|
571 | introduced by ``rev2`` since ``rev1``. | |
571 |
|
572 | |||
572 | :param rev1: Entry point from which diff is shown. Can be |
|
573 | :param rev1: Entry point from which diff is shown. Can be | |
573 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all |
|
574 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all | |
574 | the changes since empty state of the repository until ``rev2`` |
|
575 | the changes since empty state of the repository until ``rev2`` | |
575 | :param rev2: Until which revision changes should be shown. |
|
576 | :param rev2: Until which revision changes should be shown. | |
576 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
577 | :param ignore_whitespace: If set to ``True``, would not show whitespace | |
577 | changes. Defaults to ``False``. |
|
578 | changes. Defaults to ``False``. | |
578 | :param context: How many lines before/after changed lines should be |
|
579 | :param context: How many lines before/after changed lines should be | |
579 | shown. Defaults to ``3``. Due to limitations in Git, if |
|
580 | shown. Defaults to ``3``. Due to limitations in Git, if | |
580 | value passed-in is greater than ``2**31-1`` |
|
581 | value passed-in is greater than ``2**31-1`` | |
581 | (``2147483647``), it will be set to ``2147483647`` |
|
582 | (``2147483647``), it will be set to ``2147483647`` | |
582 | instead. If negative value is passed-in, it will be set to |
|
583 | instead. If negative value is passed-in, it will be set to | |
583 | ``0`` instead. |
|
584 | ``0`` instead. | |
584 | """ |
|
585 | """ | |
585 |
|
586 | |||
586 | # Git internally uses a signed long int for storing context |
|
587 | # Git internally uses a signed long int for storing context | |
587 | # size (number of lines to show before and after the |
|
588 | # size (number of lines to show before and after the | |
588 | # differences). This can result in integer overflow, so we |
|
589 | # differences). This can result in integer overflow, so we | |
589 | # ensure the requested context is smaller by one than the |
|
590 | # ensure the requested context is smaller by one than the | |
590 | # number that would cause the overflow. It is highly unlikely |
|
591 | # number that would cause the overflow. It is highly unlikely | |
591 | # that a single file will contain that many lines, so this |
|
592 | # that a single file will contain that many lines, so this | |
592 | # kind of change should not cause any realistic consequences. |
|
593 | # kind of change should not cause any realistic consequences. | |
593 | overflowed_long_int = 2**31 |
|
594 | overflowed_long_int = 2**31 | |
594 |
|
595 | |||
595 | if context >= overflowed_long_int: |
|
596 | if context >= overflowed_long_int: | |
596 | context = overflowed_long_int - 1 |
|
597 | context = overflowed_long_int - 1 | |
597 |
|
598 | |||
598 | # Negative context values make no sense, and will result in |
|
599 | # Negative context values make no sense, and will result in | |
599 | # errors. Ensure this does not happen. |
|
600 | # errors. Ensure this does not happen. | |
600 | if context < 0: |
|
601 | if context < 0: | |
601 | context = 0 |
|
602 | context = 0 | |
602 |
|
603 | |||
603 | flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40'] |
|
604 | flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40'] | |
604 | if ignore_whitespace: |
|
605 | if ignore_whitespace: | |
605 | flags.append('-w') |
|
606 | flags.append('-w') | |
606 |
|
607 | |||
607 | if hasattr(rev1, 'raw_id'): |
|
608 | if hasattr(rev1, 'raw_id'): | |
608 | rev1 = getattr(rev1, 'raw_id') |
|
609 | rev1 = getattr(rev1, 'raw_id') | |
609 |
|
610 | |||
610 | if hasattr(rev2, 'raw_id'): |
|
611 | if hasattr(rev2, 'raw_id'): | |
611 | rev2 = getattr(rev2, 'raw_id') |
|
612 | rev2 = getattr(rev2, 'raw_id') | |
612 |
|
613 | |||
613 | if rev1 == self.EMPTY_CHANGESET: |
|
614 | if rev1 == self.EMPTY_CHANGESET: | |
614 | rev2 = self.get_changeset(rev2).raw_id |
|
615 | rev2 = self.get_changeset(rev2).raw_id | |
615 | cmd = ['show'] + flags + [rev2] |
|
616 | cmd = ['show'] + flags + [rev2] | |
616 | else: |
|
617 | else: | |
617 | rev1 = self.get_changeset(rev1).raw_id |
|
618 | rev1 = self.get_changeset(rev1).raw_id | |
618 | rev2 = self.get_changeset(rev2).raw_id |
|
619 | rev2 = self.get_changeset(rev2).raw_id | |
619 | cmd = ['diff'] + flags + [rev1, rev2] |
|
620 | cmd = ['diff'] + flags + [rev1, rev2] | |
620 |
|
621 | |||
621 | if path: |
|
622 | if path: | |
622 | cmd += ['--', path] |
|
623 | cmd += ['--', path] | |
623 |
|
624 | |||
624 | stdout, stderr = self._run_git_command(cmd, cwd=self.path) |
|
625 | stdout, stderr = self._run_git_command(cmd, cwd=self.path) | |
625 | # If we used 'show' command, strip first few lines (until actual diff |
|
626 | # If we used 'show' command, strip first few lines (until actual diff | |
626 | # starts) |
|
627 | # starts) | |
627 | if rev1 == self.EMPTY_CHANGESET: |
|
628 | if rev1 == self.EMPTY_CHANGESET: | |
628 | parts = stdout.split(b'\ndiff ', 1) |
|
629 | parts = stdout.split(b'\ndiff ', 1) | |
629 | if len(parts) > 1: |
|
630 | if len(parts) > 1: | |
630 | stdout = b'diff ' + parts[1] |
|
631 | stdout = b'diff ' + parts[1] | |
631 | return stdout |
|
632 | return stdout | |
632 |
|
633 | |||
633 | @LazyProperty |
|
634 | @LazyProperty | |
634 | def in_memory_changeset(self): |
|
635 | def in_memory_changeset(self): | |
635 | """ |
|
636 | """ | |
636 | Returns ``GitInMemoryChangeset`` object for this repository. |
|
637 | Returns ``GitInMemoryChangeset`` object for this repository. | |
637 | """ |
|
638 | """ | |
638 | return GitInMemoryChangeset(self) |
|
639 | return GitInMemoryChangeset(self) | |
639 |
|
640 | |||
640 | def clone(self, url, update_after_clone=True, bare=False): |
|
641 | def clone(self, url, update_after_clone=True, bare=False): | |
641 | """ |
|
642 | """ | |
642 | Tries to clone changes from external location. |
|
643 | Tries to clone changes from external location. | |
643 |
|
644 | |||
644 | :param update_after_clone: If set to ``False``, git won't checkout |
|
645 | :param update_after_clone: If set to ``False``, git won't checkout | |
645 | working directory |
|
646 | working directory | |
646 | :param bare: If set to ``True``, repository would be cloned into |
|
647 | :param bare: If set to ``True``, repository would be cloned into | |
647 | *bare* git repository (no working directory at all). |
|
648 | *bare* git repository (no working directory at all). | |
648 | """ |
|
649 | """ | |
649 | url = self._get_url(url) |
|
650 | url = self._get_url(url) | |
650 | cmd = ['clone', '-q'] |
|
651 | cmd = ['clone', '-q'] | |
651 | if bare: |
|
652 | if bare: | |
652 | cmd.append('--bare') |
|
653 | cmd.append('--bare') | |
653 | elif not update_after_clone: |
|
654 | elif not update_after_clone: | |
654 | cmd.append('--no-checkout') |
|
655 | cmd.append('--no-checkout') | |
655 | cmd += ['--', url, self.path] |
|
656 | cmd += ['--', url, self.path] | |
656 | # If error occurs run_git_command raises RepositoryError already |
|
657 | # If error occurs run_git_command raises RepositoryError already | |
657 | self.run_git_command(cmd) |
|
658 | self.run_git_command(cmd) | |
658 |
|
659 | |||
659 | def pull(self, url): |
|
660 | def pull(self, url): | |
660 | """ |
|
661 | """ | |
661 | Tries to pull changes from external location. |
|
662 | Tries to pull changes from external location. | |
662 | """ |
|
663 | """ | |
663 | url = self._get_url(url) |
|
664 | url = self._get_url(url) | |
664 | cmd = ['pull', '--ff-only', url] |
|
665 | cmd = ['pull', '--ff-only', url] | |
665 | # If error occurs run_git_command raises RepositoryError already |
|
666 | # If error occurs run_git_command raises RepositoryError already | |
666 | self.run_git_command(cmd) |
|
667 | self.run_git_command(cmd) | |
667 |
|
668 | |||
668 | def fetch(self, url): |
|
669 | def fetch(self, url): | |
669 | """ |
|
670 | """ | |
670 | Tries to pull changes from external location. |
|
671 | Tries to pull changes from external location. | |
671 | """ |
|
672 | """ | |
672 | url = self._get_url(url) |
|
673 | url = self._get_url(url) | |
673 | so = self.run_git_command(['ls-remote', '-h', url]) |
|
674 | so = self.run_git_command(['ls-remote', '-h', url]) | |
674 | cmd = ['fetch', url, '--'] |
|
675 | cmd = ['fetch', url, '--'] | |
675 | for line in (x for x in so.splitlines()): |
|
676 | for line in (x for x in so.splitlines()): | |
676 | sha, ref = line.split('\t') |
|
677 | sha, ref = line.split('\t') | |
677 | cmd.append('+%s:%s' % (ref, ref)) |
|
678 | cmd.append('+%s:%s' % (ref, ref)) | |
678 | self.run_git_command(cmd) |
|
679 | self.run_git_command(cmd) | |
679 |
|
680 | |||
680 | def _update_server_info(self): |
|
681 | def _update_server_info(self): | |
681 | """ |
|
682 | """ | |
682 | runs gits update-server-info command in this repo instance |
|
683 | runs gits update-server-info command in this repo instance | |
683 | """ |
|
684 | """ | |
684 | from dulwich.server import update_server_info |
|
685 | from dulwich.server import update_server_info | |
685 | try: |
|
686 | try: | |
686 | update_server_info(self._repo) |
|
687 | update_server_info(self._repo) | |
687 | except OSError as e: |
|
688 | except OSError as e: | |
688 | if e.errno not in [errno.ENOENT, errno.EROFS]: |
|
689 | if e.errno not in [errno.ENOENT, errno.EROFS]: | |
689 | raise |
|
690 | raise | |
690 | # Workaround for dulwich crashing on for example its own dulwich/tests/data/repos/simple_merge.git/info/refs.lock |
|
691 | # Workaround for dulwich crashing on for example its own dulwich/tests/data/repos/simple_merge.git/info/refs.lock | |
691 | log.error('Ignoring %s running update-server-info: %s', type(e).__name__, e) |
|
692 | log.error('Ignoring %s running update-server-info: %s', type(e).__name__, e) | |
692 |
|
693 | |||
693 | @LazyProperty |
|
694 | @LazyProperty | |
694 | def workdir(self): |
|
695 | def workdir(self): | |
695 | """ |
|
696 | """ | |
696 | Returns ``Workdir`` instance for this repository. |
|
697 | Returns ``Workdir`` instance for this repository. | |
697 | """ |
|
698 | """ | |
698 | return GitWorkdir(self) |
|
699 | return GitWorkdir(self) | |
699 |
|
700 | |||
700 | def get_config_value(self, section, name, config_file=None): |
|
701 | def get_config_value(self, section, name, config_file=None): | |
701 | """ |
|
702 | """ | |
702 | Returns configuration value for a given [``section``] and ``name``. |
|
703 | Returns configuration value for a given [``section``] and ``name``. | |
703 |
|
704 | |||
704 | :param section: Section we want to retrieve value from |
|
705 | :param section: Section we want to retrieve value from | |
705 | :param name: Name of configuration we want to retrieve |
|
706 | :param name: Name of configuration we want to retrieve | |
706 | :param config_file: A path to file which should be used to retrieve |
|
707 | :param config_file: A path to file which should be used to retrieve | |
707 | configuration from (might also be a list of file paths) |
|
708 | configuration from (might also be a list of file paths) | |
708 | """ |
|
709 | """ | |
709 | if config_file is None: |
|
710 | if config_file is None: | |
710 | config_file = [] |
|
711 | config_file = [] | |
711 | elif isinstance(config_file, basestring): |
|
712 | elif isinstance(config_file, basestring): | |
712 | config_file = [config_file] |
|
713 | config_file = [config_file] | |
713 |
|
714 | |||
714 | def gen_configs(): |
|
715 | def gen_configs(): | |
715 | for path in config_file + self._config_files: |
|
716 | for path in config_file + self._config_files: | |
716 | try: |
|
717 | try: | |
717 | yield ConfigFile.from_path(path) |
|
718 | yield ConfigFile.from_path(path) | |
718 | except (IOError, OSError, ValueError): |
|
719 | except (IOError, OSError, ValueError): | |
719 | continue |
|
720 | continue | |
720 |
|
721 | |||
721 | for config in gen_configs(): |
|
722 | for config in gen_configs(): | |
722 | try: |
|
723 | try: | |
723 | return config.get(section, name) |
|
724 | return config.get(section, name) | |
724 | except KeyError: |
|
725 | except KeyError: | |
725 | continue |
|
726 | continue | |
726 | return None |
|
727 | return None | |
727 |
|
728 | |||
728 | def get_user_name(self, config_file=None): |
|
729 | def get_user_name(self, config_file=None): | |
729 | """ |
|
730 | """ | |
730 | Returns user's name from global configuration file. |
|
731 | Returns user's name from global configuration file. | |
731 |
|
732 | |||
732 | :param config_file: A path to file which should be used to retrieve |
|
733 | :param config_file: A path to file which should be used to retrieve | |
733 | configuration from (might also be a list of file paths) |
|
734 | configuration from (might also be a list of file paths) | |
734 | """ |
|
735 | """ | |
735 | return self.get_config_value('user', 'name', config_file) |
|
736 | return self.get_config_value('user', 'name', config_file) | |
736 |
|
737 | |||
737 | def get_user_email(self, config_file=None): |
|
738 | def get_user_email(self, config_file=None): | |
738 | """ |
|
739 | """ | |
739 | Returns user's email from global configuration file. |
|
740 | Returns user's email from global configuration file. | |
740 |
|
741 | |||
741 | :param config_file: A path to file which should be used to retrieve |
|
742 | :param config_file: A path to file which should be used to retrieve | |
742 | configuration from (might also be a list of file paths) |
|
743 | configuration from (might also be a list of file paths) | |
743 | """ |
|
744 | """ | |
744 | return self.get_config_value('user', 'email', config_file) |
|
745 | return self.get_config_value('user', 'email', config_file) |
@@ -1,21 +1,25 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | vcs.backends.hg |
|
3 | vcs.backends.hg | |
4 | ~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Mercurial backend implementation. |
|
6 | Mercurial backend implementation. | |
7 |
|
7 | |||
8 | :created_on: Apr 8, 2010 |
|
8 | :created_on: Apr 8, 2010 | |
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. |
|
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. | |
10 | """ |
|
10 | """ | |
11 |
|
11 | |||
|
12 | from kallithea.lib.vcs.utils import hgcompat | |||
|
13 | ||||
12 | from .changeset import MercurialChangeset |
|
14 | from .changeset import MercurialChangeset | |
13 | from .inmemory import MercurialInMemoryChangeset |
|
15 | from .inmemory import MercurialInMemoryChangeset | |
14 | from .repository import MercurialRepository |
|
16 | from .repository import MercurialRepository | |
15 | from .workdir import MercurialWorkdir |
|
17 | from .workdir import MercurialWorkdir | |
16 |
|
18 | |||
17 |
|
19 | |||
18 | __all__ = [ |
|
20 | __all__ = [ | |
19 | 'MercurialRepository', 'MercurialChangeset', |
|
21 | 'MercurialRepository', 'MercurialChangeset', | |
20 | 'MercurialInMemoryChangeset', 'MercurialWorkdir', |
|
22 | 'MercurialInMemoryChangeset', 'MercurialWorkdir', | |
21 | ] |
|
23 | ] | |
|
24 | ||||
|
25 | hgcompat.monkey_do() |
@@ -1,410 +1,413 b'' | |||||
1 | import os |
|
1 | import os | |
2 | import posixpath |
|
2 | import posixpath | |
3 |
|
3 | |||
|
4 | import mercurial.archival | |||
|
5 | import mercurial.node | |||
|
6 | import mercurial.obsutil | |||
|
7 | ||||
4 | from kallithea.lib.vcs.backends.base import BaseChangeset |
|
8 | from kallithea.lib.vcs.backends.base import BaseChangeset | |
5 | from kallithea.lib.vcs.conf import settings |
|
9 | from kallithea.lib.vcs.conf import settings | |
6 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, VCSError |
|
10 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, VCSError | |
7 | from kallithea.lib.vcs.nodes import ( |
|
11 | from kallithea.lib.vcs.nodes import ( | |
8 | AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode) |
|
12 | AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode) | |
9 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_str, safe_unicode |
|
13 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_str, safe_unicode | |
10 | from kallithea.lib.vcs.utils.hgcompat import archival, hex, obsutil |
|
|||
11 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
14 | from kallithea.lib.vcs.utils.lazy import LazyProperty | |
12 | from kallithea.lib.vcs.utils.paths import get_dirs_for_path |
|
15 | from kallithea.lib.vcs.utils.paths import get_dirs_for_path | |
13 |
|
16 | |||
14 |
|
17 | |||
15 | class MercurialChangeset(BaseChangeset): |
|
18 | class MercurialChangeset(BaseChangeset): | |
16 | """ |
|
19 | """ | |
17 | Represents state of the repository at a revision. |
|
20 | Represents state of the repository at a revision. | |
18 | """ |
|
21 | """ | |
19 |
|
22 | |||
20 | def __init__(self, repository, revision): |
|
23 | def __init__(self, repository, revision): | |
21 | self.repository = repository |
|
24 | self.repository = repository | |
22 | assert isinstance(revision, basestring), repr(revision) |
|
25 | assert isinstance(revision, basestring), repr(revision) | |
23 | self._ctx = repository._repo[ascii_bytes(revision)] |
|
26 | self._ctx = repository._repo[ascii_bytes(revision)] | |
24 | self.raw_id = ascii_str(self._ctx.hex()) |
|
27 | self.raw_id = ascii_str(self._ctx.hex()) | |
25 | self.revision = self._ctx._rev |
|
28 | self.revision = self._ctx._rev | |
26 | self.nodes = {} |
|
29 | self.nodes = {} | |
27 |
|
30 | |||
28 | @LazyProperty |
|
31 | @LazyProperty | |
29 | def tags(self): |
|
32 | def tags(self): | |
30 | return [safe_unicode(tag) for tag in self._ctx.tags()] |
|
33 | return [safe_unicode(tag) for tag in self._ctx.tags()] | |
31 |
|
34 | |||
32 | @LazyProperty |
|
35 | @LazyProperty | |
33 | def branch(self): |
|
36 | def branch(self): | |
34 | return safe_unicode(self._ctx.branch()) |
|
37 | return safe_unicode(self._ctx.branch()) | |
35 |
|
38 | |||
36 | @LazyProperty |
|
39 | @LazyProperty | |
37 | def branches(self): |
|
40 | def branches(self): | |
38 | return [safe_unicode(self._ctx.branch())] |
|
41 | return [safe_unicode(self._ctx.branch())] | |
39 |
|
42 | |||
40 | @LazyProperty |
|
43 | @LazyProperty | |
41 | def closesbranch(self): |
|
44 | def closesbranch(self): | |
42 | return self._ctx.closesbranch() |
|
45 | return self._ctx.closesbranch() | |
43 |
|
46 | |||
44 | @LazyProperty |
|
47 | @LazyProperty | |
45 | def obsolete(self): |
|
48 | def obsolete(self): | |
46 | return self._ctx.obsolete() |
|
49 | return self._ctx.obsolete() | |
47 |
|
50 | |||
48 | @LazyProperty |
|
51 | @LazyProperty | |
49 | def bumped(self): |
|
52 | def bumped(self): | |
50 | return self._ctx.phasedivergent() |
|
53 | return self._ctx.phasedivergent() | |
51 |
|
54 | |||
52 | @LazyProperty |
|
55 | @LazyProperty | |
53 | def divergent(self): |
|
56 | def divergent(self): | |
54 | return self._ctx.contentdivergent() |
|
57 | return self._ctx.contentdivergent() | |
55 |
|
58 | |||
56 | @LazyProperty |
|
59 | @LazyProperty | |
57 | def extinct(self): |
|
60 | def extinct(self): | |
58 | return self._ctx.extinct() |
|
61 | return self._ctx.extinct() | |
59 |
|
62 | |||
60 | @LazyProperty |
|
63 | @LazyProperty | |
61 | def unstable(self): |
|
64 | def unstable(self): | |
62 | return self._ctx.orphan() |
|
65 | return self._ctx.orphan() | |
63 |
|
66 | |||
64 | @LazyProperty |
|
67 | @LazyProperty | |
65 | def phase(self): |
|
68 | def phase(self): | |
66 | if(self._ctx.phase() == 1): |
|
69 | if(self._ctx.phase() == 1): | |
67 | return 'Draft' |
|
70 | return 'Draft' | |
68 | elif(self._ctx.phase() == 2): |
|
71 | elif(self._ctx.phase() == 2): | |
69 | return 'Secret' |
|
72 | return 'Secret' | |
70 | else: |
|
73 | else: | |
71 | return '' |
|
74 | return '' | |
72 |
|
75 | |||
73 | @LazyProperty |
|
76 | @LazyProperty | |
74 | def successors(self): |
|
77 | def successors(self): | |
75 | successors = obsutil.successorssets(self._ctx._repo, self._ctx.node(), closest=True) |
|
78 | successors = mercurial.obsutil.successorssets(self._ctx._repo, self._ctx.node(), closest=True) | |
76 | if successors: |
|
79 | if successors: | |
77 | # flatten the list here handles both divergent (len > 1) |
|
80 | # flatten the list here handles both divergent (len > 1) | |
78 | # and the usual case (len = 1) |
|
81 | # and the usual case (len = 1) | |
79 | successors = [hex(n)[:12] for sub in successors for n in sub if n != self._ctx.node()] |
|
82 | successors = [mercurial.node.hex(n)[:12] for sub in successors for n in sub if n != self._ctx.node()] | |
80 |
|
83 | |||
81 | return successors |
|
84 | return successors | |
82 |
|
85 | |||
83 | @LazyProperty |
|
86 | @LazyProperty | |
84 | def predecessors(self): |
|
87 | def predecessors(self): | |
85 | return [hex(n)[:12] for n in obsutil.closestpredecessors(self._ctx._repo, self._ctx.node())] |
|
88 | return [mercurial.node.hex(n)[:12] for n in mercurial.obsutil.closestpredecessors(self._ctx._repo, self._ctx.node())] | |
86 |
|
89 | |||
87 | @LazyProperty |
|
90 | @LazyProperty | |
88 | def bookmarks(self): |
|
91 | def bookmarks(self): | |
89 | return [safe_unicode(bookmark) for bookmark in self._ctx.bookmarks()] |
|
92 | return [safe_unicode(bookmark) for bookmark in self._ctx.bookmarks()] | |
90 |
|
93 | |||
91 | @LazyProperty |
|
94 | @LazyProperty | |
92 | def message(self): |
|
95 | def message(self): | |
93 | return safe_unicode(self._ctx.description()) |
|
96 | return safe_unicode(self._ctx.description()) | |
94 |
|
97 | |||
95 | @LazyProperty |
|
98 | @LazyProperty | |
96 | def committer(self): |
|
99 | def committer(self): | |
97 | return safe_unicode(self.author) |
|
100 | return safe_unicode(self.author) | |
98 |
|
101 | |||
99 | @LazyProperty |
|
102 | @LazyProperty | |
100 | def author(self): |
|
103 | def author(self): | |
101 | return safe_unicode(self._ctx.user()) |
|
104 | return safe_unicode(self._ctx.user()) | |
102 |
|
105 | |||
103 | @LazyProperty |
|
106 | @LazyProperty | |
104 | def date(self): |
|
107 | def date(self): | |
105 | return date_fromtimestamp(*self._ctx.date()) |
|
108 | return date_fromtimestamp(*self._ctx.date()) | |
106 |
|
109 | |||
107 | @LazyProperty |
|
110 | @LazyProperty | |
108 | def _timestamp(self): |
|
111 | def _timestamp(self): | |
109 | return self._ctx.date()[0] |
|
112 | return self._ctx.date()[0] | |
110 |
|
113 | |||
111 | @LazyProperty |
|
114 | @LazyProperty | |
112 | def status(self): |
|
115 | def status(self): | |
113 | """ |
|
116 | """ | |
114 | Returns modified, added, removed, deleted files for current changeset |
|
117 | Returns modified, added, removed, deleted files for current changeset | |
115 | """ |
|
118 | """ | |
116 | return self.repository._repo.status(self._ctx.p1().node(), |
|
119 | return self.repository._repo.status(self._ctx.p1().node(), | |
117 | self._ctx.node()) |
|
120 | self._ctx.node()) | |
118 |
|
121 | |||
119 | @LazyProperty |
|
122 | @LazyProperty | |
120 | def _file_paths(self): |
|
123 | def _file_paths(self): | |
121 | return list(self._ctx) |
|
124 | return list(self._ctx) | |
122 |
|
125 | |||
123 | @LazyProperty |
|
126 | @LazyProperty | |
124 | def _dir_paths(self): |
|
127 | def _dir_paths(self): | |
125 | p = list(set(get_dirs_for_path(*self._file_paths))) |
|
128 | p = list(set(get_dirs_for_path(*self._file_paths))) | |
126 | p.insert(0, '') |
|
129 | p.insert(0, '') | |
127 | return p |
|
130 | return p | |
128 |
|
131 | |||
129 | @LazyProperty |
|
132 | @LazyProperty | |
130 | def _paths(self): |
|
133 | def _paths(self): | |
131 | return self._dir_paths + self._file_paths |
|
134 | return self._dir_paths + self._file_paths | |
132 |
|
135 | |||
133 | @LazyProperty |
|
136 | @LazyProperty | |
134 | def id(self): |
|
137 | def id(self): | |
135 | if self.last: |
|
138 | if self.last: | |
136 | return u'tip' |
|
139 | return u'tip' | |
137 | return self.short_id |
|
140 | return self.short_id | |
138 |
|
141 | |||
139 | @LazyProperty |
|
142 | @LazyProperty | |
140 | def short_id(self): |
|
143 | def short_id(self): | |
141 | return self.raw_id[:12] |
|
144 | return self.raw_id[:12] | |
142 |
|
145 | |||
143 | @LazyProperty |
|
146 | @LazyProperty | |
144 | def parents(self): |
|
147 | def parents(self): | |
145 | """ |
|
148 | """ | |
146 | Returns list of parents changesets. |
|
149 | Returns list of parents changesets. | |
147 | """ |
|
150 | """ | |
148 | return [self.repository.get_changeset(parent.rev()) |
|
151 | return [self.repository.get_changeset(parent.rev()) | |
149 | for parent in self._ctx.parents() if parent.rev() >= 0] |
|
152 | for parent in self._ctx.parents() if parent.rev() >= 0] | |
150 |
|
153 | |||
151 | @LazyProperty |
|
154 | @LazyProperty | |
152 | def children(self): |
|
155 | def children(self): | |
153 | """ |
|
156 | """ | |
154 | Returns list of children changesets. |
|
157 | Returns list of children changesets. | |
155 | """ |
|
158 | """ | |
156 | return [self.repository.get_changeset(child.rev()) |
|
159 | return [self.repository.get_changeset(child.rev()) | |
157 | for child in self._ctx.children() if child.rev() >= 0] |
|
160 | for child in self._ctx.children() if child.rev() >= 0] | |
158 |
|
161 | |||
159 | def next(self, branch=None): |
|
162 | def next(self, branch=None): | |
160 | if branch and self.branch != branch: |
|
163 | if branch and self.branch != branch: | |
161 | raise VCSError('Branch option used on changeset not belonging ' |
|
164 | raise VCSError('Branch option used on changeset not belonging ' | |
162 | 'to that branch') |
|
165 | 'to that branch') | |
163 |
|
166 | |||
164 | cs = self |
|
167 | cs = self | |
165 | while True: |
|
168 | while True: | |
166 | try: |
|
169 | try: | |
167 | next_ = cs.repository.revisions.index(cs.raw_id) + 1 |
|
170 | next_ = cs.repository.revisions.index(cs.raw_id) + 1 | |
168 | next_rev = cs.repository.revisions[next_] |
|
171 | next_rev = cs.repository.revisions[next_] | |
169 | except IndexError: |
|
172 | except IndexError: | |
170 | raise ChangesetDoesNotExistError |
|
173 | raise ChangesetDoesNotExistError | |
171 | cs = cs.repository.get_changeset(next_rev) |
|
174 | cs = cs.repository.get_changeset(next_rev) | |
172 |
|
175 | |||
173 | if not branch or branch == cs.branch: |
|
176 | if not branch or branch == cs.branch: | |
174 | return cs |
|
177 | return cs | |
175 |
|
178 | |||
176 | def prev(self, branch=None): |
|
179 | def prev(self, branch=None): | |
177 | if branch and self.branch != branch: |
|
180 | if branch and self.branch != branch: | |
178 | raise VCSError('Branch option used on changeset not belonging ' |
|
181 | raise VCSError('Branch option used on changeset not belonging ' | |
179 | 'to that branch') |
|
182 | 'to that branch') | |
180 |
|
183 | |||
181 | cs = self |
|
184 | cs = self | |
182 | while True: |
|
185 | while True: | |
183 | try: |
|
186 | try: | |
184 | prev_ = cs.repository.revisions.index(cs.raw_id) - 1 |
|
187 | prev_ = cs.repository.revisions.index(cs.raw_id) - 1 | |
185 | if prev_ < 0: |
|
188 | if prev_ < 0: | |
186 | raise IndexError |
|
189 | raise IndexError | |
187 | prev_rev = cs.repository.revisions[prev_] |
|
190 | prev_rev = cs.repository.revisions[prev_] | |
188 | except IndexError: |
|
191 | except IndexError: | |
189 | raise ChangesetDoesNotExistError |
|
192 | raise ChangesetDoesNotExistError | |
190 | cs = cs.repository.get_changeset(prev_rev) |
|
193 | cs = cs.repository.get_changeset(prev_rev) | |
191 |
|
194 | |||
192 | if not branch or branch == cs.branch: |
|
195 | if not branch or branch == cs.branch: | |
193 | return cs |
|
196 | return cs | |
194 |
|
197 | |||
195 | def diff(self): |
|
198 | def diff(self): | |
196 | # Only used to feed diffstat |
|
199 | # Only used to feed diffstat | |
197 | return b''.join(self._ctx.diff()) |
|
200 | return b''.join(self._ctx.diff()) | |
198 |
|
201 | |||
199 | def _fix_path(self, path): |
|
202 | def _fix_path(self, path): | |
200 | """ |
|
203 | """ | |
201 | Paths are stored without trailing slash so we need to get rid off it if |
|
204 | Paths are stored without trailing slash so we need to get rid off it if | |
202 | needed. Also mercurial keeps filenodes as str so we need to decode |
|
205 | needed. Also mercurial keeps filenodes as str so we need to decode | |
203 | from unicode to str |
|
206 | from unicode to str | |
204 | """ |
|
207 | """ | |
205 | if path.endswith('/'): |
|
208 | if path.endswith('/'): | |
206 | path = path.rstrip('/') |
|
209 | path = path.rstrip('/') | |
207 |
|
210 | |||
208 | return safe_str(path) |
|
211 | return safe_str(path) | |
209 |
|
212 | |||
210 | def _get_kind(self, path): |
|
213 | def _get_kind(self, path): | |
211 | path = self._fix_path(path) |
|
214 | path = self._fix_path(path) | |
212 | if path in self._file_paths: |
|
215 | if path in self._file_paths: | |
213 | return NodeKind.FILE |
|
216 | return NodeKind.FILE | |
214 | elif path in self._dir_paths: |
|
217 | elif path in self._dir_paths: | |
215 | return NodeKind.DIR |
|
218 | return NodeKind.DIR | |
216 | else: |
|
219 | else: | |
217 | raise ChangesetError("Node does not exist at the given path '%s'" |
|
220 | raise ChangesetError("Node does not exist at the given path '%s'" | |
218 | % (path)) |
|
221 | % (path)) | |
219 |
|
222 | |||
220 | def _get_filectx(self, path): |
|
223 | def _get_filectx(self, path): | |
221 | path = self._fix_path(path) |
|
224 | path = self._fix_path(path) | |
222 | if self._get_kind(path) != NodeKind.FILE: |
|
225 | if self._get_kind(path) != NodeKind.FILE: | |
223 | raise ChangesetError("File does not exist for revision %s at " |
|
226 | raise ChangesetError("File does not exist for revision %s at " | |
224 | " '%s'" % (self.raw_id, path)) |
|
227 | " '%s'" % (self.raw_id, path)) | |
225 | return self._ctx.filectx(path) |
|
228 | return self._ctx.filectx(path) | |
226 |
|
229 | |||
227 | def _extract_submodules(self): |
|
230 | def _extract_submodules(self): | |
228 | """ |
|
231 | """ | |
229 | returns a dictionary with submodule information from substate file |
|
232 | returns a dictionary with submodule information from substate file | |
230 | of hg repository |
|
233 | of hg repository | |
231 | """ |
|
234 | """ | |
232 | return self._ctx.substate |
|
235 | return self._ctx.substate | |
233 |
|
236 | |||
234 | def get_file_mode(self, path): |
|
237 | def get_file_mode(self, path): | |
235 | """ |
|
238 | """ | |
236 | Returns stat mode of the file at the given ``path``. |
|
239 | Returns stat mode of the file at the given ``path``. | |
237 | """ |
|
240 | """ | |
238 | fctx = self._get_filectx(path) |
|
241 | fctx = self._get_filectx(path) | |
239 | if b'x' in fctx.flags(): |
|
242 | if b'x' in fctx.flags(): | |
240 | return 0o100755 |
|
243 | return 0o100755 | |
241 | else: |
|
244 | else: | |
242 | return 0o100644 |
|
245 | return 0o100644 | |
243 |
|
246 | |||
244 | def get_file_content(self, path): |
|
247 | def get_file_content(self, path): | |
245 | """ |
|
248 | """ | |
246 | Returns content of the file at given ``path``. |
|
249 | Returns content of the file at given ``path``. | |
247 | """ |
|
250 | """ | |
248 | fctx = self._get_filectx(path) |
|
251 | fctx = self._get_filectx(path) | |
249 | return fctx.data() |
|
252 | return fctx.data() | |
250 |
|
253 | |||
251 | def get_file_size(self, path): |
|
254 | def get_file_size(self, path): | |
252 | """ |
|
255 | """ | |
253 | Returns size of the file at given ``path``. |
|
256 | Returns size of the file at given ``path``. | |
254 | """ |
|
257 | """ | |
255 | fctx = self._get_filectx(path) |
|
258 | fctx = self._get_filectx(path) | |
256 | return fctx.size() |
|
259 | return fctx.size() | |
257 |
|
260 | |||
258 | def get_file_changeset(self, path): |
|
261 | def get_file_changeset(self, path): | |
259 | """ |
|
262 | """ | |
260 | Returns last commit of the file at the given ``path``. |
|
263 | Returns last commit of the file at the given ``path``. | |
261 | """ |
|
264 | """ | |
262 | return self.get_file_history(path, limit=1)[0] |
|
265 | return self.get_file_history(path, limit=1)[0] | |
263 |
|
266 | |||
264 | def get_file_history(self, path, limit=None): |
|
267 | def get_file_history(self, path, limit=None): | |
265 | """ |
|
268 | """ | |
266 | Returns history of file as reversed list of ``Changeset`` objects for |
|
269 | Returns history of file as reversed list of ``Changeset`` objects for | |
267 | which file at given ``path`` has been modified. |
|
270 | which file at given ``path`` has been modified. | |
268 | """ |
|
271 | """ | |
269 | fctx = self._get_filectx(path) |
|
272 | fctx = self._get_filectx(path) | |
270 | hist = [] |
|
273 | hist = [] | |
271 | cnt = 0 |
|
274 | cnt = 0 | |
272 | for cs in reversed([x for x in fctx.filelog()]): |
|
275 | for cs in reversed([x for x in fctx.filelog()]): | |
273 | cnt += 1 |
|
276 | cnt += 1 | |
274 | hist.append(hex(fctx.filectx(cs).node())) |
|
277 | hist.append(mercurial.node.hex(fctx.filectx(cs).node())) | |
275 | if limit is not None and cnt == limit: |
|
278 | if limit is not None and cnt == limit: | |
276 | break |
|
279 | break | |
277 |
|
280 | |||
278 | return [self.repository.get_changeset(node) for node in hist] |
|
281 | return [self.repository.get_changeset(node) for node in hist] | |
279 |
|
282 | |||
280 | def get_file_annotate(self, path): |
|
283 | def get_file_annotate(self, path): | |
281 | """ |
|
284 | """ | |
282 | Returns a generator of four element tuples with |
|
285 | Returns a generator of four element tuples with | |
283 | lineno, sha, changeset lazy loader and line |
|
286 | lineno, sha, changeset lazy loader and line | |
284 | """ |
|
287 | """ | |
285 | annotations = self._get_filectx(path).annotate() |
|
288 | annotations = self._get_filectx(path).annotate() | |
286 | annotation_lines = [(annotateline.fctx, annotateline.text) for annotateline in annotations] |
|
289 | annotation_lines = [(annotateline.fctx, annotateline.text) for annotateline in annotations] | |
287 | for i, (fctx, l) in enumerate(annotation_lines): |
|
290 | for i, (fctx, l) in enumerate(annotation_lines): | |
288 | sha = ascii_str(fctx.hex()) |
|
291 | sha = ascii_str(fctx.hex()) | |
289 | yield (i + 1, sha, lambda sha=sha, l=l: self.repository.get_changeset(sha), l) |
|
292 | yield (i + 1, sha, lambda sha=sha, l=l: self.repository.get_changeset(sha), l) | |
290 |
|
293 | |||
291 | def fill_archive(self, stream=None, kind='tgz', prefix=None, |
|
294 | def fill_archive(self, stream=None, kind='tgz', prefix=None, | |
292 | subrepos=False): |
|
295 | subrepos=False): | |
293 | """ |
|
296 | """ | |
294 | Fills up given stream. |
|
297 | Fills up given stream. | |
295 |
|
298 | |||
296 | :param stream: file like object. |
|
299 | :param stream: file like object. | |
297 | :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``. |
|
300 | :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``. | |
298 | Default: ``tgz``. |
|
301 | Default: ``tgz``. | |
299 | :param prefix: name of root directory in archive. |
|
302 | :param prefix: name of root directory in archive. | |
300 | Default is repository name and changeset's raw_id joined with dash |
|
303 | Default is repository name and changeset's raw_id joined with dash | |
301 | (``repo-tip.<KIND>``). |
|
304 | (``repo-tip.<KIND>``). | |
302 | :param subrepos: include subrepos in this archive. |
|
305 | :param subrepos: include subrepos in this archive. | |
303 |
|
306 | |||
304 | :raise ImproperArchiveTypeError: If given kind is wrong. |
|
307 | :raise ImproperArchiveTypeError: If given kind is wrong. | |
305 | :raise VcsError: If given stream is None |
|
308 | :raise VcsError: If given stream is None | |
306 | """ |
|
309 | """ | |
307 | allowed_kinds = settings.ARCHIVE_SPECS |
|
310 | allowed_kinds = settings.ARCHIVE_SPECS | |
308 | if kind not in allowed_kinds: |
|
311 | if kind not in allowed_kinds: | |
309 | raise ImproperArchiveTypeError('Archive kind not supported use one' |
|
312 | raise ImproperArchiveTypeError('Archive kind not supported use one' | |
310 | 'of %s' % ' '.join(allowed_kinds)) |
|
313 | 'of %s' % ' '.join(allowed_kinds)) | |
311 |
|
314 | |||
312 | if stream is None: |
|
315 | if stream is None: | |
313 | raise VCSError('You need to pass in a valid stream for filling' |
|
316 | raise VCSError('You need to pass in a valid stream for filling' | |
314 | ' with archival data') |
|
317 | ' with archival data') | |
315 |
|
318 | |||
316 | if prefix is None: |
|
319 | if prefix is None: | |
317 | prefix = '%s-%s' % (self.repository.name, self.short_id) |
|
320 | prefix = '%s-%s' % (self.repository.name, self.short_id) | |
318 | elif prefix.startswith('/'): |
|
321 | elif prefix.startswith('/'): | |
319 | raise VCSError("Prefix cannot start with leading slash") |
|
322 | raise VCSError("Prefix cannot start with leading slash") | |
320 | elif prefix.strip() == '': |
|
323 | elif prefix.strip() == '': | |
321 | raise VCSError("Prefix cannot be empty") |
|
324 | raise VCSError("Prefix cannot be empty") | |
322 |
|
325 | |||
323 | archival.archive(self.repository._repo, stream, ascii_bytes(self.raw_id), |
|
326 | mercurial.archival.archive(self.repository._repo, stream, ascii_bytes(self.raw_id), | |
324 | kind, prefix=prefix, subrepos=subrepos) |
|
327 | kind, prefix=prefix, subrepos=subrepos) | |
325 |
|
328 | |||
326 | def get_nodes(self, path): |
|
329 | def get_nodes(self, path): | |
327 | """ |
|
330 | """ | |
328 | Returns combined ``DirNode`` and ``FileNode`` objects list representing |
|
331 | Returns combined ``DirNode`` and ``FileNode`` objects list representing | |
329 | state of changeset at the given ``path``. If node at the given ``path`` |
|
332 | state of changeset at the given ``path``. If node at the given ``path`` | |
330 | is not instance of ``DirNode``, ChangesetError would be raised. |
|
333 | is not instance of ``DirNode``, ChangesetError would be raised. | |
331 | """ |
|
334 | """ | |
332 |
|
335 | |||
333 | if self._get_kind(path) != NodeKind.DIR: |
|
336 | if self._get_kind(path) != NodeKind.DIR: | |
334 | raise ChangesetError("Directory does not exist for revision %s at " |
|
337 | raise ChangesetError("Directory does not exist for revision %s at " | |
335 | " '%s'" % (self.revision, path)) |
|
338 | " '%s'" % (self.revision, path)) | |
336 | path = self._fix_path(path) |
|
339 | path = self._fix_path(path) | |
337 |
|
340 | |||
338 | filenodes = [FileNode(f, changeset=self) for f in self._file_paths |
|
341 | filenodes = [FileNode(f, changeset=self) for f in self._file_paths | |
339 | if os.path.dirname(f) == path] |
|
342 | if os.path.dirname(f) == path] | |
340 | dirs = path == '' and '' or [d for d in self._dir_paths |
|
343 | dirs = path == '' and '' or [d for d in self._dir_paths | |
341 | if d and posixpath.dirname(d) == path] |
|
344 | if d and posixpath.dirname(d) == path] | |
342 | dirnodes = [DirNode(d, changeset=self) for d in dirs |
|
345 | dirnodes = [DirNode(d, changeset=self) for d in dirs | |
343 | if os.path.dirname(d) == path] |
|
346 | if os.path.dirname(d) == path] | |
344 |
|
347 | |||
345 | als = self.repository.alias |
|
348 | als = self.repository.alias | |
346 | for k, vals in self._extract_submodules().iteritems(): |
|
349 | for k, vals in self._extract_submodules().iteritems(): | |
347 | #vals = url,rev,type |
|
350 | #vals = url,rev,type | |
348 | loc = vals[0] |
|
351 | loc = vals[0] | |
349 | cs = vals[1] |
|
352 | cs = vals[1] | |
350 | dirnodes.append(SubModuleNode(k, url=loc, changeset=cs, |
|
353 | dirnodes.append(SubModuleNode(k, url=loc, changeset=cs, | |
351 | alias=als)) |
|
354 | alias=als)) | |
352 | nodes = dirnodes + filenodes |
|
355 | nodes = dirnodes + filenodes | |
353 | for node in nodes: |
|
356 | for node in nodes: | |
354 | self.nodes[node.path] = node |
|
357 | self.nodes[node.path] = node | |
355 | nodes.sort() |
|
358 | nodes.sort() | |
356 | return nodes |
|
359 | return nodes | |
357 |
|
360 | |||
358 | def get_node(self, path): |
|
361 | def get_node(self, path): | |
359 | """ |
|
362 | """ | |
360 | Returns ``Node`` object from the given ``path``. If there is no node at |
|
363 | Returns ``Node`` object from the given ``path``. If there is no node at | |
361 | the given ``path``, ``ChangesetError`` would be raised. |
|
364 | the given ``path``, ``ChangesetError`` would be raised. | |
362 | """ |
|
365 | """ | |
363 | path = self._fix_path(path) |
|
366 | path = self._fix_path(path) | |
364 | if path not in self.nodes: |
|
367 | if path not in self.nodes: | |
365 | if path in self._file_paths: |
|
368 | if path in self._file_paths: | |
366 | node = FileNode(path, changeset=self) |
|
369 | node = FileNode(path, changeset=self) | |
367 | elif path in self._dir_paths or path in self._dir_paths: |
|
370 | elif path in self._dir_paths or path in self._dir_paths: | |
368 | if path == '': |
|
371 | if path == '': | |
369 | node = RootNode(changeset=self) |
|
372 | node = RootNode(changeset=self) | |
370 | else: |
|
373 | else: | |
371 | node = DirNode(path, changeset=self) |
|
374 | node = DirNode(path, changeset=self) | |
372 | else: |
|
375 | else: | |
373 | raise NodeDoesNotExistError("There is no file nor directory " |
|
376 | raise NodeDoesNotExistError("There is no file nor directory " | |
374 | "at the given path: '%s' at revision %s" |
|
377 | "at the given path: '%s' at revision %s" | |
375 | % (path, self.short_id)) |
|
378 | % (path, self.short_id)) | |
376 | # cache node |
|
379 | # cache node | |
377 | self.nodes[path] = node |
|
380 | self.nodes[path] = node | |
378 | return self.nodes[path] |
|
381 | return self.nodes[path] | |
379 |
|
382 | |||
380 | @LazyProperty |
|
383 | @LazyProperty | |
381 | def affected_files(self): |
|
384 | def affected_files(self): | |
382 | """ |
|
385 | """ | |
383 | Gets a fast accessible file changes for given changeset |
|
386 | Gets a fast accessible file changes for given changeset | |
384 | """ |
|
387 | """ | |
385 | return self._ctx.files() |
|
388 | return self._ctx.files() | |
386 |
|
389 | |||
387 | @property |
|
390 | @property | |
388 | def added(self): |
|
391 | def added(self): | |
389 | """ |
|
392 | """ | |
390 | Returns list of added ``FileNode`` objects. |
|
393 | Returns list of added ``FileNode`` objects. | |
391 | """ |
|
394 | """ | |
392 | return AddedFileNodesGenerator([n for n in self.status[1]], self) |
|
395 | return AddedFileNodesGenerator([n for n in self.status[1]], self) | |
393 |
|
396 | |||
394 | @property |
|
397 | @property | |
395 | def changed(self): |
|
398 | def changed(self): | |
396 | """ |
|
399 | """ | |
397 | Returns list of modified ``FileNode`` objects. |
|
400 | Returns list of modified ``FileNode`` objects. | |
398 | """ |
|
401 | """ | |
399 | return ChangedFileNodesGenerator([n for n in self.status[0]], self) |
|
402 | return ChangedFileNodesGenerator([n for n in self.status[0]], self) | |
400 |
|
403 | |||
401 | @property |
|
404 | @property | |
402 | def removed(self): |
|
405 | def removed(self): | |
403 | """ |
|
406 | """ | |
404 | Returns list of removed ``FileNode`` objects. |
|
407 | Returns list of removed ``FileNode`` objects. | |
405 | """ |
|
408 | """ | |
406 | return RemovedFileNodesGenerator([n for n in self.status[2]], self) |
|
409 | return RemovedFileNodesGenerator([n for n in self.status[2]], self) | |
407 |
|
410 | |||
408 | @LazyProperty |
|
411 | @LazyProperty | |
409 | def extra(self): |
|
412 | def extra(self): | |
410 | return self._ctx.extra() |
|
413 | return self._ctx.extra() |
@@ -1,106 +1,109 b'' | |||||
1 | import datetime |
|
1 | import datetime | |
2 |
|
2 | |||
|
3 | import mercurial.context | |||
|
4 | import mercurial.node | |||
|
5 | ||||
3 | from kallithea.lib.vcs.backends.base import BaseInMemoryChangeset |
|
6 | from kallithea.lib.vcs.backends.base import BaseInMemoryChangeset | |
4 | from kallithea.lib.vcs.exceptions import RepositoryError |
|
7 | from kallithea.lib.vcs.exceptions import RepositoryError | |
5 | from kallithea.lib.vcs.utils import ascii_str, safe_bytes |
|
8 | from kallithea.lib.vcs.utils import ascii_str, safe_bytes | |
6 | from kallithea.lib.vcs.utils.hgcompat import hex, memctx, memfilectx |
|
|||
7 |
|
9 | |||
8 |
|
10 | |||
9 | class MercurialInMemoryChangeset(BaseInMemoryChangeset): |
|
11 | class MercurialInMemoryChangeset(BaseInMemoryChangeset): | |
10 |
|
12 | |||
11 | def commit(self, message, author, parents=None, branch=None, date=None, |
|
13 | def commit(self, message, author, parents=None, branch=None, date=None, | |
12 | **kwargs): |
|
14 | **kwargs): | |
13 | """ |
|
15 | """ | |
14 | Performs in-memory commit (doesn't check workdir in any way) and |
|
16 | Performs in-memory commit (doesn't check workdir in any way) and | |
15 | returns newly created ``Changeset``. Updates repository's |
|
17 | returns newly created ``Changeset``. Updates repository's | |
16 | ``revisions``. |
|
18 | ``revisions``. | |
17 |
|
19 | |||
18 | :param message: message of the commit |
|
20 | :param message: message of the commit | |
19 | :param author: full username, i.e. "Joe Doe <joe.doe@example.com>" |
|
21 | :param author: full username, i.e. "Joe Doe <joe.doe@example.com>" | |
20 | :param parents: single parent or sequence of parents from which commit |
|
22 | :param parents: single parent or sequence of parents from which commit | |
21 | would be derived |
|
23 | would be derived | |
22 | :param date: ``datetime.datetime`` instance. Defaults to |
|
24 | :param date: ``datetime.datetime`` instance. Defaults to | |
23 | ``datetime.datetime.now()``. |
|
25 | ``datetime.datetime.now()``. | |
24 | :param branch: branch name, as string. If none given, default backend's |
|
26 | :param branch: branch name, as string. If none given, default backend's | |
25 | branch would be used. |
|
27 | branch would be used. | |
26 |
|
28 | |||
27 | :raises ``CommitError``: if any error occurs while committing |
|
29 | :raises ``CommitError``: if any error occurs while committing | |
28 | """ |
|
30 | """ | |
29 | self.check_integrity(parents) |
|
31 | self.check_integrity(parents) | |
30 |
|
32 | |||
31 | from .repository import MercurialRepository |
|
33 | from .repository import MercurialRepository | |
32 | if not isinstance(message, unicode) or not isinstance(author, unicode): |
|
34 | if not isinstance(message, unicode) or not isinstance(author, unicode): | |
33 | raise RepositoryError('Given message and author needs to be ' |
|
35 | raise RepositoryError('Given message and author needs to be ' | |
34 | 'an <unicode> instance got %r & %r instead' |
|
36 | 'an <unicode> instance got %r & %r instead' | |
35 | % (type(message), type(author))) |
|
37 | % (type(message), type(author))) | |
36 |
|
38 | |||
37 | if branch is None: |
|
39 | if branch is None: | |
38 | branch = MercurialRepository.DEFAULT_BRANCH_NAME |
|
40 | branch = MercurialRepository.DEFAULT_BRANCH_NAME | |
39 | kwargs[b'branch'] = branch |
|
41 | kwargs[b'branch'] = branch | |
40 |
|
42 | |||
41 | def filectxfn(_repo, memctx, path): |
|
43 | def filectxfn(_repo, memctx, path): | |
42 | """ |
|
44 | """ | |
43 | Marks given path as added/changed/removed in a given _repo. This is |
|
45 | Marks given path as added/changed/removed in a given _repo. This is | |
44 | for internal mercurial commit function. |
|
46 | for internal mercurial commit function. | |
45 | """ |
|
47 | """ | |
46 |
|
48 | |||
47 | # check if this path is removed |
|
49 | # check if this path is removed | |
48 | if path in (node.path for node in self.removed): |
|
50 | if path in (node.path for node in self.removed): | |
49 | return None |
|
51 | return None | |
50 |
|
52 | |||
51 | # check if this path is added |
|
53 | # check if this path is added | |
52 | for node in self.added: |
|
54 | for node in self.added: | |
53 | if node.path == path: |
|
55 | if node.path == path: | |
54 | return memfilectx(_repo, memctx, path=node.path, |
|
56 | return mercurial.context.memfilectx(_repo, memctx, path=node.path, | |
55 | data=node.content, |
|
57 | data=node.content, | |
56 | islink=False, |
|
58 | islink=False, | |
57 | isexec=node.is_executable, |
|
59 | isexec=node.is_executable, | |
58 | copysource=False) |
|
60 | copysource=False) | |
59 |
|
61 | |||
60 | # or changed |
|
62 | # or changed | |
61 | for node in self.changed: |
|
63 | for node in self.changed: | |
62 | if node.path == path: |
|
64 | if node.path == path: | |
63 | return memfilectx(_repo, memctx, path=node.path, |
|
65 | return mercurial.context.memfilectx(_repo, memctx, path=node.path, | |
64 | data=node.content, |
|
66 | data=node.content, | |
65 | islink=False, |
|
67 | islink=False, | |
66 | isexec=node.is_executable, |
|
68 | isexec=node.is_executable, | |
67 | copysource=False) |
|
69 | copysource=False) | |
68 |
|
70 | |||
69 | raise RepositoryError("Given path haven't been marked as added," |
|
71 | raise RepositoryError("Given path haven't been marked as added," | |
70 | "changed or removed (%s)" % path) |
|
72 | "changed or removed (%s)" % path) | |
71 |
|
73 | |||
72 | parents = [None, None] |
|
74 | parents = [None, None] | |
73 | for i, parent in enumerate(self.parents): |
|
75 | for i, parent in enumerate(self.parents): | |
74 | if parent is not None: |
|
76 | if parent is not None: | |
75 | parents[i] = parent._ctx.node() |
|
77 | parents[i] = parent._ctx.node() | |
76 |
|
78 | |||
77 | if date and isinstance(date, datetime.datetime): |
|
79 | if date and isinstance(date, datetime.datetime): | |
78 | date = date.strftime('%a, %d %b %Y %H:%M:%S') |
|
80 | date = date.strftime('%a, %d %b %Y %H:%M:%S') | |
79 |
|
81 | |||
80 | commit_ctx = memctx(repo=self.repository._repo, |
|
82 | commit_ctx = mercurial.context.memctx( | |
|
83 | repo=self.repository._repo, | |||
81 | parents=parents, |
|
84 | parents=parents, | |
82 | text=b'', |
|
85 | text=b'', | |
83 | files=self.get_paths(), |
|
86 | files=self.get_paths(), | |
84 | filectxfn=filectxfn, |
|
87 | filectxfn=filectxfn, | |
85 | user=author, |
|
88 | user=author, | |
86 | date=date, |
|
89 | date=date, | |
87 | extra=kwargs) |
|
90 | extra=kwargs) | |
88 |
|
91 | |||
89 | # injecting given _repo params |
|
92 | # injecting given _repo params | |
90 | commit_ctx._text = safe_bytes(message) |
|
93 | commit_ctx._text = safe_bytes(message) | |
91 | commit_ctx._user = safe_bytes(author) |
|
94 | commit_ctx._user = safe_bytes(author) | |
92 | commit_ctx._date = date |
|
95 | commit_ctx._date = date | |
93 |
|
96 | |||
94 | # TODO: Catch exceptions! |
|
97 | # TODO: Catch exceptions! | |
95 | n = self.repository._repo.commitctx(commit_ctx) |
|
98 | n = self.repository._repo.commitctx(commit_ctx) | |
96 | # Returns mercurial node |
|
99 | # Returns mercurial node | |
97 | self._commit_ctx = commit_ctx # For reference |
|
100 | self._commit_ctx = commit_ctx # For reference | |
98 | # Update vcs repository object & recreate mercurial _repo |
|
101 | # Update vcs repository object & recreate mercurial _repo | |
99 | # new_ctx = self.repository._repo[node] |
|
102 | # new_ctx = self.repository._repo[node] | |
100 | # new_tip = ascii_str(self.repository.get_changeset(new_ctx.hex())) |
|
103 | # new_tip = ascii_str(self.repository.get_changeset(new_ctx.hex())) | |
101 | self.repository.revisions.append(ascii_str(hex(n))) |
|
104 | self.repository.revisions.append(ascii_str(mercurial.node.hex(n))) | |
102 | self._repo = self.repository._get_repo(create=False) |
|
105 | self._repo = self.repository._get_repo(create=False) | |
103 | self.repository.branches = self.repository._get_branches() |
|
106 | self.repository.branches = self.repository._get_branches() | |
104 | tip = self.repository.get_changeset() |
|
107 | tip = self.repository.get_changeset() | |
105 | self.reset() |
|
108 | self.reset() | |
106 | return tip |
|
109 | return tip |
@@ -1,611 +1,625 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | vcs.backends.hg.repository |
|
3 | vcs.backends.hg.repository | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Mercurial repository implementation. |
|
6 | Mercurial repository implementation. | |
7 |
|
7 | |||
8 | :created_on: Apr 8, 2010 |
|
8 | :created_on: Apr 8, 2010 | |
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. |
|
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. | |
10 | """ |
|
10 | """ | |
11 |
|
11 | |||
12 | import datetime |
|
12 | import datetime | |
13 | import logging |
|
13 | import logging | |
14 | import os |
|
14 | import os | |
15 | import time |
|
15 | import time | |
16 | import urllib |
|
16 | import urllib | |
17 | import urllib2 |
|
17 | import urllib2 | |
18 | from collections import OrderedDict |
|
18 | from collections import OrderedDict | |
19 |
|
19 | |||
|
20 | import mercurial.commands | |||
|
21 | import mercurial.error | |||
|
22 | import mercurial.exchange | |||
|
23 | import mercurial.hg | |||
|
24 | import mercurial.hgweb | |||
|
25 | import mercurial.httppeer | |||
|
26 | import mercurial.match | |||
|
27 | import mercurial.mdiff | |||
|
28 | import mercurial.node | |||
|
29 | import mercurial.patch | |||
|
30 | import mercurial.scmutil | |||
|
31 | import mercurial.sshpeer | |||
|
32 | import mercurial.tags | |||
|
33 | import mercurial.ui | |||
|
34 | import mercurial.url | |||
|
35 | import mercurial.util | |||
|
36 | ||||
20 | from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator |
|
37 | from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator | |
21 | from kallithea.lib.vcs.exceptions import ( |
|
38 | from kallithea.lib.vcs.exceptions import ( | |
22 | BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError) |
|
39 | BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError) | |
23 | from kallithea.lib.vcs.utils import ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str, safe_unicode |
|
40 | from kallithea.lib.vcs.utils import ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str, safe_unicode | |
24 | from kallithea.lib.vcs.utils.hgcompat import ( |
|
|||
25 | Abort, RepoError, RepoLookupError, clone, diffopts, get_contact, hex, hg_url, httpbasicauthhandler, httpdigestauthhandler, httppeer, localrepo, match_exact, nullid, patch, peer, scmutil, sshpeer, tag, ui) |
|
|||
26 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
41 | from kallithea.lib.vcs.utils.lazy import LazyProperty | |
27 | from kallithea.lib.vcs.utils.paths import abspath |
|
42 | from kallithea.lib.vcs.utils.paths import abspath | |
28 |
|
43 | |||
29 | from .changeset import MercurialChangeset |
|
44 | from .changeset import MercurialChangeset | |
30 | from .inmemory import MercurialInMemoryChangeset |
|
45 | from .inmemory import MercurialInMemoryChangeset | |
31 | from .workdir import MercurialWorkdir |
|
46 | from .workdir import MercurialWorkdir | |
32 |
|
47 | |||
33 |
|
48 | |||
34 | log = logging.getLogger(__name__) |
|
49 | log = logging.getLogger(__name__) | |
35 |
|
50 | |||
36 |
|
51 | |||
37 | class MercurialRepository(BaseRepository): |
|
52 | class MercurialRepository(BaseRepository): | |
38 | """ |
|
53 | """ | |
39 | Mercurial repository backend |
|
54 | Mercurial repository backend | |
40 | """ |
|
55 | """ | |
41 | DEFAULT_BRANCH_NAME = 'default' |
|
56 | DEFAULT_BRANCH_NAME = 'default' | |
42 | scm = 'hg' |
|
57 | scm = 'hg' | |
43 |
|
58 | |||
44 | def __init__(self, repo_path, create=False, baseui=None, src_url=None, |
|
59 | def __init__(self, repo_path, create=False, baseui=None, src_url=None, | |
45 | update_after_clone=False): |
|
60 | update_after_clone=False): | |
46 | """ |
|
61 | """ | |
47 | Raises RepositoryError if repository could not be find at the given |
|
62 | Raises RepositoryError if repository could not be find at the given | |
48 | ``repo_path``. |
|
63 | ``repo_path``. | |
49 |
|
64 | |||
50 | :param repo_path: local path of the repository |
|
65 | :param repo_path: local path of the repository | |
51 | :param create=False: if set to True, would try to create repository if |
|
66 | :param create=False: if set to True, would try to create repository if | |
52 | it does not exist rather than raising exception |
|
67 | it does not exist rather than raising exception | |
53 | :param baseui=None: user data |
|
68 | :param baseui=None: user data | |
54 | :param src_url=None: would try to clone repository from given location |
|
69 | :param src_url=None: would try to clone repository from given location | |
55 | :param update_after_clone=False: sets update of working copy after |
|
70 | :param update_after_clone=False: sets update of working copy after | |
56 | making a clone |
|
71 | making a clone | |
57 | """ |
|
72 | """ | |
58 |
|
73 | |||
59 | if not isinstance(repo_path, str): |
|
74 | if not isinstance(repo_path, str): | |
60 | raise VCSError('Mercurial backend requires repository path to ' |
|
75 | raise VCSError('Mercurial backend requires repository path to ' | |
61 | 'be instance of <str> got %s instead' % |
|
76 | 'be instance of <str> got %s instead' % | |
62 | type(repo_path)) |
|
77 | type(repo_path)) | |
63 |
|
78 | |||
64 | self.path = abspath(repo_path) |
|
79 | self.path = abspath(repo_path) | |
65 | self.baseui = baseui or ui.ui() |
|
80 | self.baseui = baseui or mercurial.ui.ui() | |
66 | # We've set path and ui, now we can set _repo itself |
|
81 | # We've set path and ui, now we can set _repo itself | |
67 | self._repo = self._get_repo(create, src_url, update_after_clone) |
|
82 | self._repo = self._get_repo(create, src_url, update_after_clone) | |
68 |
|
83 | |||
69 | @property |
|
84 | @property | |
70 | def _empty(self): |
|
85 | def _empty(self): | |
71 | """ |
|
86 | """ | |
72 | Checks if repository is empty ie. without any changesets |
|
87 | Checks if repository is empty ie. without any changesets | |
73 | """ |
|
88 | """ | |
74 | # TODO: Following raises errors when using InMemoryChangeset... |
|
89 | # TODO: Following raises errors when using InMemoryChangeset... | |
75 | # return len(self._repo.changelog) == 0 |
|
90 | # return len(self._repo.changelog) == 0 | |
76 | return len(self.revisions) == 0 |
|
91 | return len(self.revisions) == 0 | |
77 |
|
92 | |||
78 | @LazyProperty |
|
93 | @LazyProperty | |
79 | def revisions(self): |
|
94 | def revisions(self): | |
80 | """ |
|
95 | """ | |
81 | Returns list of revisions' ids, in ascending order. Being lazy |
|
96 | Returns list of revisions' ids, in ascending order. Being lazy | |
82 | attribute allows external tools to inject shas from cache. |
|
97 | attribute allows external tools to inject shas from cache. | |
83 | """ |
|
98 | """ | |
84 | return self._get_all_revisions() |
|
99 | return self._get_all_revisions() | |
85 |
|
100 | |||
86 | @LazyProperty |
|
101 | @LazyProperty | |
87 | def name(self): |
|
102 | def name(self): | |
88 | return os.path.basename(self.path) |
|
103 | return os.path.basename(self.path) | |
89 |
|
104 | |||
90 | @LazyProperty |
|
105 | @LazyProperty | |
91 | def branches(self): |
|
106 | def branches(self): | |
92 | return self._get_branches() |
|
107 | return self._get_branches() | |
93 |
|
108 | |||
94 | @LazyProperty |
|
109 | @LazyProperty | |
95 | def closed_branches(self): |
|
110 | def closed_branches(self): | |
96 | return self._get_branches(normal=False, closed=True) |
|
111 | return self._get_branches(normal=False, closed=True) | |
97 |
|
112 | |||
98 | @LazyProperty |
|
113 | @LazyProperty | |
99 | def allbranches(self): |
|
114 | def allbranches(self): | |
100 | """ |
|
115 | """ | |
101 | List all branches, including closed branches. |
|
116 | List all branches, including closed branches. | |
102 | """ |
|
117 | """ | |
103 | return self._get_branches(closed=True) |
|
118 | return self._get_branches(closed=True) | |
104 |
|
119 | |||
105 | def _get_branches(self, normal=True, closed=False): |
|
120 | def _get_branches(self, normal=True, closed=False): | |
106 | """ |
|
121 | """ | |
107 | Gets branches for this repository |
|
122 | Gets branches for this repository | |
108 | Returns only not closed branches by default |
|
123 | Returns only not closed branches by default | |
109 |
|
124 | |||
110 | :param closed: return also closed branches for mercurial |
|
125 | :param closed: return also closed branches for mercurial | |
111 | :param normal: return also normal branches |
|
126 | :param normal: return also normal branches | |
112 | """ |
|
127 | """ | |
113 |
|
128 | |||
114 | if self._empty: |
|
129 | if self._empty: | |
115 | return {} |
|
130 | return {} | |
116 |
|
131 | |||
117 | bt = OrderedDict() |
|
132 | bt = OrderedDict() | |
118 | for bn, _heads, node, isclosed in sorted(self._repo.branchmap().iterbranches()): |
|
133 | for bn, _heads, node, isclosed in sorted(self._repo.branchmap().iterbranches()): | |
119 | if isclosed: |
|
134 | if isclosed: | |
120 | if closed: |
|
135 | if closed: | |
121 | bt[safe_unicode(bn)] = ascii_str(hex(node)) |
|
136 | bt[safe_unicode(bn)] = ascii_str(mercurial.node.hex(node)) | |
122 | else: |
|
137 | else: | |
123 | if normal: |
|
138 | if normal: | |
124 | bt[safe_unicode(bn)] = ascii_str(hex(node)) |
|
139 | bt[safe_unicode(bn)] = ascii_str(mercurial.node.hex(node)) | |
125 | return bt |
|
140 | return bt | |
126 |
|
141 | |||
127 | @LazyProperty |
|
142 | @LazyProperty | |
128 | def tags(self): |
|
143 | def tags(self): | |
129 | """ |
|
144 | """ | |
130 | Gets tags for this repository |
|
145 | Gets tags for this repository | |
131 | """ |
|
146 | """ | |
132 | return self._get_tags() |
|
147 | return self._get_tags() | |
133 |
|
148 | |||
134 | def _get_tags(self): |
|
149 | def _get_tags(self): | |
135 | if self._empty: |
|
150 | if self._empty: | |
136 | return {} |
|
151 | return {} | |
137 |
|
152 | |||
138 | return OrderedDict(sorted( |
|
153 | return OrderedDict(sorted( | |
139 | ((safe_unicode(n), ascii_str(hex(h))) for n, h in self._repo.tags().items()), |
|
154 | ((safe_unicode(n), ascii_str(mercurial.node.hex(h))) for n, h in self._repo.tags().items()), | |
140 | reverse=True, |
|
155 | reverse=True, | |
141 | key=lambda x: x[0], # sort by name |
|
156 | key=lambda x: x[0], # sort by name | |
142 | )) |
|
157 | )) | |
143 |
|
158 | |||
144 | def tag(self, name, user, revision=None, message=None, date=None, |
|
159 | def tag(self, name, user, revision=None, message=None, date=None, | |
145 | **kwargs): |
|
160 | **kwargs): | |
146 | """ |
|
161 | """ | |
147 | Creates and returns a tag for the given ``revision``. |
|
162 | Creates and returns a tag for the given ``revision``. | |
148 |
|
163 | |||
149 | :param name: name for new tag |
|
164 | :param name: name for new tag | |
150 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
165 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
151 | :param revision: changeset id for which new tag would be created |
|
166 | :param revision: changeset id for which new tag would be created | |
152 | :param message: message of the tag's commit |
|
167 | :param message: message of the tag's commit | |
153 | :param date: date of tag's commit |
|
168 | :param date: date of tag's commit | |
154 |
|
169 | |||
155 | :raises TagAlreadyExistError: if tag with same name already exists |
|
170 | :raises TagAlreadyExistError: if tag with same name already exists | |
156 | """ |
|
171 | """ | |
157 | if name in self.tags: |
|
172 | if name in self.tags: | |
158 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
173 | raise TagAlreadyExistError("Tag %s already exists" % name) | |
159 | changeset = self.get_changeset(revision) |
|
174 | changeset = self.get_changeset(revision) | |
160 | local = kwargs.setdefault('local', False) |
|
175 | local = kwargs.setdefault('local', False) | |
161 |
|
176 | |||
162 | if message is None: |
|
177 | if message is None: | |
163 | message = "Added tag %s for changeset %s" % (name, |
|
178 | message = "Added tag %s for changeset %s" % (name, | |
164 | changeset.short_id) |
|
179 | changeset.short_id) | |
165 |
|
180 | |||
166 | if date is None: |
|
181 | if date is None: | |
167 | date = datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S') |
|
182 | date = datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S') | |
168 |
|
183 | |||
169 | try: |
|
184 | try: | |
170 | tag(self._repo, name, changeset._ctx.node(), message, local, user, date) |
|
185 | mercurial.tags.tag(self._repo, name, changeset._ctx.node(), message, local, user, date) | |
171 | except Abort as e: |
|
186 | except mercurial.error.Abort as e: | |
172 | raise RepositoryError(e.message) |
|
187 | raise RepositoryError(e.message) | |
173 |
|
188 | |||
174 | # Reinitialize tags |
|
189 | # Reinitialize tags | |
175 | self.tags = self._get_tags() |
|
190 | self.tags = self._get_tags() | |
176 | tag_id = self.tags[name] |
|
191 | tag_id = self.tags[name] | |
177 |
|
192 | |||
178 | return self.get_changeset(revision=tag_id) |
|
193 | return self.get_changeset(revision=tag_id) | |
179 |
|
194 | |||
180 | def remove_tag(self, name, user, message=None, date=None): |
|
195 | def remove_tag(self, name, user, message=None, date=None): | |
181 | """ |
|
196 | """ | |
182 | Removes tag with the given ``name``. |
|
197 | Removes tag with the given ``name``. | |
183 |
|
198 | |||
184 | :param name: name of the tag to be removed |
|
199 | :param name: name of the tag to be removed | |
185 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
200 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
186 | :param message: message of the tag's removal commit |
|
201 | :param message: message of the tag's removal commit | |
187 | :param date: date of tag's removal commit |
|
202 | :param date: date of tag's removal commit | |
188 |
|
203 | |||
189 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
204 | :raises TagDoesNotExistError: if tag with given name does not exists | |
190 | """ |
|
205 | """ | |
191 | if name not in self.tags: |
|
206 | if name not in self.tags: | |
192 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
207 | raise TagDoesNotExistError("Tag %s does not exist" % name) | |
193 | if message is None: |
|
208 | if message is None: | |
194 | message = "Removed tag %s" % name |
|
209 | message = "Removed tag %s" % name | |
195 | if date is None: |
|
210 | if date is None: | |
196 | date = datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S') |
|
211 | date = datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S') | |
197 | local = False |
|
212 | local = False | |
198 |
|
213 | |||
199 | try: |
|
214 | try: | |
200 | tag(self._repo, name, nullid, message, local, user, date) |
|
215 | mercurial.tags.tag(self._repo, name, mercurial.commands.nullid, message, local, user, date) | |
201 | self.tags = self._get_tags() |
|
216 | self.tags = self._get_tags() | |
202 | except Abort as e: |
|
217 | except mercurial.error.Abort as e: | |
203 | raise RepositoryError(e.message) |
|
218 | raise RepositoryError(e.message) | |
204 |
|
219 | |||
205 | @LazyProperty |
|
220 | @LazyProperty | |
206 | def bookmarks(self): |
|
221 | def bookmarks(self): | |
207 | """ |
|
222 | """ | |
208 | Gets bookmarks for this repository |
|
223 | Gets bookmarks for this repository | |
209 | """ |
|
224 | """ | |
210 | return self._get_bookmarks() |
|
225 | return self._get_bookmarks() | |
211 |
|
226 | |||
212 | def _get_bookmarks(self): |
|
227 | def _get_bookmarks(self): | |
213 | if self._empty: |
|
228 | if self._empty: | |
214 | return {} |
|
229 | return {} | |
215 |
|
230 | |||
216 | return OrderedDict(sorted( |
|
231 | return OrderedDict(sorted( | |
217 | ((safe_unicode(n), ascii_str(h)) for n, h in self._repo._bookmarks.items()), |
|
232 | ((safe_unicode(n), ascii_str(h)) for n, h in self._repo._bookmarks.items()), | |
218 | reverse=True, |
|
233 | reverse=True, | |
219 | key=lambda x: x[0], # sort by name |
|
234 | key=lambda x: x[0], # sort by name | |
220 | )) |
|
235 | )) | |
221 |
|
236 | |||
222 | def _get_all_revisions(self): |
|
237 | def _get_all_revisions(self): | |
223 | return [ascii_str(self._repo[x].hex()) for x in self._repo.filtered(b'visible').changelog.revs()] |
|
238 | return [ascii_str(self._repo[x].hex()) for x in self._repo.filtered(b'visible').changelog.revs()] | |
224 |
|
239 | |||
225 | def get_diff(self, rev1, rev2, path='', ignore_whitespace=False, |
|
240 | def get_diff(self, rev1, rev2, path='', ignore_whitespace=False, | |
226 | context=3): |
|
241 | context=3): | |
227 | """ |
|
242 | """ | |
228 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
243 | Returns (git like) *diff*, as plain text. Shows changes introduced by | |
229 | ``rev2`` since ``rev1``. |
|
244 | ``rev2`` since ``rev1``. | |
230 |
|
245 | |||
231 | :param rev1: Entry point from which diff is shown. Can be |
|
246 | :param rev1: Entry point from which diff is shown. Can be | |
232 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all |
|
247 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all | |
233 | the changes since empty state of the repository until ``rev2`` |
|
248 | the changes since empty state of the repository until ``rev2`` | |
234 | :param rev2: Until which revision changes should be shown. |
|
249 | :param rev2: Until which revision changes should be shown. | |
235 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
250 | :param ignore_whitespace: If set to ``True``, would not show whitespace | |
236 | changes. Defaults to ``False``. |
|
251 | changes. Defaults to ``False``. | |
237 | :param context: How many lines before/after changed lines should be |
|
252 | :param context: How many lines before/after changed lines should be | |
238 | shown. Defaults to ``3``. If negative value is passed-in, it will be |
|
253 | shown. Defaults to ``3``. If negative value is passed-in, it will be | |
239 | set to ``0`` instead. |
|
254 | set to ``0`` instead. | |
240 | """ |
|
255 | """ | |
241 |
|
256 | |||
242 | # Negative context values make no sense, and will result in |
|
257 | # Negative context values make no sense, and will result in | |
243 | # errors. Ensure this does not happen. |
|
258 | # errors. Ensure this does not happen. | |
244 | if context < 0: |
|
259 | if context < 0: | |
245 | context = 0 |
|
260 | context = 0 | |
246 |
|
261 | |||
247 | if hasattr(rev1, 'raw_id'): |
|
262 | if hasattr(rev1, 'raw_id'): | |
248 | rev1 = getattr(rev1, 'raw_id') |
|
263 | rev1 = getattr(rev1, 'raw_id') | |
249 |
|
264 | |||
250 | if hasattr(rev2, 'raw_id'): |
|
265 | if hasattr(rev2, 'raw_id'): | |
251 | rev2 = getattr(rev2, 'raw_id') |
|
266 | rev2 = getattr(rev2, 'raw_id') | |
252 |
|
267 | |||
253 | # Check if given revisions are present at repository (may raise |
|
268 | # Check if given revisions are present at repository (may raise | |
254 | # ChangesetDoesNotExistError) |
|
269 | # ChangesetDoesNotExistError) | |
255 | if rev1 != self.EMPTY_CHANGESET: |
|
270 | if rev1 != self.EMPTY_CHANGESET: | |
256 | self.get_changeset(rev1) |
|
271 | self.get_changeset(rev1) | |
257 | self.get_changeset(rev2) |
|
272 | self.get_changeset(rev2) | |
258 | if path: |
|
273 | if path: | |
259 |
file_filter = m |
|
274 | file_filter = mercurial.match.exact(path) | |
260 | else: |
|
275 | else: | |
261 | file_filter = None |
|
276 | file_filter = None | |
262 |
|
277 | |||
263 | return b''.join(patch.diff(self._repo, rev1, rev2, match=file_filter, |
|
278 | return b''.join(mercurial.patch.diff(self._repo, rev1, rev2, match=file_filter, | |
264 | opts=diffopts(git=True, |
|
279 | opts=mercurial.mdiff.diffopts(git=True, | |
265 | showfunc=True, |
|
280 | showfunc=True, | |
266 | ignorews=ignore_whitespace, |
|
281 | ignorews=ignore_whitespace, | |
267 | context=context))) |
|
282 | context=context))) | |
268 |
|
283 | |||
269 | @classmethod |
|
284 | @classmethod | |
270 | def _check_url(cls, url, repoui=None): |
|
285 | def _check_url(cls, url, repoui=None): | |
271 | """ |
|
286 | """ | |
272 | Function will check given url and try to verify if it's a valid |
|
287 | Function will check given url and try to verify if it's a valid | |
273 | link. Sometimes it may happened that mercurial will issue basic |
|
288 | link. Sometimes it may happened that mercurial will issue basic | |
274 | auth request that can cause whole API to hang when used from python |
|
289 | auth request that can cause whole API to hang when used from python | |
275 | or other external calls. |
|
290 | or other external calls. | |
276 |
|
291 | |||
277 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
292 | On failures it'll raise urllib2.HTTPError, exception is also thrown | |
278 | when the return code is non 200 |
|
293 | when the return code is non 200 | |
279 | """ |
|
294 | """ | |
280 | # check first if it's not an local url |
|
295 | # check first if it's not an local url | |
281 | if os.path.isdir(url) or url.startswith(b'file:'): |
|
296 | if os.path.isdir(url) or url.startswith(b'file:'): | |
282 | return True |
|
297 | return True | |
283 |
|
298 | |||
284 | if url.startswith(b'ssh:'): |
|
299 | if url.startswith(b'ssh:'): | |
285 | # in case of invalid uri or authentication issues, sshpeer will |
|
300 | # in case of invalid uri or authentication issues, sshpeer will | |
286 | # throw an exception. |
|
301 | # throw an exception. | |
287 | sshpeer.instance(repoui or ui.ui(), url, False).lookup(b'tip') |
|
302 | mercurial.sshpeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip') | |
288 | return True |
|
303 | return True | |
289 |
|
304 | |||
290 | url_prefix = None |
|
305 | url_prefix = None | |
291 | if b'+' in url[:url.find(b'://')]: |
|
306 | if b'+' in url[:url.find(b'://')]: | |
292 | url_prefix, url = url.split(b'+', 1) |
|
307 | url_prefix, url = url.split(b'+', 1) | |
293 |
|
308 | |||
294 | handlers = [] |
|
309 | handlers = [] | |
295 |
url_obj = |
|
310 | url_obj = mercurial.util.url(url) | |
296 | test_uri, authinfo = url_obj.authinfo() |
|
311 | test_uri, authinfo = url_obj.authinfo() | |
297 | url_obj.passwd = b'*****' |
|
312 | url_obj.passwd = b'*****' | |
298 | cleaned_uri = str(url_obj) |
|
313 | cleaned_uri = str(url_obj) | |
299 |
|
314 | |||
300 | if authinfo: |
|
315 | if authinfo: | |
301 | # create a password manager |
|
316 | # create a password manager | |
302 | passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() |
|
317 | passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() | |
303 | passmgr.add_password(*authinfo) |
|
318 | passmgr.add_password(*authinfo) | |
304 |
|
319 | |||
305 | handlers.extend((httpbasicauthhandler(passmgr), |
|
320 | handlers.extend((mercurial.url.httpbasicauthhandler(passmgr), | |
306 | httpdigestauthhandler(passmgr))) |
|
321 | mercurial.url.httpdigestauthhandler(passmgr))) | |
307 |
|
322 | |||
308 | o = urllib2.build_opener(*handlers) |
|
323 | o = urllib2.build_opener(*handlers) | |
309 | o.addheaders = [('Content-Type', 'application/mercurial-0.1'), |
|
324 | o.addheaders = [('Content-Type', 'application/mercurial-0.1'), | |
310 | ('Accept', 'application/mercurial-0.1')] |
|
325 | ('Accept', 'application/mercurial-0.1')] | |
311 |
|
326 | |||
312 | q = {"cmd": 'between'} |
|
327 | q = {"cmd": 'between'} | |
313 | q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)}) |
|
328 | q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)}) | |
314 | qs = '?%s' % urllib.urlencode(q) |
|
329 | qs = '?%s' % urllib.urlencode(q) | |
315 | cu = "%s%s" % (test_uri, qs) |
|
330 | cu = "%s%s" % (test_uri, qs) | |
316 | req = urllib2.Request(cu, None, {}) |
|
331 | req = urllib2.Request(cu, None, {}) | |
317 |
|
332 | |||
318 | try: |
|
333 | try: | |
319 | resp = o.open(req) |
|
334 | resp = o.open(req) | |
320 | if resp.code != 200: |
|
335 | if resp.code != 200: | |
321 | raise Exception('Return Code is not 200') |
|
336 | raise Exception('Return Code is not 200') | |
322 | except Exception as e: |
|
337 | except Exception as e: | |
323 | # means it cannot be cloned |
|
338 | # means it cannot be cloned | |
324 | raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e)) |
|
339 | raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e)) | |
325 |
|
340 | |||
326 | if not url_prefix: # skip svn+http://... (and git+... too) |
|
341 | if not url_prefix: # skip svn+http://... (and git+... too) | |
327 | # now check if it's a proper hg repo |
|
342 | # now check if it's a proper hg repo | |
328 | try: |
|
343 | try: | |
329 | httppeer.instance(repoui or ui.ui(), url, False).lookup(b'tip') |
|
344 | mercurial.httppeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip') | |
330 | except Exception as e: |
|
345 | except Exception as e: | |
331 | raise urllib2.URLError( |
|
346 | raise urllib2.URLError( | |
332 | "url [%s] does not look like an hg repo org_exc: %s" |
|
347 | "url [%s] does not look like an hg repo org_exc: %s" | |
333 | % (cleaned_uri, e)) |
|
348 | % (cleaned_uri, e)) | |
334 |
|
349 | |||
335 | return True |
|
350 | return True | |
336 |
|
351 | |||
337 | def _get_repo(self, create, src_url=None, update_after_clone=False): |
|
352 | def _get_repo(self, create, src_url=None, update_after_clone=False): | |
338 | """ |
|
353 | """ | |
339 | Function will check for mercurial repository in given path and return |
|
354 | Function will check for mercurial repository in given path and return | |
340 | a localrepo object. If there is no repository in that path it will |
|
355 | a localrepo object. If there is no repository in that path it will | |
341 | raise an exception unless ``create`` parameter is set to True - in |
|
356 | raise an exception unless ``create`` parameter is set to True - in | |
342 | that case repository would be created and returned. |
|
357 | that case repository would be created and returned. | |
343 | If ``src_url`` is given, would try to clone repository from the |
|
358 | If ``src_url`` is given, would try to clone repository from the | |
344 | location at given clone_point. Additionally it'll make update to |
|
359 | location at given clone_point. Additionally it'll make update to | |
345 | working copy accordingly to ``update_after_clone`` flag |
|
360 | working copy accordingly to ``update_after_clone`` flag | |
346 | """ |
|
361 | """ | |
347 |
|
362 | |||
348 | try: |
|
363 | try: | |
349 | if src_url: |
|
364 | if src_url: | |
350 | url = safe_bytes(self._get_url(src_url)) |
|
365 | url = safe_bytes(self._get_url(src_url)) | |
351 | opts = {} |
|
366 | opts = {} | |
352 | if not update_after_clone: |
|
367 | if not update_after_clone: | |
353 | opts.update({'noupdate': True}) |
|
368 | opts.update({'noupdate': True}) | |
354 | MercurialRepository._check_url(url, self.baseui) |
|
369 | MercurialRepository._check_url(url, self.baseui) | |
355 | clone(self.baseui, url, self.path, **opts) |
|
370 | mercurial.commands.clone(self.baseui, url, self.path, **opts) | |
356 |
|
371 | |||
357 | # Don't try to create if we've already cloned repo |
|
372 | # Don't try to create if we've already cloned repo | |
358 | create = False |
|
373 | create = False | |
359 | return localrepo.instance(self.baseui, self.path, create=create) |
|
374 | return mercurial.localrepo.instance(self.baseui, self.path, create=create) | |
360 | except (Abort, RepoError) as err: |
|
375 | except (mercurial.error.Abort, mercurial.error.RepoError) as err: | |
361 | if create: |
|
376 | if create: | |
362 | msg = "Cannot create repository at %s. Original error was %s" \ |
|
377 | msg = "Cannot create repository at %s. Original error was %s" \ | |
363 | % (self.path, err) |
|
378 | % (self.path, err) | |
364 | else: |
|
379 | else: | |
365 | msg = "Not valid repository at %s. Original error was %s" \ |
|
380 | msg = "Not valid repository at %s. Original error was %s" \ | |
366 | % (self.path, err) |
|
381 | % (self.path, err) | |
367 | raise RepositoryError(msg) |
|
382 | raise RepositoryError(msg) | |
368 |
|
383 | |||
369 | @LazyProperty |
|
384 | @LazyProperty | |
370 | def in_memory_changeset(self): |
|
385 | def in_memory_changeset(self): | |
371 | return MercurialInMemoryChangeset(self) |
|
386 | return MercurialInMemoryChangeset(self) | |
372 |
|
387 | |||
373 | @LazyProperty |
|
388 | @LazyProperty | |
374 | def description(self): |
|
389 | def description(self): | |
375 | _desc = self._repo.ui.config(b'web', b'description', None, untrusted=True) |
|
390 | _desc = self._repo.ui.config(b'web', b'description', None, untrusted=True) | |
376 | return safe_unicode(_desc or b'unknown') |
|
391 | return safe_unicode(_desc or b'unknown') | |
377 |
|
392 | |||
378 | @LazyProperty |
|
393 | @LazyProperty | |
379 | def contact(self): |
|
394 | def contact(self): | |
380 | return safe_unicode(get_contact(self._repo.ui.config) |
|
395 | return safe_unicode(mercurial.hgweb.common.get_contact(self._repo.ui.config) | |
381 | or b'Unknown') |
|
396 | or b'Unknown') | |
382 |
|
397 | |||
383 | @LazyProperty |
|
398 | @LazyProperty | |
384 | def last_change(self): |
|
399 | def last_change(self): | |
385 | """ |
|
400 | """ | |
386 | Returns last change made on this repository as datetime object |
|
401 | Returns last change made on this repository as datetime object | |
387 | """ |
|
402 | """ | |
388 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) |
|
403 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) | |
389 |
|
404 | |||
390 | def _get_mtime(self): |
|
405 | def _get_mtime(self): | |
391 | try: |
|
406 | try: | |
392 | return time.mktime(self.get_changeset().date.timetuple()) |
|
407 | return time.mktime(self.get_changeset().date.timetuple()) | |
393 | except RepositoryError: |
|
408 | except RepositoryError: | |
394 | # fallback to filesystem |
|
409 | # fallback to filesystem | |
395 | cl_path = os.path.join(self.path, '.hg', "00changelog.i") |
|
410 | cl_path = os.path.join(self.path, '.hg', "00changelog.i") | |
396 | st_path = os.path.join(self.path, '.hg', "store") |
|
411 | st_path = os.path.join(self.path, '.hg', "store") | |
397 | if os.path.exists(cl_path): |
|
412 | if os.path.exists(cl_path): | |
398 | return os.stat(cl_path).st_mtime |
|
413 | return os.stat(cl_path).st_mtime | |
399 | else: |
|
414 | else: | |
400 | return os.stat(st_path).st_mtime |
|
415 | return os.stat(st_path).st_mtime | |
401 |
|
416 | |||
402 | def _get_revision(self, revision): |
|
417 | def _get_revision(self, revision): | |
403 | """ |
|
418 | """ | |
404 | Given any revision identifier, returns a 40 char string with revision hash. |
|
419 | Given any revision identifier, returns a 40 char string with revision hash. | |
405 |
|
420 | |||
406 | :param revision: str or int or None |
|
421 | :param revision: str or int or None | |
407 | """ |
|
422 | """ | |
408 | if self._empty: |
|
423 | if self._empty: | |
409 | raise EmptyRepositoryError("There are no changesets yet") |
|
424 | raise EmptyRepositoryError("There are no changesets yet") | |
410 |
|
425 | |||
411 | if revision in [-1, None]: |
|
426 | if revision in [-1, None]: | |
412 | revision = b'tip' |
|
427 | revision = b'tip' | |
413 | elif isinstance(revision, unicode): |
|
428 | elif isinstance(revision, unicode): | |
414 | revision = safe_bytes(revision) |
|
429 | revision = safe_bytes(revision) | |
415 |
|
430 | |||
416 | try: |
|
431 | try: | |
417 | if isinstance(revision, int): |
|
432 | if isinstance(revision, int): | |
418 | return ascii_str(self._repo[revision].hex()) |
|
433 | return ascii_str(self._repo[revision].hex()) | |
419 | return ascii_str(scmutil.revsymbol(self._repo, revision).hex()) |
|
434 | return ascii_str(mercurial.scmutil.revsymbol(self._repo, revision).hex()) | |
420 | except (IndexError, ValueError, RepoLookupError, TypeError): |
|
435 | except (IndexError, ValueError, mercurial.error.RepoLookupError, TypeError): | |
421 | msg = ("Revision %s does not exist for %s" % (revision, self)) |
|
436 | msg = ("Revision %s does not exist for %s" % (revision, self)) | |
422 | raise ChangesetDoesNotExistError(msg) |
|
437 | raise ChangesetDoesNotExistError(msg) | |
423 | except (LookupError, ): |
|
438 | except (LookupError, ): | |
424 | msg = ("Ambiguous identifier `%s` for %s" % (revision, self)) |
|
439 | msg = ("Ambiguous identifier `%s` for %s" % (revision, self)) | |
425 | raise ChangesetDoesNotExistError(msg) |
|
440 | raise ChangesetDoesNotExistError(msg) | |
426 |
|
441 | |||
427 | def get_ref_revision(self, ref_type, ref_name): |
|
442 | def get_ref_revision(self, ref_type, ref_name): | |
428 | """ |
|
443 | """ | |
429 | Returns revision number for the given reference. |
|
444 | Returns revision number for the given reference. | |
430 | """ |
|
445 | """ | |
431 | ref_name = safe_str(ref_name) |
|
446 | ref_name = safe_str(ref_name) | |
432 | if ref_type == 'rev' and not ref_name.strip('0'): |
|
447 | if ref_type == 'rev' and not ref_name.strip('0'): | |
433 | return self.EMPTY_CHANGESET |
|
448 | return self.EMPTY_CHANGESET | |
434 | # lookup up the exact node id |
|
449 | # lookup up the exact node id | |
435 | _revset_predicates = { |
|
450 | _revset_predicates = { | |
436 | 'branch': 'branch', |
|
451 | 'branch': 'branch', | |
437 | 'book': 'bookmark', |
|
452 | 'book': 'bookmark', | |
438 | 'tag': 'tag', |
|
453 | 'tag': 'tag', | |
439 | 'rev': 'id', |
|
454 | 'rev': 'id', | |
440 | } |
|
455 | } | |
441 | # avoid expensive branch(x) iteration over whole repo |
|
456 | # avoid expensive branch(x) iteration over whole repo | |
442 | rev_spec = "%%s & %s(%%s)" % _revset_predicates[ref_type] |
|
457 | rev_spec = "%%s & %s(%%s)" % _revset_predicates[ref_type] | |
443 | try: |
|
458 | try: | |
444 | revs = self._repo.revs(rev_spec, ref_name, ref_name) |
|
459 | revs = self._repo.revs(rev_spec, ref_name, ref_name) | |
445 | except LookupError: |
|
460 | except LookupError: | |
446 | msg = ("Ambiguous identifier %s:%s for %s" % (ref_type, ref_name, self.name)) |
|
461 | msg = ("Ambiguous identifier %s:%s for %s" % (ref_type, ref_name, self.name)) | |
447 | raise ChangesetDoesNotExistError(msg) |
|
462 | raise ChangesetDoesNotExistError(msg) | |
448 | except RepoLookupError: |
|
463 | except mercurial.error.RepoLookupError: | |
449 | msg = ("Revision %s:%s does not exist for %s" % (ref_type, ref_name, self.name)) |
|
464 | msg = ("Revision %s:%s does not exist for %s" % (ref_type, ref_name, self.name)) | |
450 | raise ChangesetDoesNotExistError(msg) |
|
465 | raise ChangesetDoesNotExistError(msg) | |
451 | if revs: |
|
466 | if revs: | |
452 | revision = revs.last() |
|
467 | revision = revs.last() | |
453 | else: |
|
468 | else: | |
454 | # TODO: just report 'not found'? |
|
469 | # TODO: just report 'not found'? | |
455 | revision = ref_name |
|
470 | revision = ref_name | |
456 |
|
471 | |||
457 | return self._get_revision(revision) |
|
472 | return self._get_revision(revision) | |
458 |
|
473 | |||
459 | def _get_archives(self, archive_name='tip'): |
|
474 | def _get_archives(self, archive_name='tip'): | |
460 | allowed = self.baseui.configlist(b"web", b"allow_archive", |
|
475 | allowed = self.baseui.configlist(b"web", b"allow_archive", | |
461 | untrusted=True) |
|
476 | untrusted=True) | |
462 | for name, ext in [(b'zip', '.zip'), (b'gz', '.tar.gz'), (b'bz2', '.tar.bz2')]: |
|
477 | for name, ext in [(b'zip', '.zip'), (b'gz', '.tar.gz'), (b'bz2', '.tar.bz2')]: | |
463 | if name in allowed or self._repo.ui.configbool(b"web", |
|
478 | if name in allowed or self._repo.ui.configbool(b"web", | |
464 | b"allow" + name, |
|
479 | b"allow" + name, | |
465 | untrusted=True): |
|
480 | untrusted=True): | |
466 | yield {"type": name, "extension": ext, "node": archive_name} |
|
481 | yield {"type": name, "extension": ext, "node": archive_name} | |
467 |
|
482 | |||
468 | def _get_url(self, url): |
|
483 | def _get_url(self, url): | |
469 | """ |
|
484 | """ | |
470 | Returns normalized url. If schema is not given, would fall |
|
485 | Returns normalized url. If schema is not given, would fall | |
471 | to filesystem |
|
486 | to filesystem | |
472 | (``file:///``) schema. |
|
487 | (``file:///``) schema. | |
473 | """ |
|
488 | """ | |
474 | url = safe_str(url) |
|
489 | url = safe_str(url) | |
475 | if url != 'default' and '://' not in url: |
|
490 | if url != 'default' and '://' not in url: | |
476 | url = "file:" + urllib.pathname2url(url) |
|
491 | url = "file:" + urllib.pathname2url(url) | |
477 | return url |
|
492 | return url | |
478 |
|
493 | |||
479 | def get_hook_location(self): |
|
494 | def get_hook_location(self): | |
480 | """ |
|
495 | """ | |
481 | returns absolute path to location where hooks are stored |
|
496 | returns absolute path to location where hooks are stored | |
482 | """ |
|
497 | """ | |
483 | return os.path.join(self.path, '.hg', '.hgrc') |
|
498 | return os.path.join(self.path, '.hg', '.hgrc') | |
484 |
|
499 | |||
485 | def get_changeset(self, revision=None): |
|
500 | def get_changeset(self, revision=None): | |
486 | """ |
|
501 | """ | |
487 | Returns ``MercurialChangeset`` object representing repository's |
|
502 | Returns ``MercurialChangeset`` object representing repository's | |
488 | changeset at the given ``revision``. |
|
503 | changeset at the given ``revision``. | |
489 | """ |
|
504 | """ | |
490 | revision = self._get_revision(revision) |
|
505 | revision = self._get_revision(revision) | |
491 | changeset = MercurialChangeset(repository=self, revision=revision) |
|
506 | changeset = MercurialChangeset(repository=self, revision=revision) | |
492 | return changeset |
|
507 | return changeset | |
493 |
|
508 | |||
494 | def get_changesets(self, start=None, end=None, start_date=None, |
|
509 | def get_changesets(self, start=None, end=None, start_date=None, | |
495 | end_date=None, branch_name=None, reverse=False, max_revisions=None): |
|
510 | end_date=None, branch_name=None, reverse=False, max_revisions=None): | |
496 | """ |
|
511 | """ | |
497 | Returns iterator of ``MercurialChangeset`` objects from start to end |
|
512 | Returns iterator of ``MercurialChangeset`` objects from start to end | |
498 | (both are inclusive) |
|
513 | (both are inclusive) | |
499 |
|
514 | |||
500 | :param start: None, str, int or mercurial lookup format |
|
515 | :param start: None, str, int or mercurial lookup format | |
501 | :param end: None, str, int or mercurial lookup format |
|
516 | :param end: None, str, int or mercurial lookup format | |
502 | :param start_date: |
|
517 | :param start_date: | |
503 | :param end_date: |
|
518 | :param end_date: | |
504 | :param branch_name: |
|
519 | :param branch_name: | |
505 | :param reversed: return changesets in reversed order |
|
520 | :param reversed: return changesets in reversed order | |
506 | """ |
|
521 | """ | |
507 | start_raw_id = self._get_revision(start) |
|
522 | start_raw_id = self._get_revision(start) | |
508 | start_pos = None if start is None else self.revisions.index(start_raw_id) |
|
523 | start_pos = None if start is None else self.revisions.index(start_raw_id) | |
509 | end_raw_id = self._get_revision(end) |
|
524 | end_raw_id = self._get_revision(end) | |
510 | end_pos = None if end is None else self.revisions.index(end_raw_id) |
|
525 | end_pos = None if end is None else self.revisions.index(end_raw_id) | |
511 |
|
526 | |||
512 | if start_pos is not None and end_pos is not None and start_pos > end_pos: |
|
527 | if start_pos is not None and end_pos is not None and start_pos > end_pos: | |
513 | raise RepositoryError("Start revision '%s' cannot be " |
|
528 | raise RepositoryError("Start revision '%s' cannot be " | |
514 | "after end revision '%s'" % (start, end)) |
|
529 | "after end revision '%s'" % (start, end)) | |
515 |
|
530 | |||
516 | if branch_name and branch_name not in self.allbranches: |
|
531 | if branch_name and branch_name not in self.allbranches: | |
517 | msg = ("Branch %s not found in %s" % (branch_name, self)) |
|
532 | msg = ("Branch %s not found in %s" % (branch_name, self)) | |
518 | raise BranchDoesNotExistError(msg) |
|
533 | raise BranchDoesNotExistError(msg) | |
519 | if end_pos is not None: |
|
534 | if end_pos is not None: | |
520 | end_pos += 1 |
|
535 | end_pos += 1 | |
521 | # filter branches |
|
536 | # filter branches | |
522 | filter_ = [] |
|
537 | filter_ = [] | |
523 | if branch_name: |
|
538 | if branch_name: | |
524 | filter_.append(b'branch("%s")' % safe_bytes(branch_name)) |
|
539 | filter_.append(b'branch("%s")' % safe_bytes(branch_name)) | |
525 | if start_date: |
|
540 | if start_date: | |
526 | filter_.append(b'date(">%s")' % start_date) |
|
541 | filter_.append(b'date(">%s")' % start_date) | |
527 | if end_date: |
|
542 | if end_date: | |
528 | filter_.append(b'date("<%s")' % end_date) |
|
543 | filter_.append(b'date("<%s")' % end_date) | |
529 | if filter_ or max_revisions: |
|
544 | if filter_ or max_revisions: | |
530 | if filter_: |
|
545 | if filter_: | |
531 | revspec = b' and '.join(filter_) |
|
546 | revspec = b' and '.join(filter_) | |
532 | else: |
|
547 | else: | |
533 | revspec = b'all()' |
|
548 | revspec = b'all()' | |
534 | if max_revisions: |
|
549 | if max_revisions: | |
535 | revspec = b'limit(%s, %d)' % (revspec, max_revisions) |
|
550 | revspec = b'limit(%s, %d)' % (revspec, max_revisions) | |
536 | revisions = scmutil.revrange(self._repo, [revspec]) |
|
551 | revisions = mercurial.scmutil.revrange(self._repo, [revspec]) | |
537 | else: |
|
552 | else: | |
538 | revisions = self.revisions |
|
553 | revisions = self.revisions | |
539 |
|
554 | |||
540 | # this is very much a hack to turn this into a list; a better solution |
|
555 | # this is very much a hack to turn this into a list; a better solution | |
541 | # would be to get rid of this function entirely and use revsets |
|
556 | # would be to get rid of this function entirely and use revsets | |
542 | revs = list(revisions)[start_pos:end_pos] |
|
557 | revs = list(revisions)[start_pos:end_pos] | |
543 | if reverse: |
|
558 | if reverse: | |
544 | revs.reverse() |
|
559 | revs.reverse() | |
545 |
|
560 | |||
546 | return CollectionGenerator(self, revs) |
|
561 | return CollectionGenerator(self, revs) | |
547 |
|
562 | |||
548 | def pull(self, url): |
|
563 | def pull(self, url): | |
549 | """ |
|
564 | """ | |
550 | Tries to pull changes from external location. |
|
565 | Tries to pull changes from external location. | |
551 | """ |
|
566 | """ | |
552 | url = self._get_url(url) |
|
567 | url = self._get_url(url) | |
553 | other = peer(self._repo, {}, url) |
|
568 | other = mercurial.hg.peer(self._repo, {}, url) | |
554 | try: |
|
569 | try: | |
555 | from mercurial import exchange |
|
570 | mercurial.exchange.pull(self._repo, other, heads=None, force=None) | |
556 | exchange.pull(self._repo, other, heads=None, force=None) |
|
571 | except mercurial.error.Abort as err: | |
557 | except Abort as err: |
|
|||
558 | # Propagate error but with vcs's type |
|
572 | # Propagate error but with vcs's type | |
559 | raise RepositoryError(str(err)) |
|
573 | raise RepositoryError(str(err)) | |
560 |
|
574 | |||
561 | @LazyProperty |
|
575 | @LazyProperty | |
562 | def workdir(self): |
|
576 | def workdir(self): | |
563 | """ |
|
577 | """ | |
564 | Returns ``Workdir`` instance for this repository. |
|
578 | Returns ``Workdir`` instance for this repository. | |
565 | """ |
|
579 | """ | |
566 | return MercurialWorkdir(self) |
|
580 | return MercurialWorkdir(self) | |
567 |
|
581 | |||
568 | def get_config_value(self, section, name=None, config_file=None): |
|
582 | def get_config_value(self, section, name=None, config_file=None): | |
569 | """ |
|
583 | """ | |
570 | Returns configuration value for a given [``section``] and ``name``. |
|
584 | Returns configuration value for a given [``section``] and ``name``. | |
571 |
|
585 | |||
572 | :param section: Section we want to retrieve value from |
|
586 | :param section: Section we want to retrieve value from | |
573 | :param name: Name of configuration we want to retrieve |
|
587 | :param name: Name of configuration we want to retrieve | |
574 | :param config_file: A path to file which should be used to retrieve |
|
588 | :param config_file: A path to file which should be used to retrieve | |
575 | configuration from (might also be a list of file paths) |
|
589 | configuration from (might also be a list of file paths) | |
576 | """ |
|
590 | """ | |
577 | if config_file is None: |
|
591 | if config_file is None: | |
578 | config_file = [] |
|
592 | config_file = [] | |
579 | elif isinstance(config_file, basestring): |
|
593 | elif isinstance(config_file, basestring): | |
580 | config_file = [config_file] |
|
594 | config_file = [config_file] | |
581 |
|
595 | |||
582 | config = self._repo.ui |
|
596 | config = self._repo.ui | |
583 | if config_file: |
|
597 | if config_file: | |
584 | config = ui.ui() |
|
598 | config = mercurial.ui.ui() | |
585 | for path in config_file: |
|
599 | for path in config_file: | |
586 | config.readconfig(path) |
|
600 | config.readconfig(path) | |
587 | return config.config(section, name) |
|
601 | return config.config(section, name) | |
588 |
|
602 | |||
589 | def get_user_name(self, config_file=None): |
|
603 | def get_user_name(self, config_file=None): | |
590 | """ |
|
604 | """ | |
591 | Returns user's name from global configuration file. |
|
605 | Returns user's name from global configuration file. | |
592 |
|
606 | |||
593 | :param config_file: A path to file which should be used to retrieve |
|
607 | :param config_file: A path to file which should be used to retrieve | |
594 | configuration from (might also be a list of file paths) |
|
608 | configuration from (might also be a list of file paths) | |
595 | """ |
|
609 | """ | |
596 | username = self.get_config_value('ui', 'username', config_file=config_file) |
|
610 | username = self.get_config_value('ui', 'username', config_file=config_file) | |
597 | if username: |
|
611 | if username: | |
598 | return author_name(username) |
|
612 | return author_name(username) | |
599 | return None |
|
613 | return None | |
600 |
|
614 | |||
601 | def get_user_email(self, config_file=None): |
|
615 | def get_user_email(self, config_file=None): | |
602 | """ |
|
616 | """ | |
603 | Returns user's email from global configuration file. |
|
617 | Returns user's email from global configuration file. | |
604 |
|
618 | |||
605 | :param config_file: A path to file which should be used to retrieve |
|
619 | :param config_file: A path to file which should be used to retrieve | |
606 | configuration from (might also be a list of file paths) |
|
620 | configuration from (might also be a list of file paths) | |
607 | """ |
|
621 | """ | |
608 | username = self.get_config_value('ui', 'username', config_file=config_file) |
|
622 | username = self.get_config_value('ui', 'username', config_file=config_file) | |
609 | if username: |
|
623 | if username: | |
610 | return author_email(username) |
|
624 | return author_email(username) | |
611 | return None |
|
625 | return None |
@@ -1,66 +1,66 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 |
|
14 | |||
15 | import logging |
|
15 | import logging | |
16 |
|
16 | |||
17 |
|
|
17 | import mercurial.hg | |
18 |
|
|
18 | import mercurial.wireprotoserver | |
19 |
|
19 | |||
20 | from kallithea.lib.utils import make_ui |
|
20 | from kallithea.lib.utils import make_ui | |
21 | from kallithea.lib.vcs.backends.ssh import BaseSshHandler |
|
21 | from kallithea.lib.vcs.backends.ssh import BaseSshHandler | |
22 | from kallithea.lib.vcs.utils import safe_bytes, safe_unicode |
|
22 | from kallithea.lib.vcs.utils import safe_bytes, safe_unicode | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | log = logging.getLogger(__name__) |
|
25 | log = logging.getLogger(__name__) | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | class MercurialSshHandler(BaseSshHandler): |
|
28 | class MercurialSshHandler(BaseSshHandler): | |
29 | vcs_type = 'hg' |
|
29 | vcs_type = 'hg' | |
30 |
|
30 | |||
31 | @classmethod |
|
31 | @classmethod | |
32 | def make(cls, ssh_command_parts): |
|
32 | def make(cls, ssh_command_parts): | |
33 | r""" |
|
33 | r""" | |
34 | >>> import shlex |
|
34 | >>> import shlex | |
35 |
|
35 | |||
36 | >>> MercurialSshHandler.make(shlex.split('hg -R "foo bar" serve --stdio')).repo_name |
|
36 | >>> MercurialSshHandler.make(shlex.split('hg -R "foo bar" serve --stdio')).repo_name | |
37 | u'foo bar' |
|
37 | u'foo bar' | |
38 | >>> MercurialSshHandler.make(shlex.split(' hg -R blΓ₯bΓ¦rgrΓΈd serve --stdio ')).repo_name |
|
38 | >>> MercurialSshHandler.make(shlex.split(' hg -R blΓ₯bΓ¦rgrΓΈd serve --stdio ')).repo_name | |
39 | u'bl\xe5b\xe6rgr\xf8d' |
|
39 | u'bl\xe5b\xe6rgr\xf8d' | |
40 | >>> MercurialSshHandler.make(shlex.split('''hg -R 'foo"bar' serve --stdio''')).repo_name |
|
40 | >>> MercurialSshHandler.make(shlex.split('''hg -R 'foo"bar' serve --stdio''')).repo_name | |
41 | u'foo"bar' |
|
41 | u'foo"bar' | |
42 |
|
42 | |||
43 | >>> MercurialSshHandler.make(shlex.split('/bin/hg -R "foo" serve --stdio')) |
|
43 | >>> MercurialSshHandler.make(shlex.split('/bin/hg -R "foo" serve --stdio')) | |
44 | >>> MercurialSshHandler.make(shlex.split('''hg -R "foo"bar" serve --stdio''')) # ssh-serve will report: Error parsing SSH command "...": invalid syntax |
|
44 | >>> MercurialSshHandler.make(shlex.split('''hg -R "foo"bar" serve --stdio''')) # ssh-serve will report: Error parsing SSH command "...": invalid syntax | |
45 | Traceback (most recent call last): |
|
45 | Traceback (most recent call last): | |
46 | ValueError: No closing quotation |
|
46 | ValueError: No closing quotation | |
47 | >>> MercurialSshHandler.make(shlex.split('git-upload-pack "/foo"')) # not handled here |
|
47 | >>> MercurialSshHandler.make(shlex.split('git-upload-pack "/foo"')) # not handled here | |
48 | """ |
|
48 | """ | |
49 | if ssh_command_parts[:2] == ['hg', '-R'] and ssh_command_parts[3:] == ['serve', '--stdio']: |
|
49 | if ssh_command_parts[:2] == ['hg', '-R'] and ssh_command_parts[3:] == ['serve', '--stdio']: | |
50 | return cls(safe_unicode(ssh_command_parts[2])) |
|
50 | return cls(safe_unicode(ssh_command_parts[2])) | |
51 |
|
51 | |||
52 | return None |
|
52 | return None | |
53 |
|
53 | |||
54 | def __init__(self, repo_name): |
|
54 | def __init__(self, repo_name): | |
55 | self.repo_name = repo_name |
|
55 | self.repo_name = repo_name | |
56 |
|
56 | |||
57 | def _serve(self): |
|
57 | def _serve(self): | |
58 | # Note: we want a repo with config based on .hg/hgrc and can thus not use self.db_repo.scm_instance._repo.ui |
|
58 | # Note: we want a repo with config based on .hg/hgrc and can thus not use self.db_repo.scm_instance._repo.ui | |
59 | baseui = make_ui(repo_path=self.db_repo.repo_full_path) |
|
59 | baseui = make_ui(repo_path=self.db_repo.repo_full_path) | |
60 | if not self.allow_push: |
|
60 | if not self.allow_push: | |
61 | baseui.setconfig(b'hooks', b'pretxnopen._ssh_reject', b'python:kallithea.lib.hooks.rejectpush') |
|
61 | baseui.setconfig(b'hooks', b'pretxnopen._ssh_reject', b'python:kallithea.lib.hooks.rejectpush') | |
62 | baseui.setconfig(b'hooks', b'prepushkey._ssh_reject', b'python:kallithea.lib.hooks.rejectpush') |
|
62 | baseui.setconfig(b'hooks', b'prepushkey._ssh_reject', b'python:kallithea.lib.hooks.rejectpush') | |
63 |
|
63 | |||
64 | repo = hg.repository(baseui, safe_bytes(self.db_repo.repo_full_path)) |
|
64 | repo = mercurial.hg.repository(baseui, safe_bytes(self.db_repo.repo_full_path)) | |
65 | log.debug("Starting Mercurial sshserver for %s", self.db_repo.repo_full_path) |
|
65 | log.debug("Starting Mercurial sshserver for %s", self.db_repo.repo_full_path) | |
66 | sshserver(baseui, repo).serve_forever() |
|
66 | mercurial.wireprotoserver.sshserver(baseui, repo).serve_forever() |
@@ -1,23 +1,24 b'' | |||||
|
1 | import mercurial.merge | |||
|
2 | ||||
1 | from kallithea.lib.vcs.backends.base import BaseWorkdir |
|
3 | from kallithea.lib.vcs.backends.base import BaseWorkdir | |
2 | from kallithea.lib.vcs.exceptions import BranchDoesNotExistError |
|
4 | from kallithea.lib.vcs.exceptions import BranchDoesNotExistError | |
3 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str |
|
5 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str | |
4 | from kallithea.lib.vcs.utils.hgcompat import hg_merge |
|
|||
5 |
|
6 | |||
6 |
|
7 | |||
7 | class MercurialWorkdir(BaseWorkdir): |
|
8 | class MercurialWorkdir(BaseWorkdir): | |
8 |
|
9 | |||
9 | def get_branch(self): |
|
10 | def get_branch(self): | |
10 | return self.repository._repo.dirstate.branch() |
|
11 | return self.repository._repo.dirstate.branch() | |
11 |
|
12 | |||
12 | def get_changeset(self): |
|
13 | def get_changeset(self): | |
13 | wk_dir_id = ascii_str(self.repository._repo[None].parents()[0].hex()) |
|
14 | wk_dir_id = ascii_str(self.repository._repo[None].parents()[0].hex()) | |
14 | return self.repository.get_changeset(wk_dir_id) |
|
15 | return self.repository.get_changeset(wk_dir_id) | |
15 |
|
16 | |||
16 | def checkout_branch(self, branch=None): |
|
17 | def checkout_branch(self, branch=None): | |
17 | if branch is None: |
|
18 | if branch is None: | |
18 | branch = self.repository.DEFAULT_BRANCH_NAME |
|
19 | branch = self.repository.DEFAULT_BRANCH_NAME | |
19 | if branch not in self.repository.branches: |
|
20 | if branch not in self.repository.branches: | |
20 | raise BranchDoesNotExistError |
|
21 | raise BranchDoesNotExistError | |
21 |
|
22 | |||
22 | raw_id = self.repository.branches[branch] |
|
23 | raw_id = self.repository.branches[branch] | |
23 |
|
|
24 | mercurial.merge.update(self.repository._repo, ascii_bytes(raw_id), False, False, None) |
@@ -1,29 +1,13 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Mercurial libs compatibility |
|
2 | Mercurial libs compatibility | |
3 | """ |
|
3 | """ | |
4 |
|
4 | |||
5 | import mercurial |
|
5 | import mercurial.localrepo | |
6 | from mercurial import archival, config, demandimport, discovery, httppeer, localrepo |
|
|||
7 | from mercurial import merge as hg_merge |
|
|||
8 | from mercurial import obsutil, patch, scmutil, sshpeer, ui, unionrepo |
|
|||
9 | from mercurial.commands import clone, nullid, pull |
|
|||
10 | from mercurial.context import memctx, memfilectx |
|
|||
11 | from mercurial.discovery import findcommonoutgoing |
|
|||
12 | from mercurial.error import Abort, RepoError, RepoLookupError |
|
|||
13 | from mercurial.hg import peer |
|
|||
14 | from mercurial.hgweb import hgweb_mod |
|
|||
15 | from mercurial.hgweb.common import get_contact |
|
|||
16 | from mercurial.match import exact as match_exact |
|
|||
17 | from mercurial.match import match |
|
|||
18 | from mercurial.mdiff import diffopts |
|
|||
19 | from mercurial.node import hex, nullrev |
|
|||
20 | from mercurial.scmutil import revrange |
|
|||
21 | from mercurial.tags import tag |
|
|||
22 | from mercurial.url import httpbasicauthhandler, httpdigestauthhandler |
|
|||
23 | from mercurial.util import url as hg_url |
|
|||
24 |
|
6 | |||
25 |
|
7 | |||
26 | # workaround for 3.3 94ac64bcf6fe and not calling largefiles reposetup correctly, and test_archival failing |
|
8 | def monkey_do(): | |
27 | localrepo.localrepository._lfstatuswriters = [lambda *msg, **opts: None] |
|
9 | """Apply some Mercurial monkey patching""" | |
28 | # 3.5 7699d3212994 added the invariant that repo.lfstatus must exist before hitting overridearchive |
|
10 | # workaround for 3.3 94ac64bcf6fe and not calling largefiles reposetup correctly, and test_archival failing | |
29 |
localrepo.localrepository.lfstatus = |
|
11 | mercurial.localrepo.localrepository._lfstatuswriters = [lambda *msg, **opts: None] | |
|
12 | # 3.5 7699d3212994 added the invariant that repo.lfstatus must exist before hitting overridearchive | |||
|
13 | mercurial.localrepo.localrepository.lfstatus = False |
@@ -1,592 +1,592 b'' | |||||
1 | import os |
|
1 | import os | |
2 |
|
2 | |||
3 | import mock |
|
3 | import mock | |
4 | import pytest |
|
4 | import pytest | |
5 |
|
5 | |||
6 | from kallithea.lib.vcs.backends.hg import MercurialChangeset, MercurialRepository |
|
6 | from kallithea.lib.vcs.backends.hg import MercurialChangeset, MercurialRepository | |
7 | from kallithea.lib.vcs.exceptions import NodeDoesNotExistError, RepositoryError, VCSError |
|
7 | from kallithea.lib.vcs.exceptions import NodeDoesNotExistError, RepositoryError, VCSError | |
8 | from kallithea.lib.vcs.nodes import NodeKind, NodeState |
|
8 | from kallithea.lib.vcs.nodes import NodeKind, NodeState | |
9 | from kallithea.lib.vcs.utils import safe_str |
|
9 | from kallithea.lib.vcs.utils import safe_str | |
10 | from kallithea.tests.vcs.conf import TEST_HG_REPO, TEST_HG_REPO_CLONE, TEST_HG_REPO_PULL, TESTS_TMP_PATH |
|
10 | from kallithea.tests.vcs.conf import TEST_HG_REPO, TEST_HG_REPO_CLONE, TEST_HG_REPO_PULL, TESTS_TMP_PATH | |
11 |
|
11 | |||
12 |
|
12 | |||
13 | class TestMercurialRepository(object): |
|
13 | class TestMercurialRepository(object): | |
14 |
|
14 | |||
15 | def __check_for_existing_repo(self): |
|
15 | def __check_for_existing_repo(self): | |
16 | if os.path.exists(TEST_HG_REPO_CLONE): |
|
16 | if os.path.exists(TEST_HG_REPO_CLONE): | |
17 | pytest.fail('Cannot test mercurial clone repo as location %s already ' |
|
17 | pytest.fail('Cannot test mercurial clone repo as location %s already ' | |
18 | 'exists. You should manually remove it first.' |
|
18 | 'exists. You should manually remove it first.' | |
19 | % TEST_HG_REPO_CLONE) |
|
19 | % TEST_HG_REPO_CLONE) | |
20 |
|
20 | |||
21 | def setup_method(self): |
|
21 | def setup_method(self): | |
22 | self.repo = MercurialRepository(safe_str(TEST_HG_REPO)) |
|
22 | self.repo = MercurialRepository(safe_str(TEST_HG_REPO)) | |
23 |
|
23 | |||
24 | def test_wrong_repo_path(self): |
|
24 | def test_wrong_repo_path(self): | |
25 | wrong_repo_path = os.path.join(TESTS_TMP_PATH, 'errorrepo') |
|
25 | wrong_repo_path = os.path.join(TESTS_TMP_PATH, 'errorrepo') | |
26 | with pytest.raises(RepositoryError): |
|
26 | with pytest.raises(RepositoryError): | |
27 | MercurialRepository(wrong_repo_path) |
|
27 | MercurialRepository(wrong_repo_path) | |
28 |
|
28 | |||
29 | def test_unicode_path_repo(self): |
|
29 | def test_unicode_path_repo(self): | |
30 | with pytest.raises(VCSError): |
|
30 | with pytest.raises(VCSError): | |
31 | MercurialRepository(u'iShouldFail') |
|
31 | MercurialRepository(u'iShouldFail') | |
32 |
|
32 | |||
33 | def test_repo_clone(self): |
|
33 | def test_repo_clone(self): | |
34 | self.__check_for_existing_repo() |
|
34 | self.__check_for_existing_repo() | |
35 | repo = MercurialRepository(safe_str(TEST_HG_REPO)) |
|
35 | repo = MercurialRepository(safe_str(TEST_HG_REPO)) | |
36 | repo_clone = MercurialRepository(TEST_HG_REPO_CLONE, |
|
36 | repo_clone = MercurialRepository(TEST_HG_REPO_CLONE, | |
37 | src_url=TEST_HG_REPO, update_after_clone=True) |
|
37 | src_url=TEST_HG_REPO, update_after_clone=True) | |
38 | assert len(repo.revisions) == len(repo_clone.revisions) |
|
38 | assert len(repo.revisions) == len(repo_clone.revisions) | |
39 | # Checking hashes of changesets should be enough |
|
39 | # Checking hashes of changesets should be enough | |
40 | for changeset in repo.get_changesets(): |
|
40 | for changeset in repo.get_changesets(): | |
41 | raw_id = changeset.raw_id |
|
41 | raw_id = changeset.raw_id | |
42 | assert raw_id == repo_clone.get_changeset(raw_id).raw_id |
|
42 | assert raw_id == repo_clone.get_changeset(raw_id).raw_id | |
43 |
|
43 | |||
44 | def test_repo_clone_with_update(self): |
|
44 | def test_repo_clone_with_update(self): | |
45 | repo = MercurialRepository(safe_str(TEST_HG_REPO)) |
|
45 | repo = MercurialRepository(safe_str(TEST_HG_REPO)) | |
46 | repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_w_update', |
|
46 | repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_w_update', | |
47 | src_url=TEST_HG_REPO, update_after_clone=True) |
|
47 | src_url=TEST_HG_REPO, update_after_clone=True) | |
48 | assert len(repo.revisions) == len(repo_clone.revisions) |
|
48 | assert len(repo.revisions) == len(repo_clone.revisions) | |
49 |
|
49 | |||
50 | # check if current workdir was updated |
|
50 | # check if current workdir was updated | |
51 | assert os.path.isfile( |
|
51 | assert os.path.isfile( | |
52 | os.path.join( |
|
52 | os.path.join( | |
53 | TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in' |
|
53 | TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in' | |
54 | ) |
|
54 | ) | |
55 | ) |
|
55 | ) | |
56 |
|
56 | |||
57 | def test_repo_clone_without_update(self): |
|
57 | def test_repo_clone_without_update(self): | |
58 | repo = MercurialRepository(safe_str(TEST_HG_REPO)) |
|
58 | repo = MercurialRepository(safe_str(TEST_HG_REPO)) | |
59 | repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_wo_update', |
|
59 | repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_wo_update', | |
60 | src_url=TEST_HG_REPO, update_after_clone=False) |
|
60 | src_url=TEST_HG_REPO, update_after_clone=False) | |
61 | assert len(repo.revisions) == len(repo_clone.revisions) |
|
61 | assert len(repo.revisions) == len(repo_clone.revisions) | |
62 | assert not os.path.isfile( |
|
62 | assert not os.path.isfile( | |
63 | os.path.join( |
|
63 | os.path.join( | |
64 | TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in' |
|
64 | TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in' | |
65 | ) |
|
65 | ) | |
66 | ) |
|
66 | ) | |
67 |
|
67 | |||
68 | def test_pull(self): |
|
68 | def test_pull(self): | |
69 | if os.path.exists(TEST_HG_REPO_PULL): |
|
69 | if os.path.exists(TEST_HG_REPO_PULL): | |
70 | pytest.fail('Cannot test mercurial pull command as location %s ' |
|
70 | pytest.fail('Cannot test mercurial pull command as location %s ' | |
71 | 'already exists. You should manually remove it first' |
|
71 | 'already exists. You should manually remove it first' | |
72 | % TEST_HG_REPO_PULL) |
|
72 | % TEST_HG_REPO_PULL) | |
73 | repo_new = MercurialRepository(TEST_HG_REPO_PULL, create=True) |
|
73 | repo_new = MercurialRepository(TEST_HG_REPO_PULL, create=True) | |
74 | assert len(self.repo.revisions) > len(repo_new.revisions) |
|
74 | assert len(self.repo.revisions) > len(repo_new.revisions) | |
75 |
|
75 | |||
76 | repo_new.pull(self.repo.path) |
|
76 | repo_new.pull(self.repo.path) | |
77 | repo_new = MercurialRepository(TEST_HG_REPO_PULL) |
|
77 | repo_new = MercurialRepository(TEST_HG_REPO_PULL) | |
78 | assert len(self.repo.revisions) == len(repo_new.revisions) |
|
78 | assert len(self.repo.revisions) == len(repo_new.revisions) | |
79 |
|
79 | |||
80 | def test_revisions(self): |
|
80 | def test_revisions(self): | |
81 | # there are 21 revisions at bitbucket now |
|
81 | # there are 21 revisions at bitbucket now | |
82 | # so we can assume they would be available from now on |
|
82 | # so we can assume they would be available from now on | |
83 | subset = set(['b986218ba1c9b0d6a259fac9b050b1724ed8e545', |
|
83 | subset = set(['b986218ba1c9b0d6a259fac9b050b1724ed8e545', | |
84 | '3d8f361e72ab303da48d799ff1ac40d5ac37c67e', |
|
84 | '3d8f361e72ab303da48d799ff1ac40d5ac37c67e', | |
85 | '6cba7170863a2411822803fa77a0a264f1310b35', |
|
85 | '6cba7170863a2411822803fa77a0a264f1310b35', | |
86 | '56349e29c2af3ac913b28bde9a2c6154436e615b', |
|
86 | '56349e29c2af3ac913b28bde9a2c6154436e615b', | |
87 | '2dda4e345facb0ccff1a191052dd1606dba6781d', |
|
87 | '2dda4e345facb0ccff1a191052dd1606dba6781d', | |
88 | '6fff84722075f1607a30f436523403845f84cd9e', |
|
88 | '6fff84722075f1607a30f436523403845f84cd9e', | |
89 | '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', |
|
89 | '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', | |
90 | '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb', |
|
90 | '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb', | |
91 | 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', |
|
91 | 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', | |
92 | 'be90031137367893f1c406e0a8683010fd115b79', |
|
92 | 'be90031137367893f1c406e0a8683010fd115b79', | |
93 | 'db8e58be770518cbb2b1cdfa69146e47cd481481', |
|
93 | 'db8e58be770518cbb2b1cdfa69146e47cd481481', | |
94 | '84478366594b424af694a6c784cb991a16b87c21', |
|
94 | '84478366594b424af694a6c784cb991a16b87c21', | |
95 | '17f8e105dddb9f339600389c6dc7175d395a535c', |
|
95 | '17f8e105dddb9f339600389c6dc7175d395a535c', | |
96 | '20a662e756499bde3095ffc9bc0643d1def2d0eb', |
|
96 | '20a662e756499bde3095ffc9bc0643d1def2d0eb', | |
97 | '2e319b85e70a707bba0beff866d9f9de032aa4f9', |
|
97 | '2e319b85e70a707bba0beff866d9f9de032aa4f9', | |
98 | '786facd2c61deb9cf91e9534735124fb8fc11842', |
|
98 | '786facd2c61deb9cf91e9534735124fb8fc11842', | |
99 | '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', |
|
99 | '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', | |
100 | 'aa6a0de05b7612707db567078e130a6cd114a9a7', |
|
100 | 'aa6a0de05b7612707db567078e130a6cd114a9a7', | |
101 | 'eada5a770da98ab0dd7325e29d00e0714f228d09' |
|
101 | 'eada5a770da98ab0dd7325e29d00e0714f228d09' | |
102 | ]) |
|
102 | ]) | |
103 | assert subset.issubset(set(self.repo.revisions)) |
|
103 | assert subset.issubset(set(self.repo.revisions)) | |
104 |
|
104 | |||
105 | # check if we have the proper order of revisions |
|
105 | # check if we have the proper order of revisions | |
106 | org = ['b986218ba1c9b0d6a259fac9b050b1724ed8e545', |
|
106 | org = ['b986218ba1c9b0d6a259fac9b050b1724ed8e545', | |
107 | '3d8f361e72ab303da48d799ff1ac40d5ac37c67e', |
|
107 | '3d8f361e72ab303da48d799ff1ac40d5ac37c67e', | |
108 | '6cba7170863a2411822803fa77a0a264f1310b35', |
|
108 | '6cba7170863a2411822803fa77a0a264f1310b35', | |
109 | '56349e29c2af3ac913b28bde9a2c6154436e615b', |
|
109 | '56349e29c2af3ac913b28bde9a2c6154436e615b', | |
110 | '2dda4e345facb0ccff1a191052dd1606dba6781d', |
|
110 | '2dda4e345facb0ccff1a191052dd1606dba6781d', | |
111 | '6fff84722075f1607a30f436523403845f84cd9e', |
|
111 | '6fff84722075f1607a30f436523403845f84cd9e', | |
112 | '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', |
|
112 | '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', | |
113 | '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb', |
|
113 | '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb', | |
114 | 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', |
|
114 | 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', | |
115 | 'be90031137367893f1c406e0a8683010fd115b79', |
|
115 | 'be90031137367893f1c406e0a8683010fd115b79', | |
116 | 'db8e58be770518cbb2b1cdfa69146e47cd481481', |
|
116 | 'db8e58be770518cbb2b1cdfa69146e47cd481481', | |
117 | '84478366594b424af694a6c784cb991a16b87c21', |
|
117 | '84478366594b424af694a6c784cb991a16b87c21', | |
118 | '17f8e105dddb9f339600389c6dc7175d395a535c', |
|
118 | '17f8e105dddb9f339600389c6dc7175d395a535c', | |
119 | '20a662e756499bde3095ffc9bc0643d1def2d0eb', |
|
119 | '20a662e756499bde3095ffc9bc0643d1def2d0eb', | |
120 | '2e319b85e70a707bba0beff866d9f9de032aa4f9', |
|
120 | '2e319b85e70a707bba0beff866d9f9de032aa4f9', | |
121 | '786facd2c61deb9cf91e9534735124fb8fc11842', |
|
121 | '786facd2c61deb9cf91e9534735124fb8fc11842', | |
122 | '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', |
|
122 | '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', | |
123 | 'aa6a0de05b7612707db567078e130a6cd114a9a7', |
|
123 | 'aa6a0de05b7612707db567078e130a6cd114a9a7', | |
124 | 'eada5a770da98ab0dd7325e29d00e0714f228d09', |
|
124 | 'eada5a770da98ab0dd7325e29d00e0714f228d09', | |
125 | '2c1885c735575ca478bf9e17b0029dca68824458', |
|
125 | '2c1885c735575ca478bf9e17b0029dca68824458', | |
126 | 'd9bcd465040bf869799b09ad732c04e0eea99fe9', |
|
126 | 'd9bcd465040bf869799b09ad732c04e0eea99fe9', | |
127 | '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7', |
|
127 | '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7', | |
128 | '4fb8326d78e5120da2c7468dcf7098997be385da', |
|
128 | '4fb8326d78e5120da2c7468dcf7098997be385da', | |
129 | '62b4a097164940bd66030c4db51687f3ec035eed', |
|
129 | '62b4a097164940bd66030c4db51687f3ec035eed', | |
130 | '536c1a19428381cfea92ac44985304f6a8049569', |
|
130 | '536c1a19428381cfea92ac44985304f6a8049569', | |
131 | '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4', |
|
131 | '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4', | |
132 | '9bb326a04ae5d98d437dece54be04f830cf1edd9', |
|
132 | '9bb326a04ae5d98d437dece54be04f830cf1edd9', | |
133 | 'f8940bcb890a98c4702319fbe36db75ea309b475', |
|
133 | 'f8940bcb890a98c4702319fbe36db75ea309b475', | |
134 | 'ff5ab059786ebc7411e559a2cc309dfae3625a3b', |
|
134 | 'ff5ab059786ebc7411e559a2cc309dfae3625a3b', | |
135 | '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08', |
|
135 | '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08', | |
136 | 'ee87846a61c12153b51543bf860e1026c6d3dcba', ] |
|
136 | 'ee87846a61c12153b51543bf860e1026c6d3dcba', ] | |
137 | assert org == self.repo.revisions[:31] |
|
137 | assert org == self.repo.revisions[:31] | |
138 |
|
138 | |||
139 | def test_iter_slice(self): |
|
139 | def test_iter_slice(self): | |
140 | sliced = list(self.repo[:10]) |
|
140 | sliced = list(self.repo[:10]) | |
141 | itered = list(self.repo)[:10] |
|
141 | itered = list(self.repo)[:10] | |
142 | assert sliced == itered |
|
142 | assert sliced == itered | |
143 |
|
143 | |||
144 | def test_slicing(self): |
|
144 | def test_slicing(self): | |
145 | # 4 1 5 10 95 |
|
145 | # 4 1 5 10 95 | |
146 | for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), |
|
146 | for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), | |
147 | (10, 20, 10), (5, 100, 95)]: |
|
147 | (10, 20, 10), (5, 100, 95)]: | |
148 | revs = list(self.repo[sfrom:sto]) |
|
148 | revs = list(self.repo[sfrom:sto]) | |
149 | assert len(revs) == size |
|
149 | assert len(revs) == size | |
150 | assert revs[0] == self.repo.get_changeset(sfrom) |
|
150 | assert revs[0] == self.repo.get_changeset(sfrom) | |
151 | assert revs[-1] == self.repo.get_changeset(sto - 1) |
|
151 | assert revs[-1] == self.repo.get_changeset(sto - 1) | |
152 |
|
152 | |||
153 | def test_branches(self): |
|
153 | def test_branches(self): | |
154 | # TODO: Need more tests here |
|
154 | # TODO: Need more tests here | |
155 |
|
155 | |||
156 | # active branches |
|
156 | # active branches | |
157 | assert 'default' in self.repo.branches |
|
157 | assert 'default' in self.repo.branches | |
158 | assert 'stable' in self.repo.branches |
|
158 | assert 'stable' in self.repo.branches | |
159 |
|
159 | |||
160 | # closed |
|
160 | # closed | |
161 | assert 'git' in self.repo._get_branches(closed=True) |
|
161 | assert 'git' in self.repo._get_branches(closed=True) | |
162 | assert 'web' in self.repo._get_branches(closed=True) |
|
162 | assert 'web' in self.repo._get_branches(closed=True) | |
163 |
|
163 | |||
164 | for name, id in self.repo.branches.items(): |
|
164 | for name, id in self.repo.branches.items(): | |
165 | assert isinstance(self.repo.get_changeset(id), MercurialChangeset) |
|
165 | assert isinstance(self.repo.get_changeset(id), MercurialChangeset) | |
166 |
|
166 | |||
167 | def test_tip_in_tags(self): |
|
167 | def test_tip_in_tags(self): | |
168 | # tip is always a tag |
|
168 | # tip is always a tag | |
169 | assert 'tip' in self.repo.tags |
|
169 | assert 'tip' in self.repo.tags | |
170 |
|
170 | |||
171 | def test_tip_changeset_in_tags(self): |
|
171 | def test_tip_changeset_in_tags(self): | |
172 | tip = self.repo.get_changeset() |
|
172 | tip = self.repo.get_changeset() | |
173 | assert self.repo.tags['tip'] == tip.raw_id |
|
173 | assert self.repo.tags['tip'] == tip.raw_id | |
174 |
|
174 | |||
175 | def test_initial_changeset(self): |
|
175 | def test_initial_changeset(self): | |
176 |
|
176 | |||
177 | init_chset = self.repo.get_changeset(0) |
|
177 | init_chset = self.repo.get_changeset(0) | |
178 | assert init_chset.message == 'initial import' |
|
178 | assert init_chset.message == 'initial import' | |
179 | assert init_chset.author == 'Marcin Kuzminski <marcin@python-blog.com>' |
|
179 | assert init_chset.author == 'Marcin Kuzminski <marcin@python-blog.com>' | |
180 | assert sorted(init_chset._file_paths) == sorted([ |
|
180 | assert sorted(init_chset._file_paths) == sorted([ | |
181 | 'vcs/__init__.py', |
|
181 | 'vcs/__init__.py', | |
182 | 'vcs/backends/BaseRepository.py', |
|
182 | 'vcs/backends/BaseRepository.py', | |
183 | 'vcs/backends/__init__.py', |
|
183 | 'vcs/backends/__init__.py', | |
184 | ]) |
|
184 | ]) | |
185 |
|
185 | |||
186 | assert sorted(init_chset._dir_paths) == sorted(['', 'vcs', 'vcs/backends']) |
|
186 | assert sorted(init_chset._dir_paths) == sorted(['', 'vcs', 'vcs/backends']) | |
187 |
|
187 | |||
188 | with pytest.raises(NodeDoesNotExistError): |
|
188 | with pytest.raises(NodeDoesNotExistError): | |
189 | init_chset.get_node(path='foobar') |
|
189 | init_chset.get_node(path='foobar') | |
190 |
|
190 | |||
191 | node = init_chset.get_node('vcs/') |
|
191 | node = init_chset.get_node('vcs/') | |
192 | assert hasattr(node, 'kind') |
|
192 | assert hasattr(node, 'kind') | |
193 | assert node.kind == NodeKind.DIR |
|
193 | assert node.kind == NodeKind.DIR | |
194 |
|
194 | |||
195 | node = init_chset.get_node('vcs') |
|
195 | node = init_chset.get_node('vcs') | |
196 | assert hasattr(node, 'kind') |
|
196 | assert hasattr(node, 'kind') | |
197 | assert node.kind == NodeKind.DIR |
|
197 | assert node.kind == NodeKind.DIR | |
198 |
|
198 | |||
199 | node = init_chset.get_node('vcs/__init__.py') |
|
199 | node = init_chset.get_node('vcs/__init__.py') | |
200 | assert hasattr(node, 'kind') |
|
200 | assert hasattr(node, 'kind') | |
201 | assert node.kind == NodeKind.FILE |
|
201 | assert node.kind == NodeKind.FILE | |
202 |
|
202 | |||
203 | def test_not_existing_changeset(self): |
|
203 | def test_not_existing_changeset(self): | |
204 | # rawid |
|
204 | # rawid | |
205 | with pytest.raises(RepositoryError): |
|
205 | with pytest.raises(RepositoryError): | |
206 | self.repo.get_changeset('abcd' * 10) |
|
206 | self.repo.get_changeset('abcd' * 10) | |
207 | # shortid |
|
207 | # shortid | |
208 | with pytest.raises(RepositoryError): |
|
208 | with pytest.raises(RepositoryError): | |
209 | self.repo.get_changeset('erro' * 4) |
|
209 | self.repo.get_changeset('erro' * 4) | |
210 | # numeric |
|
210 | # numeric | |
211 | with pytest.raises(RepositoryError): |
|
211 | with pytest.raises(RepositoryError): | |
212 | self.repo.get_changeset(self.repo.count() + 1) |
|
212 | self.repo.get_changeset(self.repo.count() + 1) | |
213 |
|
213 | |||
214 | # Small chance we ever get to this one |
|
214 | # Small chance we ever get to this one | |
215 | revision = pow(2, 30) |
|
215 | revision = pow(2, 30) | |
216 | with pytest.raises(RepositoryError): |
|
216 | with pytest.raises(RepositoryError): | |
217 | self.repo.get_changeset(revision) |
|
217 | self.repo.get_changeset(revision) | |
218 |
|
218 | |||
219 | def test_changeset10(self): |
|
219 | def test_changeset10(self): | |
220 |
|
220 | |||
221 | chset10 = self.repo.get_changeset(10) |
|
221 | chset10 = self.repo.get_changeset(10) | |
222 | readme = b"""=== |
|
222 | readme = b"""=== | |
223 | VCS |
|
223 | VCS | |
224 | === |
|
224 | === | |
225 |
|
225 | |||
226 | Various Version Control System management abstraction layer for Python. |
|
226 | Various Version Control System management abstraction layer for Python. | |
227 |
|
227 | |||
228 | Introduction |
|
228 | Introduction | |
229 | ------------ |
|
229 | ------------ | |
230 |
|
230 | |||
231 | TODO: To be written... |
|
231 | TODO: To be written... | |
232 |
|
232 | |||
233 | """ |
|
233 | """ | |
234 | node = chset10.get_node('README.rst') |
|
234 | node = chset10.get_node('README.rst') | |
235 | assert node.kind == NodeKind.FILE |
|
235 | assert node.kind == NodeKind.FILE | |
236 | assert node.content == readme |
|
236 | assert node.content == readme | |
237 |
|
237 | |||
238 | @mock.patch('kallithea.lib.vcs.backends.hg.repository.diffopts') |
|
238 | @mock.patch('mercurial.mdiff.diffopts') | |
239 | def test_get_diff_does_not_sanitize_zero_context(self, mock_diffopts): |
|
239 | def test_get_diff_does_not_sanitize_zero_context(self, mock_diffopts): | |
240 | zero_context = 0 |
|
240 | zero_context = 0 | |
241 |
|
241 | |||
242 | self.repo.get_diff(0, 1, 'foo', context=zero_context) |
|
242 | self.repo.get_diff(0, 1, 'foo', context=zero_context) | |
243 |
|
243 | |||
244 | mock_diffopts.assert_called_once_with(git=True, showfunc=True, ignorews=False, context=zero_context) |
|
244 | mock_diffopts.assert_called_once_with(git=True, showfunc=True, ignorews=False, context=zero_context) | |
245 |
|
245 | |||
246 | @mock.patch('kallithea.lib.vcs.backends.hg.repository.diffopts') |
|
246 | @mock.patch('mercurial.mdiff.diffopts') | |
247 | def test_get_diff_sanitizes_negative_context(self, mock_diffopts): |
|
247 | def test_get_diff_sanitizes_negative_context(self, mock_diffopts): | |
248 | negative_context = -10 |
|
248 | negative_context = -10 | |
249 | zero_context = 0 |
|
249 | zero_context = 0 | |
250 |
|
250 | |||
251 | self.repo.get_diff(0, 1, 'foo', context=negative_context) |
|
251 | self.repo.get_diff(0, 1, 'foo', context=negative_context) | |
252 |
|
252 | |||
253 | mock_diffopts.assert_called_once_with(git=True, showfunc=True, ignorews=False, context=zero_context) |
|
253 | mock_diffopts.assert_called_once_with(git=True, showfunc=True, ignorews=False, context=zero_context) | |
254 |
|
254 | |||
255 |
|
255 | |||
256 | class TestMercurialChangeset(object): |
|
256 | class TestMercurialChangeset(object): | |
257 |
|
257 | |||
258 | def setup_method(self): |
|
258 | def setup_method(self): | |
259 | self.repo = MercurialRepository(safe_str(TEST_HG_REPO)) |
|
259 | self.repo = MercurialRepository(safe_str(TEST_HG_REPO)) | |
260 |
|
260 | |||
261 | def _test_equality(self, changeset): |
|
261 | def _test_equality(self, changeset): | |
262 | revision = changeset.revision |
|
262 | revision = changeset.revision | |
263 | assert changeset == self.repo.get_changeset(revision) |
|
263 | assert changeset == self.repo.get_changeset(revision) | |
264 |
|
264 | |||
265 | def test_equality(self): |
|
265 | def test_equality(self): | |
266 | revs = [0, 10, 20] |
|
266 | revs = [0, 10, 20] | |
267 | changesets = [self.repo.get_changeset(rev) for rev in revs] |
|
267 | changesets = [self.repo.get_changeset(rev) for rev in revs] | |
268 | for changeset in changesets: |
|
268 | for changeset in changesets: | |
269 | self._test_equality(changeset) |
|
269 | self._test_equality(changeset) | |
270 |
|
270 | |||
271 | def test_default_changeset(self): |
|
271 | def test_default_changeset(self): | |
272 | tip = self.repo.get_changeset('tip') |
|
272 | tip = self.repo.get_changeset('tip') | |
273 | assert tip == self.repo.get_changeset() |
|
273 | assert tip == self.repo.get_changeset() | |
274 | assert tip == self.repo.get_changeset(revision=None) |
|
274 | assert tip == self.repo.get_changeset(revision=None) | |
275 | assert tip == list(self.repo[-1:])[0] |
|
275 | assert tip == list(self.repo[-1:])[0] | |
276 |
|
276 | |||
277 | def test_root_node(self): |
|
277 | def test_root_node(self): | |
278 | tip = self.repo.get_changeset('tip') |
|
278 | tip = self.repo.get_changeset('tip') | |
279 | assert tip.root is tip.get_node('') |
|
279 | assert tip.root is tip.get_node('') | |
280 |
|
280 | |||
281 | def test_lazy_fetch(self): |
|
281 | def test_lazy_fetch(self): | |
282 | """ |
|
282 | """ | |
283 | Test if changeset's nodes expands and are cached as we walk through |
|
283 | Test if changeset's nodes expands and are cached as we walk through | |
284 | the revision. This test is somewhat hard to write as order of tests |
|
284 | the revision. This test is somewhat hard to write as order of tests | |
285 | is a key here. Written by running command after command in a shell. |
|
285 | is a key here. Written by running command after command in a shell. | |
286 | """ |
|
286 | """ | |
287 | chset = self.repo.get_changeset(45) |
|
287 | chset = self.repo.get_changeset(45) | |
288 | assert len(chset.nodes) == 0 |
|
288 | assert len(chset.nodes) == 0 | |
289 | root = chset.root |
|
289 | root = chset.root | |
290 | assert len(chset.nodes) == 1 |
|
290 | assert len(chset.nodes) == 1 | |
291 | assert len(root.nodes) == 8 |
|
291 | assert len(root.nodes) == 8 | |
292 | # accessing root.nodes updates chset.nodes |
|
292 | # accessing root.nodes updates chset.nodes | |
293 | assert len(chset.nodes) == 9 |
|
293 | assert len(chset.nodes) == 9 | |
294 |
|
294 | |||
295 | docs = root.get_node('docs') |
|
295 | docs = root.get_node('docs') | |
296 | # we haven't yet accessed anything new as docs dir was already cached |
|
296 | # we haven't yet accessed anything new as docs dir was already cached | |
297 | assert len(chset.nodes) == 9 |
|
297 | assert len(chset.nodes) == 9 | |
298 | assert len(docs.nodes) == 8 |
|
298 | assert len(docs.nodes) == 8 | |
299 | # accessing docs.nodes updates chset.nodes |
|
299 | # accessing docs.nodes updates chset.nodes | |
300 | assert len(chset.nodes) == 17 |
|
300 | assert len(chset.nodes) == 17 | |
301 |
|
301 | |||
302 | assert docs is chset.get_node('docs') |
|
302 | assert docs is chset.get_node('docs') | |
303 | assert docs is root.nodes[0] |
|
303 | assert docs is root.nodes[0] | |
304 | assert docs is root.dirs[0] |
|
304 | assert docs is root.dirs[0] | |
305 | assert docs is chset.get_node('docs') |
|
305 | assert docs is chset.get_node('docs') | |
306 |
|
306 | |||
307 | def test_nodes_with_changeset(self): |
|
307 | def test_nodes_with_changeset(self): | |
308 | chset = self.repo.get_changeset(45) |
|
308 | chset = self.repo.get_changeset(45) | |
309 | root = chset.root |
|
309 | root = chset.root | |
310 | docs = root.get_node('docs') |
|
310 | docs = root.get_node('docs') | |
311 | assert docs is chset.get_node('docs') |
|
311 | assert docs is chset.get_node('docs') | |
312 | api = docs.get_node('api') |
|
312 | api = docs.get_node('api') | |
313 | assert api is chset.get_node('docs/api') |
|
313 | assert api is chset.get_node('docs/api') | |
314 | index = api.get_node('index.rst') |
|
314 | index = api.get_node('index.rst') | |
315 | assert index is chset.get_node('docs/api/index.rst') |
|
315 | assert index is chset.get_node('docs/api/index.rst') | |
316 | assert index is chset.get_node('docs').get_node('api').get_node('index.rst') |
|
316 | assert index is chset.get_node('docs').get_node('api').get_node('index.rst') | |
317 |
|
317 | |||
318 | def test_branch_and_tags(self): |
|
318 | def test_branch_and_tags(self): | |
319 | chset0 = self.repo.get_changeset(0) |
|
319 | chset0 = self.repo.get_changeset(0) | |
320 | assert chset0.branch == 'default' |
|
320 | assert chset0.branch == 'default' | |
321 | assert chset0.branches == ['default'] |
|
321 | assert chset0.branches == ['default'] | |
322 | assert chset0.tags == [] |
|
322 | assert chset0.tags == [] | |
323 |
|
323 | |||
324 | chset10 = self.repo.get_changeset(10) |
|
324 | chset10 = self.repo.get_changeset(10) | |
325 | assert chset10.branch == 'default' |
|
325 | assert chset10.branch == 'default' | |
326 | assert chset10.branches == ['default'] |
|
326 | assert chset10.branches == ['default'] | |
327 | assert chset10.tags == [] |
|
327 | assert chset10.tags == [] | |
328 |
|
328 | |||
329 | chset44 = self.repo.get_changeset(44) |
|
329 | chset44 = self.repo.get_changeset(44) | |
330 | assert chset44.branch == 'web' |
|
330 | assert chset44.branch == 'web' | |
331 | assert chset44.branches == ['web'] |
|
331 | assert chset44.branches == ['web'] | |
332 |
|
332 | |||
333 | tip = self.repo.get_changeset('tip') |
|
333 | tip = self.repo.get_changeset('tip') | |
334 | assert 'tip' in tip.tags |
|
334 | assert 'tip' in tip.tags | |
335 |
|
335 | |||
336 | def _test_file_size(self, revision, path, size): |
|
336 | def _test_file_size(self, revision, path, size): | |
337 | node = self.repo.get_changeset(revision).get_node(path) |
|
337 | node = self.repo.get_changeset(revision).get_node(path) | |
338 | assert node.is_file() |
|
338 | assert node.is_file() | |
339 | assert node.size == size |
|
339 | assert node.size == size | |
340 |
|
340 | |||
341 | def test_file_size(self): |
|
341 | def test_file_size(self): | |
342 | to_check = ( |
|
342 | to_check = ( | |
343 | (10, 'setup.py', 1068), |
|
343 | (10, 'setup.py', 1068), | |
344 | (20, 'setup.py', 1106), |
|
344 | (20, 'setup.py', 1106), | |
345 | (60, 'setup.py', 1074), |
|
345 | (60, 'setup.py', 1074), | |
346 |
|
346 | |||
347 | (10, 'vcs/backends/base.py', 2921), |
|
347 | (10, 'vcs/backends/base.py', 2921), | |
348 | (20, 'vcs/backends/base.py', 3936), |
|
348 | (20, 'vcs/backends/base.py', 3936), | |
349 | (60, 'vcs/backends/base.py', 6189), |
|
349 | (60, 'vcs/backends/base.py', 6189), | |
350 | ) |
|
350 | ) | |
351 | for revision, path, size in to_check: |
|
351 | for revision, path, size in to_check: | |
352 | self._test_file_size(revision, path, size) |
|
352 | self._test_file_size(revision, path, size) | |
353 |
|
353 | |||
354 | def _test_dir_size(self, revision, path, size): |
|
354 | def _test_dir_size(self, revision, path, size): | |
355 | node = self.repo.get_changeset(revision).get_node(path) |
|
355 | node = self.repo.get_changeset(revision).get_node(path) | |
356 | assert not node.is_file() |
|
356 | assert not node.is_file() | |
357 | assert node.size == size |
|
357 | assert node.size == size | |
358 |
|
358 | |||
359 | def test_dir_size(self): |
|
359 | def test_dir_size(self): | |
360 | to_check = ( |
|
360 | to_check = ( | |
361 | ('96507bd11ecc', '/', 682421), |
|
361 | ('96507bd11ecc', '/', 682421), | |
362 | ('a53d9201d4bc', '/', 682410), |
|
362 | ('a53d9201d4bc', '/', 682410), | |
363 | ('90243de06161', '/', 682006), |
|
363 | ('90243de06161', '/', 682006), | |
364 | ) |
|
364 | ) | |
365 | for revision, path, size in to_check: |
|
365 | for revision, path, size in to_check: | |
366 | self._test_dir_size(revision, path, size) |
|
366 | self._test_dir_size(revision, path, size) | |
367 |
|
367 | |||
368 | def test_repo_size(self): |
|
368 | def test_repo_size(self): | |
369 | assert self.repo.size == 682421 |
|
369 | assert self.repo.size == 682421 | |
370 |
|
370 | |||
371 | def test_file_history(self): |
|
371 | def test_file_history(self): | |
372 | # we can only check if those revisions are present in the history |
|
372 | # we can only check if those revisions are present in the history | |
373 | # as we cannot update this test every time file is changed |
|
373 | # as we cannot update this test every time file is changed | |
374 | files = { |
|
374 | files = { | |
375 | 'setup.py': [7, 18, 45, 46, 47, 69, 77], |
|
375 | 'setup.py': [7, 18, 45, 46, 47, 69, 77], | |
376 | 'vcs/nodes.py': [7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, |
|
376 | 'vcs/nodes.py': [7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, | |
377 | 61, 73, 76], |
|
377 | 61, 73, 76], | |
378 | 'vcs/backends/hg.py': [4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, |
|
378 | 'vcs/backends/hg.py': [4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, | |
379 | 26, 27, 28, 30, 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, |
|
379 | 26, 27, 28, 30, 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, | |
380 | 48, 49, 53, 54, 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, |
|
380 | 48, 49, 53, 54, 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, | |
381 | 82], |
|
381 | 82], | |
382 | } |
|
382 | } | |
383 | for path, revs in files.items(): |
|
383 | for path, revs in files.items(): | |
384 | tip = self.repo.get_changeset(revs[-1]) |
|
384 | tip = self.repo.get_changeset(revs[-1]) | |
385 | node = tip.get_node(path) |
|
385 | node = tip.get_node(path) | |
386 | node_revs = [chset.revision for chset in node.history] |
|
386 | node_revs = [chset.revision for chset in node.history] | |
387 | assert set(revs).issubset(set(node_revs)), \ |
|
387 | assert set(revs).issubset(set(node_revs)), \ | |
388 | "We assumed that %s is subset of revisions for which file %s " \ |
|
388 | "We assumed that %s is subset of revisions for which file %s " \ | |
389 | "has been changed, and history of that node returned: %s" \ |
|
389 | "has been changed, and history of that node returned: %s" \ | |
390 | % (revs, path, node_revs) |
|
390 | % (revs, path, node_revs) | |
391 |
|
391 | |||
392 | def test_file_annotate(self): |
|
392 | def test_file_annotate(self): | |
393 | files = { |
|
393 | files = { | |
394 | 'vcs/backends/__init__.py': |
|
394 | 'vcs/backends/__init__.py': | |
395 | {89: {'lines_no': 31, |
|
395 | {89: {'lines_no': 31, | |
396 | 'changesets': [32, 32, 61, 32, 32, 37, 32, 32, 32, 44, |
|
396 | 'changesets': [32, 32, 61, 32, 32, 37, 32, 32, 32, 44, | |
397 | 37, 37, 37, 37, 45, 37, 44, 37, 37, 37, |
|
397 | 37, 37, 37, 37, 45, 37, 44, 37, 37, 37, | |
398 | 32, 32, 32, 32, 37, 32, 37, 37, 32, |
|
398 | 32, 32, 32, 32, 37, 32, 37, 37, 32, | |
399 | 32, 32]}, |
|
399 | 32, 32]}, | |
400 | 20: {'lines_no': 1, |
|
400 | 20: {'lines_no': 1, | |
401 | 'changesets': [4]}, |
|
401 | 'changesets': [4]}, | |
402 | 55: {'lines_no': 31, |
|
402 | 55: {'lines_no': 31, | |
403 | 'changesets': [32, 32, 45, 32, 32, 37, 32, 32, 32, 44, |
|
403 | 'changesets': [32, 32, 45, 32, 32, 37, 32, 32, 32, 44, | |
404 | 37, 37, 37, 37, 45, 37, 44, 37, 37, 37, |
|
404 | 37, 37, 37, 37, 45, 37, 44, 37, 37, 37, | |
405 | 32, 32, 32, 32, 37, 32, 37, 37, 32, |
|
405 | 32, 32, 32, 32, 37, 32, 37, 37, 32, | |
406 | 32, 32]}}, |
|
406 | 32, 32]}}, | |
407 | 'vcs/exceptions.py': |
|
407 | 'vcs/exceptions.py': | |
408 | {89: {'lines_no': 18, |
|
408 | {89: {'lines_no': 18, | |
409 | 'changesets': [16, 16, 16, 16, 16, 16, 16, 16, 16, 16, |
|
409 | 'changesets': [16, 16, 16, 16, 16, 16, 16, 16, 16, 16, | |
410 | 16, 16, 17, 16, 16, 18, 18, 18]}, |
|
410 | 16, 16, 17, 16, 16, 18, 18, 18]}, | |
411 | 20: {'lines_no': 18, |
|
411 | 20: {'lines_no': 18, | |
412 | 'changesets': [16, 16, 16, 16, 16, 16, 16, 16, 16, 16, |
|
412 | 'changesets': [16, 16, 16, 16, 16, 16, 16, 16, 16, 16, | |
413 | 16, 16, 17, 16, 16, 18, 18, 18]}, |
|
413 | 16, 16, 17, 16, 16, 18, 18, 18]}, | |
414 | 55: {'lines_no': 18, 'changesets': [16, 16, 16, 16, 16, 16, |
|
414 | 55: {'lines_no': 18, 'changesets': [16, 16, 16, 16, 16, 16, | |
415 | 16, 16, 16, 16, 16, 16, |
|
415 | 16, 16, 16, 16, 16, 16, | |
416 | 17, 16, 16, 18, 18, 18]}}, |
|
416 | 17, 16, 16, 18, 18, 18]}}, | |
417 | 'MANIFEST.in': {89: {'lines_no': 5, |
|
417 | 'MANIFEST.in': {89: {'lines_no': 5, | |
418 | 'changesets': [7, 7, 7, 71, 71]}, |
|
418 | 'changesets': [7, 7, 7, 71, 71]}, | |
419 | 20: {'lines_no': 3, |
|
419 | 20: {'lines_no': 3, | |
420 | 'changesets': [7, 7, 7]}, |
|
420 | 'changesets': [7, 7, 7]}, | |
421 | 55: {'lines_no': 3, |
|
421 | 55: {'lines_no': 3, | |
422 | 'changesets': [7, 7, 7]}}} |
|
422 | 'changesets': [7, 7, 7]}}} | |
423 |
|
423 | |||
424 | for fname, revision_dict in files.items(): |
|
424 | for fname, revision_dict in files.items(): | |
425 | for rev, data in revision_dict.items(): |
|
425 | for rev, data in revision_dict.items(): | |
426 | cs = self.repo.get_changeset(rev) |
|
426 | cs = self.repo.get_changeset(rev) | |
427 | l1_1 = [x[1] for x in cs.get_file_annotate(fname)] |
|
427 | l1_1 = [x[1] for x in cs.get_file_annotate(fname)] | |
428 | l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)] |
|
428 | l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)] | |
429 | assert l1_1 == l1_2 |
|
429 | assert l1_1 == l1_2 | |
430 | l1 = l1_2 = [x[2]().revision for x in cs.get_file_annotate(fname)] |
|
430 | l1 = l1_2 = [x[2]().revision for x in cs.get_file_annotate(fname)] | |
431 | l2 = files[fname][rev]['changesets'] |
|
431 | l2 = files[fname][rev]['changesets'] | |
432 | assert l1 == l2, "The lists of revision for %s@rev%s" \ |
|
432 | assert l1 == l2, "The lists of revision for %s@rev%s" \ | |
433 | "from annotation list should match each other," \ |
|
433 | "from annotation list should match each other," \ | |
434 | "got \n%s \nvs \n%s " % (fname, rev, l1, l2) |
|
434 | "got \n%s \nvs \n%s " % (fname, rev, l1, l2) | |
435 |
|
435 | |||
436 | def test_changeset_state(self): |
|
436 | def test_changeset_state(self): | |
437 | """ |
|
437 | """ | |
438 | Tests which files have been added/changed/removed at particular revision |
|
438 | Tests which files have been added/changed/removed at particular revision | |
439 | """ |
|
439 | """ | |
440 |
|
440 | |||
441 | # rev 46ad32a4f974: |
|
441 | # rev 46ad32a4f974: | |
442 | # hg st --rev 46ad32a4f974 |
|
442 | # hg st --rev 46ad32a4f974 | |
443 | # changed: 13 |
|
443 | # changed: 13 | |
444 | # added: 20 |
|
444 | # added: 20 | |
445 | # removed: 1 |
|
445 | # removed: 1 | |
446 | changed = set(['.hgignore' |
|
446 | changed = set(['.hgignore' | |
447 | , 'README.rst', 'docs/conf.py', 'docs/index.rst', 'setup.py' |
|
447 | , 'README.rst', 'docs/conf.py', 'docs/index.rst', 'setup.py' | |
448 | , 'tests/test_hg.py', 'tests/test_nodes.py', 'vcs/__init__.py' |
|
448 | , 'tests/test_hg.py', 'tests/test_nodes.py', 'vcs/__init__.py' | |
449 | , 'vcs/backends/__init__.py', 'vcs/backends/base.py' |
|
449 | , 'vcs/backends/__init__.py', 'vcs/backends/base.py' | |
450 | , 'vcs/backends/hg.py', 'vcs/nodes.py', 'vcs/utils/__init__.py']) |
|
450 | , 'vcs/backends/hg.py', 'vcs/nodes.py', 'vcs/utils/__init__.py']) | |
451 |
|
451 | |||
452 | added = set(['docs/api/backends/hg.rst' |
|
452 | added = set(['docs/api/backends/hg.rst' | |
453 | , 'docs/api/backends/index.rst', 'docs/api/index.rst' |
|
453 | , 'docs/api/backends/index.rst', 'docs/api/index.rst' | |
454 | , 'docs/api/nodes.rst', 'docs/api/web/index.rst' |
|
454 | , 'docs/api/nodes.rst', 'docs/api/web/index.rst' | |
455 | , 'docs/api/web/simplevcs.rst', 'docs/installation.rst' |
|
455 | , 'docs/api/web/simplevcs.rst', 'docs/installation.rst' | |
456 | , 'docs/quickstart.rst', 'setup.cfg', 'vcs/utils/baseui_config.py' |
|
456 | , 'docs/quickstart.rst', 'setup.cfg', 'vcs/utils/baseui_config.py' | |
457 | , 'vcs/utils/web.py', 'vcs/web/__init__.py', 'vcs/web/exceptions.py' |
|
457 | , 'vcs/utils/web.py', 'vcs/web/__init__.py', 'vcs/web/exceptions.py' | |
458 | , 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py' |
|
458 | , 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py' | |
459 | , 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py' |
|
459 | , 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py' | |
460 | , 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py' |
|
460 | , 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py' | |
461 | , 'vcs/web/simplevcs/views.py']) |
|
461 | , 'vcs/web/simplevcs/views.py']) | |
462 |
|
462 | |||
463 | removed = set(['docs/api.rst']) |
|
463 | removed = set(['docs/api.rst']) | |
464 |
|
464 | |||
465 | chset64 = self.repo.get_changeset('46ad32a4f974') |
|
465 | chset64 = self.repo.get_changeset('46ad32a4f974') | |
466 | assert set((node.path for node in chset64.added)) == added |
|
466 | assert set((node.path for node in chset64.added)) == added | |
467 | assert set((node.path for node in chset64.changed)) == changed |
|
467 | assert set((node.path for node in chset64.changed)) == changed | |
468 | assert set((node.path for node in chset64.removed)) == removed |
|
468 | assert set((node.path for node in chset64.removed)) == removed | |
469 |
|
469 | |||
470 | # rev b090f22d27d6: |
|
470 | # rev b090f22d27d6: | |
471 | # hg st --rev b090f22d27d6 |
|
471 | # hg st --rev b090f22d27d6 | |
472 | # changed: 13 |
|
472 | # changed: 13 | |
473 | # added: 20 |
|
473 | # added: 20 | |
474 | # removed: 1 |
|
474 | # removed: 1 | |
475 | chset88 = self.repo.get_changeset('b090f22d27d6') |
|
475 | chset88 = self.repo.get_changeset('b090f22d27d6') | |
476 | assert set((node.path for node in chset88.added)) == set() |
|
476 | assert set((node.path for node in chset88.added)) == set() | |
477 | assert set((node.path for node in chset88.changed)) == set(['.hgignore']) |
|
477 | assert set((node.path for node in chset88.changed)) == set(['.hgignore']) | |
478 | assert set((node.path for node in chset88.removed)) == set() |
|
478 | assert set((node.path for node in chset88.removed)) == set() | |
479 |
|
479 | |||
480 | # 85: |
|
480 | # 85: | |
481 | # added: 2 ['vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py'] |
|
481 | # added: 2 ['vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py'] | |
482 | # changed: 4 ['vcs/web/simplevcs/models.py', ...] |
|
482 | # changed: 4 ['vcs/web/simplevcs/models.py', ...] | |
483 | # removed: 1 ['vcs/utils/web.py'] |
|
483 | # removed: 1 ['vcs/utils/web.py'] | |
484 | chset85 = self.repo.get_changeset(85) |
|
484 | chset85 = self.repo.get_changeset(85) | |
485 | assert set((node.path for node in chset85.added)) == set([ |
|
485 | assert set((node.path for node in chset85.added)) == set([ | |
486 | 'vcs/utils/diffs.py', |
|
486 | 'vcs/utils/diffs.py', | |
487 | 'vcs/web/simplevcs/views/diffs.py' |
|
487 | 'vcs/web/simplevcs/views/diffs.py' | |
488 | ]) |
|
488 | ]) | |
489 |
|
489 | |||
490 | assert set((node.path for node in chset85.changed)) == set([ |
|
490 | assert set((node.path for node in chset85.changed)) == set([ | |
491 | 'vcs/web/simplevcs/models.py', |
|
491 | 'vcs/web/simplevcs/models.py', | |
492 | 'vcs/web/simplevcs/utils.py', |
|
492 | 'vcs/web/simplevcs/utils.py', | |
493 | 'vcs/web/simplevcs/views/__init__.py', |
|
493 | 'vcs/web/simplevcs/views/__init__.py', | |
494 | 'vcs/web/simplevcs/views/repository.py', |
|
494 | 'vcs/web/simplevcs/views/repository.py', | |
495 | ]) |
|
495 | ]) | |
496 |
|
496 | |||
497 | assert set((node.path for node in chset85.removed)) == set([ |
|
497 | assert set((node.path for node in chset85.removed)) == set([ | |
498 | 'vcs/utils/web.py' |
|
498 | 'vcs/utils/web.py' | |
499 | ]) |
|
499 | ]) | |
500 |
|
500 | |||
501 |
|
501 | |||
502 | def test_files_state(self): |
|
502 | def test_files_state(self): | |
503 | """ |
|
503 | """ | |
504 | Tests state of FileNodes. |
|
504 | Tests state of FileNodes. | |
505 | """ |
|
505 | """ | |
506 | chset = self.repo.get_changeset(85) |
|
506 | chset = self.repo.get_changeset(85) | |
507 | node = chset.get_node('vcs/utils/diffs.py') |
|
507 | node = chset.get_node('vcs/utils/diffs.py') | |
508 | assert node.state, NodeState.ADDED |
|
508 | assert node.state, NodeState.ADDED | |
509 | assert node.added |
|
509 | assert node.added | |
510 | assert not node.changed |
|
510 | assert not node.changed | |
511 | assert not node.not_changed |
|
511 | assert not node.not_changed | |
512 | assert not node.removed |
|
512 | assert not node.removed | |
513 |
|
513 | |||
514 | chset = self.repo.get_changeset(88) |
|
514 | chset = self.repo.get_changeset(88) | |
515 | node = chset.get_node('.hgignore') |
|
515 | node = chset.get_node('.hgignore') | |
516 | assert node.state, NodeState.CHANGED |
|
516 | assert node.state, NodeState.CHANGED | |
517 | assert not node.added |
|
517 | assert not node.added | |
518 | assert node.changed |
|
518 | assert node.changed | |
519 | assert not node.not_changed |
|
519 | assert not node.not_changed | |
520 | assert not node.removed |
|
520 | assert not node.removed | |
521 |
|
521 | |||
522 | chset = self.repo.get_changeset(85) |
|
522 | chset = self.repo.get_changeset(85) | |
523 | node = chset.get_node('setup.py') |
|
523 | node = chset.get_node('setup.py') | |
524 | assert node.state, NodeState.NOT_CHANGED |
|
524 | assert node.state, NodeState.NOT_CHANGED | |
525 | assert not node.added |
|
525 | assert not node.added | |
526 | assert not node.changed |
|
526 | assert not node.changed | |
527 | assert node.not_changed |
|
527 | assert node.not_changed | |
528 | assert not node.removed |
|
528 | assert not node.removed | |
529 |
|
529 | |||
530 | # If node has REMOVED state then trying to fetch it would raise |
|
530 | # If node has REMOVED state then trying to fetch it would raise | |
531 | # ChangesetError exception |
|
531 | # ChangesetError exception | |
532 | chset = self.repo.get_changeset(2) |
|
532 | chset = self.repo.get_changeset(2) | |
533 | path = 'vcs/backends/BaseRepository.py' |
|
533 | path = 'vcs/backends/BaseRepository.py' | |
534 | with pytest.raises(NodeDoesNotExistError): |
|
534 | with pytest.raises(NodeDoesNotExistError): | |
535 | chset.get_node(path) |
|
535 | chset.get_node(path) | |
536 | # but it would be one of ``removed`` (changeset's attribute) |
|
536 | # but it would be one of ``removed`` (changeset's attribute) | |
537 | assert path in [rf.path for rf in chset.removed] |
|
537 | assert path in [rf.path for rf in chset.removed] | |
538 |
|
538 | |||
539 | def test_commit_message_is_unicode(self): |
|
539 | def test_commit_message_is_unicode(self): | |
540 | for cm in self.repo: |
|
540 | for cm in self.repo: | |
541 | assert isinstance(cm.message, unicode) |
|
541 | assert isinstance(cm.message, unicode) | |
542 |
|
542 | |||
543 | def test_changeset_author_is_unicode(self): |
|
543 | def test_changeset_author_is_unicode(self): | |
544 | for cm in self.repo: |
|
544 | for cm in self.repo: | |
545 | assert isinstance(cm.author, unicode) |
|
545 | assert isinstance(cm.author, unicode) | |
546 |
|
546 | |||
547 | def test_repo_files_content_is_bytes(self): |
|
547 | def test_repo_files_content_is_bytes(self): | |
548 | test_changeset = self.repo.get_changeset(100) |
|
548 | test_changeset = self.repo.get_changeset(100) | |
549 | for node in test_changeset.get_node('/'): |
|
549 | for node in test_changeset.get_node('/'): | |
550 | if node.is_file(): |
|
550 | if node.is_file(): | |
551 | assert isinstance(node.content, bytes) |
|
551 | assert isinstance(node.content, bytes) | |
552 |
|
552 | |||
553 | def test_wrong_path(self): |
|
553 | def test_wrong_path(self): | |
554 | # There is 'setup.py' in the root dir but not there: |
|
554 | # There is 'setup.py' in the root dir but not there: | |
555 | path = 'foo/bar/setup.py' |
|
555 | path = 'foo/bar/setup.py' | |
556 | with pytest.raises(VCSError): |
|
556 | with pytest.raises(VCSError): | |
557 | self.repo.get_changeset().get_node(path) |
|
557 | self.repo.get_changeset().get_node(path) | |
558 |
|
558 | |||
559 | def test_archival_file(self): |
|
559 | def test_archival_file(self): | |
560 | # TODO: |
|
560 | # TODO: | |
561 | pass |
|
561 | pass | |
562 |
|
562 | |||
563 | def test_archival_as_generator(self): |
|
563 | def test_archival_as_generator(self): | |
564 | # TODO: |
|
564 | # TODO: | |
565 | pass |
|
565 | pass | |
566 |
|
566 | |||
567 | def test_archival_wrong_kind(self): |
|
567 | def test_archival_wrong_kind(self): | |
568 | tip = self.repo.get_changeset() |
|
568 | tip = self.repo.get_changeset() | |
569 | with pytest.raises(VCSError): |
|
569 | with pytest.raises(VCSError): | |
570 | tip.fill_archive(kind='error') |
|
570 | tip.fill_archive(kind='error') | |
571 |
|
571 | |||
572 | def test_archival_empty_prefix(self): |
|
572 | def test_archival_empty_prefix(self): | |
573 | # TODO: |
|
573 | # TODO: | |
574 | pass |
|
574 | pass | |
575 |
|
575 | |||
576 | def test_author_email(self): |
|
576 | def test_author_email(self): | |
577 | assert 'marcin@python-blog.com' == self.repo.get_changeset('b986218ba1c9').author_email |
|
577 | assert 'marcin@python-blog.com' == self.repo.get_changeset('b986218ba1c9').author_email | |
578 | assert 'lukasz.balcerzak@python-center.pl' == self.repo.get_changeset('3803844fdbd3').author_email |
|
578 | assert 'lukasz.balcerzak@python-center.pl' == self.repo.get_changeset('3803844fdbd3').author_email | |
579 | assert '' == self.repo.get_changeset('84478366594b').author_email |
|
579 | assert '' == self.repo.get_changeset('84478366594b').author_email | |
580 |
|
580 | |||
581 | def test_author_username(self): |
|
581 | def test_author_username(self): | |
582 | assert 'Marcin Kuzminski' == self.repo.get_changeset('b986218ba1c9').author_name |
|
582 | assert 'Marcin Kuzminski' == self.repo.get_changeset('b986218ba1c9').author_name | |
583 | assert 'Lukasz Balcerzak' == self.repo.get_changeset('3803844fdbd3').author_name |
|
583 | assert 'Lukasz Balcerzak' == self.repo.get_changeset('3803844fdbd3').author_name | |
584 | assert 'marcink' == self.repo.get_changeset('84478366594b').author_name |
|
584 | assert 'marcink' == self.repo.get_changeset('84478366594b').author_name | |
585 |
|
585 | |||
586 | def test_successors(self): |
|
586 | def test_successors(self): | |
587 | init_chset = self.repo.get_changeset(0) |
|
587 | init_chset = self.repo.get_changeset(0) | |
588 | assert init_chset.successors == [] |
|
588 | assert init_chset.successors == [] | |
589 |
|
589 | |||
590 | def test_predecessors(self): |
|
590 | def test_predecessors(self): | |
591 | init_chset = self.repo.get_changeset(0) |
|
591 | init_chset = self.repo.get_changeset(0) | |
592 | assert len(init_chset.predecessors) == 0 |
|
592 | assert len(init_chset.predecessors) == 0 |
General Comments 0
You need to be logged in to leave comments.
Login now