Show More
@@ -1,214 +1,214 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.controllers.changelog |
|
3 | rhodecode.controllers.changelog | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | changelog controller for rhodecode |
|
6 | changelog controller for rhodecode | |
7 |
|
7 | |||
8 | :created_on: Apr 21, 2010 |
|
8 | :created_on: Apr 21, 2010 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 |
|
25 | |||
26 | import logging |
|
26 | import logging | |
27 | import traceback |
|
27 | import traceback | |
28 |
|
28 | |||
29 | from pylons import request, url, session, tmpl_context as c |
|
29 | from pylons import request, url, session, tmpl_context as c | |
30 | from pylons.controllers.util import redirect |
|
30 | from pylons.controllers.util import redirect | |
31 | from pylons.i18n.translation import _ |
|
31 | from pylons.i18n.translation import _ | |
32 | from webob.exc import HTTPNotFound, HTTPBadRequest |
|
32 | from webob.exc import HTTPNotFound, HTTPBadRequest | |
33 |
|
33 | |||
34 | import rhodecode.lib.helpers as h |
|
34 | import rhodecode.lib.helpers as h | |
35 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
35 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator | |
36 | from rhodecode.lib.base import BaseRepoController, render |
|
36 | from rhodecode.lib.base import BaseRepoController, render | |
37 | from rhodecode.lib.helpers import RepoPage |
|
37 | from rhodecode.lib.helpers import RepoPage | |
38 | from rhodecode.lib.compat import json |
|
38 | from rhodecode.lib.compat import json | |
39 | from rhodecode.lib.graphmod import _colored, _dagwalker |
|
39 | from rhodecode.lib.graphmod import _colored, _dagwalker | |
40 | from rhodecode.lib.vcs.exceptions import RepositoryError, ChangesetDoesNotExistError,\ |
|
40 | from rhodecode.lib.vcs.exceptions import RepositoryError, ChangesetDoesNotExistError,\ | |
41 | ChangesetError, NodeDoesNotExistError, EmptyRepositoryError |
|
41 | ChangesetError, NodeDoesNotExistError, EmptyRepositoryError | |
42 | from rhodecode.lib.utils2 import safe_int |
|
42 | from rhodecode.lib.utils2 import safe_int, safe_str | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | def _load_changelog_summary(): |
|
48 | def _load_changelog_summary(): | |
49 | p = safe_int(request.GET.get('page'), 1) |
|
49 | p = safe_int(request.GET.get('page'), 1) | |
50 | size = safe_int(request.GET.get('size'), 10) |
|
50 | size = safe_int(request.GET.get('size'), 10) | |
51 |
|
51 | |||
52 | def url_generator(**kw): |
|
52 | def url_generator(**kw): | |
53 | return url('changelog_summary_home', |
|
53 | return url('changelog_summary_home', | |
54 | repo_name=c.rhodecode_db_repo.repo_name, size=size, **kw) |
|
54 | repo_name=c.rhodecode_db_repo.repo_name, size=size, **kw) | |
55 |
|
55 | |||
56 | collection = c.rhodecode_repo |
|
56 | collection = c.rhodecode_repo | |
57 |
|
57 | |||
58 | c.repo_changesets = RepoPage(collection, page=p, |
|
58 | c.repo_changesets = RepoPage(collection, page=p, | |
59 | items_per_page=size, |
|
59 | items_per_page=size, | |
60 | url=url_generator) |
|
60 | url=url_generator) | |
61 | page_revisions = [x.raw_id for x in list(c.repo_changesets)] |
|
61 | page_revisions = [x.raw_id for x in list(c.repo_changesets)] | |
62 | c.comments = c.rhodecode_db_repo.get_comments(page_revisions) |
|
62 | c.comments = c.rhodecode_db_repo.get_comments(page_revisions) | |
63 | c.statuses = c.rhodecode_db_repo.statuses(page_revisions) |
|
63 | c.statuses = c.rhodecode_db_repo.statuses(page_revisions) | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | class ChangelogController(BaseRepoController): |
|
66 | class ChangelogController(BaseRepoController): | |
67 |
|
67 | |||
68 | def __before__(self): |
|
68 | def __before__(self): | |
69 | super(ChangelogController, self).__before__() |
|
69 | super(ChangelogController, self).__before__() | |
70 | c.affected_files_cut_off = 60 |
|
70 | c.affected_files_cut_off = 60 | |
71 |
|
71 | |||
72 | def __get_cs_or_redirect(self, rev, repo, redirect_after=True, |
|
72 | def __get_cs_or_redirect(self, rev, repo, redirect_after=True, | |
73 | partial=False): |
|
73 | partial=False): | |
74 | """ |
|
74 | """ | |
75 | Safe way to get changeset if error occur it redirects to changeset with |
|
75 | Safe way to get changeset if error occur it redirects to changeset with | |
76 | proper message. If partial is set then don't do redirect raise Exception |
|
76 | proper message. If partial is set then don't do redirect raise Exception | |
77 | instead |
|
77 | instead | |
78 |
|
78 | |||
79 | :param rev: revision to fetch |
|
79 | :param rev: revision to fetch | |
80 | :param repo: repo instance |
|
80 | :param repo: repo instance | |
81 | """ |
|
81 | """ | |
82 |
|
82 | |||
83 | try: |
|
83 | try: | |
84 | return c.rhodecode_repo.get_changeset(rev) |
|
84 | return c.rhodecode_repo.get_changeset(rev) | |
85 | except EmptyRepositoryError, e: |
|
85 | except EmptyRepositoryError, e: | |
86 | if not redirect_after: |
|
86 | if not redirect_after: | |
87 | return None |
|
87 | return None | |
88 | h.flash(h.literal(_('There are no changesets yet')), |
|
88 | h.flash(h.literal(_('There are no changesets yet')), | |
89 | category='warning') |
|
89 | category='warning') | |
90 | redirect(url('changelog_home', repo_name=repo.repo_name)) |
|
90 | redirect(url('changelog_home', repo_name=repo.repo_name)) | |
91 |
|
91 | |||
92 | except RepositoryError, e: |
|
92 | except RepositoryError, e: | |
93 | log.error(traceback.format_exc()) |
|
93 | log.error(traceback.format_exc()) | |
94 | h.flash(str(e), category='warning') |
|
94 | h.flash(safe_str(e), category='warning') | |
95 | if not partial: |
|
95 | if not partial: | |
96 | redirect(h.url('changelog_home', repo_name=repo.repo_name)) |
|
96 | redirect(h.url('changelog_home', repo_name=repo.repo_name)) | |
97 | raise HTTPBadRequest() |
|
97 | raise HTTPBadRequest() | |
98 |
|
98 | |||
99 | def _graph(self, repo, revs_int, repo_size, size, p): |
|
99 | def _graph(self, repo, revs_int, repo_size, size, p): | |
100 | """ |
|
100 | """ | |
101 | Generates a DAG graph for repo |
|
101 | Generates a DAG graph for repo | |
102 |
|
102 | |||
103 | :param repo: |
|
103 | :param repo: | |
104 | :param revs_int: |
|
104 | :param revs_int: | |
105 | :param repo_size: |
|
105 | :param repo_size: | |
106 | :param size: |
|
106 | :param size: | |
107 | :param p: |
|
107 | :param p: | |
108 | """ |
|
108 | """ | |
109 | if not revs_int: |
|
109 | if not revs_int: | |
110 | c.jsdata = json.dumps([]) |
|
110 | c.jsdata = json.dumps([]) | |
111 | return |
|
111 | return | |
112 |
|
112 | |||
113 | data = [] |
|
113 | data = [] | |
114 | revs = revs_int |
|
114 | revs = revs_int | |
115 |
|
115 | |||
116 | dag = _dagwalker(repo, revs, repo.alias) |
|
116 | dag = _dagwalker(repo, revs, repo.alias) | |
117 | dag = _colored(dag) |
|
117 | dag = _colored(dag) | |
118 | for (_id, _type, ctx, vtx, edges) in dag: |
|
118 | for (_id, _type, ctx, vtx, edges) in dag: | |
119 | data.append(['', vtx, edges]) |
|
119 | data.append(['', vtx, edges]) | |
120 |
|
120 | |||
121 | c.jsdata = json.dumps(data) |
|
121 | c.jsdata = json.dumps(data) | |
122 |
|
122 | |||
123 | @LoginRequired() |
|
123 | @LoginRequired() | |
124 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
124 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
125 | 'repository.admin') |
|
125 | 'repository.admin') | |
126 | def index(self, repo_name, revision=None, f_path=None): |
|
126 | def index(self, repo_name, revision=None, f_path=None): | |
127 | limit = 100 |
|
127 | limit = 100 | |
128 | default = 20 |
|
128 | default = 20 | |
129 | if request.GET.get('size'): |
|
129 | if request.GET.get('size'): | |
130 | c.size = max(min(safe_int(request.GET.get('size')), limit), 1) |
|
130 | c.size = max(min(safe_int(request.GET.get('size')), limit), 1) | |
131 | session['changelog_size'] = c.size |
|
131 | session['changelog_size'] = c.size | |
132 | session.save() |
|
132 | session.save() | |
133 | else: |
|
133 | else: | |
134 | c.size = int(session.get('changelog_size', default)) |
|
134 | c.size = int(session.get('changelog_size', default)) | |
135 | # min size must be 1 |
|
135 | # min size must be 1 | |
136 | c.size = max(c.size, 1) |
|
136 | c.size = max(c.size, 1) | |
137 | p = safe_int(request.GET.get('page', 1), 1) |
|
137 | p = safe_int(request.GET.get('page', 1), 1) | |
138 | branch_name = request.GET.get('branch', None) |
|
138 | branch_name = request.GET.get('branch', None) | |
139 | if (branch_name and |
|
139 | if (branch_name and | |
140 | branch_name not in c.rhodecode_repo.branches and |
|
140 | branch_name not in c.rhodecode_repo.branches and | |
141 | branch_name not in c.rhodecode_repo.closed_branches and |
|
141 | branch_name not in c.rhodecode_repo.closed_branches and | |
142 | not revision): |
|
142 | not revision): | |
143 | return redirect(url('changelog_file_home', repo_name=c.repo_name, |
|
143 | return redirect(url('changelog_file_home', repo_name=c.repo_name, | |
144 | revision=branch_name, f_path=f_path or '')) |
|
144 | revision=branch_name, f_path=f_path or '')) | |
145 |
|
145 | |||
146 | c.changelog_for_path = f_path |
|
146 | c.changelog_for_path = f_path | |
147 | try: |
|
147 | try: | |
148 |
|
148 | |||
149 | if f_path: |
|
149 | if f_path: | |
150 | log.debug('generating changelog for path %s' % f_path) |
|
150 | log.debug('generating changelog for path %s' % f_path) | |
151 | # get the history for the file ! |
|
151 | # get the history for the file ! | |
152 | tip_cs = c.rhodecode_repo.get_changeset() |
|
152 | tip_cs = c.rhodecode_repo.get_changeset() | |
153 | try: |
|
153 | try: | |
154 | collection = tip_cs.get_file_history(f_path) |
|
154 | collection = tip_cs.get_file_history(f_path) | |
155 | except (NodeDoesNotExistError, ChangesetError): |
|
155 | except (NodeDoesNotExistError, ChangesetError): | |
156 | #this node is not present at tip ! |
|
156 | #this node is not present at tip ! | |
157 | try: |
|
157 | try: | |
158 | cs = self.__get_cs_or_redirect(revision, repo_name) |
|
158 | cs = self.__get_cs_or_redirect(revision, repo_name) | |
159 | collection = cs.get_file_history(f_path) |
|
159 | collection = cs.get_file_history(f_path) | |
160 | except RepositoryError, e: |
|
160 | except RepositoryError, e: | |
161 | h.flash(str(e), category='warning') |
|
161 | h.flash(safe_str(e), category='warning') | |
162 | redirect(h.url('changelog_home', repo_name=repo_name)) |
|
162 | redirect(h.url('changelog_home', repo_name=repo_name)) | |
163 | collection = list(reversed(collection)) |
|
163 | collection = list(reversed(collection)) | |
164 | else: |
|
164 | else: | |
165 | collection = c.rhodecode_repo.get_changesets(start=0, |
|
165 | collection = c.rhodecode_repo.get_changesets(start=0, | |
166 | branch_name=branch_name) |
|
166 | branch_name=branch_name) | |
167 | c.total_cs = len(collection) |
|
167 | c.total_cs = len(collection) | |
168 |
|
168 | |||
169 | c.pagination = RepoPage(collection, page=p, item_count=c.total_cs, |
|
169 | c.pagination = RepoPage(collection, page=p, item_count=c.total_cs, | |
170 | items_per_page=c.size, branch=branch_name,) |
|
170 | items_per_page=c.size, branch=branch_name,) | |
171 | collection = list(c.pagination) |
|
171 | collection = list(c.pagination) | |
172 | page_revisions = [x.raw_id for x in c.pagination] |
|
172 | page_revisions = [x.raw_id for x in c.pagination] | |
173 | c.comments = c.rhodecode_db_repo.get_comments(page_revisions) |
|
173 | c.comments = c.rhodecode_db_repo.get_comments(page_revisions) | |
174 | c.statuses = c.rhodecode_db_repo.statuses(page_revisions) |
|
174 | c.statuses = c.rhodecode_db_repo.statuses(page_revisions) | |
175 | except (EmptyRepositoryError), e: |
|
175 | except (EmptyRepositoryError), e: | |
176 | h.flash(str(e), category='warning') |
|
176 | h.flash(safe_str(e), category='warning') | |
177 | return redirect(url('summary_home', repo_name=c.repo_name)) |
|
177 | return redirect(url('summary_home', repo_name=c.repo_name)) | |
178 | except (RepositoryError, ChangesetDoesNotExistError, Exception), e: |
|
178 | except (RepositoryError, ChangesetDoesNotExistError, Exception), e: | |
179 | log.error(traceback.format_exc()) |
|
179 | log.error(traceback.format_exc()) | |
180 | h.flash(str(e), category='error') |
|
180 | h.flash(safe_str(e), category='error') | |
181 | return redirect(url('changelog_home', repo_name=c.repo_name)) |
|
181 | return redirect(url('changelog_home', repo_name=c.repo_name)) | |
182 |
|
182 | |||
183 | c.branch_name = branch_name |
|
183 | c.branch_name = branch_name | |
184 | c.branch_filters = [('', _('All Branches'))] + \ |
|
184 | c.branch_filters = [('', _('All Branches'))] + \ | |
185 | [(k, k) for k in c.rhodecode_repo.branches.keys()] |
|
185 | [(k, k) for k in c.rhodecode_repo.branches.keys()] | |
186 | if c.rhodecode_repo.closed_branches: |
|
186 | if c.rhodecode_repo.closed_branches: | |
187 | prefix = _('(closed)') + ' ' |
|
187 | prefix = _('(closed)') + ' ' | |
188 | c.branch_filters += [('-', '-')] + \ |
|
188 | c.branch_filters += [('-', '-')] + \ | |
189 | [(k, prefix + k) for k in c.rhodecode_repo.closed_branches.keys()] |
|
189 | [(k, prefix + k) for k in c.rhodecode_repo.closed_branches.keys()] | |
190 | _revs = [] |
|
190 | _revs = [] | |
191 | if not f_path: |
|
191 | if not f_path: | |
192 | _revs = [x.revision for x in c.pagination] |
|
192 | _revs = [x.revision for x in c.pagination] | |
193 | self._graph(c.rhodecode_repo, _revs, c.total_cs, c.size, p) |
|
193 | self._graph(c.rhodecode_repo, _revs, c.total_cs, c.size, p) | |
194 |
|
194 | |||
195 | return render('changelog/changelog.html') |
|
195 | return render('changelog/changelog.html') | |
196 |
|
196 | |||
197 | @LoginRequired() |
|
197 | @LoginRequired() | |
198 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
198 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
199 | 'repository.admin') |
|
199 | 'repository.admin') | |
200 | def changelog_details(self, cs): |
|
200 | def changelog_details(self, cs): | |
201 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
201 | if request.environ.get('HTTP_X_PARTIAL_XHR'): | |
202 | c.cs = c.rhodecode_repo.get_changeset(cs) |
|
202 | c.cs = c.rhodecode_repo.get_changeset(cs) | |
203 | return render('changelog/changelog_details.html') |
|
203 | return render('changelog/changelog_details.html') | |
204 | raise HTTPNotFound() |
|
204 | raise HTTPNotFound() | |
205 |
|
205 | |||
206 | @LoginRequired() |
|
206 | @LoginRequired() | |
207 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
207 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
208 | 'repository.admin') |
|
208 | 'repository.admin') | |
209 | def changelog_summary(self, repo_name): |
|
209 | def changelog_summary(self, repo_name): | |
210 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
210 | if request.environ.get('HTTP_X_PARTIAL_XHR'): | |
211 | _load_changelog_summary() |
|
211 | _load_changelog_summary() | |
212 |
|
212 | |||
213 | return render('changelog/changelog_summary_data.html') |
|
213 | return render('changelog/changelog_summary_data.html') | |
214 | raise HTTPNotFound() |
|
214 | raise HTTPNotFound() |
@@ -1,439 +1,439 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.controllers.changeset |
|
3 | rhodecode.controllers.changeset | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | changeset controller for pylons showoing changes beetween |
|
6 | changeset controller for pylons showoing changes beetween | |
7 | revisions |
|
7 | revisions | |
8 |
|
8 | |||
9 | :created_on: Apr 25, 2010 |
|
9 | :created_on: Apr 25, 2010 | |
10 | :author: marcink |
|
10 | :author: marcink | |
11 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
12 | :license: GPLv3, see COPYING for more details. |
|
12 | :license: GPLv3, see COPYING for more details. | |
13 | """ |
|
13 | """ | |
14 | # This program is free software: you can redistribute it and/or modify |
|
14 | # This program is free software: you can redistribute it and/or modify | |
15 | # it under the terms of the GNU General Public License as published by |
|
15 | # it under the terms of the GNU General Public License as published by | |
16 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | # the Free Software Foundation, either version 3 of the License, or | |
17 | # (at your option) any later version. |
|
17 | # (at your option) any later version. | |
18 | # |
|
18 | # | |
19 | # This program is distributed in the hope that it will be useful, |
|
19 | # This program is distributed in the hope that it will be useful, | |
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
22 | # GNU General Public License for more details. |
|
22 | # GNU General Public License for more details. | |
23 | # |
|
23 | # | |
24 | # You should have received a copy of the GNU General Public License |
|
24 | # You should have received a copy of the GNU General Public License | |
25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
26 | import logging |
|
26 | import logging | |
27 | import traceback |
|
27 | import traceback | |
28 | from collections import defaultdict |
|
28 | from collections import defaultdict | |
29 | from webob.exc import HTTPForbidden, HTTPBadRequest, HTTPNotFound |
|
29 | from webob.exc import HTTPForbidden, HTTPBadRequest, HTTPNotFound | |
30 |
|
30 | |||
31 | from pylons import tmpl_context as c, url, request, response |
|
31 | from pylons import tmpl_context as c, url, request, response | |
32 | from pylons.i18n.translation import _ |
|
32 | from pylons.i18n.translation import _ | |
33 | from pylons.controllers.util import redirect |
|
33 | from pylons.controllers.util import redirect | |
34 | from rhodecode.lib.utils import jsonify |
|
34 | from rhodecode.lib.utils import jsonify | |
35 |
|
35 | |||
36 | from rhodecode.lib.vcs.exceptions import RepositoryError, \ |
|
36 | from rhodecode.lib.vcs.exceptions import RepositoryError, \ | |
37 | ChangesetDoesNotExistError |
|
37 | ChangesetDoesNotExistError | |
38 |
|
38 | |||
39 | import rhodecode.lib.helpers as h |
|
39 | import rhodecode.lib.helpers as h | |
40 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator,\ |
|
40 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator,\ | |
41 | NotAnonymous |
|
41 | NotAnonymous | |
42 | from rhodecode.lib.base import BaseRepoController, render |
|
42 | from rhodecode.lib.base import BaseRepoController, render | |
43 | from rhodecode.lib.utils import action_logger |
|
43 | from rhodecode.lib.utils import action_logger | |
44 | from rhodecode.lib.compat import OrderedDict |
|
44 | from rhodecode.lib.compat import OrderedDict | |
45 | from rhodecode.lib import diffs |
|
45 | from rhodecode.lib import diffs | |
46 | from rhodecode.model.db import ChangesetComment, ChangesetStatus |
|
46 | from rhodecode.model.db import ChangesetComment, ChangesetStatus | |
47 | from rhodecode.model.comment import ChangesetCommentsModel |
|
47 | from rhodecode.model.comment import ChangesetCommentsModel | |
48 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
48 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
49 | from rhodecode.model.meta import Session |
|
49 | from rhodecode.model.meta import Session | |
50 | from rhodecode.model.repo import RepoModel |
|
50 | from rhodecode.model.repo import RepoModel | |
51 | from rhodecode.lib.diffs import LimitedDiffContainer |
|
51 | from rhodecode.lib.diffs import LimitedDiffContainer | |
52 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError |
|
52 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError | |
53 | from rhodecode.lib.vcs.backends.base import EmptyChangeset |
|
53 | from rhodecode.lib.vcs.backends.base import EmptyChangeset | |
54 | from rhodecode.lib.utils2 import safe_unicode |
|
54 | from rhodecode.lib.utils2 import safe_unicode, safe_str | |
55 |
|
55 | |||
56 | log = logging.getLogger(__name__) |
|
56 | log = logging.getLogger(__name__) | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | def _update_with_GET(params, GET): |
|
59 | def _update_with_GET(params, GET): | |
60 | for k in ['diff1', 'diff2', 'diff']: |
|
60 | for k in ['diff1', 'diff2', 'diff']: | |
61 | params[k] += GET.getall(k) |
|
61 | params[k] += GET.getall(k) | |
62 |
|
62 | |||
63 |
|
63 | |||
64 | def anchor_url(revision, path, GET): |
|
64 | def anchor_url(revision, path, GET): | |
65 | fid = h.FID(revision, path) |
|
65 | fid = h.FID(revision, path) | |
66 | return h.url.current(anchor=fid, **dict(GET)) |
|
66 | return h.url.current(anchor=fid, **dict(GET)) | |
67 |
|
67 | |||
68 |
|
68 | |||
69 | def get_ignore_ws(fid, GET): |
|
69 | def get_ignore_ws(fid, GET): | |
70 | ig_ws_global = GET.get('ignorews') |
|
70 | ig_ws_global = GET.get('ignorews') | |
71 | ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid)) |
|
71 | ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid)) | |
72 | if ig_ws: |
|
72 | if ig_ws: | |
73 | try: |
|
73 | try: | |
74 | return int(ig_ws[0].split(':')[-1]) |
|
74 | return int(ig_ws[0].split(':')[-1]) | |
75 | except Exception: |
|
75 | except Exception: | |
76 | pass |
|
76 | pass | |
77 | return ig_ws_global |
|
77 | return ig_ws_global | |
78 |
|
78 | |||
79 |
|
79 | |||
80 | def _ignorews_url(GET, fileid=None): |
|
80 | def _ignorews_url(GET, fileid=None): | |
81 | fileid = str(fileid) if fileid else None |
|
81 | fileid = str(fileid) if fileid else None | |
82 | params = defaultdict(list) |
|
82 | params = defaultdict(list) | |
83 | _update_with_GET(params, GET) |
|
83 | _update_with_GET(params, GET) | |
84 | lbl = _('Show white space') |
|
84 | lbl = _('Show white space') | |
85 | ig_ws = get_ignore_ws(fileid, GET) |
|
85 | ig_ws = get_ignore_ws(fileid, GET) | |
86 | ln_ctx = get_line_ctx(fileid, GET) |
|
86 | ln_ctx = get_line_ctx(fileid, GET) | |
87 | # global option |
|
87 | # global option | |
88 | if fileid is None: |
|
88 | if fileid is None: | |
89 | if ig_ws is None: |
|
89 | if ig_ws is None: | |
90 | params['ignorews'] += [1] |
|
90 | params['ignorews'] += [1] | |
91 | lbl = _('Ignore white space') |
|
91 | lbl = _('Ignore white space') | |
92 | ctx_key = 'context' |
|
92 | ctx_key = 'context' | |
93 | ctx_val = ln_ctx |
|
93 | ctx_val = ln_ctx | |
94 | # per file options |
|
94 | # per file options | |
95 | else: |
|
95 | else: | |
96 | if ig_ws is None: |
|
96 | if ig_ws is None: | |
97 | params[fileid] += ['WS:1'] |
|
97 | params[fileid] += ['WS:1'] | |
98 | lbl = _('Ignore white space') |
|
98 | lbl = _('Ignore white space') | |
99 |
|
99 | |||
100 | ctx_key = fileid |
|
100 | ctx_key = fileid | |
101 | ctx_val = 'C:%s' % ln_ctx |
|
101 | ctx_val = 'C:%s' % ln_ctx | |
102 | # if we have passed in ln_ctx pass it along to our params |
|
102 | # if we have passed in ln_ctx pass it along to our params | |
103 | if ln_ctx: |
|
103 | if ln_ctx: | |
104 | params[ctx_key] += [ctx_val] |
|
104 | params[ctx_key] += [ctx_val] | |
105 |
|
105 | |||
106 | params['anchor'] = fileid |
|
106 | params['anchor'] = fileid | |
107 | img = h.image(h.url('/images/icons/text_strikethrough.png'), lbl, class_='icon') |
|
107 | img = h.image(h.url('/images/icons/text_strikethrough.png'), lbl, class_='icon') | |
108 | return h.link_to(img, h.url.current(**params), title=lbl, class_='tooltip') |
|
108 | return h.link_to(img, h.url.current(**params), title=lbl, class_='tooltip') | |
109 |
|
109 | |||
110 |
|
110 | |||
111 | def get_line_ctx(fid, GET): |
|
111 | def get_line_ctx(fid, GET): | |
112 | ln_ctx_global = GET.get('context') |
|
112 | ln_ctx_global = GET.get('context') | |
113 | if fid: |
|
113 | if fid: | |
114 | ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid)) |
|
114 | ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid)) | |
115 | else: |
|
115 | else: | |
116 | _ln_ctx = filter(lambda k: k.startswith('C'), GET) |
|
116 | _ln_ctx = filter(lambda k: k.startswith('C'), GET) | |
117 | ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global |
|
117 | ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global | |
118 | if ln_ctx: |
|
118 | if ln_ctx: | |
119 | ln_ctx = [ln_ctx] |
|
119 | ln_ctx = [ln_ctx] | |
120 |
|
120 | |||
121 | if ln_ctx: |
|
121 | if ln_ctx: | |
122 | retval = ln_ctx[0].split(':')[-1] |
|
122 | retval = ln_ctx[0].split(':')[-1] | |
123 | else: |
|
123 | else: | |
124 | retval = ln_ctx_global |
|
124 | retval = ln_ctx_global | |
125 |
|
125 | |||
126 | try: |
|
126 | try: | |
127 | return int(retval) |
|
127 | return int(retval) | |
128 | except Exception: |
|
128 | except Exception: | |
129 | return 3 |
|
129 | return 3 | |
130 |
|
130 | |||
131 |
|
131 | |||
132 | def _context_url(GET, fileid=None): |
|
132 | def _context_url(GET, fileid=None): | |
133 | """ |
|
133 | """ | |
134 | Generates url for context lines |
|
134 | Generates url for context lines | |
135 |
|
135 | |||
136 | :param fileid: |
|
136 | :param fileid: | |
137 | """ |
|
137 | """ | |
138 |
|
138 | |||
139 | fileid = str(fileid) if fileid else None |
|
139 | fileid = str(fileid) if fileid else None | |
140 | ig_ws = get_ignore_ws(fileid, GET) |
|
140 | ig_ws = get_ignore_ws(fileid, GET) | |
141 | ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2 |
|
141 | ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2 | |
142 |
|
142 | |||
143 | params = defaultdict(list) |
|
143 | params = defaultdict(list) | |
144 | _update_with_GET(params, GET) |
|
144 | _update_with_GET(params, GET) | |
145 |
|
145 | |||
146 | # global option |
|
146 | # global option | |
147 | if fileid is None: |
|
147 | if fileid is None: | |
148 | if ln_ctx > 0: |
|
148 | if ln_ctx > 0: | |
149 | params['context'] += [ln_ctx] |
|
149 | params['context'] += [ln_ctx] | |
150 |
|
150 | |||
151 | if ig_ws: |
|
151 | if ig_ws: | |
152 | ig_ws_key = 'ignorews' |
|
152 | ig_ws_key = 'ignorews' | |
153 | ig_ws_val = 1 |
|
153 | ig_ws_val = 1 | |
154 |
|
154 | |||
155 | # per file option |
|
155 | # per file option | |
156 | else: |
|
156 | else: | |
157 | params[fileid] += ['C:%s' % ln_ctx] |
|
157 | params[fileid] += ['C:%s' % ln_ctx] | |
158 | ig_ws_key = fileid |
|
158 | ig_ws_key = fileid | |
159 | ig_ws_val = 'WS:%s' % 1 |
|
159 | ig_ws_val = 'WS:%s' % 1 | |
160 |
|
160 | |||
161 | if ig_ws: |
|
161 | if ig_ws: | |
162 | params[ig_ws_key] += [ig_ws_val] |
|
162 | params[ig_ws_key] += [ig_ws_val] | |
163 |
|
163 | |||
164 | lbl = _('%s line context') % ln_ctx |
|
164 | lbl = _('%s line context') % ln_ctx | |
165 |
|
165 | |||
166 | params['anchor'] = fileid |
|
166 | params['anchor'] = fileid | |
167 | img = h.image(h.url('/images/icons/table_add.png'), lbl, class_='icon') |
|
167 | img = h.image(h.url('/images/icons/table_add.png'), lbl, class_='icon') | |
168 | return h.link_to(img, h.url.current(**params), title=lbl, class_='tooltip') |
|
168 | return h.link_to(img, h.url.current(**params), title=lbl, class_='tooltip') | |
169 |
|
169 | |||
170 |
|
170 | |||
171 | class ChangesetController(BaseRepoController): |
|
171 | class ChangesetController(BaseRepoController): | |
172 |
|
172 | |||
173 | def __before__(self): |
|
173 | def __before__(self): | |
174 | super(ChangesetController, self).__before__() |
|
174 | super(ChangesetController, self).__before__() | |
175 | c.affected_files_cut_off = 60 |
|
175 | c.affected_files_cut_off = 60 | |
176 | repo_model = RepoModel() |
|
176 | repo_model = RepoModel() | |
177 | c.users_array = repo_model.get_users_js() |
|
177 | c.users_array = repo_model.get_users_js() | |
178 | c.users_groups_array = repo_model.get_users_groups_js() |
|
178 | c.users_groups_array = repo_model.get_users_groups_js() | |
179 |
|
179 | |||
180 | def _index(self, revision, method): |
|
180 | def _index(self, revision, method): | |
181 | c.anchor_url = anchor_url |
|
181 | c.anchor_url = anchor_url | |
182 | c.ignorews_url = _ignorews_url |
|
182 | c.ignorews_url = _ignorews_url | |
183 | c.context_url = _context_url |
|
183 | c.context_url = _context_url | |
184 | c.fulldiff = fulldiff = request.GET.get('fulldiff') |
|
184 | c.fulldiff = fulldiff = request.GET.get('fulldiff') | |
185 | #get ranges of revisions if preset |
|
185 | #get ranges of revisions if preset | |
186 | rev_range = revision.split('...')[:2] |
|
186 | rev_range = revision.split('...')[:2] | |
187 | enable_comments = True |
|
187 | enable_comments = True | |
188 | try: |
|
188 | try: | |
189 | if len(rev_range) == 2: |
|
189 | if len(rev_range) == 2: | |
190 | enable_comments = False |
|
190 | enable_comments = False | |
191 | rev_start = rev_range[0] |
|
191 | rev_start = rev_range[0] | |
192 | rev_end = rev_range[1] |
|
192 | rev_end = rev_range[1] | |
193 | rev_ranges = c.rhodecode_repo.get_changesets(start=rev_start, |
|
193 | rev_ranges = c.rhodecode_repo.get_changesets(start=rev_start, | |
194 | end=rev_end) |
|
194 | end=rev_end) | |
195 | else: |
|
195 | else: | |
196 | rev_ranges = [c.rhodecode_repo.get_changeset(revision)] |
|
196 | rev_ranges = [c.rhodecode_repo.get_changeset(revision)] | |
197 |
|
197 | |||
198 | c.cs_ranges = list(rev_ranges) |
|
198 | c.cs_ranges = list(rev_ranges) | |
199 | if not c.cs_ranges: |
|
199 | if not c.cs_ranges: | |
200 | raise RepositoryError('Changeset range returned empty result') |
|
200 | raise RepositoryError('Changeset range returned empty result') | |
201 |
|
201 | |||
202 | except (RepositoryError, ChangesetDoesNotExistError, Exception), e: |
|
202 | except (RepositoryError, ChangesetDoesNotExistError, Exception), e: | |
203 | log.error(traceback.format_exc()) |
|
203 | log.error(traceback.format_exc()) | |
204 | h.flash(str(e), category='error') |
|
204 | h.flash(safe_str(e), category='error') | |
205 | raise HTTPNotFound() |
|
205 | raise HTTPNotFound() | |
206 |
|
206 | |||
207 | c.changes = OrderedDict() |
|
207 | c.changes = OrderedDict() | |
208 |
|
208 | |||
209 | c.lines_added = 0 # count of lines added |
|
209 | c.lines_added = 0 # count of lines added | |
210 | c.lines_deleted = 0 # count of lines removes |
|
210 | c.lines_deleted = 0 # count of lines removes | |
211 |
|
211 | |||
212 | c.changeset_statuses = ChangesetStatus.STATUSES |
|
212 | c.changeset_statuses = ChangesetStatus.STATUSES | |
213 | c.comments = [] |
|
213 | c.comments = [] | |
214 | c.statuses = [] |
|
214 | c.statuses = [] | |
215 | c.inline_comments = [] |
|
215 | c.inline_comments = [] | |
216 | c.inline_cnt = 0 |
|
216 | c.inline_cnt = 0 | |
217 |
|
217 | |||
218 | # Iterate over ranges (default changeset view is always one changeset) |
|
218 | # Iterate over ranges (default changeset view is always one changeset) | |
219 | for changeset in c.cs_ranges: |
|
219 | for changeset in c.cs_ranges: | |
220 | inlines = [] |
|
220 | inlines = [] | |
221 | if method == 'show': |
|
221 | if method == 'show': | |
222 | c.statuses.extend([ChangesetStatusModel().get_status( |
|
222 | c.statuses.extend([ChangesetStatusModel().get_status( | |
223 | c.rhodecode_db_repo.repo_id, changeset.raw_id)]) |
|
223 | c.rhodecode_db_repo.repo_id, changeset.raw_id)]) | |
224 |
|
224 | |||
225 | c.comments.extend(ChangesetCommentsModel()\ |
|
225 | c.comments.extend(ChangesetCommentsModel()\ | |
226 | .get_comments(c.rhodecode_db_repo.repo_id, |
|
226 | .get_comments(c.rhodecode_db_repo.repo_id, | |
227 | revision=changeset.raw_id)) |
|
227 | revision=changeset.raw_id)) | |
228 |
|
228 | |||
229 | #comments from PR |
|
229 | #comments from PR | |
230 | st = ChangesetStatusModel().get_statuses( |
|
230 | st = ChangesetStatusModel().get_statuses( | |
231 | c.rhodecode_db_repo.repo_id, changeset.raw_id, |
|
231 | c.rhodecode_db_repo.repo_id, changeset.raw_id, | |
232 | with_revisions=True) |
|
232 | with_revisions=True) | |
233 | # from associated statuses, check the pull requests, and |
|
233 | # from associated statuses, check the pull requests, and | |
234 | # show comments from them |
|
234 | # show comments from them | |
235 |
|
235 | |||
236 | prs = set([x.pull_request for x in |
|
236 | prs = set([x.pull_request for x in | |
237 | filter(lambda x: x.pull_request is not None, st)]) |
|
237 | filter(lambda x: x.pull_request is not None, st)]) | |
238 |
|
238 | |||
239 | for pr in prs: |
|
239 | for pr in prs: | |
240 | c.comments.extend(pr.comments) |
|
240 | c.comments.extend(pr.comments) | |
241 | inlines = ChangesetCommentsModel()\ |
|
241 | inlines = ChangesetCommentsModel()\ | |
242 | .get_inline_comments(c.rhodecode_db_repo.repo_id, |
|
242 | .get_inline_comments(c.rhodecode_db_repo.repo_id, | |
243 | revision=changeset.raw_id) |
|
243 | revision=changeset.raw_id) | |
244 | c.inline_comments.extend(inlines) |
|
244 | c.inline_comments.extend(inlines) | |
245 |
|
245 | |||
246 | c.changes[changeset.raw_id] = [] |
|
246 | c.changes[changeset.raw_id] = [] | |
247 |
|
247 | |||
248 | cs2 = changeset.raw_id |
|
248 | cs2 = changeset.raw_id | |
249 | cs1 = changeset.parents[0].raw_id if changeset.parents else EmptyChangeset().raw_id |
|
249 | cs1 = changeset.parents[0].raw_id if changeset.parents else EmptyChangeset().raw_id | |
250 | context_lcl = get_line_ctx('', request.GET) |
|
250 | context_lcl = get_line_ctx('', request.GET) | |
251 | ign_whitespace_lcl = ign_whitespace_lcl = get_ignore_ws('', request.GET) |
|
251 | ign_whitespace_lcl = ign_whitespace_lcl = get_ignore_ws('', request.GET) | |
252 |
|
252 | |||
253 | _diff = c.rhodecode_repo.get_diff(cs1, cs2, |
|
253 | _diff = c.rhodecode_repo.get_diff(cs1, cs2, | |
254 | ignore_whitespace=ign_whitespace_lcl, context=context_lcl) |
|
254 | ignore_whitespace=ign_whitespace_lcl, context=context_lcl) | |
255 | diff_limit = self.cut_off_limit if not fulldiff else None |
|
255 | diff_limit = self.cut_off_limit if not fulldiff else None | |
256 | diff_processor = diffs.DiffProcessor(_diff, |
|
256 | diff_processor = diffs.DiffProcessor(_diff, | |
257 | vcs=c.rhodecode_repo.alias, |
|
257 | vcs=c.rhodecode_repo.alias, | |
258 | format='gitdiff', |
|
258 | format='gitdiff', | |
259 | diff_limit=diff_limit) |
|
259 | diff_limit=diff_limit) | |
260 | cs_changes = OrderedDict() |
|
260 | cs_changes = OrderedDict() | |
261 | if method == 'show': |
|
261 | if method == 'show': | |
262 | _parsed = diff_processor.prepare() |
|
262 | _parsed = diff_processor.prepare() | |
263 | c.limited_diff = False |
|
263 | c.limited_diff = False | |
264 | if isinstance(_parsed, LimitedDiffContainer): |
|
264 | if isinstance(_parsed, LimitedDiffContainer): | |
265 | c.limited_diff = True |
|
265 | c.limited_diff = True | |
266 | for f in _parsed: |
|
266 | for f in _parsed: | |
267 | st = f['stats'] |
|
267 | st = f['stats'] | |
268 | c.lines_added += st['added'] |
|
268 | c.lines_added += st['added'] | |
269 | c.lines_deleted += st['deleted'] |
|
269 | c.lines_deleted += st['deleted'] | |
270 | fid = h.FID(changeset.raw_id, f['filename']) |
|
270 | fid = h.FID(changeset.raw_id, f['filename']) | |
271 | diff = diff_processor.as_html(enable_comments=enable_comments, |
|
271 | diff = diff_processor.as_html(enable_comments=enable_comments, | |
272 | parsed_lines=[f]) |
|
272 | parsed_lines=[f]) | |
273 | cs_changes[fid] = [cs1, cs2, f['operation'], f['filename'], |
|
273 | cs_changes[fid] = [cs1, cs2, f['operation'], f['filename'], | |
274 | diff, st] |
|
274 | diff, st] | |
275 | else: |
|
275 | else: | |
276 | # downloads/raw we only need RAW diff nothing else |
|
276 | # downloads/raw we only need RAW diff nothing else | |
277 | diff = diff_processor.as_raw() |
|
277 | diff = diff_processor.as_raw() | |
278 | cs_changes[''] = [None, None, None, None, diff, None] |
|
278 | cs_changes[''] = [None, None, None, None, diff, None] | |
279 | c.changes[changeset.raw_id] = cs_changes |
|
279 | c.changes[changeset.raw_id] = cs_changes | |
280 |
|
280 | |||
281 | #sort comments by how they were generated |
|
281 | #sort comments by how they were generated | |
282 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) |
|
282 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) | |
283 |
|
283 | |||
284 | # count inline comments |
|
284 | # count inline comments | |
285 | for __, lines in c.inline_comments: |
|
285 | for __, lines in c.inline_comments: | |
286 | for comments in lines.values(): |
|
286 | for comments in lines.values(): | |
287 | c.inline_cnt += len(comments) |
|
287 | c.inline_cnt += len(comments) | |
288 |
|
288 | |||
289 | if len(c.cs_ranges) == 1: |
|
289 | if len(c.cs_ranges) == 1: | |
290 | c.changeset = c.cs_ranges[0] |
|
290 | c.changeset = c.cs_ranges[0] | |
291 | c.parent_tmpl = ''.join(['# Parent %s\n' % x.raw_id |
|
291 | c.parent_tmpl = ''.join(['# Parent %s\n' % x.raw_id | |
292 | for x in c.changeset.parents]) |
|
292 | for x in c.changeset.parents]) | |
293 | if method == 'download': |
|
293 | if method == 'download': | |
294 | response.content_type = 'text/plain' |
|
294 | response.content_type = 'text/plain' | |
295 | response.content_disposition = 'attachment; filename=%s.diff' \ |
|
295 | response.content_disposition = 'attachment; filename=%s.diff' \ | |
296 | % revision[:12] |
|
296 | % revision[:12] | |
297 | return diff |
|
297 | return diff | |
298 | elif method == 'patch': |
|
298 | elif method == 'patch': | |
299 | response.content_type = 'text/plain' |
|
299 | response.content_type = 'text/plain' | |
300 | c.diff = safe_unicode(diff) |
|
300 | c.diff = safe_unicode(diff) | |
301 | return render('changeset/patch_changeset.html') |
|
301 | return render('changeset/patch_changeset.html') | |
302 | elif method == 'raw': |
|
302 | elif method == 'raw': | |
303 | response.content_type = 'text/plain' |
|
303 | response.content_type = 'text/plain' | |
304 | return diff |
|
304 | return diff | |
305 | elif method == 'show': |
|
305 | elif method == 'show': | |
306 | if len(c.cs_ranges) == 1: |
|
306 | if len(c.cs_ranges) == 1: | |
307 | return render('changeset/changeset.html') |
|
307 | return render('changeset/changeset.html') | |
308 | else: |
|
308 | else: | |
309 | return render('changeset/changeset_range.html') |
|
309 | return render('changeset/changeset_range.html') | |
310 |
|
310 | |||
311 | @LoginRequired() |
|
311 | @LoginRequired() | |
312 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
312 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
313 | 'repository.admin') |
|
313 | 'repository.admin') | |
314 | def index(self, revision, method='show'): |
|
314 | def index(self, revision, method='show'): | |
315 | return self._index(revision, method=method) |
|
315 | return self._index(revision, method=method) | |
316 |
|
316 | |||
317 | @LoginRequired() |
|
317 | @LoginRequired() | |
318 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
318 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
319 | 'repository.admin') |
|
319 | 'repository.admin') | |
320 | def changeset_raw(self, revision): |
|
320 | def changeset_raw(self, revision): | |
321 | return self._index(revision, method='raw') |
|
321 | return self._index(revision, method='raw') | |
322 |
|
322 | |||
323 | @LoginRequired() |
|
323 | @LoginRequired() | |
324 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
324 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
325 | 'repository.admin') |
|
325 | 'repository.admin') | |
326 | def changeset_patch(self, revision): |
|
326 | def changeset_patch(self, revision): | |
327 | return self._index(revision, method='patch') |
|
327 | return self._index(revision, method='patch') | |
328 |
|
328 | |||
329 | @LoginRequired() |
|
329 | @LoginRequired() | |
330 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
330 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
331 | 'repository.admin') |
|
331 | 'repository.admin') | |
332 | def changeset_download(self, revision): |
|
332 | def changeset_download(self, revision): | |
333 | return self._index(revision, method='download') |
|
333 | return self._index(revision, method='download') | |
334 |
|
334 | |||
335 | @LoginRequired() |
|
335 | @LoginRequired() | |
336 | @NotAnonymous() |
|
336 | @NotAnonymous() | |
337 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
337 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
338 | 'repository.admin') |
|
338 | 'repository.admin') | |
339 | @jsonify |
|
339 | @jsonify | |
340 | def comment(self, repo_name, revision): |
|
340 | def comment(self, repo_name, revision): | |
341 | status = request.POST.get('changeset_status') |
|
341 | status = request.POST.get('changeset_status') | |
342 | change_status = request.POST.get('change_changeset_status') |
|
342 | change_status = request.POST.get('change_changeset_status') | |
343 | text = request.POST.get('text') |
|
343 | text = request.POST.get('text') | |
344 | if status and change_status: |
|
344 | if status and change_status: | |
345 | text = text or (_('Status change -> %s') |
|
345 | text = text or (_('Status change -> %s') | |
346 | % ChangesetStatus.get_status_lbl(status)) |
|
346 | % ChangesetStatus.get_status_lbl(status)) | |
347 |
|
347 | |||
348 | c.co = comm = ChangesetCommentsModel().create( |
|
348 | c.co = comm = ChangesetCommentsModel().create( | |
349 | text=text, |
|
349 | text=text, | |
350 | repo=c.rhodecode_db_repo.repo_id, |
|
350 | repo=c.rhodecode_db_repo.repo_id, | |
351 | user=c.rhodecode_user.user_id, |
|
351 | user=c.rhodecode_user.user_id, | |
352 | revision=revision, |
|
352 | revision=revision, | |
353 | f_path=request.POST.get('f_path'), |
|
353 | f_path=request.POST.get('f_path'), | |
354 | line_no=request.POST.get('line'), |
|
354 | line_no=request.POST.get('line'), | |
355 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
355 | status_change=(ChangesetStatus.get_status_lbl(status) | |
356 | if status and change_status else None) |
|
356 | if status and change_status else None) | |
357 | ) |
|
357 | ) | |
358 |
|
358 | |||
359 | # get status if set ! |
|
359 | # get status if set ! | |
360 | if status and change_status: |
|
360 | if status and change_status: | |
361 | # if latest status was from pull request and it's closed |
|
361 | # if latest status was from pull request and it's closed | |
362 | # disallow changing status ! |
|
362 | # disallow changing status ! | |
363 | # dont_allow_on_closed_pull_request = True ! |
|
363 | # dont_allow_on_closed_pull_request = True ! | |
364 |
|
364 | |||
365 | try: |
|
365 | try: | |
366 | ChangesetStatusModel().set_status( |
|
366 | ChangesetStatusModel().set_status( | |
367 | c.rhodecode_db_repo.repo_id, |
|
367 | c.rhodecode_db_repo.repo_id, | |
368 | status, |
|
368 | status, | |
369 | c.rhodecode_user.user_id, |
|
369 | c.rhodecode_user.user_id, | |
370 | comm, |
|
370 | comm, | |
371 | revision=revision, |
|
371 | revision=revision, | |
372 | dont_allow_on_closed_pull_request=True |
|
372 | dont_allow_on_closed_pull_request=True | |
373 | ) |
|
373 | ) | |
374 | except StatusChangeOnClosedPullRequestError: |
|
374 | except StatusChangeOnClosedPullRequestError: | |
375 | log.error(traceback.format_exc()) |
|
375 | log.error(traceback.format_exc()) | |
376 | msg = _('Changing status on a changeset associated with ' |
|
376 | msg = _('Changing status on a changeset associated with ' | |
377 | 'a closed pull request is not allowed') |
|
377 | 'a closed pull request is not allowed') | |
378 | h.flash(msg, category='warning') |
|
378 | h.flash(msg, category='warning') | |
379 | return redirect(h.url('changeset_home', repo_name=repo_name, |
|
379 | return redirect(h.url('changeset_home', repo_name=repo_name, | |
380 | revision=revision)) |
|
380 | revision=revision)) | |
381 | action_logger(self.rhodecode_user, |
|
381 | action_logger(self.rhodecode_user, | |
382 | 'user_commented_revision:%s' % revision, |
|
382 | 'user_commented_revision:%s' % revision, | |
383 | c.rhodecode_db_repo, self.ip_addr, self.sa) |
|
383 | c.rhodecode_db_repo, self.ip_addr, self.sa) | |
384 |
|
384 | |||
385 | Session().commit() |
|
385 | Session().commit() | |
386 |
|
386 | |||
387 | if not request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
387 | if not request.environ.get('HTTP_X_PARTIAL_XHR'): | |
388 | return redirect(h.url('changeset_home', repo_name=repo_name, |
|
388 | return redirect(h.url('changeset_home', repo_name=repo_name, | |
389 | revision=revision)) |
|
389 | revision=revision)) | |
390 | #only ajax below |
|
390 | #only ajax below | |
391 | data = { |
|
391 | data = { | |
392 | 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))), |
|
392 | 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))), | |
393 | } |
|
393 | } | |
394 | if comm: |
|
394 | if comm: | |
395 | data.update(comm.get_dict()) |
|
395 | data.update(comm.get_dict()) | |
396 | data.update({'rendered_text': |
|
396 | data.update({'rendered_text': | |
397 | render('changeset/changeset_comment_block.html')}) |
|
397 | render('changeset/changeset_comment_block.html')}) | |
398 |
|
398 | |||
399 | return data |
|
399 | return data | |
400 |
|
400 | |||
401 | @LoginRequired() |
|
401 | @LoginRequired() | |
402 | @NotAnonymous() |
|
402 | @NotAnonymous() | |
403 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
403 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
404 | 'repository.admin') |
|
404 | 'repository.admin') | |
405 | def preview_comment(self): |
|
405 | def preview_comment(self): | |
406 | if not request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
406 | if not request.environ.get('HTTP_X_PARTIAL_XHR'): | |
407 | raise HTTPBadRequest() |
|
407 | raise HTTPBadRequest() | |
408 | text = request.POST.get('text') |
|
408 | text = request.POST.get('text') | |
409 | if text: |
|
409 | if text: | |
410 | return h.rst_w_mentions(text) |
|
410 | return h.rst_w_mentions(text) | |
411 | return '' |
|
411 | return '' | |
412 |
|
412 | |||
413 | @LoginRequired() |
|
413 | @LoginRequired() | |
414 | @NotAnonymous() |
|
414 | @NotAnonymous() | |
415 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
415 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
416 | 'repository.admin') |
|
416 | 'repository.admin') | |
417 | @jsonify |
|
417 | @jsonify | |
418 | def delete_comment(self, repo_name, comment_id): |
|
418 | def delete_comment(self, repo_name, comment_id): | |
419 | co = ChangesetComment.get(comment_id) |
|
419 | co = ChangesetComment.get(comment_id) | |
420 | owner = co.author.user_id == c.rhodecode_user.user_id |
|
420 | owner = co.author.user_id == c.rhodecode_user.user_id | |
421 | if h.HasPermissionAny('hg.admin', 'repository.admin')() or owner: |
|
421 | if h.HasPermissionAny('hg.admin', 'repository.admin')() or owner: | |
422 | ChangesetCommentsModel().delete(comment=co) |
|
422 | ChangesetCommentsModel().delete(comment=co) | |
423 | Session().commit() |
|
423 | Session().commit() | |
424 | return True |
|
424 | return True | |
425 | else: |
|
425 | else: | |
426 | raise HTTPForbidden() |
|
426 | raise HTTPForbidden() | |
427 |
|
427 | |||
428 | @LoginRequired() |
|
428 | @LoginRequired() | |
429 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
429 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
430 | 'repository.admin') |
|
430 | 'repository.admin') | |
431 | @jsonify |
|
431 | @jsonify | |
432 | def changeset_info(self, repo_name, revision): |
|
432 | def changeset_info(self, repo_name, revision): | |
433 | if request.is_xhr: |
|
433 | if request.is_xhr: | |
434 | try: |
|
434 | try: | |
435 | return c.rhodecode_repo.get_changeset(revision) |
|
435 | return c.rhodecode_repo.get_changeset(revision) | |
436 | except ChangesetDoesNotExistError, e: |
|
436 | except ChangesetDoesNotExistError, e: | |
437 | return EmptyChangeset(message=str(e)) |
|
437 | return EmptyChangeset(message=str(e)) | |
438 | else: |
|
438 | else: | |
439 | raise HTTPBadRequest() |
|
439 | raise HTTPBadRequest() |
@@ -1,266 +1,265 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.controllers.compare |
|
3 | rhodecode.controllers.compare | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | compare controller for pylons showing differences between two |
|
6 | compare controller for pylons showing differences between two | |
7 | repos, branches, bookmarks or tips |
|
7 | repos, branches, bookmarks or tips | |
8 |
|
8 | |||
9 | :created_on: May 6, 2012 |
|
9 | :created_on: May 6, 2012 | |
10 | :author: marcink |
|
10 | :author: marcink | |
11 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
11 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
12 | :license: GPLv3, see COPYING for more details. |
|
12 | :license: GPLv3, see COPYING for more details. | |
13 | """ |
|
13 | """ | |
14 | # This program is free software: you can redistribute it and/or modify |
|
14 | # This program is free software: you can redistribute it and/or modify | |
15 | # it under the terms of the GNU General Public License as published by |
|
15 | # it under the terms of the GNU General Public License as published by | |
16 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | # the Free Software Foundation, either version 3 of the License, or | |
17 | # (at your option) any later version. |
|
17 | # (at your option) any later version. | |
18 | # |
|
18 | # | |
19 | # This program is distributed in the hope that it will be useful, |
|
19 | # This program is distributed in the hope that it will be useful, | |
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
22 | # GNU General Public License for more details. |
|
22 | # GNU General Public License for more details. | |
23 | # |
|
23 | # | |
24 | # You should have received a copy of the GNU General Public License |
|
24 | # You should have received a copy of the GNU General Public License | |
25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
26 |
|
26 | |||
27 | import logging |
|
27 | import logging | |
28 | import traceback |
|
28 | import traceback | |
29 | import re |
|
29 | import re | |
30 |
|
30 | |||
31 | from webob.exc import HTTPNotFound |
|
31 | from webob.exc import HTTPNotFound, HTTPBadRequest | |
32 | from pylons import request, response, session, tmpl_context as c, url |
|
32 | from pylons import request, response, session, tmpl_context as c, url | |
33 | from pylons.controllers.util import abort, redirect |
|
33 | from pylons.controllers.util import abort, redirect | |
34 | from pylons.i18n.translation import _ |
|
34 | from pylons.i18n.translation import _ | |
35 |
|
35 | |||
36 | from rhodecode.lib.vcs.exceptions import EmptyRepositoryError, RepositoryError |
|
36 | from rhodecode.lib.vcs.exceptions import EmptyRepositoryError, RepositoryError | |
37 | from rhodecode.lib.vcs.utils import safe_str |
|
37 | from rhodecode.lib.vcs.utils import safe_str | |
38 | from rhodecode.lib.vcs.utils.hgcompat import scmutil, unionrepo |
|
38 | from rhodecode.lib.vcs.utils.hgcompat import scmutil, unionrepo | |
39 | from rhodecode.lib import helpers as h |
|
39 | from rhodecode.lib import helpers as h | |
40 | from rhodecode.lib.base import BaseRepoController, render |
|
40 | from rhodecode.lib.base import BaseRepoController, render | |
41 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
41 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator | |
42 | from rhodecode.lib import diffs |
|
42 | from rhodecode.lib import diffs | |
43 |
|
43 | from rhodecode.lib.utils2 import safe_str | ||
44 | from rhodecode.model.db import Repository |
|
44 | from rhodecode.model.db import Repository | |
45 | from webob.exc import HTTPBadRequest |
|
|||
46 | from rhodecode.lib.diffs import LimitedDiffContainer |
|
45 | from rhodecode.lib.diffs import LimitedDiffContainer | |
47 |
|
46 | |||
48 |
|
47 | |||
49 | log = logging.getLogger(__name__) |
|
48 | log = logging.getLogger(__name__) | |
50 |
|
49 | |||
51 |
|
50 | |||
52 | class CompareController(BaseRepoController): |
|
51 | class CompareController(BaseRepoController): | |
53 |
|
52 | |||
54 | def __before__(self): |
|
53 | def __before__(self): | |
55 | super(CompareController, self).__before__() |
|
54 | super(CompareController, self).__before__() | |
56 |
|
55 | |||
57 | def __get_rev_or_redirect(self, ref, repo, redirect_after=True, |
|
56 | def __get_rev_or_redirect(self, ref, repo, redirect_after=True, | |
58 | partial=False): |
|
57 | partial=False): | |
59 | """ |
|
58 | """ | |
60 | Safe way to get changeset if error occur it redirects to changeset with |
|
59 | Safe way to get changeset if error occur it redirects to changeset with | |
61 | proper message. If partial is set then don't do redirect raise Exception |
|
60 | proper message. If partial is set then don't do redirect raise Exception | |
62 | instead |
|
61 | instead | |
63 |
|
62 | |||
64 | :param rev: revision to fetch |
|
63 | :param rev: revision to fetch | |
65 | :param repo: repo instance |
|
64 | :param repo: repo instance | |
66 | """ |
|
65 | """ | |
67 |
|
66 | |||
68 | rev = ref[1] # default and used for git |
|
67 | rev = ref[1] # default and used for git | |
69 | if repo.scm_instance.alias == 'hg': |
|
68 | if repo.scm_instance.alias == 'hg': | |
70 | # lookup up the exact node id |
|
69 | # lookup up the exact node id | |
71 | _revset_predicates = { |
|
70 | _revset_predicates = { | |
72 | 'branch': 'branch', |
|
71 | 'branch': 'branch', | |
73 | 'book': 'bookmark', |
|
72 | 'book': 'bookmark', | |
74 | 'tag': 'tag', |
|
73 | 'tag': 'tag', | |
75 | 'rev': 'id', |
|
74 | 'rev': 'id', | |
76 | } |
|
75 | } | |
77 | rev_spec = "max(%s(%%s))" % _revset_predicates[ref[0]] |
|
76 | rev_spec = "max(%s(%%s))" % _revset_predicates[ref[0]] | |
78 | revs = repo.scm_instance._repo.revs(rev_spec, safe_str(ref[1])) |
|
77 | revs = repo.scm_instance._repo.revs(rev_spec, safe_str(ref[1])) | |
79 | if revs: |
|
78 | if revs: | |
80 | rev = revs[-1] |
|
79 | rev = revs[-1] | |
81 | # else: TODO: just report 'not found' |
|
80 | # else: TODO: just report 'not found' | |
82 |
|
81 | |||
83 | try: |
|
82 | try: | |
84 | return repo.scm_instance.get_changeset(rev).raw_id |
|
83 | return repo.scm_instance.get_changeset(rev).raw_id | |
85 | except EmptyRepositoryError, e: |
|
84 | except EmptyRepositoryError, e: | |
86 | if not redirect_after: |
|
85 | if not redirect_after: | |
87 | return None |
|
86 | return None | |
88 | h.flash(h.literal(_('There are no changesets yet')), |
|
87 | h.flash(h.literal(_('There are no changesets yet')), | |
89 | category='warning') |
|
88 | category='warning') | |
90 | redirect(url('summary_home', repo_name=repo.repo_name)) |
|
89 | redirect(url('summary_home', repo_name=repo.repo_name)) | |
91 |
|
90 | |||
92 | except RepositoryError, e: |
|
91 | except RepositoryError, e: | |
93 | log.error(traceback.format_exc()) |
|
92 | log.error(traceback.format_exc()) | |
94 | h.flash(str(e), category='warning') |
|
93 | h.flash(safe_str(e), category='warning') | |
95 | if not partial: |
|
94 | if not partial: | |
96 | redirect(h.url('summary_home', repo_name=repo.repo_name)) |
|
95 | redirect(h.url('summary_home', repo_name=repo.repo_name)) | |
97 | raise HTTPBadRequest() |
|
96 | raise HTTPBadRequest() | |
98 |
|
97 | |||
99 | def _get_changesets(self, alias, org_repo, org_rev, other_repo, other_rev, merge): |
|
98 | def _get_changesets(self, alias, org_repo, org_rev, other_repo, other_rev, merge): | |
100 | """ |
|
99 | """ | |
101 | Returns a list of changesets that can be merged from org_repo@org_rev |
|
100 | Returns a list of changesets that can be merged from org_repo@org_rev | |
102 | to other_repo@other_rev ... and the ancestor that would be used for merge |
|
101 | to other_repo@other_rev ... and the ancestor that would be used for merge | |
103 | """ |
|
102 | """ | |
104 |
|
103 | |||
105 | ancestor = None |
|
104 | ancestor = None | |
106 |
|
105 | |||
107 | if org_rev == other_rev: |
|
106 | if org_rev == other_rev: | |
108 | changesets = [] |
|
107 | changesets = [] | |
109 | if merge: |
|
108 | if merge: | |
110 | ancestor = org_rev |
|
109 | ancestor = org_rev | |
111 |
|
110 | |||
112 | elif alias == 'hg': |
|
111 | elif alias == 'hg': | |
113 | #case two independent repos |
|
112 | #case two independent repos | |
114 | if org_repo != other_repo: |
|
113 | if org_repo != other_repo: | |
115 | hgrepo = unionrepo.unionrepository(other_repo.baseui, |
|
114 | hgrepo = unionrepo.unionrepository(other_repo.baseui, | |
116 | other_repo.path, |
|
115 | other_repo.path, | |
117 | org_repo.path) |
|
116 | org_repo.path) | |
118 | # all the changesets we are looking for will be in other_repo, |
|
117 | # all the changesets we are looking for will be in other_repo, | |
119 | # so rev numbers from hgrepo can be used in other_repo |
|
118 | # so rev numbers from hgrepo can be used in other_repo | |
120 |
|
119 | |||
121 | #no remote compare do it on the same repository |
|
120 | #no remote compare do it on the same repository | |
122 | else: |
|
121 | else: | |
123 | hgrepo = other_repo._repo |
|
122 | hgrepo = other_repo._repo | |
124 |
|
123 | |||
125 | if merge: |
|
124 | if merge: | |
126 | revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", |
|
125 | revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", | |
127 | other_rev, org_rev, org_rev) |
|
126 | other_rev, org_rev, org_rev) | |
128 |
|
127 | |||
129 | ancestors = hgrepo.revs("ancestor(id(%s), id(%s))", org_rev, other_rev) |
|
128 | ancestors = hgrepo.revs("ancestor(id(%s), id(%s))", org_rev, other_rev) | |
130 | if ancestors: |
|
129 | if ancestors: | |
131 | # pick arbitrary ancestor - but there is usually only one |
|
130 | # pick arbitrary ancestor - but there is usually only one | |
132 | ancestor = hgrepo[ancestors[0]].hex() |
|
131 | ancestor = hgrepo[ancestors[0]].hex() | |
133 | else: |
|
132 | else: | |
134 | # TODO: have both + and - changesets |
|
133 | # TODO: have both + and - changesets | |
135 | revs = hgrepo.revs("id(%s) :: id(%s) - id(%s)", |
|
134 | revs = hgrepo.revs("id(%s) :: id(%s) - id(%s)", | |
136 | org_rev, other_rev, org_rev) |
|
135 | org_rev, other_rev, org_rev) | |
137 |
|
136 | |||
138 | changesets = [other_repo.get_changeset(rev) for rev in revs] |
|
137 | changesets = [other_repo.get_changeset(rev) for rev in revs] | |
139 |
|
138 | |||
140 | elif alias == 'git': |
|
139 | elif alias == 'git': | |
141 | if org_repo != other_repo: |
|
140 | if org_repo != other_repo: | |
142 | raise Exception('Comparing of different GIT repositories is not' |
|
141 | raise Exception('Comparing of different GIT repositories is not' | |
143 | 'allowed. Got %s != %s' % (org_repo, other_repo)) |
|
142 | 'allowed. Got %s != %s' % (org_repo, other_repo)) | |
144 |
|
143 | |||
145 | so, se = org_repo.run_git_command( |
|
144 | so, se = org_repo.run_git_command( | |
146 | 'log --reverse --pretty="format: %%H" -s -p %s..%s' |
|
145 | 'log --reverse --pretty="format: %%H" -s -p %s..%s' | |
147 | % (org_rev, other_rev) |
|
146 | % (org_rev, other_rev) | |
148 | ) |
|
147 | ) | |
149 | changesets = [org_repo.get_changeset(cs) |
|
148 | changesets = [org_repo.get_changeset(cs) | |
150 | for cs in re.findall(r'[0-9a-fA-F]{40}', so)] |
|
149 | for cs in re.findall(r'[0-9a-fA-F]{40}', so)] | |
151 |
|
150 | |||
152 | return changesets, ancestor |
|
151 | return changesets, ancestor | |
153 |
|
152 | |||
154 | @LoginRequired() |
|
153 | @LoginRequired() | |
155 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
154 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
156 | 'repository.admin') |
|
155 | 'repository.admin') | |
157 | def index(self, org_ref_type, org_ref, other_ref_type, other_ref): |
|
156 | def index(self, org_ref_type, org_ref, other_ref_type, other_ref): | |
158 | # org_ref will be evaluated in org_repo |
|
157 | # org_ref will be evaluated in org_repo | |
159 | org_repo = c.rhodecode_db_repo.repo_name |
|
158 | org_repo = c.rhodecode_db_repo.repo_name | |
160 | org_ref = (org_ref_type, org_ref) |
|
159 | org_ref = (org_ref_type, org_ref) | |
161 | # other_ref will be evaluated in other_repo |
|
160 | # other_ref will be evaluated in other_repo | |
162 | other_ref = (other_ref_type, other_ref) |
|
161 | other_ref = (other_ref_type, other_ref) | |
163 | other_repo = request.GET.get('other_repo', org_repo) |
|
162 | other_repo = request.GET.get('other_repo', org_repo) | |
164 | # If merge is True: |
|
163 | # If merge is True: | |
165 | # Show what org would get if merged with other: |
|
164 | # Show what org would get if merged with other: | |
166 | # List changesets that are ancestors of other but not of org. |
|
165 | # List changesets that are ancestors of other but not of org. | |
167 | # New changesets in org is thus ignored. |
|
166 | # New changesets in org is thus ignored. | |
168 | # Diff will be from common ancestor, and merges of org to other will thus be ignored. |
|
167 | # Diff will be from common ancestor, and merges of org to other will thus be ignored. | |
169 | # If merge is False: |
|
168 | # If merge is False: | |
170 | # Make a raw diff from org to other, no matter if related or not. |
|
169 | # Make a raw diff from org to other, no matter if related or not. | |
171 | # Changesets in one and not in the other will be ignored |
|
170 | # Changesets in one and not in the other will be ignored | |
172 | merge = bool(request.GET.get('merge')) |
|
171 | merge = bool(request.GET.get('merge')) | |
173 | # fulldiff disables cut_off_limit |
|
172 | # fulldiff disables cut_off_limit | |
174 | c.fulldiff = request.GET.get('fulldiff') |
|
173 | c.fulldiff = request.GET.get('fulldiff') | |
175 | # partial uses compare_cs.html template directly |
|
174 | # partial uses compare_cs.html template directly | |
176 | partial = request.environ.get('HTTP_X_PARTIAL_XHR') |
|
175 | partial = request.environ.get('HTTP_X_PARTIAL_XHR') | |
177 | # as_form puts hidden input field with changeset revisions |
|
176 | # as_form puts hidden input field with changeset revisions | |
178 | c.as_form = partial and request.GET.get('as_form') |
|
177 | c.as_form = partial and request.GET.get('as_form') | |
179 | # swap url for compare_diff page - never partial and never as_form |
|
178 | # swap url for compare_diff page - never partial and never as_form | |
180 | c.swap_url = h.url('compare_url', |
|
179 | c.swap_url = h.url('compare_url', | |
181 | repo_name=other_repo, |
|
180 | repo_name=other_repo, | |
182 | org_ref_type=other_ref[0], org_ref=other_ref[1], |
|
181 | org_ref_type=other_ref[0], org_ref=other_ref[1], | |
183 | other_repo=org_repo, |
|
182 | other_repo=org_repo, | |
184 | other_ref_type=org_ref[0], other_ref=org_ref[1], |
|
183 | other_ref_type=org_ref[0], other_ref=org_ref[1], | |
185 | merge=merge or '') |
|
184 | merge=merge or '') | |
186 |
|
185 | |||
187 | org_repo = Repository.get_by_repo_name(org_repo) |
|
186 | org_repo = Repository.get_by_repo_name(org_repo) | |
188 | other_repo = Repository.get_by_repo_name(other_repo) |
|
187 | other_repo = Repository.get_by_repo_name(other_repo) | |
189 |
|
188 | |||
190 | if org_repo is None: |
|
189 | if org_repo is None: | |
191 | log.error('Could not find org repo %s' % org_repo) |
|
190 | log.error('Could not find org repo %s' % org_repo) | |
192 | raise HTTPNotFound |
|
191 | raise HTTPNotFound | |
193 | if other_repo is None: |
|
192 | if other_repo is None: | |
194 | log.error('Could not find other repo %s' % other_repo) |
|
193 | log.error('Could not find other repo %s' % other_repo) | |
195 | raise HTTPNotFound |
|
194 | raise HTTPNotFound | |
196 |
|
195 | |||
197 | if org_repo != other_repo and h.is_git(org_repo): |
|
196 | if org_repo != other_repo and h.is_git(org_repo): | |
198 | log.error('compare of two remote repos not available for GIT REPOS') |
|
197 | log.error('compare of two remote repos not available for GIT REPOS') | |
199 | raise HTTPNotFound |
|
198 | raise HTTPNotFound | |
200 |
|
199 | |||
201 | if org_repo.scm_instance.alias != other_repo.scm_instance.alias: |
|
200 | if org_repo.scm_instance.alias != other_repo.scm_instance.alias: | |
202 | log.error('compare of two different kind of remote repos not available') |
|
201 | log.error('compare of two different kind of remote repos not available') | |
203 | raise HTTPNotFound |
|
202 | raise HTTPNotFound | |
204 |
|
203 | |||
205 | org_rev = self.__get_rev_or_redirect(ref=org_ref, repo=org_repo, partial=partial) |
|
204 | org_rev = self.__get_rev_or_redirect(ref=org_ref, repo=org_repo, partial=partial) | |
206 | other_rev = self.__get_rev_or_redirect(ref=other_ref, repo=other_repo, partial=partial) |
|
205 | other_rev = self.__get_rev_or_redirect(ref=other_ref, repo=other_repo, partial=partial) | |
207 |
|
206 | |||
208 | c.org_repo = org_repo |
|
207 | c.org_repo = org_repo | |
209 | c.other_repo = other_repo |
|
208 | c.other_repo = other_repo | |
210 | c.org_ref = org_ref[1] |
|
209 | c.org_ref = org_ref[1] | |
211 | c.other_ref = other_ref[1] |
|
210 | c.other_ref = other_ref[1] | |
212 | c.org_ref_type = org_ref[0] |
|
211 | c.org_ref_type = org_ref[0] | |
213 | c.other_ref_type = other_ref[0] |
|
212 | c.other_ref_type = other_ref[0] | |
214 |
|
213 | |||
215 | c.cs_ranges, c.ancestor = self._get_changesets(org_repo.scm_instance.alias, |
|
214 | c.cs_ranges, c.ancestor = self._get_changesets(org_repo.scm_instance.alias, | |
216 | org_repo.scm_instance, org_rev, |
|
215 | org_repo.scm_instance, org_rev, | |
217 | other_repo.scm_instance, other_rev, |
|
216 | other_repo.scm_instance, other_rev, | |
218 | merge) |
|
217 | merge) | |
219 |
|
218 | |||
220 | c.statuses = c.rhodecode_db_repo.statuses([x.raw_id for x in |
|
219 | c.statuses = c.rhodecode_db_repo.statuses([x.raw_id for x in | |
221 | c.cs_ranges]) |
|
220 | c.cs_ranges]) | |
222 | if merge and not c.ancestor: |
|
221 | if merge and not c.ancestor: | |
223 | log.error('Unable to find ancestor revision') |
|
222 | log.error('Unable to find ancestor revision') | |
224 |
|
223 | |||
225 | if partial: |
|
224 | if partial: | |
226 | return render('compare/compare_cs.html') |
|
225 | return render('compare/compare_cs.html') | |
227 |
|
226 | |||
228 | if c.ancestor: |
|
227 | if c.ancestor: | |
229 | assert merge |
|
228 | assert merge | |
230 | # case we want a simple diff without incoming changesets, |
|
229 | # case we want a simple diff without incoming changesets, | |
231 | # previewing what will be merged. |
|
230 | # previewing what will be merged. | |
232 | # Make the diff on the other repo (which is known to have other_ref) |
|
231 | # Make the diff on the other repo (which is known to have other_ref) | |
233 | log.debug('Using ancestor %s as org_ref instead of %s' |
|
232 | log.debug('Using ancestor %s as org_ref instead of %s' | |
234 | % (c.ancestor, org_ref)) |
|
233 | % (c.ancestor, org_ref)) | |
235 | org_rev = c.ancestor |
|
234 | org_rev = c.ancestor | |
236 | org_repo = other_repo |
|
235 | org_repo = other_repo | |
237 |
|
236 | |||
238 | diff_limit = self.cut_off_limit if not c.fulldiff else None |
|
237 | diff_limit = self.cut_off_limit if not c.fulldiff else None | |
239 |
|
238 | |||
240 | log.debug('running diff between %s and %s in %s' |
|
239 | log.debug('running diff between %s and %s in %s' | |
241 | % (org_rev, other_rev, org_repo.scm_instance.path)) |
|
240 | % (org_rev, other_rev, org_repo.scm_instance.path)) | |
242 | txtdiff = org_repo.scm_instance.get_diff(rev1=org_rev, rev2=other_rev) |
|
241 | txtdiff = org_repo.scm_instance.get_diff(rev1=org_rev, rev2=other_rev) | |
243 |
|
242 | |||
244 | diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff', |
|
243 | diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff', | |
245 | diff_limit=diff_limit) |
|
244 | diff_limit=diff_limit) | |
246 | _parsed = diff_processor.prepare() |
|
245 | _parsed = diff_processor.prepare() | |
247 |
|
246 | |||
248 | c.limited_diff = False |
|
247 | c.limited_diff = False | |
249 | if isinstance(_parsed, LimitedDiffContainer): |
|
248 | if isinstance(_parsed, LimitedDiffContainer): | |
250 | c.limited_diff = True |
|
249 | c.limited_diff = True | |
251 |
|
250 | |||
252 | c.files = [] |
|
251 | c.files = [] | |
253 | c.changes = {} |
|
252 | c.changes = {} | |
254 | c.lines_added = 0 |
|
253 | c.lines_added = 0 | |
255 | c.lines_deleted = 0 |
|
254 | c.lines_deleted = 0 | |
256 | for f in _parsed: |
|
255 | for f in _parsed: | |
257 | st = f['stats'] |
|
256 | st = f['stats'] | |
258 | if not st['binary']: |
|
257 | if not st['binary']: | |
259 | c.lines_added += st['added'] |
|
258 | c.lines_added += st['added'] | |
260 | c.lines_deleted += st['deleted'] |
|
259 | c.lines_deleted += st['deleted'] | |
261 | fid = h.FID('', f['filename']) |
|
260 | fid = h.FID('', f['filename']) | |
262 | c.files.append([fid, f['operation'], f['filename'], f['stats']]) |
|
261 | c.files.append([fid, f['operation'], f['filename'], f['stats']]) | |
263 | htmldiff = diff_processor.as_html(enable_comments=False, parsed_lines=[f]) |
|
262 | htmldiff = diff_processor.as_html(enable_comments=False, parsed_lines=[f]) | |
264 | c.changes[fid] = [f['operation'], f['filename'], htmldiff] |
|
263 | c.changes[fid] = [f['operation'], f['filename'], htmldiff] | |
265 |
|
264 | |||
266 | return render('compare/compare_diff.html') |
|
265 | return render('compare/compare_diff.html') |
@@ -1,735 +1,735 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | rhodecode.controllers.files |
|
3 | rhodecode.controllers.files | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Files controller for RhodeCode |
|
6 | Files controller for RhodeCode | |
7 |
|
7 | |||
8 | :created_on: Apr 21, 2010 |
|
8 | :created_on: Apr 21, 2010 | |
9 | :author: marcink |
|
9 | :author: marcink | |
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> |
|
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |
11 | :license: GPLv3, see COPYING for more details. |
|
11 | :license: GPLv3, see COPYING for more details. | |
12 | """ |
|
12 | """ | |
13 | # This program is free software: you can redistribute it and/or modify |
|
13 | # This program is free software: you can redistribute it and/or modify | |
14 | # it under the terms of the GNU General Public License as published by |
|
14 | # it under the terms of the GNU General Public License as published by | |
15 | # the Free Software Foundation, either version 3 of the License, or |
|
15 | # the Free Software Foundation, either version 3 of the License, or | |
16 | # (at your option) any later version. |
|
16 | # (at your option) any later version. | |
17 | # |
|
17 | # | |
18 | # This program is distributed in the hope that it will be useful, |
|
18 | # This program is distributed in the hope that it will be useful, | |
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
21 | # GNU General Public License for more details. |
|
21 | # GNU General Public License for more details. | |
22 | # |
|
22 | # | |
23 | # You should have received a copy of the GNU General Public License |
|
23 | # You should have received a copy of the GNU General Public License | |
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
25 | from __future__ import with_statement |
|
25 | from __future__ import with_statement | |
26 | import os |
|
26 | import os | |
27 | import logging |
|
27 | import logging | |
28 | import traceback |
|
28 | import traceback | |
29 | import tempfile |
|
29 | import tempfile | |
30 | import shutil |
|
30 | import shutil | |
31 |
|
31 | |||
32 | from pylons import request, response, tmpl_context as c, url |
|
32 | from pylons import request, response, tmpl_context as c, url | |
33 | from pylons.i18n.translation import _ |
|
33 | from pylons.i18n.translation import _ | |
34 | from pylons.controllers.util import redirect |
|
34 | from pylons.controllers.util import redirect | |
35 | from rhodecode.lib.utils import jsonify, action_logger |
|
35 | from rhodecode.lib.utils import jsonify, action_logger | |
36 |
|
36 | |||
37 | from rhodecode.lib import diffs |
|
37 | from rhodecode.lib import diffs | |
38 | from rhodecode.lib import helpers as h |
|
38 | from rhodecode.lib import helpers as h | |
39 |
|
39 | |||
40 | from rhodecode.lib.compat import OrderedDict |
|
40 | from rhodecode.lib.compat import OrderedDict | |
41 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode, safe_str,\ |
|
41 | from rhodecode.lib.utils2 import convert_line_endings, detect_mode, safe_str,\ | |
42 | str2bool |
|
42 | str2bool | |
43 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
43 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator | |
44 | from rhodecode.lib.base import BaseRepoController, render |
|
44 | from rhodecode.lib.base import BaseRepoController, render | |
45 | from rhodecode.lib.vcs.backends.base import EmptyChangeset |
|
45 | from rhodecode.lib.vcs.backends.base import EmptyChangeset | |
46 | from rhodecode.lib.vcs.conf import settings |
|
46 | from rhodecode.lib.vcs.conf import settings | |
47 | from rhodecode.lib.vcs.exceptions import RepositoryError, \ |
|
47 | from rhodecode.lib.vcs.exceptions import RepositoryError, \ | |
48 | ChangesetDoesNotExistError, EmptyRepositoryError, \ |
|
48 | ChangesetDoesNotExistError, EmptyRepositoryError, \ | |
49 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,\ |
|
49 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,\ | |
50 | NodeDoesNotExistError, ChangesetError, NodeError |
|
50 | NodeDoesNotExistError, ChangesetError, NodeError | |
51 | from rhodecode.lib.vcs.nodes import FileNode |
|
51 | from rhodecode.lib.vcs.nodes import FileNode | |
52 |
|
52 | |||
53 | from rhodecode.model.repo import RepoModel |
|
53 | from rhodecode.model.repo import RepoModel | |
54 | from rhodecode.model.scm import ScmModel |
|
54 | from rhodecode.model.scm import ScmModel | |
55 | from rhodecode.model.db import Repository |
|
55 | from rhodecode.model.db import Repository | |
56 |
|
56 | |||
57 | from rhodecode.controllers.changeset import anchor_url, _ignorews_url,\ |
|
57 | from rhodecode.controllers.changeset import anchor_url, _ignorews_url,\ | |
58 | _context_url, get_line_ctx, get_ignore_ws |
|
58 | _context_url, get_line_ctx, get_ignore_ws | |
59 | from webob.exc import HTTPNotFound |
|
59 | from webob.exc import HTTPNotFound | |
60 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError |
|
60 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError | |
61 |
|
61 | |||
62 |
|
62 | |||
63 | log = logging.getLogger(__name__) |
|
63 | log = logging.getLogger(__name__) | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | class FilesController(BaseRepoController): |
|
66 | class FilesController(BaseRepoController): | |
67 |
|
67 | |||
68 | def __before__(self): |
|
68 | def __before__(self): | |
69 | super(FilesController, self).__before__() |
|
69 | super(FilesController, self).__before__() | |
70 | c.cut_off_limit = self.cut_off_limit |
|
70 | c.cut_off_limit = self.cut_off_limit | |
71 |
|
71 | |||
72 | def __get_cs_or_redirect(self, rev, repo_name, redirect_after=True): |
|
72 | def __get_cs_or_redirect(self, rev, repo_name, redirect_after=True): | |
73 | """ |
|
73 | """ | |
74 | Safe way to get changeset if error occur it redirects to tip with |
|
74 | Safe way to get changeset if error occur it redirects to tip with | |
75 | proper message |
|
75 | proper message | |
76 |
|
76 | |||
77 | :param rev: revision to fetch |
|
77 | :param rev: revision to fetch | |
78 | :param repo_name: repo name to redirect after |
|
78 | :param repo_name: repo name to redirect after | |
79 | """ |
|
79 | """ | |
80 |
|
80 | |||
81 | try: |
|
81 | try: | |
82 | return c.rhodecode_repo.get_changeset(rev) |
|
82 | return c.rhodecode_repo.get_changeset(rev) | |
83 | except EmptyRepositoryError, e: |
|
83 | except EmptyRepositoryError, e: | |
84 | if not redirect_after: |
|
84 | if not redirect_after: | |
85 | return None |
|
85 | return None | |
86 | url_ = url('files_add_home', |
|
86 | url_ = url('files_add_home', | |
87 | repo_name=c.repo_name, |
|
87 | repo_name=c.repo_name, | |
88 | revision=0, f_path='') |
|
88 | revision=0, f_path='') | |
89 | add_new = h.link_to(_('Click here to add new file'), url_) |
|
89 | add_new = h.link_to(_('Click here to add new file'), url_) | |
90 | h.flash(h.literal(_('There are no files yet %s') % add_new), |
|
90 | h.flash(h.literal(_('There are no files yet %s') % add_new), | |
91 | category='warning') |
|
91 | category='warning') | |
92 | redirect(h.url('summary_home', repo_name=repo_name)) |
|
92 | redirect(h.url('summary_home', repo_name=repo_name)) | |
93 |
|
93 | |||
94 | except RepositoryError, e: # including ChangesetDoesNotExistError |
|
94 | except RepositoryError, e: # including ChangesetDoesNotExistError | |
95 | h.flash(str(e), category='error') |
|
95 | h.flash(safe_str(e), category='error') | |
96 | raise HTTPNotFound() |
|
96 | raise HTTPNotFound() | |
97 |
|
97 | |||
98 | def __get_filenode_or_redirect(self, repo_name, cs, path): |
|
98 | def __get_filenode_or_redirect(self, repo_name, cs, path): | |
99 | """ |
|
99 | """ | |
100 | Returns file_node, if error occurs or given path is directory, |
|
100 | Returns file_node, if error occurs or given path is directory, | |
101 | it'll redirect to top level path |
|
101 | it'll redirect to top level path | |
102 |
|
102 | |||
103 | :param repo_name: repo_name |
|
103 | :param repo_name: repo_name | |
104 | :param cs: given changeset |
|
104 | :param cs: given changeset | |
105 | :param path: path to lookup |
|
105 | :param path: path to lookup | |
106 | """ |
|
106 | """ | |
107 |
|
107 | |||
108 | try: |
|
108 | try: | |
109 | file_node = cs.get_node(path) |
|
109 | file_node = cs.get_node(path) | |
110 | if file_node.is_dir(): |
|
110 | if file_node.is_dir(): | |
111 | raise RepositoryError('given path is a directory') |
|
111 | raise RepositoryError('given path is a directory') | |
112 | except RepositoryError, e: |
|
112 | except RepositoryError, e: | |
113 | h.flash(str(e), category='error') |
|
113 | h.flash(safe_str(e), category='error') | |
114 | raise HTTPNotFound() |
|
114 | raise HTTPNotFound() | |
115 |
|
115 | |||
116 | return file_node |
|
116 | return file_node | |
117 |
|
117 | |||
118 | @LoginRequired() |
|
118 | @LoginRequired() | |
119 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
119 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
120 | 'repository.admin') |
|
120 | 'repository.admin') | |
121 | def index(self, repo_name, revision, f_path, annotate=False): |
|
121 | def index(self, repo_name, revision, f_path, annotate=False): | |
122 | # redirect to given revision from form if given |
|
122 | # redirect to given revision from form if given | |
123 | post_revision = request.POST.get('at_rev', None) |
|
123 | post_revision = request.POST.get('at_rev', None) | |
124 | if post_revision: |
|
124 | if post_revision: | |
125 | cs = self.__get_cs_or_redirect(post_revision, repo_name) |
|
125 | cs = self.__get_cs_or_redirect(post_revision, repo_name) | |
126 |
|
126 | |||
127 | c.changeset = self.__get_cs_or_redirect(revision, repo_name) |
|
127 | c.changeset = self.__get_cs_or_redirect(revision, repo_name) | |
128 | c.branch = request.GET.get('branch', None) |
|
128 | c.branch = request.GET.get('branch', None) | |
129 | c.f_path = f_path |
|
129 | c.f_path = f_path | |
130 | c.annotate = annotate |
|
130 | c.annotate = annotate | |
131 | c.changeset = self.__get_cs_or_redirect(revision, repo_name) |
|
131 | c.changeset = self.__get_cs_or_redirect(revision, repo_name) | |
132 | cur_rev = c.changeset.revision |
|
132 | cur_rev = c.changeset.revision | |
133 |
|
133 | |||
134 | # prev link |
|
134 | # prev link | |
135 | try: |
|
135 | try: | |
136 | prev_rev = c.rhodecode_repo.get_changeset(cur_rev).prev(c.branch) |
|
136 | prev_rev = c.rhodecode_repo.get_changeset(cur_rev).prev(c.branch) | |
137 | c.url_prev = url('files_home', repo_name=c.repo_name, |
|
137 | c.url_prev = url('files_home', repo_name=c.repo_name, | |
138 | revision=prev_rev.raw_id, f_path=f_path) |
|
138 | revision=prev_rev.raw_id, f_path=f_path) | |
139 | if c.branch: |
|
139 | if c.branch: | |
140 | c.url_prev += '?branch=%s' % c.branch |
|
140 | c.url_prev += '?branch=%s' % c.branch | |
141 | except (ChangesetDoesNotExistError, VCSError): |
|
141 | except (ChangesetDoesNotExistError, VCSError): | |
142 | c.url_prev = '#' |
|
142 | c.url_prev = '#' | |
143 |
|
143 | |||
144 | # next link |
|
144 | # next link | |
145 | try: |
|
145 | try: | |
146 | next_rev = c.rhodecode_repo.get_changeset(cur_rev).next(c.branch) |
|
146 | next_rev = c.rhodecode_repo.get_changeset(cur_rev).next(c.branch) | |
147 | c.url_next = url('files_home', repo_name=c.repo_name, |
|
147 | c.url_next = url('files_home', repo_name=c.repo_name, | |
148 | revision=next_rev.raw_id, f_path=f_path) |
|
148 | revision=next_rev.raw_id, f_path=f_path) | |
149 | if c.branch: |
|
149 | if c.branch: | |
150 | c.url_next += '?branch=%s' % c.branch |
|
150 | c.url_next += '?branch=%s' % c.branch | |
151 | except (ChangesetDoesNotExistError, VCSError): |
|
151 | except (ChangesetDoesNotExistError, VCSError): | |
152 | c.url_next = '#' |
|
152 | c.url_next = '#' | |
153 |
|
153 | |||
154 | # files or dirs |
|
154 | # files or dirs | |
155 | try: |
|
155 | try: | |
156 | c.file = c.changeset.get_node(f_path) |
|
156 | c.file = c.changeset.get_node(f_path) | |
157 |
|
157 | |||
158 | if c.file.is_file(): |
|
158 | if c.file.is_file(): | |
159 | c.load_full_history = False |
|
159 | c.load_full_history = False | |
160 | file_last_cs = c.file.last_changeset |
|
160 | file_last_cs = c.file.last_changeset | |
161 | c.file_changeset = (c.changeset |
|
161 | c.file_changeset = (c.changeset | |
162 | if c.changeset.revision < file_last_cs.revision |
|
162 | if c.changeset.revision < file_last_cs.revision | |
163 | else file_last_cs) |
|
163 | else file_last_cs) | |
164 | #determine if we're on branch head |
|
164 | #determine if we're on branch head | |
165 | _branches = c.rhodecode_repo.branches |
|
165 | _branches = c.rhodecode_repo.branches | |
166 | c.on_branch_head = revision in _branches.keys() + _branches.values() |
|
166 | c.on_branch_head = revision in _branches.keys() + _branches.values() | |
167 | _hist = [] |
|
167 | _hist = [] | |
168 | c.file_history = [] |
|
168 | c.file_history = [] | |
169 | if c.load_full_history: |
|
169 | if c.load_full_history: | |
170 | c.file_history, _hist = self._get_node_history(c.changeset, f_path) |
|
170 | c.file_history, _hist = self._get_node_history(c.changeset, f_path) | |
171 |
|
171 | |||
172 | c.authors = [] |
|
172 | c.authors = [] | |
173 | for a in set([x.author for x in _hist]): |
|
173 | for a in set([x.author for x in _hist]): | |
174 | c.authors.append((h.email(a), h.person(a))) |
|
174 | c.authors.append((h.email(a), h.person(a))) | |
175 | else: |
|
175 | else: | |
176 | c.authors = c.file_history = [] |
|
176 | c.authors = c.file_history = [] | |
177 | except RepositoryError, e: |
|
177 | except RepositoryError, e: | |
178 | h.flash(str(e), category='error') |
|
178 | h.flash(safe_str(e), category='error') | |
179 | raise HTTPNotFound() |
|
179 | raise HTTPNotFound() | |
180 |
|
180 | |||
181 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
181 | if request.environ.get('HTTP_X_PARTIAL_XHR'): | |
182 | return render('files/files_ypjax.html') |
|
182 | return render('files/files_ypjax.html') | |
183 |
|
183 | |||
184 | return render('files/files.html') |
|
184 | return render('files/files.html') | |
185 |
|
185 | |||
186 | @LoginRequired() |
|
186 | @LoginRequired() | |
187 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
187 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
188 | 'repository.admin') |
|
188 | 'repository.admin') | |
189 | def history(self, repo_name, revision, f_path, annotate=False): |
|
189 | def history(self, repo_name, revision, f_path, annotate=False): | |
190 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
190 | if request.environ.get('HTTP_X_PARTIAL_XHR'): | |
191 | c.changeset = self.__get_cs_or_redirect(revision, repo_name) |
|
191 | c.changeset = self.__get_cs_or_redirect(revision, repo_name) | |
192 | c.f_path = f_path |
|
192 | c.f_path = f_path | |
193 | c.annotate = annotate |
|
193 | c.annotate = annotate | |
194 | c.file = c.changeset.get_node(f_path) |
|
194 | c.file = c.changeset.get_node(f_path) | |
195 | if c.file.is_file(): |
|
195 | if c.file.is_file(): | |
196 | file_last_cs = c.file.last_changeset |
|
196 | file_last_cs = c.file.last_changeset | |
197 | c.file_changeset = (c.changeset |
|
197 | c.file_changeset = (c.changeset | |
198 | if c.changeset.revision < file_last_cs.revision |
|
198 | if c.changeset.revision < file_last_cs.revision | |
199 | else file_last_cs) |
|
199 | else file_last_cs) | |
200 | c.file_history, _hist = self._get_node_history(c.changeset, f_path) |
|
200 | c.file_history, _hist = self._get_node_history(c.changeset, f_path) | |
201 | c.authors = [] |
|
201 | c.authors = [] | |
202 | for a in set([x.author for x in _hist]): |
|
202 | for a in set([x.author for x in _hist]): | |
203 | c.authors.append((h.email(a), h.person(a))) |
|
203 | c.authors.append((h.email(a), h.person(a))) | |
204 | return render('files/files_history_box.html') |
|
204 | return render('files/files_history_box.html') | |
205 |
|
205 | |||
206 | @LoginRequired() |
|
206 | @LoginRequired() | |
207 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
207 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
208 | 'repository.admin') |
|
208 | 'repository.admin') | |
209 | def rawfile(self, repo_name, revision, f_path): |
|
209 | def rawfile(self, repo_name, revision, f_path): | |
210 | cs = self.__get_cs_or_redirect(revision, repo_name) |
|
210 | cs = self.__get_cs_or_redirect(revision, repo_name) | |
211 | file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path) |
|
211 | file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path) | |
212 |
|
212 | |||
213 | response.content_disposition = 'attachment; filename=%s' % \ |
|
213 | response.content_disposition = 'attachment; filename=%s' % \ | |
214 | safe_str(f_path.split(Repository.url_sep())[-1]) |
|
214 | safe_str(f_path.split(Repository.url_sep())[-1]) | |
215 |
|
215 | |||
216 | response.content_type = file_node.mimetype |
|
216 | response.content_type = file_node.mimetype | |
217 | return file_node.content |
|
217 | return file_node.content | |
218 |
|
218 | |||
219 | @LoginRequired() |
|
219 | @LoginRequired() | |
220 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
220 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
221 | 'repository.admin') |
|
221 | 'repository.admin') | |
222 | def raw(self, repo_name, revision, f_path): |
|
222 | def raw(self, repo_name, revision, f_path): | |
223 | cs = self.__get_cs_or_redirect(revision, repo_name) |
|
223 | cs = self.__get_cs_or_redirect(revision, repo_name) | |
224 | file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path) |
|
224 | file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path) | |
225 |
|
225 | |||
226 | raw_mimetype_mapping = { |
|
226 | raw_mimetype_mapping = { | |
227 | # map original mimetype to a mimetype used for "show as raw" |
|
227 | # map original mimetype to a mimetype used for "show as raw" | |
228 | # you can also provide a content-disposition to override the |
|
228 | # you can also provide a content-disposition to override the | |
229 | # default "attachment" disposition. |
|
229 | # default "attachment" disposition. | |
230 | # orig_type: (new_type, new_dispo) |
|
230 | # orig_type: (new_type, new_dispo) | |
231 |
|
231 | |||
232 | # show images inline: |
|
232 | # show images inline: | |
233 | 'image/x-icon': ('image/x-icon', 'inline'), |
|
233 | 'image/x-icon': ('image/x-icon', 'inline'), | |
234 | 'image/png': ('image/png', 'inline'), |
|
234 | 'image/png': ('image/png', 'inline'), | |
235 | 'image/gif': ('image/gif', 'inline'), |
|
235 | 'image/gif': ('image/gif', 'inline'), | |
236 | 'image/jpeg': ('image/jpeg', 'inline'), |
|
236 | 'image/jpeg': ('image/jpeg', 'inline'), | |
237 | 'image/svg+xml': ('image/svg+xml', 'inline'), |
|
237 | 'image/svg+xml': ('image/svg+xml', 'inline'), | |
238 | } |
|
238 | } | |
239 |
|
239 | |||
240 | mimetype = file_node.mimetype |
|
240 | mimetype = file_node.mimetype | |
241 | try: |
|
241 | try: | |
242 | mimetype, dispo = raw_mimetype_mapping[mimetype] |
|
242 | mimetype, dispo = raw_mimetype_mapping[mimetype] | |
243 | except KeyError: |
|
243 | except KeyError: | |
244 | # we don't know anything special about this, handle it safely |
|
244 | # we don't know anything special about this, handle it safely | |
245 | if file_node.is_binary: |
|
245 | if file_node.is_binary: | |
246 | # do same as download raw for binary files |
|
246 | # do same as download raw for binary files | |
247 | mimetype, dispo = 'application/octet-stream', 'attachment' |
|
247 | mimetype, dispo = 'application/octet-stream', 'attachment' | |
248 | else: |
|
248 | else: | |
249 | # do not just use the original mimetype, but force text/plain, |
|
249 | # do not just use the original mimetype, but force text/plain, | |
250 | # otherwise it would serve text/html and that might be unsafe. |
|
250 | # otherwise it would serve text/html and that might be unsafe. | |
251 | # Note: underlying vcs library fakes text/plain mimetype if the |
|
251 | # Note: underlying vcs library fakes text/plain mimetype if the | |
252 | # mimetype can not be determined and it thinks it is not |
|
252 | # mimetype can not be determined and it thinks it is not | |
253 | # binary.This might lead to erroneous text display in some |
|
253 | # binary.This might lead to erroneous text display in some | |
254 | # cases, but helps in other cases, like with text files |
|
254 | # cases, but helps in other cases, like with text files | |
255 | # without extension. |
|
255 | # without extension. | |
256 | mimetype, dispo = 'text/plain', 'inline' |
|
256 | mimetype, dispo = 'text/plain', 'inline' | |
257 |
|
257 | |||
258 | if dispo == 'attachment': |
|
258 | if dispo == 'attachment': | |
259 | dispo = 'attachment; filename=%s' % \ |
|
259 | dispo = 'attachment; filename=%s' % \ | |
260 | safe_str(f_path.split(os.sep)[-1]) |
|
260 | safe_str(f_path.split(os.sep)[-1]) | |
261 |
|
261 | |||
262 | response.content_disposition = dispo |
|
262 | response.content_disposition = dispo | |
263 | response.content_type = mimetype |
|
263 | response.content_type = mimetype | |
264 | return file_node.content |
|
264 | return file_node.content | |
265 |
|
265 | |||
266 | @LoginRequired() |
|
266 | @LoginRequired() | |
267 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
267 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
268 | def edit(self, repo_name, revision, f_path): |
|
268 | def edit(self, repo_name, revision, f_path): | |
269 | repo = c.rhodecode_db_repo |
|
269 | repo = c.rhodecode_db_repo | |
270 | if repo.enable_locking and repo.locked[0]: |
|
270 | if repo.enable_locking and repo.locked[0]: | |
271 | h.flash(_('This repository is has been locked by %s on %s') |
|
271 | h.flash(_('This repository is has been locked by %s on %s') | |
272 | % (h.person_by_id(repo.locked[0]), |
|
272 | % (h.person_by_id(repo.locked[0]), | |
273 | h.fmt_date(h.time_to_datetime(repo.locked[1]))), |
|
273 | h.fmt_date(h.time_to_datetime(repo.locked[1]))), | |
274 | 'warning') |
|
274 | 'warning') | |
275 | return redirect(h.url('files_home', |
|
275 | return redirect(h.url('files_home', | |
276 | repo_name=repo_name, revision='tip')) |
|
276 | repo_name=repo_name, revision='tip')) | |
277 |
|
277 | |||
278 | # check if revision is a branch identifier- basically we cannot |
|
278 | # check if revision is a branch identifier- basically we cannot | |
279 | # create multiple heads via file editing |
|
279 | # create multiple heads via file editing | |
280 | _branches = repo.scm_instance.branches |
|
280 | _branches = repo.scm_instance.branches | |
281 | # check if revision is a branch name or branch hash |
|
281 | # check if revision is a branch name or branch hash | |
282 | if revision not in _branches.keys() + _branches.values(): |
|
282 | if revision not in _branches.keys() + _branches.values(): | |
283 | h.flash(_('You can only edit files with revision ' |
|
283 | h.flash(_('You can only edit files with revision ' | |
284 | 'being a valid branch '), category='warning') |
|
284 | 'being a valid branch '), category='warning') | |
285 | return redirect(h.url('files_home', |
|
285 | return redirect(h.url('files_home', | |
286 | repo_name=repo_name, revision='tip', |
|
286 | repo_name=repo_name, revision='tip', | |
287 | f_path=f_path)) |
|
287 | f_path=f_path)) | |
288 |
|
288 | |||
289 | r_post = request.POST |
|
289 | r_post = request.POST | |
290 |
|
290 | |||
291 | c.cs = self.__get_cs_or_redirect(revision, repo_name) |
|
291 | c.cs = self.__get_cs_or_redirect(revision, repo_name) | |
292 | c.file = self.__get_filenode_or_redirect(repo_name, c.cs, f_path) |
|
292 | c.file = self.__get_filenode_or_redirect(repo_name, c.cs, f_path) | |
293 |
|
293 | |||
294 | if c.file.is_binary: |
|
294 | if c.file.is_binary: | |
295 | return redirect(url('files_home', repo_name=c.repo_name, |
|
295 | return redirect(url('files_home', repo_name=c.repo_name, | |
296 | revision=c.cs.raw_id, f_path=f_path)) |
|
296 | revision=c.cs.raw_id, f_path=f_path)) | |
297 | c.default_message = _('Edited file %s via RhodeCode') % (f_path) |
|
297 | c.default_message = _('Edited file %s via RhodeCode') % (f_path) | |
298 | c.f_path = f_path |
|
298 | c.f_path = f_path | |
299 |
|
299 | |||
300 | if r_post: |
|
300 | if r_post: | |
301 |
|
301 | |||
302 | old_content = c.file.content |
|
302 | old_content = c.file.content | |
303 | sl = old_content.splitlines(1) |
|
303 | sl = old_content.splitlines(1) | |
304 | first_line = sl[0] if sl else '' |
|
304 | first_line = sl[0] if sl else '' | |
305 | # modes: 0 - Unix, 1 - Mac, 2 - DOS |
|
305 | # modes: 0 - Unix, 1 - Mac, 2 - DOS | |
306 | mode = detect_mode(first_line, 0) |
|
306 | mode = detect_mode(first_line, 0) | |
307 | content = convert_line_endings(r_post.get('content', ''), mode) |
|
307 | content = convert_line_endings(r_post.get('content', ''), mode) | |
308 |
|
308 | |||
309 | message = r_post.get('message') or c.default_message |
|
309 | message = r_post.get('message') or c.default_message | |
310 | author = self.rhodecode_user.full_contact |
|
310 | author = self.rhodecode_user.full_contact | |
311 |
|
311 | |||
312 | if content == old_content: |
|
312 | if content == old_content: | |
313 | h.flash(_('No changes'), category='warning') |
|
313 | h.flash(_('No changes'), category='warning') | |
314 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
314 | return redirect(url('changeset_home', repo_name=c.repo_name, | |
315 | revision='tip')) |
|
315 | revision='tip')) | |
316 | try: |
|
316 | try: | |
317 | self.scm_model.commit_change(repo=c.rhodecode_repo, |
|
317 | self.scm_model.commit_change(repo=c.rhodecode_repo, | |
318 | repo_name=repo_name, cs=c.cs, |
|
318 | repo_name=repo_name, cs=c.cs, | |
319 | user=self.rhodecode_user.user_id, |
|
319 | user=self.rhodecode_user.user_id, | |
320 | author=author, message=message, |
|
320 | author=author, message=message, | |
321 | content=content, f_path=f_path) |
|
321 | content=content, f_path=f_path) | |
322 | h.flash(_('Successfully committed to %s') % f_path, |
|
322 | h.flash(_('Successfully committed to %s') % f_path, | |
323 | category='success') |
|
323 | category='success') | |
324 | except Exception: |
|
324 | except Exception: | |
325 | log.error(traceback.format_exc()) |
|
325 | log.error(traceback.format_exc()) | |
326 | h.flash(_('Error occurred during commit'), category='error') |
|
326 | h.flash(_('Error occurred during commit'), category='error') | |
327 | return redirect(url('changeset_home', |
|
327 | return redirect(url('changeset_home', | |
328 | repo_name=c.repo_name, revision='tip')) |
|
328 | repo_name=c.repo_name, revision='tip')) | |
329 |
|
329 | |||
330 | return render('files/files_edit.html') |
|
330 | return render('files/files_edit.html') | |
331 |
|
331 | |||
332 | @LoginRequired() |
|
332 | @LoginRequired() | |
333 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
333 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
334 | def add(self, repo_name, revision, f_path): |
|
334 | def add(self, repo_name, revision, f_path): | |
335 |
|
335 | |||
336 | repo = Repository.get_by_repo_name(repo_name) |
|
336 | repo = Repository.get_by_repo_name(repo_name) | |
337 | if repo.enable_locking and repo.locked[0]: |
|
337 | if repo.enable_locking and repo.locked[0]: | |
338 | h.flash(_('This repository is has been locked by %s on %s') |
|
338 | h.flash(_('This repository is has been locked by %s on %s') | |
339 | % (h.person_by_id(repo.locked[0]), |
|
339 | % (h.person_by_id(repo.locked[0]), | |
340 | h.fmt_date(h.time_to_datetime(repo.locked[1]))), |
|
340 | h.fmt_date(h.time_to_datetime(repo.locked[1]))), | |
341 | 'warning') |
|
341 | 'warning') | |
342 | return redirect(h.url('files_home', |
|
342 | return redirect(h.url('files_home', | |
343 | repo_name=repo_name, revision='tip')) |
|
343 | repo_name=repo_name, revision='tip')) | |
344 |
|
344 | |||
345 | r_post = request.POST |
|
345 | r_post = request.POST | |
346 | c.cs = self.__get_cs_or_redirect(revision, repo_name, |
|
346 | c.cs = self.__get_cs_or_redirect(revision, repo_name, | |
347 | redirect_after=False) |
|
347 | redirect_after=False) | |
348 | if c.cs is None: |
|
348 | if c.cs is None: | |
349 | c.cs = EmptyChangeset(alias=c.rhodecode_repo.alias) |
|
349 | c.cs = EmptyChangeset(alias=c.rhodecode_repo.alias) | |
350 | c.default_message = (_('Added file via RhodeCode')) |
|
350 | c.default_message = (_('Added file via RhodeCode')) | |
351 | c.f_path = f_path |
|
351 | c.f_path = f_path | |
352 |
|
352 | |||
353 | if r_post: |
|
353 | if r_post: | |
354 | unix_mode = 0 |
|
354 | unix_mode = 0 | |
355 | content = convert_line_endings(r_post.get('content', ''), unix_mode) |
|
355 | content = convert_line_endings(r_post.get('content', ''), unix_mode) | |
356 |
|
356 | |||
357 | message = r_post.get('message') or c.default_message |
|
357 | message = r_post.get('message') or c.default_message | |
358 | filename = r_post.get('filename') |
|
358 | filename = r_post.get('filename') | |
359 | location = r_post.get('location', '') |
|
359 | location = r_post.get('location', '') | |
360 | file_obj = r_post.get('upload_file', None) |
|
360 | file_obj = r_post.get('upload_file', None) | |
361 |
|
361 | |||
362 | if file_obj is not None and hasattr(file_obj, 'filename'): |
|
362 | if file_obj is not None and hasattr(file_obj, 'filename'): | |
363 | filename = file_obj.filename |
|
363 | filename = file_obj.filename | |
364 | content = file_obj.file |
|
364 | content = file_obj.file | |
365 |
|
365 | |||
366 | if hasattr(content, 'file'): |
|
366 | if hasattr(content, 'file'): | |
367 | # non posix systems store real file under file attr |
|
367 | # non posix systems store real file under file attr | |
368 | content = content.file |
|
368 | content = content.file | |
369 |
|
369 | |||
370 | if not content: |
|
370 | if not content: | |
371 | h.flash(_('No content'), category='warning') |
|
371 | h.flash(_('No content'), category='warning') | |
372 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
372 | return redirect(url('changeset_home', repo_name=c.repo_name, | |
373 | revision='tip')) |
|
373 | revision='tip')) | |
374 | if not filename: |
|
374 | if not filename: | |
375 | h.flash(_('No filename'), category='warning') |
|
375 | h.flash(_('No filename'), category='warning') | |
376 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
376 | return redirect(url('changeset_home', repo_name=c.repo_name, | |
377 | revision='tip')) |
|
377 | revision='tip')) | |
378 | #strip all crap out of file, just leave the basename |
|
378 | #strip all crap out of file, just leave the basename | |
379 | filename = os.path.basename(filename) |
|
379 | filename = os.path.basename(filename) | |
380 | node_path = os.path.join(location, filename) |
|
380 | node_path = os.path.join(location, filename) | |
381 | author = self.rhodecode_user.full_contact |
|
381 | author = self.rhodecode_user.full_contact | |
382 |
|
382 | |||
383 | try: |
|
383 | try: | |
384 | nodes = { |
|
384 | nodes = { | |
385 | node_path: { |
|
385 | node_path: { | |
386 | 'content': content |
|
386 | 'content': content | |
387 | } |
|
387 | } | |
388 | } |
|
388 | } | |
389 | self.scm_model.create_nodes( |
|
389 | self.scm_model.create_nodes( | |
390 | user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo, |
|
390 | user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo, | |
391 | message=message, |
|
391 | message=message, | |
392 | nodes=nodes, |
|
392 | nodes=nodes, | |
393 | parent_cs=c.cs, |
|
393 | parent_cs=c.cs, | |
394 | author=author, |
|
394 | author=author, | |
395 | ) |
|
395 | ) | |
396 |
|
396 | |||
397 | h.flash(_('Successfully committed to %s') % node_path, |
|
397 | h.flash(_('Successfully committed to %s') % node_path, | |
398 | category='success') |
|
398 | category='success') | |
399 | except NonRelativePathError, e: |
|
399 | except NonRelativePathError, e: | |
400 | h.flash(_('Location must be relative path and must not ' |
|
400 | h.flash(_('Location must be relative path and must not ' | |
401 | 'contain .. in path'), category='warning') |
|
401 | 'contain .. in path'), category='warning') | |
402 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
402 | return redirect(url('changeset_home', repo_name=c.repo_name, | |
403 | revision='tip')) |
|
403 | revision='tip')) | |
404 | except (NodeError, NodeAlreadyExistsError), e: |
|
404 | except (NodeError, NodeAlreadyExistsError), e: | |
405 | h.flash(_(e), category='error') |
|
405 | h.flash(_(e), category='error') | |
406 | except Exception: |
|
406 | except Exception: | |
407 | log.error(traceback.format_exc()) |
|
407 | log.error(traceback.format_exc()) | |
408 | h.flash(_('Error occurred during commit'), category='error') |
|
408 | h.flash(_('Error occurred during commit'), category='error') | |
409 | return redirect(url('changeset_home', |
|
409 | return redirect(url('changeset_home', | |
410 | repo_name=c.repo_name, revision='tip')) |
|
410 | repo_name=c.repo_name, revision='tip')) | |
411 |
|
411 | |||
412 | return render('files/files_add.html') |
|
412 | return render('files/files_add.html') | |
413 |
|
413 | |||
414 | @LoginRequired() |
|
414 | @LoginRequired() | |
415 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
415 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
416 | 'repository.admin') |
|
416 | 'repository.admin') | |
417 | def archivefile(self, repo_name, fname): |
|
417 | def archivefile(self, repo_name, fname): | |
418 |
|
418 | |||
419 | fileformat = None |
|
419 | fileformat = None | |
420 | revision = None |
|
420 | revision = None | |
421 | ext = None |
|
421 | ext = None | |
422 | subrepos = request.GET.get('subrepos') == 'true' |
|
422 | subrepos = request.GET.get('subrepos') == 'true' | |
423 |
|
423 | |||
424 | for a_type, ext_data in settings.ARCHIVE_SPECS.items(): |
|
424 | for a_type, ext_data in settings.ARCHIVE_SPECS.items(): | |
425 | archive_spec = fname.split(ext_data[1]) |
|
425 | archive_spec = fname.split(ext_data[1]) | |
426 | if len(archive_spec) == 2 and archive_spec[1] == '': |
|
426 | if len(archive_spec) == 2 and archive_spec[1] == '': | |
427 | fileformat = a_type or ext_data[1] |
|
427 | fileformat = a_type or ext_data[1] | |
428 | revision = archive_spec[0] |
|
428 | revision = archive_spec[0] | |
429 | ext = ext_data[1] |
|
429 | ext = ext_data[1] | |
430 |
|
430 | |||
431 | try: |
|
431 | try: | |
432 | dbrepo = RepoModel().get_by_repo_name(repo_name) |
|
432 | dbrepo = RepoModel().get_by_repo_name(repo_name) | |
433 | if not dbrepo.enable_downloads: |
|
433 | if not dbrepo.enable_downloads: | |
434 | return _('Downloads disabled') |
|
434 | return _('Downloads disabled') | |
435 |
|
435 | |||
436 | if c.rhodecode_repo.alias == 'hg': |
|
436 | if c.rhodecode_repo.alias == 'hg': | |
437 | # patch and reset hooks section of UI config to not run any |
|
437 | # patch and reset hooks section of UI config to not run any | |
438 | # hooks on fetching archives with subrepos |
|
438 | # hooks on fetching archives with subrepos | |
439 | for k, v in c.rhodecode_repo._repo.ui.configitems('hooks'): |
|
439 | for k, v in c.rhodecode_repo._repo.ui.configitems('hooks'): | |
440 | c.rhodecode_repo._repo.ui.setconfig('hooks', k, None) |
|
440 | c.rhodecode_repo._repo.ui.setconfig('hooks', k, None) | |
441 |
|
441 | |||
442 | cs = c.rhodecode_repo.get_changeset(revision) |
|
442 | cs = c.rhodecode_repo.get_changeset(revision) | |
443 | content_type = settings.ARCHIVE_SPECS[fileformat][0] |
|
443 | content_type = settings.ARCHIVE_SPECS[fileformat][0] | |
444 | except ChangesetDoesNotExistError: |
|
444 | except ChangesetDoesNotExistError: | |
445 | return _('Unknown revision %s') % revision |
|
445 | return _('Unknown revision %s') % revision | |
446 | except EmptyRepositoryError: |
|
446 | except EmptyRepositoryError: | |
447 | return _('Empty repository') |
|
447 | return _('Empty repository') | |
448 | except (ImproperArchiveTypeError, KeyError): |
|
448 | except (ImproperArchiveTypeError, KeyError): | |
449 | return _('Unknown archive type') |
|
449 | return _('Unknown archive type') | |
450 | # archive cache |
|
450 | # archive cache | |
451 | from rhodecode import CONFIG |
|
451 | from rhodecode import CONFIG | |
452 | rev_name = cs.raw_id[:12] |
|
452 | rev_name = cs.raw_id[:12] | |
453 | archive_name = '%s-%s%s' % (safe_str(repo_name.replace('/', '_')), |
|
453 | archive_name = '%s-%s%s' % (safe_str(repo_name.replace('/', '_')), | |
454 | safe_str(rev_name), ext) |
|
454 | safe_str(rev_name), ext) | |
455 |
|
455 | |||
456 | use_cached_archive = False # defines if we use cached version of archive |
|
456 | use_cached_archive = False # defines if we use cached version of archive | |
457 | archive_cache_enabled = CONFIG.get('archive_cache_dir') |
|
457 | archive_cache_enabled = CONFIG.get('archive_cache_dir') | |
458 | if not subrepos and archive_cache_enabled: |
|
458 | if not subrepos and archive_cache_enabled: | |
459 | #check if we it's ok to write |
|
459 | #check if we it's ok to write | |
460 | if not os.path.isdir(CONFIG['archive_cache_dir']): |
|
460 | if not os.path.isdir(CONFIG['archive_cache_dir']): | |
461 | os.makedirs(CONFIG['archive_cache_dir']) |
|
461 | os.makedirs(CONFIG['archive_cache_dir']) | |
462 | cached_archive_path = os.path.join(CONFIG['archive_cache_dir'], archive_name) |
|
462 | cached_archive_path = os.path.join(CONFIG['archive_cache_dir'], archive_name) | |
463 | if os.path.isfile(cached_archive_path): |
|
463 | if os.path.isfile(cached_archive_path): | |
464 | log.debug('Found cached archive in %s' % cached_archive_path) |
|
464 | log.debug('Found cached archive in %s' % cached_archive_path) | |
465 | fd, archive = None, cached_archive_path |
|
465 | fd, archive = None, cached_archive_path | |
466 | use_cached_archive = True |
|
466 | use_cached_archive = True | |
467 | else: |
|
467 | else: | |
468 | log.debug('Archive %s is not yet cached' % (archive_name)) |
|
468 | log.debug('Archive %s is not yet cached' % (archive_name)) | |
469 |
|
469 | |||
470 | if not use_cached_archive: |
|
470 | if not use_cached_archive: | |
471 | #generate new archive |
|
471 | #generate new archive | |
472 | try: |
|
472 | try: | |
473 | fd, archive = tempfile.mkstemp() |
|
473 | fd, archive = tempfile.mkstemp() | |
474 | t = open(archive, 'wb') |
|
474 | t = open(archive, 'wb') | |
475 | log.debug('Creating new temp archive in %s' % archive) |
|
475 | log.debug('Creating new temp archive in %s' % archive) | |
476 | cs.fill_archive(stream=t, kind=fileformat, subrepos=subrepos) |
|
476 | cs.fill_archive(stream=t, kind=fileformat, subrepos=subrepos) | |
477 | if archive_cache_enabled: |
|
477 | if archive_cache_enabled: | |
478 | #if we generated the archive and use cache rename that |
|
478 | #if we generated the archive and use cache rename that | |
479 | log.debug('Storing new archive in %s' % cached_archive_path) |
|
479 | log.debug('Storing new archive in %s' % cached_archive_path) | |
480 | shutil.move(archive, cached_archive_path) |
|
480 | shutil.move(archive, cached_archive_path) | |
481 | archive = cached_archive_path |
|
481 | archive = cached_archive_path | |
482 | finally: |
|
482 | finally: | |
483 | t.close() |
|
483 | t.close() | |
484 |
|
484 | |||
485 | def get_chunked_archive(archive): |
|
485 | def get_chunked_archive(archive): | |
486 | stream = open(archive, 'rb') |
|
486 | stream = open(archive, 'rb') | |
487 | while True: |
|
487 | while True: | |
488 | data = stream.read(16 * 1024) |
|
488 | data = stream.read(16 * 1024) | |
489 | if not data: |
|
489 | if not data: | |
490 | stream.close() |
|
490 | stream.close() | |
491 | if fd: # fd means we used temporary file |
|
491 | if fd: # fd means we used temporary file | |
492 | os.close(fd) |
|
492 | os.close(fd) | |
493 | if not archive_cache_enabled: |
|
493 | if not archive_cache_enabled: | |
494 | log.debug('Destroing temp archive %s' % archive) |
|
494 | log.debug('Destroing temp archive %s' % archive) | |
495 | os.remove(archive) |
|
495 | os.remove(archive) | |
496 | break |
|
496 | break | |
497 | yield data |
|
497 | yield data | |
498 | # store download action |
|
498 | # store download action | |
499 | action_logger(user=c.rhodecode_user, |
|
499 | action_logger(user=c.rhodecode_user, | |
500 | action='user_downloaded_archive:%s' % (archive_name), |
|
500 | action='user_downloaded_archive:%s' % (archive_name), | |
501 | repo=repo_name, ipaddr=self.ip_addr, commit=True) |
|
501 | repo=repo_name, ipaddr=self.ip_addr, commit=True) | |
502 | response.content_disposition = str('attachment; filename=%s' % (archive_name)) |
|
502 | response.content_disposition = str('attachment; filename=%s' % (archive_name)) | |
503 | response.content_type = str(content_type) |
|
503 | response.content_type = str(content_type) | |
504 | return get_chunked_archive(archive) |
|
504 | return get_chunked_archive(archive) | |
505 |
|
505 | |||
506 | @LoginRequired() |
|
506 | @LoginRequired() | |
507 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
507 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
508 | 'repository.admin') |
|
508 | 'repository.admin') | |
509 | def diff(self, repo_name, f_path): |
|
509 | def diff(self, repo_name, f_path): | |
510 | ignore_whitespace = request.GET.get('ignorews') == '1' |
|
510 | ignore_whitespace = request.GET.get('ignorews') == '1' | |
511 | line_context = request.GET.get('context', 3) |
|
511 | line_context = request.GET.get('context', 3) | |
512 | diff1 = request.GET.get('diff1', '') |
|
512 | diff1 = request.GET.get('diff1', '') | |
513 | diff2 = request.GET.get('diff2', '') |
|
513 | diff2 = request.GET.get('diff2', '') | |
514 | c.action = request.GET.get('diff') |
|
514 | c.action = request.GET.get('diff') | |
515 | c.no_changes = diff1 == diff2 |
|
515 | c.no_changes = diff1 == diff2 | |
516 | c.f_path = f_path |
|
516 | c.f_path = f_path | |
517 | c.big_diff = False |
|
517 | c.big_diff = False | |
518 | c.anchor_url = anchor_url |
|
518 | c.anchor_url = anchor_url | |
519 | c.ignorews_url = _ignorews_url |
|
519 | c.ignorews_url = _ignorews_url | |
520 | c.context_url = _context_url |
|
520 | c.context_url = _context_url | |
521 | c.changes = OrderedDict() |
|
521 | c.changes = OrderedDict() | |
522 | c.changes[diff2] = [] |
|
522 | c.changes[diff2] = [] | |
523 |
|
523 | |||
524 | #special case if we want a show rev only, it's impl here |
|
524 | #special case if we want a show rev only, it's impl here | |
525 | #to reduce JS and callbacks |
|
525 | #to reduce JS and callbacks | |
526 |
|
526 | |||
527 | if request.GET.get('show_rev'): |
|
527 | if request.GET.get('show_rev'): | |
528 | if str2bool(request.GET.get('annotate', 'False')): |
|
528 | if str2bool(request.GET.get('annotate', 'False')): | |
529 | _url = url('files_annotate_home', repo_name=c.repo_name, |
|
529 | _url = url('files_annotate_home', repo_name=c.repo_name, | |
530 | revision=diff1, f_path=c.f_path) |
|
530 | revision=diff1, f_path=c.f_path) | |
531 | else: |
|
531 | else: | |
532 | _url = url('files_home', repo_name=c.repo_name, |
|
532 | _url = url('files_home', repo_name=c.repo_name, | |
533 | revision=diff1, f_path=c.f_path) |
|
533 | revision=diff1, f_path=c.f_path) | |
534 |
|
534 | |||
535 | return redirect(_url) |
|
535 | return redirect(_url) | |
536 | try: |
|
536 | try: | |
537 | if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
537 | if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]: | |
538 | c.changeset_1 = c.rhodecode_repo.get_changeset(diff1) |
|
538 | c.changeset_1 = c.rhodecode_repo.get_changeset(diff1) | |
539 | try: |
|
539 | try: | |
540 | node1 = c.changeset_1.get_node(f_path) |
|
540 | node1 = c.changeset_1.get_node(f_path) | |
541 | if node1.is_dir(): |
|
541 | if node1.is_dir(): | |
542 | raise NodeError('%s path is a %s not a file' |
|
542 | raise NodeError('%s path is a %s not a file' | |
543 | % (node1, type(node1))) |
|
543 | % (node1, type(node1))) | |
544 | except NodeDoesNotExistError: |
|
544 | except NodeDoesNotExistError: | |
545 | c.changeset_1 = EmptyChangeset(cs=diff1, |
|
545 | c.changeset_1 = EmptyChangeset(cs=diff1, | |
546 | revision=c.changeset_1.revision, |
|
546 | revision=c.changeset_1.revision, | |
547 | repo=c.rhodecode_repo) |
|
547 | repo=c.rhodecode_repo) | |
548 | node1 = FileNode(f_path, '', changeset=c.changeset_1) |
|
548 | node1 = FileNode(f_path, '', changeset=c.changeset_1) | |
549 | else: |
|
549 | else: | |
550 | c.changeset_1 = EmptyChangeset(repo=c.rhodecode_repo) |
|
550 | c.changeset_1 = EmptyChangeset(repo=c.rhodecode_repo) | |
551 | node1 = FileNode(f_path, '', changeset=c.changeset_1) |
|
551 | node1 = FileNode(f_path, '', changeset=c.changeset_1) | |
552 |
|
552 | |||
553 | if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
553 | if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]: | |
554 | c.changeset_2 = c.rhodecode_repo.get_changeset(diff2) |
|
554 | c.changeset_2 = c.rhodecode_repo.get_changeset(diff2) | |
555 | try: |
|
555 | try: | |
556 | node2 = c.changeset_2.get_node(f_path) |
|
556 | node2 = c.changeset_2.get_node(f_path) | |
557 | if node2.is_dir(): |
|
557 | if node2.is_dir(): | |
558 | raise NodeError('%s path is a %s not a file' |
|
558 | raise NodeError('%s path is a %s not a file' | |
559 | % (node2, type(node2))) |
|
559 | % (node2, type(node2))) | |
560 | except NodeDoesNotExistError: |
|
560 | except NodeDoesNotExistError: | |
561 | c.changeset_2 = EmptyChangeset(cs=diff2, |
|
561 | c.changeset_2 = EmptyChangeset(cs=diff2, | |
562 | revision=c.changeset_2.revision, |
|
562 | revision=c.changeset_2.revision, | |
563 | repo=c.rhodecode_repo) |
|
563 | repo=c.rhodecode_repo) | |
564 | node2 = FileNode(f_path, '', changeset=c.changeset_2) |
|
564 | node2 = FileNode(f_path, '', changeset=c.changeset_2) | |
565 | else: |
|
565 | else: | |
566 | c.changeset_2 = EmptyChangeset(repo=c.rhodecode_repo) |
|
566 | c.changeset_2 = EmptyChangeset(repo=c.rhodecode_repo) | |
567 | node2 = FileNode(f_path, '', changeset=c.changeset_2) |
|
567 | node2 = FileNode(f_path, '', changeset=c.changeset_2) | |
568 | except (RepositoryError, NodeError): |
|
568 | except (RepositoryError, NodeError): | |
569 | log.error(traceback.format_exc()) |
|
569 | log.error(traceback.format_exc()) | |
570 | return redirect(url('files_home', repo_name=c.repo_name, |
|
570 | return redirect(url('files_home', repo_name=c.repo_name, | |
571 | f_path=f_path)) |
|
571 | f_path=f_path)) | |
572 |
|
572 | |||
573 | if c.action == 'download': |
|
573 | if c.action == 'download': | |
574 | _diff = diffs.get_gitdiff(node1, node2, |
|
574 | _diff = diffs.get_gitdiff(node1, node2, | |
575 | ignore_whitespace=ignore_whitespace, |
|
575 | ignore_whitespace=ignore_whitespace, | |
576 | context=line_context) |
|
576 | context=line_context) | |
577 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
577 | diff = diffs.DiffProcessor(_diff, format='gitdiff') | |
578 |
|
578 | |||
579 | diff_name = '%s_vs_%s.diff' % (diff1, diff2) |
|
579 | diff_name = '%s_vs_%s.diff' % (diff1, diff2) | |
580 | response.content_type = 'text/plain' |
|
580 | response.content_type = 'text/plain' | |
581 | response.content_disposition = ( |
|
581 | response.content_disposition = ( | |
582 | 'attachment; filename=%s' % diff_name |
|
582 | 'attachment; filename=%s' % diff_name | |
583 | ) |
|
583 | ) | |
584 | return diff.as_raw() |
|
584 | return diff.as_raw() | |
585 |
|
585 | |||
586 | elif c.action == 'raw': |
|
586 | elif c.action == 'raw': | |
587 | _diff = diffs.get_gitdiff(node1, node2, |
|
587 | _diff = diffs.get_gitdiff(node1, node2, | |
588 | ignore_whitespace=ignore_whitespace, |
|
588 | ignore_whitespace=ignore_whitespace, | |
589 | context=line_context) |
|
589 | context=line_context) | |
590 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
590 | diff = diffs.DiffProcessor(_diff, format='gitdiff') | |
591 | response.content_type = 'text/plain' |
|
591 | response.content_type = 'text/plain' | |
592 | return diff.as_raw() |
|
592 | return diff.as_raw() | |
593 |
|
593 | |||
594 | else: |
|
594 | else: | |
595 | fid = h.FID(diff2, node2.path) |
|
595 | fid = h.FID(diff2, node2.path) | |
596 | line_context_lcl = get_line_ctx(fid, request.GET) |
|
596 | line_context_lcl = get_line_ctx(fid, request.GET) | |
597 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) |
|
597 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) | |
598 |
|
598 | |||
599 | lim = request.GET.get('fulldiff') or self.cut_off_limit |
|
599 | lim = request.GET.get('fulldiff') or self.cut_off_limit | |
600 | _, cs1, cs2, diff, st = diffs.wrapped_diff(filenode_old=node1, |
|
600 | _, cs1, cs2, diff, st = diffs.wrapped_diff(filenode_old=node1, | |
601 | filenode_new=node2, |
|
601 | filenode_new=node2, | |
602 | cut_off_limit=lim, |
|
602 | cut_off_limit=lim, | |
603 | ignore_whitespace=ign_whitespace_lcl, |
|
603 | ignore_whitespace=ign_whitespace_lcl, | |
604 | line_context=line_context_lcl, |
|
604 | line_context=line_context_lcl, | |
605 | enable_comments=False) |
|
605 | enable_comments=False) | |
606 | op = '' |
|
606 | op = '' | |
607 | filename = node1.path |
|
607 | filename = node1.path | |
608 | cs_changes = { |
|
608 | cs_changes = { | |
609 | 'fid': [cs1, cs2, op, filename, diff, st] |
|
609 | 'fid': [cs1, cs2, op, filename, diff, st] | |
610 | } |
|
610 | } | |
611 | c.changes = cs_changes |
|
611 | c.changes = cs_changes | |
612 |
|
612 | |||
613 | return render('files/file_diff.html') |
|
613 | return render('files/file_diff.html') | |
614 |
|
614 | |||
615 | @LoginRequired() |
|
615 | @LoginRequired() | |
616 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
616 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
617 | 'repository.admin') |
|
617 | 'repository.admin') | |
618 | def diff_2way(self, repo_name, f_path): |
|
618 | def diff_2way(self, repo_name, f_path): | |
619 | diff1 = request.GET.get('diff1', '') |
|
619 | diff1 = request.GET.get('diff1', '') | |
620 | diff2 = request.GET.get('diff2', '') |
|
620 | diff2 = request.GET.get('diff2', '') | |
621 | try: |
|
621 | try: | |
622 | if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
622 | if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]: | |
623 | c.changeset_1 = c.rhodecode_repo.get_changeset(diff1) |
|
623 | c.changeset_1 = c.rhodecode_repo.get_changeset(diff1) | |
624 | try: |
|
624 | try: | |
625 | node1 = c.changeset_1.get_node(f_path) |
|
625 | node1 = c.changeset_1.get_node(f_path) | |
626 | if node1.is_dir(): |
|
626 | if node1.is_dir(): | |
627 | raise NodeError('%s path is a %s not a file' |
|
627 | raise NodeError('%s path is a %s not a file' | |
628 | % (node1, type(node1))) |
|
628 | % (node1, type(node1))) | |
629 | except NodeDoesNotExistError: |
|
629 | except NodeDoesNotExistError: | |
630 | c.changeset_1 = EmptyChangeset(cs=diff1, |
|
630 | c.changeset_1 = EmptyChangeset(cs=diff1, | |
631 | revision=c.changeset_1.revision, |
|
631 | revision=c.changeset_1.revision, | |
632 | repo=c.rhodecode_repo) |
|
632 | repo=c.rhodecode_repo) | |
633 | node1 = FileNode(f_path, '', changeset=c.changeset_1) |
|
633 | node1 = FileNode(f_path, '', changeset=c.changeset_1) | |
634 | else: |
|
634 | else: | |
635 | c.changeset_1 = EmptyChangeset(repo=c.rhodecode_repo) |
|
635 | c.changeset_1 = EmptyChangeset(repo=c.rhodecode_repo) | |
636 | node1 = FileNode(f_path, '', changeset=c.changeset_1) |
|
636 | node1 = FileNode(f_path, '', changeset=c.changeset_1) | |
637 |
|
637 | |||
638 | if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
638 | if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]: | |
639 | c.changeset_2 = c.rhodecode_repo.get_changeset(diff2) |
|
639 | c.changeset_2 = c.rhodecode_repo.get_changeset(diff2) | |
640 | try: |
|
640 | try: | |
641 | node2 = c.changeset_2.get_node(f_path) |
|
641 | node2 = c.changeset_2.get_node(f_path) | |
642 | if node2.is_dir(): |
|
642 | if node2.is_dir(): | |
643 | raise NodeError('%s path is a %s not a file' |
|
643 | raise NodeError('%s path is a %s not a file' | |
644 | % (node2, type(node2))) |
|
644 | % (node2, type(node2))) | |
645 | except NodeDoesNotExistError: |
|
645 | except NodeDoesNotExistError: | |
646 | c.changeset_2 = EmptyChangeset(cs=diff2, |
|
646 | c.changeset_2 = EmptyChangeset(cs=diff2, | |
647 | revision=c.changeset_2.revision, |
|
647 | revision=c.changeset_2.revision, | |
648 | repo=c.rhodecode_repo) |
|
648 | repo=c.rhodecode_repo) | |
649 | node2 = FileNode(f_path, '', changeset=c.changeset_2) |
|
649 | node2 = FileNode(f_path, '', changeset=c.changeset_2) | |
650 | else: |
|
650 | else: | |
651 | c.changeset_2 = EmptyChangeset(repo=c.rhodecode_repo) |
|
651 | c.changeset_2 = EmptyChangeset(repo=c.rhodecode_repo) | |
652 | node2 = FileNode(f_path, '', changeset=c.changeset_2) |
|
652 | node2 = FileNode(f_path, '', changeset=c.changeset_2) | |
653 | except (RepositoryError, NodeError): |
|
653 | except (RepositoryError, NodeError): | |
654 | log.error(traceback.format_exc()) |
|
654 | log.error(traceback.format_exc()) | |
655 | return redirect(url('files_home', repo_name=c.repo_name, |
|
655 | return redirect(url('files_home', repo_name=c.repo_name, | |
656 | f_path=f_path)) |
|
656 | f_path=f_path)) | |
657 | if node2.is_binary: |
|
657 | if node2.is_binary: | |
658 | node2_content = 'binary file' |
|
658 | node2_content = 'binary file' | |
659 | else: |
|
659 | else: | |
660 | node2_content = node2.content |
|
660 | node2_content = node2.content | |
661 |
|
661 | |||
662 | if node1.is_binary: |
|
662 | if node1.is_binary: | |
663 | node1_content = 'binary file' |
|
663 | node1_content = 'binary file' | |
664 | else: |
|
664 | else: | |
665 | node1_content = node1.content |
|
665 | node1_content = node1.content | |
666 |
|
666 | |||
667 | html_escape_table = { |
|
667 | html_escape_table = { | |
668 | "&": "\u0026", |
|
668 | "&": "\u0026", | |
669 | '"': "\u0022", |
|
669 | '"': "\u0022", | |
670 | "'": "\u0027", |
|
670 | "'": "\u0027", | |
671 | ">": "\u003e", |
|
671 | ">": "\u003e", | |
672 | "<": "\u003c", |
|
672 | "<": "\u003c", | |
673 | '\\': "\u005c", |
|
673 | '\\': "\u005c", | |
674 | '\n': '\\n' |
|
674 | '\n': '\\n' | |
675 | } |
|
675 | } | |
676 |
|
676 | |||
677 | c.orig1 = h.html_escape((node1_content), html_escape_table) |
|
677 | c.orig1 = h.html_escape((node1_content), html_escape_table) | |
678 | c.orig2 = h.html_escape((node2_content), html_escape_table) |
|
678 | c.orig2 = h.html_escape((node2_content), html_escape_table) | |
679 | c.node1 = node1 |
|
679 | c.node1 = node1 | |
680 | c.node2 = node2 |
|
680 | c.node2 = node2 | |
681 | c.cs1 = c.changeset_1 |
|
681 | c.cs1 = c.changeset_1 | |
682 | c.cs2 = c.changeset_2 |
|
682 | c.cs2 = c.changeset_2 | |
683 |
|
683 | |||
684 | return render('files/diff_2way.html') |
|
684 | return render('files/diff_2way.html') | |
685 |
|
685 | |||
686 | def _get_node_history(self, cs, f_path, changesets=None): |
|
686 | def _get_node_history(self, cs, f_path, changesets=None): | |
687 | """ |
|
687 | """ | |
688 | get changesets history for given node |
|
688 | get changesets history for given node | |
689 |
|
689 | |||
690 | :param cs: changeset to calculate history |
|
690 | :param cs: changeset to calculate history | |
691 | :param f_path: path for node to calculate history for |
|
691 | :param f_path: path for node to calculate history for | |
692 | :param changesets: if passed don't calculate history and take |
|
692 | :param changesets: if passed don't calculate history and take | |
693 | changesets defined in this list |
|
693 | changesets defined in this list | |
694 | """ |
|
694 | """ | |
695 | # calculate history based on tip |
|
695 | # calculate history based on tip | |
696 | tip_cs = c.rhodecode_repo.get_changeset() |
|
696 | tip_cs = c.rhodecode_repo.get_changeset() | |
697 | if changesets is None: |
|
697 | if changesets is None: | |
698 | try: |
|
698 | try: | |
699 | changesets = tip_cs.get_file_history(f_path) |
|
699 | changesets = tip_cs.get_file_history(f_path) | |
700 | except (NodeDoesNotExistError, ChangesetError): |
|
700 | except (NodeDoesNotExistError, ChangesetError): | |
701 | #this node is not present at tip ! |
|
701 | #this node is not present at tip ! | |
702 | changesets = cs.get_file_history(f_path) |
|
702 | changesets = cs.get_file_history(f_path) | |
703 | hist_l = [] |
|
703 | hist_l = [] | |
704 |
|
704 | |||
705 | changesets_group = ([], _("Changesets")) |
|
705 | changesets_group = ([], _("Changesets")) | |
706 | branches_group = ([], _("Branches")) |
|
706 | branches_group = ([], _("Branches")) | |
707 | tags_group = ([], _("Tags")) |
|
707 | tags_group = ([], _("Tags")) | |
708 | _hg = cs.repository.alias == 'hg' |
|
708 | _hg = cs.repository.alias == 'hg' | |
709 | for chs in changesets: |
|
709 | for chs in changesets: | |
710 | #_branch = '(%s)' % chs.branch if _hg else '' |
|
710 | #_branch = '(%s)' % chs.branch if _hg else '' | |
711 | _branch = chs.branch |
|
711 | _branch = chs.branch | |
712 | n_desc = 'r%s:%s (%s)' % (chs.revision, chs.short_id, _branch) |
|
712 | n_desc = 'r%s:%s (%s)' % (chs.revision, chs.short_id, _branch) | |
713 | changesets_group[0].append((chs.raw_id, n_desc,)) |
|
713 | changesets_group[0].append((chs.raw_id, n_desc,)) | |
714 | hist_l.append(changesets_group) |
|
714 | hist_l.append(changesets_group) | |
715 |
|
715 | |||
716 | for name, chs in c.rhodecode_repo.branches.items(): |
|
716 | for name, chs in c.rhodecode_repo.branches.items(): | |
717 | branches_group[0].append((chs, name),) |
|
717 | branches_group[0].append((chs, name),) | |
718 | hist_l.append(branches_group) |
|
718 | hist_l.append(branches_group) | |
719 |
|
719 | |||
720 | for name, chs in c.rhodecode_repo.tags.items(): |
|
720 | for name, chs in c.rhodecode_repo.tags.items(): | |
721 | tags_group[0].append((chs, name),) |
|
721 | tags_group[0].append((chs, name),) | |
722 | hist_l.append(tags_group) |
|
722 | hist_l.append(tags_group) | |
723 |
|
723 | |||
724 | return hist_l, changesets |
|
724 | return hist_l, changesets | |
725 |
|
725 | |||
726 | @LoginRequired() |
|
726 | @LoginRequired() | |
727 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
727 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
728 | 'repository.admin') |
|
728 | 'repository.admin') | |
729 | @jsonify |
|
729 | @jsonify | |
730 | def nodelist(self, repo_name, revision, f_path): |
|
730 | def nodelist(self, repo_name, revision, f_path): | |
731 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
731 | if request.environ.get('HTTP_X_PARTIAL_XHR'): | |
732 | cs = self.__get_cs_or_redirect(revision, repo_name) |
|
732 | cs = self.__get_cs_or_redirect(revision, repo_name) | |
733 | _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path, |
|
733 | _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path, | |
734 | flat=False) |
|
734 | flat=False) | |
735 | return {'nodes': _d + _f} |
|
735 | return {'nodes': _d + _f} |
General Comments 0
You need to be logged in to leave comments.
Login now