Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,749 +1,749 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import re |
|
22 | 22 | import logging |
|
23 | 23 | import collections |
|
24 | 24 | |
|
25 | 25 | from pyramid.view import view_config |
|
26 | 26 | |
|
27 | 27 | from rhodecode.apps._base import BaseAppView |
|
28 | 28 | from rhodecode.lib import helpers as h |
|
29 | 29 | from rhodecode.lib.auth import ( |
|
30 | 30 | LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, |
|
31 | 31 | CSRFRequired) |
|
32 | 32 | from rhodecode.lib.index import searcher_from_config |
|
33 | 33 | from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int |
|
34 | 34 | from rhodecode.lib.ext_json import json |
|
35 | 35 | from rhodecode.model.db import ( |
|
36 | 36 | func, true, or_, case, in_filter_generator, Repository, RepoGroup, User, UserGroup) |
|
37 | 37 | from rhodecode.model.repo import RepoModel |
|
38 | 38 | from rhodecode.model.repo_group import RepoGroupModel |
|
39 | 39 | from rhodecode.model.scm import RepoGroupList, RepoList |
|
40 | 40 | from rhodecode.model.user import UserModel |
|
41 | 41 | from rhodecode.model.user_group import UserGroupModel |
|
42 | 42 | |
|
43 | 43 | log = logging.getLogger(__name__) |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class HomeView(BaseAppView): |
|
47 | 47 | |
|
48 | 48 | def load_default_context(self): |
|
49 | 49 | c = self._get_local_tmpl_context() |
|
50 | 50 | c.user = c.auth_user.get_instance() |
|
51 | 51 | |
|
52 | 52 | return c |
|
53 | 53 | |
|
54 | 54 | @LoginRequired() |
|
55 | 55 | @view_config( |
|
56 | 56 | route_name='user_autocomplete_data', request_method='GET', |
|
57 | 57 | renderer='json_ext', xhr=True) |
|
58 | 58 | def user_autocomplete_data(self): |
|
59 | 59 | self.load_default_context() |
|
60 | 60 | query = self.request.GET.get('query') |
|
61 | 61 | active = str2bool(self.request.GET.get('active') or True) |
|
62 | 62 | include_groups = str2bool(self.request.GET.get('user_groups')) |
|
63 | 63 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
64 | 64 | skip_default_user = str2bool(self.request.GET.get('skip_default_user')) |
|
65 | 65 | |
|
66 | 66 | log.debug('generating user list, query:%s, active:%s, with_groups:%s', |
|
67 | 67 | query, active, include_groups) |
|
68 | 68 | |
|
69 | 69 | _users = UserModel().get_users( |
|
70 | 70 | name_contains=query, only_active=active) |
|
71 | 71 | |
|
72 | 72 | def maybe_skip_default_user(usr): |
|
73 | 73 | if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: |
|
74 | 74 | return False |
|
75 | 75 | return True |
|
76 | 76 | _users = filter(maybe_skip_default_user, _users) |
|
77 | 77 | |
|
78 | 78 | if include_groups: |
|
79 | 79 | # extend with user groups |
|
80 | 80 | _user_groups = UserGroupModel().get_user_groups( |
|
81 | 81 | name_contains=query, only_active=active, |
|
82 | 82 | expand_groups=expand_groups) |
|
83 | 83 | _users = _users + _user_groups |
|
84 | 84 | |
|
85 | 85 | return {'suggestions': _users} |
|
86 | 86 | |
|
87 | 87 | @LoginRequired() |
|
88 | 88 | @NotAnonymous() |
|
89 | 89 | @view_config( |
|
90 | 90 | route_name='user_group_autocomplete_data', request_method='GET', |
|
91 | 91 | renderer='json_ext', xhr=True) |
|
92 | 92 | def user_group_autocomplete_data(self): |
|
93 | 93 | self.load_default_context() |
|
94 | 94 | query = self.request.GET.get('query') |
|
95 | 95 | active = str2bool(self.request.GET.get('active') or True) |
|
96 | 96 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
97 | 97 | |
|
98 | 98 | log.debug('generating user group list, query:%s, active:%s', |
|
99 | 99 | query, active) |
|
100 | 100 | |
|
101 | 101 | _user_groups = UserGroupModel().get_user_groups( |
|
102 | 102 | name_contains=query, only_active=active, |
|
103 | 103 | expand_groups=expand_groups) |
|
104 | 104 | _user_groups = _user_groups |
|
105 | 105 | |
|
106 | 106 | return {'suggestions': _user_groups} |
|
107 | 107 | |
|
108 | 108 | def _get_repo_list(self, name_contains=None, repo_type=None, repo_group_name='', limit=20): |
|
109 | 109 | org_query = name_contains |
|
110 | 110 | allowed_ids = self._rhodecode_user.repo_acl_ids( |
|
111 | 111 | ['repository.read', 'repository.write', 'repository.admin'], |
|
112 | 112 | cache=False, name_filter=name_contains) or [-1] |
|
113 | 113 | |
|
114 | 114 | query = Repository.query()\ |
|
115 | 115 | .filter(Repository.archived.isnot(true()))\ |
|
116 | 116 | .filter(or_( |
|
117 | 117 | # generate multiple IN to fix limitation problems |
|
118 | 118 | *in_filter_generator(Repository.repo_id, allowed_ids) |
|
119 | 119 | )) |
|
120 | 120 | |
|
121 | 121 | query = query.order_by(case( |
|
122 | 122 | [ |
|
123 | 123 | (Repository.repo_name.startswith(repo_group_name), repo_group_name+'/'), |
|
124 | 124 | ], |
|
125 | 125 | )) |
|
126 | 126 | query = query.order_by(func.length(Repository.repo_name)) |
|
127 | 127 | query = query.order_by(Repository.repo_name) |
|
128 | 128 | |
|
129 | 129 | if repo_type: |
|
130 | 130 | query = query.filter(Repository.repo_type == repo_type) |
|
131 | 131 | |
|
132 | 132 | if name_contains: |
|
133 | 133 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
134 | 134 | query = query.filter( |
|
135 | 135 | Repository.repo_name.ilike(ilike_expression)) |
|
136 | 136 | query = query.limit(limit) |
|
137 | 137 | |
|
138 | 138 | acl_iter = query |
|
139 | 139 | |
|
140 | 140 | return [ |
|
141 | 141 | { |
|
142 | 142 | 'id': obj.repo_name, |
|
143 | 143 | 'value': org_query, |
|
144 | 144 | 'value_display': obj.repo_name, |
|
145 | 145 | 'text': obj.repo_name, |
|
146 | 146 | 'type': 'repo', |
|
147 | 147 | 'repo_id': obj.repo_id, |
|
148 | 148 | 'repo_type': obj.repo_type, |
|
149 | 149 | 'private': obj.private, |
|
150 | 150 | 'url': h.route_path('repo_summary', repo_name=obj.repo_name) |
|
151 | 151 | } |
|
152 | 152 | for obj in acl_iter] |
|
153 | 153 | |
|
154 | 154 | def _get_repo_group_list(self, name_contains=None, repo_group_name='', limit=20): |
|
155 | 155 | org_query = name_contains |
|
156 | 156 | allowed_ids = self._rhodecode_user.repo_group_acl_ids( |
|
157 | 157 | ['group.read', 'group.write', 'group.admin'], |
|
158 | 158 | cache=False, name_filter=name_contains) or [-1] |
|
159 | 159 | |
|
160 | 160 | query = RepoGroup.query()\ |
|
161 | 161 | .filter(or_( |
|
162 | 162 | # generate multiple IN to fix limitation problems |
|
163 | 163 | *in_filter_generator(RepoGroup.group_id, allowed_ids) |
|
164 | 164 | )) |
|
165 | 165 | |
|
166 | 166 | query = query.order_by(case( |
|
167 | 167 | [ |
|
168 | 168 | (RepoGroup.group_name.startswith(repo_group_name), repo_group_name+'/'), |
|
169 | 169 | ], |
|
170 | 170 | )) |
|
171 | 171 | query = query.order_by(func.length(RepoGroup.group_name)) |
|
172 | 172 | query = query.order_by(RepoGroup.group_name) |
|
173 | 173 | |
|
174 | 174 | if name_contains: |
|
175 | 175 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
176 | 176 | query = query.filter( |
|
177 | 177 | RepoGroup.group_name.ilike(ilike_expression)) |
|
178 | 178 | query = query.limit(limit) |
|
179 | 179 | |
|
180 | 180 | acl_iter = query |
|
181 | 181 | |
|
182 | 182 | return [ |
|
183 | 183 | { |
|
184 | 184 | 'id': obj.group_name, |
|
185 | 185 | 'value': org_query, |
|
186 | 186 | 'value_display': obj.group_name, |
|
187 | 187 | 'text': obj.group_name, |
|
188 | 188 | 'type': 'repo_group', |
|
189 | 189 | 'repo_group_id': obj.group_id, |
|
190 | 190 | 'url': h.route_path( |
|
191 | 191 | 'repo_group_home', repo_group_name=obj.group_name) |
|
192 | 192 | } |
|
193 | 193 | for obj in acl_iter] |
|
194 | 194 | |
|
195 | 195 | def _get_user_list(self, name_contains=None, limit=20): |
|
196 | 196 | org_query = name_contains |
|
197 | 197 | if not name_contains: |
|
198 | 198 | return [], False |
|
199 | 199 | |
|
200 | 200 | # TODO(marcink): should all logged in users be allowed to search others? |
|
201 | 201 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER |
|
202 | 202 | if not allowed_user_search: |
|
203 | 203 | return [], False |
|
204 | 204 | |
|
205 | 205 | name_contains = re.compile('(?:user:[ ]?)(.+)').findall(name_contains) |
|
206 | 206 | if len(name_contains) != 1: |
|
207 | 207 | return [], False |
|
208 | 208 | |
|
209 | 209 | name_contains = name_contains[0] |
|
210 | 210 | |
|
211 | 211 | query = User.query()\ |
|
212 | 212 | .order_by(func.length(User.username))\ |
|
213 | 213 | .order_by(User.username) \ |
|
214 | 214 | .filter(User.username != User.DEFAULT_USER) |
|
215 | 215 | |
|
216 | 216 | if name_contains: |
|
217 | 217 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
218 | 218 | query = query.filter( |
|
219 | 219 | User.username.ilike(ilike_expression)) |
|
220 | 220 | query = query.limit(limit) |
|
221 | 221 | |
|
222 | 222 | acl_iter = query |
|
223 | 223 | |
|
224 | 224 | return [ |
|
225 | 225 | { |
|
226 | 226 | 'id': obj.user_id, |
|
227 | 227 | 'value': org_query, |
|
228 | 228 | 'value_display': 'user: `{}`'.format(obj.username), |
|
229 | 229 | 'type': 'user', |
|
230 | 230 | 'icon_link': h.gravatar_url(obj.email, 30), |
|
231 | 231 | 'url': h.route_path( |
|
232 | 232 | 'user_profile', username=obj.username) |
|
233 | 233 | } |
|
234 | 234 | for obj in acl_iter], True |
|
235 | 235 | |
|
236 | 236 | def _get_user_groups_list(self, name_contains=None, limit=20): |
|
237 | 237 | org_query = name_contains |
|
238 | 238 | if not name_contains: |
|
239 | 239 | return [], False |
|
240 | 240 | |
|
241 | 241 | # TODO(marcink): should all logged in users be allowed to search others? |
|
242 | 242 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER |
|
243 | 243 | if not allowed_user_search: |
|
244 | 244 | return [], False |
|
245 | 245 | |
|
246 | 246 | name_contains = re.compile('(?:user_group:[ ]?)(.+)').findall(name_contains) |
|
247 | 247 | if len(name_contains) != 1: |
|
248 | 248 | return [], False |
|
249 | 249 | |
|
250 | 250 | name_contains = name_contains[0] |
|
251 | 251 | |
|
252 | 252 | query = UserGroup.query()\ |
|
253 | 253 | .order_by(func.length(UserGroup.users_group_name))\ |
|
254 | 254 | .order_by(UserGroup.users_group_name) |
|
255 | 255 | |
|
256 | 256 | if name_contains: |
|
257 | 257 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
258 | 258 | query = query.filter( |
|
259 | 259 | UserGroup.users_group_name.ilike(ilike_expression)) |
|
260 | 260 | query = query.limit(limit) |
|
261 | 261 | |
|
262 | 262 | acl_iter = query |
|
263 | 263 | |
|
264 | 264 | return [ |
|
265 | 265 | { |
|
266 | 266 | 'id': obj.users_group_id, |
|
267 | 267 | 'value': org_query, |
|
268 | 268 | 'value_display': 'user_group: `{}`'.format(obj.users_group_name), |
|
269 | 269 | 'type': 'user_group', |
|
270 | 270 | 'url': h.route_path( |
|
271 | 271 | 'user_group_profile', user_group_name=obj.users_group_name) |
|
272 | 272 | } |
|
273 | 273 | for obj in acl_iter], True |
|
274 | 274 | |
|
275 | 275 | def _get_hash_commit_list(self, auth_user, searcher, query, repo=None, repo_group=None): |
|
276 | 276 | repo_name = repo_group_name = None |
|
277 | 277 | if repo: |
|
278 | 278 | repo_name = repo.repo_name |
|
279 | 279 | if repo_group: |
|
280 | 280 | repo_group_name = repo_group.group_name |
|
281 | 281 | |
|
282 | 282 | org_query = query |
|
283 | 283 | if not query or len(query) < 3 or not searcher: |
|
284 | 284 | return [], False |
|
285 | 285 | |
|
286 | 286 | commit_hashes = re.compile('(?:commit:[ ]?)([0-9a-f]{2,40})').findall(query) |
|
287 | 287 | |
|
288 | 288 | if len(commit_hashes) != 1: |
|
289 | 289 | return [], False |
|
290 | 290 | |
|
291 | 291 | commit_hash = commit_hashes[0] |
|
292 | 292 | |
|
293 | 293 | result = searcher.search( |
|
294 | 294 | 'commit_id:{}*'.format(commit_hash), 'commit', auth_user, |
|
295 | 295 | repo_name, repo_group_name, raise_on_exc=False) |
|
296 | 296 | |
|
297 | 297 | commits = [] |
|
298 | 298 | for entry in result['results']: |
|
299 | 299 | repo_data = { |
|
300 | 300 | 'repository_id': entry.get('repository_id'), |
|
301 | 301 | 'repository_type': entry.get('repo_type'), |
|
302 | 302 | 'repository_name': entry.get('repository'), |
|
303 | 303 | } |
|
304 | 304 | |
|
305 | 305 | commit_entry = { |
|
306 | 306 | 'id': entry['commit_id'], |
|
307 | 307 | 'value': org_query, |
|
308 | 308 | 'value_display': '`{}` commit: {}'.format( |
|
309 | 309 | entry['repository'], entry['commit_id']), |
|
310 | 310 | 'type': 'commit', |
|
311 | 311 | 'repo': entry['repository'], |
|
312 | 312 | 'repo_data': repo_data, |
|
313 | 313 | |
|
314 | 314 | 'url': h.route_path( |
|
315 | 315 | 'repo_commit', |
|
316 | 316 | repo_name=entry['repository'], commit_id=entry['commit_id']) |
|
317 | 317 | } |
|
318 | 318 | |
|
319 | 319 | commits.append(commit_entry) |
|
320 | 320 | return commits, True |
|
321 | 321 | |
|
322 | 322 | def _get_path_list(self, auth_user, searcher, query, repo=None, repo_group=None): |
|
323 | 323 | repo_name = repo_group_name = None |
|
324 | 324 | if repo: |
|
325 | 325 | repo_name = repo.repo_name |
|
326 | 326 | if repo_group: |
|
327 | 327 | repo_group_name = repo_group.group_name |
|
328 | 328 | |
|
329 | 329 | org_query = query |
|
330 | 330 | if not query or len(query) < 3 or not searcher: |
|
331 | 331 | return [], False |
|
332 | 332 | |
|
333 | 333 | paths_re = re.compile('(?:file:[ ]?)(.+)').findall(query) |
|
334 | 334 | if len(paths_re) != 1: |
|
335 | 335 | return [], False |
|
336 | 336 | |
|
337 | 337 | file_path = paths_re[0] |
|
338 | 338 | |
|
339 | 339 | search_path = searcher.escape_specials(file_path) |
|
340 | 340 | result = searcher.search( |
|
341 | 341 | 'file.raw:*{}*'.format(search_path), 'path', auth_user, |
|
342 | 342 | repo_name, repo_group_name, raise_on_exc=False) |
|
343 | 343 | |
|
344 | 344 | files = [] |
|
345 | 345 | for entry in result['results']: |
|
346 | 346 | repo_data = { |
|
347 | 347 | 'repository_id': entry.get('repository_id'), |
|
348 | 348 | 'repository_type': entry.get('repo_type'), |
|
349 | 349 | 'repository_name': entry.get('repository'), |
|
350 | 350 | } |
|
351 | 351 | |
|
352 | 352 | file_entry = { |
|
353 | 353 | 'id': entry['commit_id'], |
|
354 | 354 | 'value': org_query, |
|
355 | 355 | 'value_display': '`{}` file: {}'.format( |
|
356 | 356 | entry['repository'], entry['file']), |
|
357 | 357 | 'type': 'file', |
|
358 | 358 | 'repo': entry['repository'], |
|
359 | 359 | 'repo_data': repo_data, |
|
360 | 360 | |
|
361 | 361 | 'url': h.route_path( |
|
362 | 362 | 'repo_files', |
|
363 | 363 | repo_name=entry['repository'], commit_id=entry['commit_id'], |
|
364 | 364 | f_path=entry['file']) |
|
365 | 365 | } |
|
366 | 366 | |
|
367 | 367 | files.append(file_entry) |
|
368 | 368 | return files, True |
|
369 | 369 | |
|
370 | 370 | @LoginRequired() |
|
371 | 371 | @view_config( |
|
372 | 372 | route_name='repo_list_data', request_method='GET', |
|
373 | 373 | renderer='json_ext', xhr=True) |
|
374 | 374 | def repo_list_data(self): |
|
375 | 375 | _ = self.request.translate |
|
376 | 376 | self.load_default_context() |
|
377 | 377 | |
|
378 | 378 | query = self.request.GET.get('query') |
|
379 | 379 | repo_type = self.request.GET.get('repo_type') |
|
380 | 380 | log.debug('generating repo list, query:%s, repo_type:%s', |
|
381 | 381 | query, repo_type) |
|
382 | 382 | |
|
383 | 383 | res = [] |
|
384 | 384 | repos = self._get_repo_list(query, repo_type=repo_type) |
|
385 | 385 | if repos: |
|
386 | 386 | res.append({ |
|
387 | 387 | 'text': _('Repositories'), |
|
388 | 388 | 'children': repos |
|
389 | 389 | }) |
|
390 | 390 | |
|
391 | 391 | data = { |
|
392 | 392 | 'more': False, |
|
393 | 393 | 'results': res |
|
394 | 394 | } |
|
395 | 395 | return data |
|
396 | 396 | |
|
397 | 397 | @LoginRequired() |
|
398 | 398 | @view_config( |
|
399 | 399 | route_name='repo_group_list_data', request_method='GET', |
|
400 | 400 | renderer='json_ext', xhr=True) |
|
401 | 401 | def repo_group_list_data(self): |
|
402 | 402 | _ = self.request.translate |
|
403 | 403 | self.load_default_context() |
|
404 | 404 | |
|
405 | 405 | query = self.request.GET.get('query') |
|
406 | 406 | |
|
407 | 407 | log.debug('generating repo group list, query:%s', |
|
408 | 408 | query) |
|
409 | 409 | |
|
410 | 410 | res = [] |
|
411 | 411 | repo_groups = self._get_repo_group_list(query) |
|
412 | 412 | if repo_groups: |
|
413 | 413 | res.append({ |
|
414 | 414 | 'text': _('Repository Groups'), |
|
415 | 415 | 'children': repo_groups |
|
416 | 416 | }) |
|
417 | 417 | |
|
418 | 418 | data = { |
|
419 | 419 | 'more': False, |
|
420 | 420 | 'results': res |
|
421 | 421 | } |
|
422 | 422 | return data |
|
423 | 423 | |
|
424 | 424 | def _get_default_search_queries(self, search_context, searcher, query): |
|
425 | 425 | if not searcher: |
|
426 | 426 | return [] |
|
427 | 427 | |
|
428 | 428 | is_es_6 = searcher.is_es_6 |
|
429 | 429 | |
|
430 | 430 | queries = [] |
|
431 | 431 | repo_group_name, repo_name, repo_context = None, None, None |
|
432 | 432 | |
|
433 | 433 | # repo group context |
|
434 | 434 | if search_context.get('search_context[repo_group_name]'): |
|
435 | 435 | repo_group_name = search_context.get('search_context[repo_group_name]') |
|
436 | 436 | if search_context.get('search_context[repo_name]'): |
|
437 | 437 | repo_name = search_context.get('search_context[repo_name]') |
|
438 | 438 | repo_context = search_context.get('search_context[repo_view_type]') |
|
439 | 439 | |
|
440 | 440 | if is_es_6 and repo_name: |
|
441 | 441 | # files |
|
442 | 442 | def query_modifier(): |
|
443 | 443 | qry = query |
|
444 | 444 | return {'q': qry, 'type': 'content'} |
|
445 | 445 | label = u'File search for `{}` in this repository.'.format(query) |
|
446 | 446 | file_qry = { |
|
447 | 447 | 'id': -10, |
|
448 | 448 | 'value': query, |
|
449 | 449 | 'value_display': label, |
|
450 | 450 | 'type': 'search', |
|
451 | 451 | 'url': h.route_path('search_repo', |
|
452 | 452 | repo_name=repo_name, |
|
453 | 453 | _query=query_modifier()) |
|
454 | 454 | } |
|
455 | 455 | |
|
456 | 456 | # commits |
|
457 | 457 | def query_modifier(): |
|
458 | 458 | qry = query |
|
459 | 459 | return {'q': qry, 'type': 'commit'} |
|
460 | 460 | |
|
461 | 461 | label = u'Commit search for `{}` in this repository.'.format(query) |
|
462 | 462 | commit_qry = { |
|
463 | 463 | 'id': -20, |
|
464 | 464 | 'value': query, |
|
465 | 465 | 'value_display': label, |
|
466 | 466 | 'type': 'search', |
|
467 | 467 | 'url': h.route_path('search_repo', |
|
468 | 468 | repo_name=repo_name, |
|
469 | 469 | _query=query_modifier()) |
|
470 | 470 | } |
|
471 | 471 | |
|
472 |
if repo_context in ['commit', 'c |
|
|
472 | if repo_context in ['commit', 'commits']: | |
|
473 | 473 | queries.extend([commit_qry, file_qry]) |
|
474 | 474 | elif repo_context in ['files', 'summary']: |
|
475 | 475 | queries.extend([file_qry, commit_qry]) |
|
476 | 476 | else: |
|
477 | 477 | queries.extend([commit_qry, file_qry]) |
|
478 | 478 | |
|
479 | 479 | elif is_es_6 and repo_group_name: |
|
480 | 480 | # files |
|
481 | 481 | def query_modifier(): |
|
482 | 482 | qry = query |
|
483 | 483 | return {'q': qry, 'type': 'content'} |
|
484 | 484 | |
|
485 | 485 | label = u'File search for `{}` in this repository group'.format(query) |
|
486 | 486 | file_qry = { |
|
487 | 487 | 'id': -30, |
|
488 | 488 | 'value': query, |
|
489 | 489 | 'value_display': label, |
|
490 | 490 | 'type': 'search', |
|
491 | 491 | 'url': h.route_path('search_repo_group', |
|
492 | 492 | repo_group_name=repo_group_name, |
|
493 | 493 | _query=query_modifier()) |
|
494 | 494 | } |
|
495 | 495 | |
|
496 | 496 | # commits |
|
497 | 497 | def query_modifier(): |
|
498 | 498 | qry = query |
|
499 | 499 | return {'q': qry, 'type': 'commit'} |
|
500 | 500 | |
|
501 | 501 | label = u'Commit search for `{}` in this repository group'.format(query) |
|
502 | 502 | commit_qry = { |
|
503 | 503 | 'id': -40, |
|
504 | 504 | 'value': query, |
|
505 | 505 | 'value_display': label, |
|
506 | 506 | 'type': 'search', |
|
507 | 507 | 'url': h.route_path('search_repo_group', |
|
508 | 508 | repo_group_name=repo_group_name, |
|
509 | 509 | _query=query_modifier()) |
|
510 | 510 | } |
|
511 | 511 | |
|
512 |
if repo_context in ['commit', 'c |
|
|
512 | if repo_context in ['commit', 'commits']: | |
|
513 | 513 | queries.extend([commit_qry, file_qry]) |
|
514 | 514 | elif repo_context in ['files', 'summary']: |
|
515 | 515 | queries.extend([file_qry, commit_qry]) |
|
516 | 516 | else: |
|
517 | 517 | queries.extend([commit_qry, file_qry]) |
|
518 | 518 | |
|
519 | 519 | # Global, not scoped |
|
520 | 520 | if not queries: |
|
521 | 521 | queries.append( |
|
522 | 522 | { |
|
523 | 523 | 'id': -1, |
|
524 | 524 | 'value': query, |
|
525 | 525 | 'value_display': u'File search for: `{}`'.format(query), |
|
526 | 526 | 'type': 'search', |
|
527 | 527 | 'url': h.route_path('search', |
|
528 | 528 | _query={'q': query, 'type': 'content'}) |
|
529 | 529 | }) |
|
530 | 530 | queries.append( |
|
531 | 531 | { |
|
532 | 532 | 'id': -2, |
|
533 | 533 | 'value': query, |
|
534 | 534 | 'value_display': u'Commit search for: `{}`'.format(query), |
|
535 | 535 | 'type': 'search', |
|
536 | 536 | 'url': h.route_path('search', |
|
537 | 537 | _query={'q': query, 'type': 'commit'}) |
|
538 | 538 | }) |
|
539 | 539 | |
|
540 | 540 | return queries |
|
541 | 541 | |
|
542 | 542 | @LoginRequired() |
|
543 | 543 | @view_config( |
|
544 | 544 | route_name='goto_switcher_data', request_method='GET', |
|
545 | 545 | renderer='json_ext', xhr=True) |
|
546 | 546 | def goto_switcher_data(self): |
|
547 | 547 | c = self.load_default_context() |
|
548 | 548 | |
|
549 | 549 | _ = self.request.translate |
|
550 | 550 | |
|
551 | 551 | query = self.request.GET.get('query') |
|
552 | 552 | log.debug('generating main filter data, query %s', query) |
|
553 | 553 | |
|
554 | 554 | res = [] |
|
555 | 555 | if not query: |
|
556 | 556 | return {'suggestions': res} |
|
557 | 557 | |
|
558 | 558 | def no_match(name): |
|
559 | 559 | return { |
|
560 | 560 | 'id': -1, |
|
561 | 561 | 'value': "", |
|
562 | 562 | 'value_display': name, |
|
563 | 563 | 'type': 'text', |
|
564 | 564 | 'url': "" |
|
565 | 565 | } |
|
566 | 566 | searcher = searcher_from_config(self.request.registry.settings) |
|
567 | 567 | has_specialized_search = False |
|
568 | 568 | |
|
569 | 569 | # set repo context |
|
570 | 570 | repo = None |
|
571 | 571 | repo_id = safe_int(self.request.GET.get('search_context[repo_id]')) |
|
572 | 572 | if repo_id: |
|
573 | 573 | repo = Repository.get(repo_id) |
|
574 | 574 | |
|
575 | 575 | # set group context |
|
576 | 576 | repo_group = None |
|
577 | 577 | repo_group_id = safe_int(self.request.GET.get('search_context[repo_group_id]')) |
|
578 | 578 | if repo_group_id: |
|
579 | 579 | repo_group = RepoGroup.get(repo_group_id) |
|
580 | 580 | prefix_match = False |
|
581 | 581 | |
|
582 | 582 | # user: type search |
|
583 | 583 | if not prefix_match: |
|
584 | 584 | users, prefix_match = self._get_user_list(query) |
|
585 | 585 | if users: |
|
586 | 586 | has_specialized_search = True |
|
587 | 587 | for serialized_user in users: |
|
588 | 588 | res.append(serialized_user) |
|
589 | 589 | elif prefix_match: |
|
590 | 590 | has_specialized_search = True |
|
591 | 591 | res.append(no_match('No matching users found')) |
|
592 | 592 | |
|
593 | 593 | # user_group: type search |
|
594 | 594 | if not prefix_match: |
|
595 | 595 | user_groups, prefix_match = self._get_user_groups_list(query) |
|
596 | 596 | if user_groups: |
|
597 | 597 | has_specialized_search = True |
|
598 | 598 | for serialized_user_group in user_groups: |
|
599 | 599 | res.append(serialized_user_group) |
|
600 | 600 | elif prefix_match: |
|
601 | 601 | has_specialized_search = True |
|
602 | 602 | res.append(no_match('No matching user groups found')) |
|
603 | 603 | |
|
604 | 604 | # FTS commit: type search |
|
605 | 605 | if not prefix_match: |
|
606 | 606 | commits, prefix_match = self._get_hash_commit_list( |
|
607 | 607 | c.auth_user, searcher, query, repo, repo_group) |
|
608 | 608 | if commits: |
|
609 | 609 | has_specialized_search = True |
|
610 | 610 | unique_repos = collections.OrderedDict() |
|
611 | 611 | for commit in commits: |
|
612 | 612 | repo_name = commit['repo'] |
|
613 | 613 | unique_repos.setdefault(repo_name, []).append(commit) |
|
614 | 614 | |
|
615 | 615 | for _repo, commits in unique_repos.items(): |
|
616 | 616 | for commit in commits: |
|
617 | 617 | res.append(commit) |
|
618 | 618 | elif prefix_match: |
|
619 | 619 | has_specialized_search = True |
|
620 | 620 | res.append(no_match('No matching commits found')) |
|
621 | 621 | |
|
622 | 622 | # FTS file: type search |
|
623 | 623 | if not prefix_match: |
|
624 | 624 | paths, prefix_match = self._get_path_list( |
|
625 | 625 | c.auth_user, searcher, query, repo, repo_group) |
|
626 | 626 | if paths: |
|
627 | 627 | has_specialized_search = True |
|
628 | 628 | unique_repos = collections.OrderedDict() |
|
629 | 629 | for path in paths: |
|
630 | 630 | repo_name = path['repo'] |
|
631 | 631 | unique_repos.setdefault(repo_name, []).append(path) |
|
632 | 632 | |
|
633 | 633 | for repo, paths in unique_repos.items(): |
|
634 | 634 | for path in paths: |
|
635 | 635 | res.append(path) |
|
636 | 636 | elif prefix_match: |
|
637 | 637 | has_specialized_search = True |
|
638 | 638 | res.append(no_match('No matching files found')) |
|
639 | 639 | |
|
640 | 640 | # main suggestions |
|
641 | 641 | if not has_specialized_search: |
|
642 | 642 | repo_group_name = '' |
|
643 | 643 | if repo_group: |
|
644 | 644 | repo_group_name = repo_group.group_name |
|
645 | 645 | |
|
646 | 646 | for _q in self._get_default_search_queries(self.request.GET, searcher, query): |
|
647 | 647 | res.append(_q) |
|
648 | 648 | |
|
649 | 649 | repo_groups = self._get_repo_group_list(query, repo_group_name=repo_group_name) |
|
650 | 650 | for serialized_repo_group in repo_groups: |
|
651 | 651 | res.append(serialized_repo_group) |
|
652 | 652 | |
|
653 | 653 | repos = self._get_repo_list(query, repo_group_name=repo_group_name) |
|
654 | 654 | for serialized_repo in repos: |
|
655 | 655 | res.append(serialized_repo) |
|
656 | 656 | |
|
657 | 657 | if not repos and not repo_groups: |
|
658 | 658 | res.append(no_match('No matches found')) |
|
659 | 659 | |
|
660 | 660 | return {'suggestions': res} |
|
661 | 661 | |
|
662 | 662 | def _get_groups_and_repos(self, repo_group_id=None): |
|
663 | 663 | # repo groups groups |
|
664 | 664 | repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) |
|
665 | 665 | _perms = ['group.read', 'group.write', 'group.admin'] |
|
666 | 666 | repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) |
|
667 | 667 | repo_group_data = RepoGroupModel().get_repo_groups_as_dict( |
|
668 | 668 | repo_group_list=repo_group_list_acl, admin=False) |
|
669 | 669 | |
|
670 | 670 | # repositories |
|
671 | 671 | repo_list = Repository.get_all_repos(group_id=repo_group_id) |
|
672 | 672 | _perms = ['repository.read', 'repository.write', 'repository.admin'] |
|
673 | 673 | repo_list_acl = RepoList(repo_list, perm_set=_perms) |
|
674 | 674 | repo_data = RepoModel().get_repos_as_dict( |
|
675 | 675 | repo_list=repo_list_acl, admin=False) |
|
676 | 676 | |
|
677 | 677 | return repo_data, repo_group_data |
|
678 | 678 | |
|
679 | 679 | @LoginRequired() |
|
680 | 680 | @view_config( |
|
681 | 681 | route_name='home', request_method='GET', |
|
682 | 682 | renderer='rhodecode:templates/index.mako') |
|
683 | 683 | def main_page(self): |
|
684 | 684 | c = self.load_default_context() |
|
685 | 685 | c.repo_group = None |
|
686 | 686 | |
|
687 | 687 | repo_data, repo_group_data = self._get_groups_and_repos() |
|
688 | 688 | # json used to render the grids |
|
689 | 689 | c.repos_data = json.dumps(repo_data) |
|
690 | 690 | c.repo_groups_data = json.dumps(repo_group_data) |
|
691 | 691 | |
|
692 | 692 | return self._get_template_context(c) |
|
693 | 693 | |
|
694 | 694 | @LoginRequired() |
|
695 | 695 | @HasRepoGroupPermissionAnyDecorator( |
|
696 | 696 | 'group.read', 'group.write', 'group.admin') |
|
697 | 697 | @view_config( |
|
698 | 698 | route_name='repo_group_home', request_method='GET', |
|
699 | 699 | renderer='rhodecode:templates/index_repo_group.mako') |
|
700 | 700 | @view_config( |
|
701 | 701 | route_name='repo_group_home_slash', request_method='GET', |
|
702 | 702 | renderer='rhodecode:templates/index_repo_group.mako') |
|
703 | 703 | def repo_group_main_page(self): |
|
704 | 704 | c = self.load_default_context() |
|
705 | 705 | c.repo_group = self.request.db_repo_group |
|
706 | 706 | repo_data, repo_group_data = self._get_groups_and_repos(c.repo_group.group_id) |
|
707 | 707 | |
|
708 | 708 | # update every 5 min |
|
709 | 709 | if self.request.db_repo_group.last_commit_cache_update_diff > 60 * 5: |
|
710 | 710 | self.request.db_repo_group.update_commit_cache() |
|
711 | 711 | |
|
712 | 712 | # json used to render the grids |
|
713 | 713 | c.repos_data = json.dumps(repo_data) |
|
714 | 714 | c.repo_groups_data = json.dumps(repo_group_data) |
|
715 | 715 | |
|
716 | 716 | return self._get_template_context(c) |
|
717 | 717 | |
|
718 | 718 | @LoginRequired() |
|
719 | 719 | @CSRFRequired() |
|
720 | 720 | @view_config( |
|
721 | 721 | route_name='markup_preview', request_method='POST', |
|
722 | 722 | renderer='string', xhr=True) |
|
723 | 723 | def markup_preview(self): |
|
724 | 724 | # Technically a CSRF token is not needed as no state changes with this |
|
725 | 725 | # call. However, as this is a POST is better to have it, so automated |
|
726 | 726 | # tools don't flag it as potential CSRF. |
|
727 | 727 | # Post is required because the payload could be bigger than the maximum |
|
728 | 728 | # allowed by GET. |
|
729 | 729 | |
|
730 | 730 | text = self.request.POST.get('text') |
|
731 | 731 | renderer = self.request.POST.get('renderer') or 'rst' |
|
732 | 732 | if text: |
|
733 | 733 | return h.render(text, renderer=renderer, mentions=True) |
|
734 | 734 | return '' |
|
735 | 735 | |
|
736 | 736 | @LoginRequired() |
|
737 | 737 | @CSRFRequired() |
|
738 | 738 | @view_config( |
|
739 | 739 | route_name='store_user_session_value', request_method='POST', |
|
740 | 740 | renderer='string', xhr=True) |
|
741 | 741 | def store_user_session_attr(self): |
|
742 | 742 | key = self.request.POST.get('key') |
|
743 | 743 | val = self.request.POST.get('val') |
|
744 | 744 | |
|
745 | 745 | existing_value = self.request.session.get(key) |
|
746 | 746 | if existing_value != val: |
|
747 | 747 | self.request.session[key] = val |
|
748 | 748 | |
|
749 | 749 | return 'stored:{}:{}'.format(key, val) |
@@ -1,387 +1,387 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import logging |
|
23 | 23 | import itertools |
|
24 | 24 | |
|
25 | 25 | from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed |
|
26 | 26 | |
|
27 | 27 | from pyramid.view import view_config |
|
28 | 28 | from pyramid.httpexceptions import HTTPBadRequest |
|
29 | 29 | from pyramid.response import Response |
|
30 | 30 | from pyramid.renderers import render |
|
31 | 31 | |
|
32 | 32 | from rhodecode.apps._base import BaseAppView |
|
33 | 33 | from rhodecode.model.db import ( |
|
34 | 34 | or_, joinedload, Repository, UserLog, UserFollowing, User, UserApiKeys) |
|
35 | 35 | from rhodecode.model.meta import Session |
|
36 | 36 | import rhodecode.lib.helpers as h |
|
37 | 37 | from rhodecode.lib.helpers import Page |
|
38 | 38 | from rhodecode.lib.user_log_filter import user_log_filter |
|
39 | 39 | from rhodecode.lib.auth import LoginRequired, NotAnonymous, CSRFRequired, HasRepoPermissionAny |
|
40 | 40 | from rhodecode.lib.utils2 import safe_int, AttributeDict, md5_safe |
|
41 | 41 | from rhodecode.model.scm import ScmModel |
|
42 | 42 | |
|
43 | 43 | log = logging.getLogger(__name__) |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class JournalView(BaseAppView): |
|
47 | 47 | |
|
48 | 48 | def load_default_context(self): |
|
49 | 49 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
50 | 50 | |
|
51 | 51 | self._load_defaults(c.rhodecode_name) |
|
52 | 52 | |
|
53 | 53 | # TODO(marcink): what is this, why we need a global register ? |
|
54 | 54 | c.search_term = self.request.GET.get('filter') or '' |
|
55 | 55 | return c |
|
56 | 56 | |
|
57 | 57 | def _get_config(self, rhodecode_name): |
|
58 | 58 | import rhodecode |
|
59 | 59 | config = rhodecode.CONFIG |
|
60 | 60 | |
|
61 | 61 | return { |
|
62 | 62 | 'language': 'en-us', |
|
63 | 63 | 'feed_ttl': '5', # TTL of feed, |
|
64 | 64 | 'feed_items_per_page': |
|
65 | 65 | safe_int(config.get('rss_items_per_page', 20)), |
|
66 | 66 | 'rhodecode_name': rhodecode_name |
|
67 | 67 | } |
|
68 | 68 | |
|
69 | 69 | def _load_defaults(self, rhodecode_name): |
|
70 | 70 | config = self._get_config(rhodecode_name) |
|
71 | 71 | # common values for feeds |
|
72 | 72 | self.language = config["language"] |
|
73 | 73 | self.ttl = config["feed_ttl"] |
|
74 | 74 | self.feed_items_per_page = config['feed_items_per_page'] |
|
75 | 75 | self.rhodecode_name = config['rhodecode_name'] |
|
76 | 76 | |
|
77 | 77 | def _get_daily_aggregate(self, journal): |
|
78 | 78 | groups = [] |
|
79 | 79 | for k, g in itertools.groupby(journal, lambda x: x.action_as_day): |
|
80 | 80 | user_group = [] |
|
81 | 81 | # groupby username if it's a present value, else |
|
82 | 82 | # fallback to journal username |
|
83 | 83 | for _, g2 in itertools.groupby( |
|
84 | 84 | list(g), lambda x: x.user.username if x.user else x.username): |
|
85 | 85 | l = list(g2) |
|
86 | 86 | user_group.append((l[0].user, l)) |
|
87 | 87 | |
|
88 | 88 | groups.append((k, user_group,)) |
|
89 | 89 | |
|
90 | 90 | return groups |
|
91 | 91 | |
|
92 | 92 | def _get_journal_data(self, following_repos, search_term): |
|
93 | 93 | repo_ids = [x.follows_repository.repo_id for x in following_repos |
|
94 | 94 | if x.follows_repository is not None] |
|
95 | 95 | user_ids = [x.follows_user.user_id for x in following_repos |
|
96 | 96 | if x.follows_user is not None] |
|
97 | 97 | |
|
98 | 98 | filtering_criterion = None |
|
99 | 99 | |
|
100 | 100 | if repo_ids and user_ids: |
|
101 | 101 | filtering_criterion = or_(UserLog.repository_id.in_(repo_ids), |
|
102 | 102 | UserLog.user_id.in_(user_ids)) |
|
103 | 103 | if repo_ids and not user_ids: |
|
104 | 104 | filtering_criterion = UserLog.repository_id.in_(repo_ids) |
|
105 | 105 | if not repo_ids and user_ids: |
|
106 | 106 | filtering_criterion = UserLog.user_id.in_(user_ids) |
|
107 | 107 | if filtering_criterion is not None: |
|
108 | 108 | journal = Session().query(UserLog)\ |
|
109 | 109 | .options(joinedload(UserLog.user))\ |
|
110 | 110 | .options(joinedload(UserLog.repository)) |
|
111 | 111 | # filter |
|
112 | 112 | try: |
|
113 | 113 | journal = user_log_filter(journal, search_term) |
|
114 | 114 | except Exception: |
|
115 | 115 | # we want this to crash for now |
|
116 | 116 | raise |
|
117 | 117 | journal = journal.filter(filtering_criterion)\ |
|
118 | 118 | .order_by(UserLog.action_date.desc()) |
|
119 | 119 | else: |
|
120 | 120 | journal = [] |
|
121 | 121 | |
|
122 | 122 | return journal |
|
123 | 123 | |
|
124 | 124 | def feed_uid(self, entry_id): |
|
125 | 125 | return '{}:{}'.format('journal', md5_safe(entry_id)) |
|
126 | 126 | |
|
127 | 127 | def _atom_feed(self, repos, search_term, public=True): |
|
128 | 128 | _ = self.request.translate |
|
129 | 129 | journal = self._get_journal_data(repos, search_term) |
|
130 | 130 | if public: |
|
131 | 131 | _link = h.route_url('journal_public_atom') |
|
132 | 132 | _desc = '%s %s %s' % (self.rhodecode_name, _('public journal'), |
|
133 | 133 | 'atom feed') |
|
134 | 134 | else: |
|
135 | 135 | _link = h.route_url('journal_atom') |
|
136 | 136 | _desc = '%s %s %s' % (self.rhodecode_name, _('journal'), 'atom feed') |
|
137 | 137 | |
|
138 | 138 | feed = Atom1Feed( |
|
139 | 139 | title=_desc, link=_link, description=_desc, |
|
140 | 140 | language=self.language, ttl=self.ttl) |
|
141 | 141 | |
|
142 | 142 | for entry in journal[:self.feed_items_per_page]: |
|
143 | 143 | user = entry.user |
|
144 | 144 | if user is None: |
|
145 | 145 | # fix deleted users |
|
146 | 146 | user = AttributeDict({'short_contact': entry.username, |
|
147 | 147 | 'email': '', |
|
148 | 148 | 'full_contact': ''}) |
|
149 | 149 | action, action_extra, ico = h.action_parser( |
|
150 | 150 | self.request, entry, feed=True) |
|
151 | 151 | title = "%s - %s %s" % (user.short_contact, action(), |
|
152 | 152 | entry.repository.repo_name) |
|
153 | 153 | desc = action_extra() |
|
154 | 154 | _url = h.route_url('home') |
|
155 | 155 | if entry.repository is not None: |
|
156 |
_url = h.route_url('repo_c |
|
|
156 | _url = h.route_url('repo_commits', | |
|
157 | 157 | repo_name=entry.repository.repo_name) |
|
158 | 158 | |
|
159 | 159 | feed.add_item( |
|
160 | 160 | unique_id=self.feed_uid(entry.user_log_id), |
|
161 | 161 | title=title, |
|
162 | 162 | pubdate=entry.action_date, |
|
163 | 163 | link=_url, |
|
164 | 164 | author_email=user.email, |
|
165 | 165 | author_name=user.full_contact, |
|
166 | 166 | description=desc) |
|
167 | 167 | |
|
168 | 168 | response = Response(feed.writeString('utf-8')) |
|
169 | 169 | response.content_type = feed.mime_type |
|
170 | 170 | return response |
|
171 | 171 | |
|
172 | 172 | def _rss_feed(self, repos, search_term, public=True): |
|
173 | 173 | _ = self.request.translate |
|
174 | 174 | journal = self._get_journal_data(repos, search_term) |
|
175 | 175 | if public: |
|
176 | 176 | _link = h.route_url('journal_public_atom') |
|
177 | 177 | _desc = '%s %s %s' % ( |
|
178 | 178 | self.rhodecode_name, _('public journal'), 'rss feed') |
|
179 | 179 | else: |
|
180 | 180 | _link = h.route_url('journal_atom') |
|
181 | 181 | _desc = '%s %s %s' % ( |
|
182 | 182 | self.rhodecode_name, _('journal'), 'rss feed') |
|
183 | 183 | |
|
184 | 184 | feed = Rss201rev2Feed( |
|
185 | 185 | title=_desc, link=_link, description=_desc, |
|
186 | 186 | language=self.language, ttl=self.ttl) |
|
187 | 187 | |
|
188 | 188 | for entry in journal[:self.feed_items_per_page]: |
|
189 | 189 | user = entry.user |
|
190 | 190 | if user is None: |
|
191 | 191 | # fix deleted users |
|
192 | 192 | user = AttributeDict({'short_contact': entry.username, |
|
193 | 193 | 'email': '', |
|
194 | 194 | 'full_contact': ''}) |
|
195 | 195 | action, action_extra, ico = h.action_parser( |
|
196 | 196 | self.request, entry, feed=True) |
|
197 | 197 | title = "%s - %s %s" % (user.short_contact, action(), |
|
198 | 198 | entry.repository.repo_name) |
|
199 | 199 | desc = action_extra() |
|
200 | 200 | _url = h.route_url('home') |
|
201 | 201 | if entry.repository is not None: |
|
202 |
_url = h.route_url('repo_c |
|
|
202 | _url = h.route_url('repo_commits', | |
|
203 | 203 | repo_name=entry.repository.repo_name) |
|
204 | 204 | |
|
205 | 205 | feed.add_item( |
|
206 | 206 | unique_id=self.feed_uid(entry.user_log_id), |
|
207 | 207 | title=title, |
|
208 | 208 | pubdate=entry.action_date, |
|
209 | 209 | link=_url, |
|
210 | 210 | author_email=user.email, |
|
211 | 211 | author_name=user.full_contact, |
|
212 | 212 | description=desc) |
|
213 | 213 | |
|
214 | 214 | response = Response(feed.writeString('utf-8')) |
|
215 | 215 | response.content_type = feed.mime_type |
|
216 | 216 | return response |
|
217 | 217 | |
|
218 | 218 | @LoginRequired() |
|
219 | 219 | @NotAnonymous() |
|
220 | 220 | @view_config( |
|
221 | 221 | route_name='journal', request_method='GET', |
|
222 | 222 | renderer=None) |
|
223 | 223 | def journal(self): |
|
224 | 224 | c = self.load_default_context() |
|
225 | 225 | |
|
226 | 226 | p = safe_int(self.request.GET.get('page', 1), 1) |
|
227 | 227 | c.user = User.get(self._rhodecode_user.user_id) |
|
228 | 228 | following = Session().query(UserFollowing)\ |
|
229 | 229 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
230 | 230 | .options(joinedload(UserFollowing.follows_repository))\ |
|
231 | 231 | .all() |
|
232 | 232 | |
|
233 | 233 | journal = self._get_journal_data(following, c.search_term) |
|
234 | 234 | |
|
235 | 235 | def url_generator(**kw): |
|
236 | 236 | query_params = { |
|
237 | 237 | 'filter': c.search_term |
|
238 | 238 | } |
|
239 | 239 | query_params.update(kw) |
|
240 | 240 | return self.request.current_route_path(_query=query_params) |
|
241 | 241 | |
|
242 | 242 | c.journal_pager = Page( |
|
243 | 243 | journal, page=p, items_per_page=20, url=url_generator) |
|
244 | 244 | c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) |
|
245 | 245 | |
|
246 | 246 | c.journal_data = render( |
|
247 | 247 | 'rhodecode:templates/journal/journal_data.mako', |
|
248 | 248 | self._get_template_context(c), self.request) |
|
249 | 249 | |
|
250 | 250 | if self.request.is_xhr: |
|
251 | 251 | return Response(c.journal_data) |
|
252 | 252 | |
|
253 | 253 | html = render( |
|
254 | 254 | 'rhodecode:templates/journal/journal.mako', |
|
255 | 255 | self._get_template_context(c), self.request) |
|
256 | 256 | return Response(html) |
|
257 | 257 | |
|
258 | 258 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
259 | 259 | @NotAnonymous() |
|
260 | 260 | @view_config( |
|
261 | 261 | route_name='journal_atom', request_method='GET', |
|
262 | 262 | renderer=None) |
|
263 | 263 | def journal_atom(self): |
|
264 | 264 | """ |
|
265 | 265 | Produce an atom-1.0 feed via feedgenerator module |
|
266 | 266 | """ |
|
267 | 267 | c = self.load_default_context() |
|
268 | 268 | following_repos = Session().query(UserFollowing)\ |
|
269 | 269 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
270 | 270 | .options(joinedload(UserFollowing.follows_repository))\ |
|
271 | 271 | .all() |
|
272 | 272 | return self._atom_feed(following_repos, c.search_term, public=False) |
|
273 | 273 | |
|
274 | 274 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
275 | 275 | @NotAnonymous() |
|
276 | 276 | @view_config( |
|
277 | 277 | route_name='journal_rss', request_method='GET', |
|
278 | 278 | renderer=None) |
|
279 | 279 | def journal_rss(self): |
|
280 | 280 | """ |
|
281 | 281 | Produce an rss feed via feedgenerator module |
|
282 | 282 | """ |
|
283 | 283 | c = self.load_default_context() |
|
284 | 284 | following_repos = Session().query(UserFollowing)\ |
|
285 | 285 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
286 | 286 | .options(joinedload(UserFollowing.follows_repository))\ |
|
287 | 287 | .all() |
|
288 | 288 | return self._rss_feed(following_repos, c.search_term, public=False) |
|
289 | 289 | |
|
290 | 290 | @LoginRequired() |
|
291 | 291 | @NotAnonymous() |
|
292 | 292 | @CSRFRequired() |
|
293 | 293 | @view_config( |
|
294 | 294 | route_name='toggle_following', request_method='POST', |
|
295 | 295 | renderer='json_ext') |
|
296 | 296 | def toggle_following(self): |
|
297 | 297 | user_id = self.request.POST.get('follows_user_id') |
|
298 | 298 | if user_id: |
|
299 | 299 | try: |
|
300 | 300 | ScmModel().toggle_following_user(user_id, self._rhodecode_user.user_id) |
|
301 | 301 | Session().commit() |
|
302 | 302 | return 'ok' |
|
303 | 303 | except Exception: |
|
304 | 304 | raise HTTPBadRequest() |
|
305 | 305 | |
|
306 | 306 | repo_id = self.request.POST.get('follows_repo_id') |
|
307 | 307 | repo = Repository.get_or_404(repo_id) |
|
308 | 308 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] |
|
309 | 309 | has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'RepoWatch check') |
|
310 | 310 | if repo and has_perm: |
|
311 | 311 | try: |
|
312 | 312 | ScmModel().toggle_following_repo(repo_id, self._rhodecode_user.user_id) |
|
313 | 313 | Session().commit() |
|
314 | 314 | return 'ok' |
|
315 | 315 | except Exception: |
|
316 | 316 | raise HTTPBadRequest() |
|
317 | 317 | |
|
318 | 318 | raise HTTPBadRequest() |
|
319 | 319 | |
|
320 | 320 | @LoginRequired() |
|
321 | 321 | @view_config( |
|
322 | 322 | route_name='journal_public', request_method='GET', |
|
323 | 323 | renderer=None) |
|
324 | 324 | def journal_public(self): |
|
325 | 325 | c = self.load_default_context() |
|
326 | 326 | # Return a rendered template |
|
327 | 327 | p = safe_int(self.request.GET.get('page', 1), 1) |
|
328 | 328 | |
|
329 | 329 | c.following = Session().query(UserFollowing)\ |
|
330 | 330 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
331 | 331 | .options(joinedload(UserFollowing.follows_repository))\ |
|
332 | 332 | .all() |
|
333 | 333 | |
|
334 | 334 | journal = self._get_journal_data(c.following, c.search_term) |
|
335 | 335 | |
|
336 | 336 | def url_generator(**kw): |
|
337 | 337 | query_params = {} |
|
338 | 338 | query_params.update(kw) |
|
339 | 339 | return self.request.current_route_path(_query=query_params) |
|
340 | 340 | |
|
341 | 341 | c.journal_pager = Page( |
|
342 | 342 | journal, page=p, items_per_page=20, url=url_generator) |
|
343 | 343 | c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) |
|
344 | 344 | |
|
345 | 345 | c.journal_data = render( |
|
346 | 346 | 'rhodecode:templates/journal/journal_data.mako', |
|
347 | 347 | self._get_template_context(c), self.request) |
|
348 | 348 | |
|
349 | 349 | if self.request.is_xhr: |
|
350 | 350 | return Response(c.journal_data) |
|
351 | 351 | |
|
352 | 352 | html = render( |
|
353 | 353 | 'rhodecode:templates/journal/public_journal.mako', |
|
354 | 354 | self._get_template_context(c), self.request) |
|
355 | 355 | return Response(html) |
|
356 | 356 | |
|
357 | 357 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
358 | 358 | @view_config( |
|
359 | 359 | route_name='journal_public_atom', request_method='GET', |
|
360 | 360 | renderer=None) |
|
361 | 361 | def journal_public_atom(self): |
|
362 | 362 | """ |
|
363 | 363 | Produce an atom-1.0 feed via feedgenerator module |
|
364 | 364 | """ |
|
365 | 365 | c = self.load_default_context() |
|
366 | 366 | following_repos = Session().query(UserFollowing)\ |
|
367 | 367 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
368 | 368 | .options(joinedload(UserFollowing.follows_repository))\ |
|
369 | 369 | .all() |
|
370 | 370 | |
|
371 | 371 | return self._atom_feed(following_repos, c.search_term) |
|
372 | 372 | |
|
373 | 373 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
374 | 374 | @view_config( |
|
375 | 375 | route_name='journal_public_rss', request_method='GET', |
|
376 | 376 | renderer=None) |
|
377 | 377 | def journal_public_rss(self): |
|
378 | 378 | """ |
|
379 | 379 | Produce an rss2 feed via feedgenerator module |
|
380 | 380 | """ |
|
381 | 381 | c = self.load_default_context() |
|
382 | 382 | following_repos = Session().query(UserFollowing)\ |
|
383 | 383 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
384 | 384 | .options(joinedload(UserFollowing.follows_repository))\ |
|
385 | 385 | .all() |
|
386 | 386 | |
|
387 | 387 | return self._rss_feed(following_repos, c.search_term) |
@@ -1,492 +1,500 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | from rhodecode.apps._base import add_route_with_slash |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | def includeme(config): |
|
24 | 24 | |
|
25 | 25 | # repo creating checks, special cases that aren't repo routes |
|
26 | 26 | config.add_route( |
|
27 | 27 | name='repo_creating', |
|
28 | 28 | pattern='/{repo_name:.*?[^/]}/repo_creating') |
|
29 | 29 | |
|
30 | 30 | config.add_route( |
|
31 | 31 | name='repo_creating_check', |
|
32 | 32 | pattern='/{repo_name:.*?[^/]}/repo_creating_check') |
|
33 | 33 | |
|
34 | 34 | # Summary |
|
35 | 35 | # NOTE(marcink): one additional route is defined in very bottom, catch |
|
36 | 36 | # all pattern |
|
37 | 37 | config.add_route( |
|
38 | 38 | name='repo_summary_explicit', |
|
39 | 39 | pattern='/{repo_name:.*?[^/]}/summary', repo_route=True) |
|
40 | 40 | config.add_route( |
|
41 | 41 | name='repo_summary_commits', |
|
42 | 42 | pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True) |
|
43 | 43 | |
|
44 | 44 | # Commits |
|
45 | 45 | config.add_route( |
|
46 | 46 | name='repo_commit', |
|
47 | 47 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True) |
|
48 | 48 | |
|
49 | 49 | config.add_route( |
|
50 | 50 | name='repo_commit_children', |
|
51 | 51 | pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True) |
|
52 | 52 | |
|
53 | 53 | config.add_route( |
|
54 | 54 | name='repo_commit_parents', |
|
55 | 55 | pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True) |
|
56 | 56 | |
|
57 | 57 | config.add_route( |
|
58 | 58 | name='repo_commit_raw', |
|
59 | 59 | pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True) |
|
60 | 60 | |
|
61 | 61 | config.add_route( |
|
62 | 62 | name='repo_commit_patch', |
|
63 | 63 | pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True) |
|
64 | 64 | |
|
65 | 65 | config.add_route( |
|
66 | 66 | name='repo_commit_download', |
|
67 | 67 | pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True) |
|
68 | 68 | |
|
69 | 69 | config.add_route( |
|
70 | 70 | name='repo_commit_data', |
|
71 | 71 | pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True) |
|
72 | 72 | |
|
73 | 73 | config.add_route( |
|
74 | 74 | name='repo_commit_comment_create', |
|
75 | 75 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True) |
|
76 | 76 | |
|
77 | 77 | config.add_route( |
|
78 | 78 | name='repo_commit_comment_preview', |
|
79 | 79 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True) |
|
80 | 80 | |
|
81 | 81 | config.add_route( |
|
82 | 82 | name='repo_commit_comment_delete', |
|
83 | 83 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True) |
|
84 | 84 | |
|
85 | 85 | # still working url for backward compat. |
|
86 | 86 | config.add_route( |
|
87 | 87 | name='repo_commit_raw_deprecated', |
|
88 | 88 | pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True) |
|
89 | 89 | |
|
90 | 90 | # Files |
|
91 | 91 | config.add_route( |
|
92 | 92 | name='repo_archivefile', |
|
93 | 93 | pattern='/{repo_name:.*?[^/]}/archive/{fname:.*}', repo_route=True) |
|
94 | 94 | |
|
95 | 95 | config.add_route( |
|
96 | 96 | name='repo_files_diff', |
|
97 | 97 | pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True) |
|
98 | 98 | config.add_route( # legacy route to make old links work |
|
99 | 99 | name='repo_files_diff_2way_redirect', |
|
100 | 100 | pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True) |
|
101 | 101 | |
|
102 | 102 | config.add_route( |
|
103 | 103 | name='repo_files', |
|
104 | 104 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True) |
|
105 | 105 | config.add_route( |
|
106 | 106 | name='repo_files:default_path', |
|
107 | 107 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True) |
|
108 | 108 | config.add_route( |
|
109 | 109 | name='repo_files:default_commit', |
|
110 | 110 | pattern='/{repo_name:.*?[^/]}/files', repo_route=True) |
|
111 | 111 | |
|
112 | 112 | config.add_route( |
|
113 | 113 | name='repo_files:rendered', |
|
114 | 114 | pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True) |
|
115 | 115 | |
|
116 | 116 | config.add_route( |
|
117 | 117 | name='repo_files:annotated', |
|
118 | 118 | pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True) |
|
119 | 119 | config.add_route( |
|
120 | 120 | name='repo_files:annotated_previous', |
|
121 | 121 | pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True) |
|
122 | 122 | |
|
123 | 123 | config.add_route( |
|
124 | 124 | name='repo_nodetree_full', |
|
125 | 125 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True) |
|
126 | 126 | config.add_route( |
|
127 | 127 | name='repo_nodetree_full:default_path', |
|
128 | 128 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True) |
|
129 | 129 | |
|
130 | 130 | config.add_route( |
|
131 | 131 | name='repo_files_nodelist', |
|
132 | 132 | pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True) |
|
133 | 133 | |
|
134 | 134 | config.add_route( |
|
135 | 135 | name='repo_file_raw', |
|
136 | 136 | pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True) |
|
137 | 137 | |
|
138 | 138 | config.add_route( |
|
139 | 139 | name='repo_file_download', |
|
140 | 140 | pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True) |
|
141 | 141 | config.add_route( # backward compat to keep old links working |
|
142 | 142 | name='repo_file_download:legacy', |
|
143 | 143 | pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}', |
|
144 | 144 | repo_route=True) |
|
145 | 145 | |
|
146 | 146 | config.add_route( |
|
147 | 147 | name='repo_file_history', |
|
148 | 148 | pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True) |
|
149 | 149 | |
|
150 | 150 | config.add_route( |
|
151 | 151 | name='repo_file_authors', |
|
152 | 152 | pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True) |
|
153 | 153 | |
|
154 | 154 | config.add_route( |
|
155 | 155 | name='repo_files_remove_file', |
|
156 | 156 | pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}', |
|
157 | 157 | repo_route=True) |
|
158 | 158 | config.add_route( |
|
159 | 159 | name='repo_files_delete_file', |
|
160 | 160 | pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}', |
|
161 | 161 | repo_route=True) |
|
162 | 162 | config.add_route( |
|
163 | 163 | name='repo_files_edit_file', |
|
164 | 164 | pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}', |
|
165 | 165 | repo_route=True) |
|
166 | 166 | config.add_route( |
|
167 | 167 | name='repo_files_update_file', |
|
168 | 168 | pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}', |
|
169 | 169 | repo_route=True) |
|
170 | 170 | config.add_route( |
|
171 | 171 | name='repo_files_add_file', |
|
172 | 172 | pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}', |
|
173 | 173 | repo_route=True) |
|
174 | 174 | config.add_route( |
|
175 | 175 | name='repo_files_upload_file', |
|
176 | 176 | pattern='/{repo_name:.*?[^/]}/upload_file/{commit_id}/{f_path:.*}', |
|
177 | 177 | repo_route=True) |
|
178 | 178 | config.add_route( |
|
179 | 179 | name='repo_files_create_file', |
|
180 | 180 | pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}', |
|
181 | 181 | repo_route=True) |
|
182 | 182 | |
|
183 | 183 | # Refs data |
|
184 | 184 | config.add_route( |
|
185 | 185 | name='repo_refs_data', |
|
186 | 186 | pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True) |
|
187 | 187 | |
|
188 | 188 | config.add_route( |
|
189 | 189 | name='repo_refs_changelog_data', |
|
190 | 190 | pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True) |
|
191 | 191 | |
|
192 | 192 | config.add_route( |
|
193 | 193 | name='repo_stats', |
|
194 | 194 | pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True) |
|
195 | 195 | |
|
196 | # Changelog | |
|
196 | # Commits | |
|
197 | config.add_route( | |
|
198 | name='repo_commits', | |
|
199 | pattern='/{repo_name:.*?[^/]}/commits', repo_route=True) | |
|
200 | config.add_route( | |
|
201 | name='repo_commits_file', | |
|
202 | pattern='/{repo_name:.*?[^/]}/commits/{commit_id}/{f_path:.*}', repo_route=True) | |
|
203 | config.add_route( | |
|
204 | name='repo_commits_elements', | |
|
205 | pattern='/{repo_name:.*?[^/]}/commits_elements', repo_route=True) | |
|
206 | config.add_route( | |
|
207 | name='repo_commits_elements_file', | |
|
208 | pattern='/{repo_name:.*?[^/]}/commits_elements/{commit_id}/{f_path:.*}', repo_route=True) | |
|
209 | ||
|
210 | # Changelog (old deprecated name for commits page) | |
|
197 | 211 | config.add_route( |
|
198 | 212 | name='repo_changelog', |
|
199 | 213 | pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True) |
|
200 | 214 | config.add_route( |
|
201 | 215 | name='repo_changelog_file', |
|
202 | 216 | pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True) |
|
203 | config.add_route( | |
|
204 | name='repo_changelog_elements', | |
|
205 | pattern='/{repo_name:.*?[^/]}/changelog_elements', repo_route=True) | |
|
206 | config.add_route( | |
|
207 | name='repo_changelog_elements_file', | |
|
208 | pattern='/{repo_name:.*?[^/]}/changelog_elements/{commit_id}/{f_path:.*}', repo_route=True) | |
|
209 | 217 | |
|
210 | 218 | # Compare |
|
211 | 219 | config.add_route( |
|
212 | 220 | name='repo_compare_select', |
|
213 | 221 | pattern='/{repo_name:.*?[^/]}/compare', repo_route=True) |
|
214 | 222 | |
|
215 | 223 | config.add_route( |
|
216 | 224 | name='repo_compare', |
|
217 | 225 | pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True) |
|
218 | 226 | |
|
219 | 227 | # Tags |
|
220 | 228 | config.add_route( |
|
221 | 229 | name='tags_home', |
|
222 | 230 | pattern='/{repo_name:.*?[^/]}/tags', repo_route=True) |
|
223 | 231 | |
|
224 | 232 | # Branches |
|
225 | 233 | config.add_route( |
|
226 | 234 | name='branches_home', |
|
227 | 235 | pattern='/{repo_name:.*?[^/]}/branches', repo_route=True) |
|
228 | 236 | |
|
229 | 237 | # Bookmarks |
|
230 | 238 | config.add_route( |
|
231 | 239 | name='bookmarks_home', |
|
232 | 240 | pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True) |
|
233 | 241 | |
|
234 | 242 | # Forks |
|
235 | 243 | config.add_route( |
|
236 | 244 | name='repo_fork_new', |
|
237 | 245 | pattern='/{repo_name:.*?[^/]}/fork', repo_route=True, |
|
238 | 246 | repo_forbid_when_archived=True, |
|
239 | 247 | repo_accepted_types=['hg', 'git']) |
|
240 | 248 | |
|
241 | 249 | config.add_route( |
|
242 | 250 | name='repo_fork_create', |
|
243 | 251 | pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True, |
|
244 | 252 | repo_forbid_when_archived=True, |
|
245 | 253 | repo_accepted_types=['hg', 'git']) |
|
246 | 254 | |
|
247 | 255 | config.add_route( |
|
248 | 256 | name='repo_forks_show_all', |
|
249 | 257 | pattern='/{repo_name:.*?[^/]}/forks', repo_route=True, |
|
250 | 258 | repo_accepted_types=['hg', 'git']) |
|
251 | 259 | config.add_route( |
|
252 | 260 | name='repo_forks_data', |
|
253 | 261 | pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True, |
|
254 | 262 | repo_accepted_types=['hg', 'git']) |
|
255 | 263 | |
|
256 | 264 | # Pull Requests |
|
257 | 265 | config.add_route( |
|
258 | 266 | name='pullrequest_show', |
|
259 | 267 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}', |
|
260 | 268 | repo_route=True) |
|
261 | 269 | |
|
262 | 270 | config.add_route( |
|
263 | 271 | name='pullrequest_show_all', |
|
264 | 272 | pattern='/{repo_name:.*?[^/]}/pull-request', |
|
265 | 273 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
266 | 274 | |
|
267 | 275 | config.add_route( |
|
268 | 276 | name='pullrequest_show_all_data', |
|
269 | 277 | pattern='/{repo_name:.*?[^/]}/pull-request-data', |
|
270 | 278 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
271 | 279 | |
|
272 | 280 | config.add_route( |
|
273 | 281 | name='pullrequest_repo_refs', |
|
274 | 282 | pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
275 | 283 | repo_route=True) |
|
276 | 284 | |
|
277 | 285 | config.add_route( |
|
278 | 286 | name='pullrequest_repo_targets', |
|
279 | 287 | pattern='/{repo_name:.*?[^/]}/pull-request/repo-targets', |
|
280 | 288 | repo_route=True) |
|
281 | 289 | |
|
282 | 290 | config.add_route( |
|
283 | 291 | name='pullrequest_new', |
|
284 | 292 | pattern='/{repo_name:.*?[^/]}/pull-request/new', |
|
285 | 293 | repo_route=True, repo_accepted_types=['hg', 'git'], |
|
286 | 294 | repo_forbid_when_archived=True) |
|
287 | 295 | |
|
288 | 296 | config.add_route( |
|
289 | 297 | name='pullrequest_create', |
|
290 | 298 | pattern='/{repo_name:.*?[^/]}/pull-request/create', |
|
291 | 299 | repo_route=True, repo_accepted_types=['hg', 'git'], |
|
292 | 300 | repo_forbid_when_archived=True) |
|
293 | 301 | |
|
294 | 302 | config.add_route( |
|
295 | 303 | name='pullrequest_update', |
|
296 | 304 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update', |
|
297 | 305 | repo_route=True, repo_forbid_when_archived=True) |
|
298 | 306 | |
|
299 | 307 | config.add_route( |
|
300 | 308 | name='pullrequest_merge', |
|
301 | 309 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge', |
|
302 | 310 | repo_route=True, repo_forbid_when_archived=True) |
|
303 | 311 | |
|
304 | 312 | config.add_route( |
|
305 | 313 | name='pullrequest_delete', |
|
306 | 314 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete', |
|
307 | 315 | repo_route=True, repo_forbid_when_archived=True) |
|
308 | 316 | |
|
309 | 317 | config.add_route( |
|
310 | 318 | name='pullrequest_comment_create', |
|
311 | 319 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment', |
|
312 | 320 | repo_route=True) |
|
313 | 321 | |
|
314 | 322 | config.add_route( |
|
315 | 323 | name='pullrequest_comment_delete', |
|
316 | 324 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete', |
|
317 | 325 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
318 | 326 | |
|
319 | 327 | # Artifacts, (EE feature) |
|
320 | 328 | config.add_route( |
|
321 | 329 | name='repo_artifacts_list', |
|
322 | 330 | pattern='/{repo_name:.*?[^/]}/artifacts', repo_route=True) |
|
323 | 331 | |
|
324 | 332 | # Settings |
|
325 | 333 | config.add_route( |
|
326 | 334 | name='edit_repo', |
|
327 | 335 | pattern='/{repo_name:.*?[^/]}/settings', repo_route=True) |
|
328 | 336 | # update is POST on edit_repo |
|
329 | 337 | |
|
330 | 338 | # Settings advanced |
|
331 | 339 | config.add_route( |
|
332 | 340 | name='edit_repo_advanced', |
|
333 | 341 | pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True) |
|
334 | 342 | config.add_route( |
|
335 | 343 | name='edit_repo_advanced_archive', |
|
336 | 344 | pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True) |
|
337 | 345 | config.add_route( |
|
338 | 346 | name='edit_repo_advanced_delete', |
|
339 | 347 | pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True) |
|
340 | 348 | config.add_route( |
|
341 | 349 | name='edit_repo_advanced_locking', |
|
342 | 350 | pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True) |
|
343 | 351 | config.add_route( |
|
344 | 352 | name='edit_repo_advanced_journal', |
|
345 | 353 | pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True) |
|
346 | 354 | config.add_route( |
|
347 | 355 | name='edit_repo_advanced_fork', |
|
348 | 356 | pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True) |
|
349 | 357 | |
|
350 | 358 | config.add_route( |
|
351 | 359 | name='edit_repo_advanced_hooks', |
|
352 | 360 | pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True) |
|
353 | 361 | |
|
354 | 362 | # Caches |
|
355 | 363 | config.add_route( |
|
356 | 364 | name='edit_repo_caches', |
|
357 | 365 | pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True) |
|
358 | 366 | |
|
359 | 367 | # Permissions |
|
360 | 368 | config.add_route( |
|
361 | 369 | name='edit_repo_perms', |
|
362 | 370 | pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True) |
|
363 | 371 | |
|
364 | 372 | # Permissions Branch (EE feature) |
|
365 | 373 | config.add_route( |
|
366 | 374 | name='edit_repo_perms_branch', |
|
367 | 375 | pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True) |
|
368 | 376 | config.add_route( |
|
369 | 377 | name='edit_repo_perms_branch_delete', |
|
370 | 378 | pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete', |
|
371 | 379 | repo_route=True) |
|
372 | 380 | |
|
373 | 381 | # Maintenance |
|
374 | 382 | config.add_route( |
|
375 | 383 | name='edit_repo_maintenance', |
|
376 | 384 | pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True) |
|
377 | 385 | |
|
378 | 386 | config.add_route( |
|
379 | 387 | name='edit_repo_maintenance_execute', |
|
380 | 388 | pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True) |
|
381 | 389 | |
|
382 | 390 | # Fields |
|
383 | 391 | config.add_route( |
|
384 | 392 | name='edit_repo_fields', |
|
385 | 393 | pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True) |
|
386 | 394 | config.add_route( |
|
387 | 395 | name='edit_repo_fields_create', |
|
388 | 396 | pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True) |
|
389 | 397 | config.add_route( |
|
390 | 398 | name='edit_repo_fields_delete', |
|
391 | 399 | pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True) |
|
392 | 400 | |
|
393 | 401 | # Locking |
|
394 | 402 | config.add_route( |
|
395 | 403 | name='repo_edit_toggle_locking', |
|
396 | 404 | pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True) |
|
397 | 405 | |
|
398 | 406 | # Remote |
|
399 | 407 | config.add_route( |
|
400 | 408 | name='edit_repo_remote', |
|
401 | 409 | pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True) |
|
402 | 410 | config.add_route( |
|
403 | 411 | name='edit_repo_remote_pull', |
|
404 | 412 | pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True) |
|
405 | 413 | config.add_route( |
|
406 | 414 | name='edit_repo_remote_push', |
|
407 | 415 | pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True) |
|
408 | 416 | |
|
409 | 417 | # Statistics |
|
410 | 418 | config.add_route( |
|
411 | 419 | name='edit_repo_statistics', |
|
412 | 420 | pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True) |
|
413 | 421 | config.add_route( |
|
414 | 422 | name='edit_repo_statistics_reset', |
|
415 | 423 | pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True) |
|
416 | 424 | |
|
417 | 425 | # Issue trackers |
|
418 | 426 | config.add_route( |
|
419 | 427 | name='edit_repo_issuetracker', |
|
420 | 428 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True) |
|
421 | 429 | config.add_route( |
|
422 | 430 | name='edit_repo_issuetracker_test', |
|
423 | 431 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True) |
|
424 | 432 | config.add_route( |
|
425 | 433 | name='edit_repo_issuetracker_delete', |
|
426 | 434 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True) |
|
427 | 435 | config.add_route( |
|
428 | 436 | name='edit_repo_issuetracker_update', |
|
429 | 437 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True) |
|
430 | 438 | |
|
431 | 439 | # VCS Settings |
|
432 | 440 | config.add_route( |
|
433 | 441 | name='edit_repo_vcs', |
|
434 | 442 | pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True) |
|
435 | 443 | config.add_route( |
|
436 | 444 | name='edit_repo_vcs_update', |
|
437 | 445 | pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True) |
|
438 | 446 | |
|
439 | 447 | # svn pattern |
|
440 | 448 | config.add_route( |
|
441 | 449 | name='edit_repo_vcs_svn_pattern_delete', |
|
442 | 450 | pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True) |
|
443 | 451 | |
|
444 | 452 | # Repo Review Rules (EE feature) |
|
445 | 453 | config.add_route( |
|
446 | 454 | name='repo_reviewers', |
|
447 | 455 | pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True) |
|
448 | 456 | |
|
449 | 457 | config.add_route( |
|
450 | 458 | name='repo_default_reviewers_data', |
|
451 | 459 | pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True) |
|
452 | 460 | |
|
453 | 461 | # Repo Automation (EE feature) |
|
454 | 462 | config.add_route( |
|
455 | 463 | name='repo_automation', |
|
456 | 464 | pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True) |
|
457 | 465 | |
|
458 | 466 | # Strip |
|
459 | 467 | config.add_route( |
|
460 | 468 | name='edit_repo_strip', |
|
461 | 469 | pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True) |
|
462 | 470 | |
|
463 | 471 | config.add_route( |
|
464 | 472 | name='strip_check', |
|
465 | 473 | pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True) |
|
466 | 474 | |
|
467 | 475 | config.add_route( |
|
468 | 476 | name='strip_execute', |
|
469 | 477 | pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True) |
|
470 | 478 | |
|
471 | 479 | # Audit logs |
|
472 | 480 | config.add_route( |
|
473 | 481 | name='edit_repo_audit_logs', |
|
474 | 482 | pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True) |
|
475 | 483 | |
|
476 | 484 | # ATOM/RSS Feed |
|
477 | 485 | config.add_route( |
|
478 | 486 | name='rss_feed_home', |
|
479 | 487 | pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True) |
|
480 | 488 | |
|
481 | 489 | config.add_route( |
|
482 | 490 | name='atom_feed_home', |
|
483 | 491 | pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True) |
|
484 | 492 | |
|
485 | 493 | # NOTE(marcink): needs to be at the end for catch-all |
|
486 | 494 | add_route_with_slash( |
|
487 | 495 | config, |
|
488 | 496 | name='repo_summary', |
|
489 | 497 | pattern='/{repo_name:.*?[^/]}', repo_route=True) |
|
490 | 498 | |
|
491 | 499 | # Scan module for configuration decorators. |
|
492 | 500 | config.scan('.views', ignore='.tests') |
@@ -1,195 +1,213 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import re |
|
22 | 22 | |
|
23 | 23 | import pytest |
|
24 | 24 | |
|
25 | 25 | from rhodecode.apps.repository.views.repo_changelog import DEFAULT_CHANGELOG_SIZE |
|
26 | 26 | from rhodecode.tests import TestController |
|
27 | 27 | |
|
28 | 28 | MATCH_HASH = re.compile(r'<span class="commit_hash">r(\d+):[\da-f]+</span>') |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def route_path(name, params=None, **kwargs): |
|
32 | 32 | import urllib |
|
33 | 33 | |
|
34 | 34 | base_url = { |
|
35 | 'repo_changelog':'/{repo_name}/changelog', | |
|
36 |
'repo_c |
|
|
37 | 'repo_changelog_elements':'/{repo_name}/changelog_elements', | |
|
35 | 'repo_changelog': '/{repo_name}/changelog', | |
|
36 | 'repo_commits': '/{repo_name}/commits', | |
|
37 | 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}', | |
|
38 | 'repo_commits_elements': '/{repo_name}/commits_elements', | |
|
38 | 39 | }[name].format(**kwargs) |
|
39 | 40 | |
|
40 | 41 | if params: |
|
41 | 42 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
42 | 43 | return base_url |
|
43 | 44 | |
|
44 | 45 | |
|
46 | def assert_commits_on_page(response, indexes): | |
|
47 | found_indexes = [int(idx) for idx in MATCH_HASH.findall(response.body)] | |
|
48 | assert found_indexes == indexes | |
|
49 | ||
|
50 | ||
|
45 | 51 | class TestChangelogController(TestController): |
|
46 | 52 | |
|
53 | def test_commits_page(self, backend): | |
|
54 | self.log_user() | |
|
55 | response = self.app.get( | |
|
56 | route_path('repo_commits', repo_name=backend.repo_name)) | |
|
57 | ||
|
58 | first_idx = -1 | |
|
59 | last_idx = -DEFAULT_CHANGELOG_SIZE | |
|
60 | self.assert_commit_range_on_page( | |
|
61 | response, first_idx, last_idx, backend) | |
|
62 | ||
|
47 | 63 | def test_changelog(self, backend): |
|
48 | 64 | self.log_user() |
|
49 | 65 | response = self.app.get( |
|
50 | 66 | route_path('repo_changelog', repo_name=backend.repo_name)) |
|
51 | 67 | |
|
52 | 68 | first_idx = -1 |
|
53 | 69 | last_idx = -DEFAULT_CHANGELOG_SIZE |
|
54 | 70 | self.assert_commit_range_on_page( |
|
55 | 71 | response, first_idx, last_idx, backend) |
|
56 | 72 | |
|
57 | 73 | @pytest.mark.backends("hg", "git") |
|
58 | 74 | def test_changelog_filtered_by_branch(self, backend): |
|
59 | 75 | self.log_user() |
|
60 | 76 | self.app.get( |
|
61 | 77 | route_path('repo_changelog', repo_name=backend.repo_name, |
|
62 | 78 | params=dict(branch=backend.default_branch_name)), |
|
63 | 79 | status=200) |
|
64 | 80 | |
|
81 | @pytest.mark.backends("hg", "git") | |
|
82 | def test_commits_filtered_by_branch(self, backend): | |
|
83 | self.log_user() | |
|
84 | self.app.get( | |
|
85 | route_path('repo_commits', repo_name=backend.repo_name, | |
|
86 | params=dict(branch=backend.default_branch_name)), | |
|
87 | status=200) | |
|
88 | ||
|
65 | 89 | @pytest.mark.backends("svn") |
|
66 | 90 | def test_changelog_filtered_by_branch_svn(self, autologin_user, backend): |
|
67 | 91 | repo = backend['svn-simple-layout'] |
|
68 | 92 | response = self.app.get( |
|
69 | 93 | route_path('repo_changelog', repo_name=repo.repo_name, |
|
70 | 94 | params=dict(branch='trunk')), |
|
71 | 95 | status=200) |
|
72 | 96 | |
|
73 | self.assert_commits_on_page( | |
|
74 | response, indexes=[15, 12, 7, 3, 2, 1]) | |
|
97 | assert_commits_on_page(response, indexes=[15, 12, 7, 3, 2, 1]) | |
|
75 | 98 | |
|
76 |
def test_c |
|
|
99 | def test_commits_filtered_by_wrong_branch(self, backend): | |
|
77 | 100 | self.log_user() |
|
78 | 101 | branch = 'wrong-branch-name' |
|
79 | 102 | response = self.app.get( |
|
80 |
route_path('repo_c |
|
|
103 | route_path('repo_commits', repo_name=backend.repo_name, | |
|
81 | 104 | params=dict(branch=branch)), |
|
82 | 105 | status=302) |
|
83 |
expected_url = '/{repo}/c |
|
|
106 | expected_url = '/{repo}/commits/{branch}'.format( | |
|
84 | 107 | repo=backend.repo_name, branch=branch) |
|
85 | 108 | assert expected_url in response.location |
|
86 | 109 | response = response.follow() |
|
87 | 110 | expected_warning = 'Branch {} is not found.'.format(branch) |
|
88 | 111 | assert expected_warning in response.body |
|
89 | 112 | |
|
90 | def assert_commits_on_page(self, response, indexes): | |
|
91 | found_indexes = [int(idx) for idx in MATCH_HASH.findall(response.body)] | |
|
92 | assert found_indexes == indexes | |
|
93 | ||
|
94 | 113 | @pytest.mark.xfail_backends("svn", reason="Depends on branch support") |
|
95 | 114 | def test_changelog_filtered_by_branch_with_merges( |
|
96 | 115 | self, autologin_user, backend): |
|
97 | 116 | |
|
98 | 117 | # Note: The changelog of branch "b" does not contain the commit "a1" |
|
99 | 118 | # although this is a parent of commit "b1". And branch "b" has commits |
|
100 | 119 | # which have a smaller index than commit "a1". |
|
101 | 120 | commits = [ |
|
102 | 121 | {'message': 'a'}, |
|
103 | 122 | {'message': 'b', 'branch': 'b'}, |
|
104 | 123 | {'message': 'a1', 'parents': ['a']}, |
|
105 | 124 | {'message': 'b1', 'branch': 'b', 'parents': ['b', 'a1']}, |
|
106 | 125 | ] |
|
107 | 126 | backend.create_repo(commits) |
|
108 | 127 | |
|
109 | 128 | self.app.get( |
|
110 | 129 | route_path('repo_changelog', repo_name=backend.repo_name, |
|
111 | 130 | params=dict(branch='b')), |
|
112 | 131 | status=200) |
|
113 | 132 | |
|
114 | 133 | @pytest.mark.backends("hg") |
|
115 |
def test_c |
|
|
134 | def test_commits_closed_branches(self, autologin_user, backend): | |
|
116 | 135 | repo = backend['closed_branch'] |
|
117 | 136 | response = self.app.get( |
|
118 |
route_path('repo_c |
|
|
137 | route_path('repo_commits', repo_name=repo.repo_name, | |
|
119 | 138 | params=dict(branch='experimental')), |
|
120 | 139 | status=200) |
|
121 | 140 | |
|
122 |
|
|
|
123 | response, indexes=[3, 1]) | |
|
141 | assert_commits_on_page(response, indexes=[3, 1]) | |
|
124 | 142 | |
|
125 | 143 | def test_changelog_pagination(self, backend): |
|
126 | 144 | self.log_user() |
|
127 | 145 | # pagination, walk up to page 6 |
|
128 | 146 | changelog_url = route_path( |
|
129 |
'repo_c |
|
|
147 | 'repo_commits', repo_name=backend.repo_name) | |
|
130 | 148 | |
|
131 | 149 | for page in range(1, 7): |
|
132 | 150 | response = self.app.get(changelog_url, {'page': page}) |
|
133 | 151 | |
|
134 | 152 | first_idx = -DEFAULT_CHANGELOG_SIZE * (page - 1) - 1 |
|
135 | 153 | last_idx = -DEFAULT_CHANGELOG_SIZE * page |
|
136 | 154 | self.assert_commit_range_on_page(response, first_idx, last_idx, backend) |
|
137 | 155 | |
|
138 | 156 | def assert_commit_range_on_page( |
|
139 | 157 | self, response, first_idx, last_idx, backend): |
|
140 | 158 | input_template = ( |
|
141 | 159 | """<input class="commit-range" id="%(raw_id)s" """ |
|
142 | 160 | """name="%(raw_id)s" type="checkbox" value="1" />""" |
|
143 | 161 | ) |
|
144 | 162 | commit_span_template = """<span class="commit_hash">r%s:%s</span>""" |
|
145 | 163 | repo = backend.repo |
|
146 | 164 | |
|
147 | 165 | first_commit_on_page = repo.get_commit(commit_idx=first_idx) |
|
148 | 166 | response.mustcontain( |
|
149 | 167 | input_template % {'raw_id': first_commit_on_page.raw_id}) |
|
150 | 168 | response.mustcontain(commit_span_template % ( |
|
151 | 169 | first_commit_on_page.idx, first_commit_on_page.short_id) |
|
152 | 170 | ) |
|
153 | 171 | |
|
154 | 172 | last_commit_on_page = repo.get_commit(commit_idx=last_idx) |
|
155 | 173 | response.mustcontain( |
|
156 | 174 | input_template % {'raw_id': last_commit_on_page.raw_id}) |
|
157 | 175 | response.mustcontain(commit_span_template % ( |
|
158 | 176 | last_commit_on_page.idx, last_commit_on_page.short_id) |
|
159 | 177 | ) |
|
160 | 178 | |
|
161 | 179 | first_commit_of_next_page = repo.get_commit(commit_idx=last_idx - 1) |
|
162 | 180 | first_span_of_next_page = commit_span_template % ( |
|
163 | 181 | first_commit_of_next_page.idx, first_commit_of_next_page.short_id) |
|
164 | 182 | assert first_span_of_next_page not in response |
|
165 | 183 | |
|
166 | 184 | @pytest.mark.parametrize('test_path', [ |
|
167 | 185 | 'vcs/exceptions.py', |
|
168 | 186 | '/vcs/exceptions.py', |
|
169 | 187 | '//vcs/exceptions.py' |
|
170 | 188 | ]) |
|
171 |
def test_c |
|
|
189 | def test_commits_with_filenode(self, backend, test_path): | |
|
172 | 190 | self.log_user() |
|
173 | 191 | response = self.app.get( |
|
174 |
route_path('repo_c |
|
|
192 | route_path('repo_commits_file', repo_name=backend.repo_name, | |
|
175 | 193 | commit_id='tip', f_path=test_path), |
|
176 | 194 | ) |
|
177 | 195 | |
|
178 | 196 | # history commits messages |
|
179 | 197 | response.mustcontain('Added exceptions module, this time for real') |
|
180 | 198 | response.mustcontain('Added not implemented hg backend test case') |
|
181 | 199 | response.mustcontain('Added BaseChangeset class') |
|
182 | 200 | |
|
183 |
def test_c |
|
|
201 | def test_commits_with_filenode_that_is_dirnode(self, backend): | |
|
184 | 202 | self.log_user() |
|
185 | 203 | self.app.get( |
|
186 |
route_path('repo_c |
|
|
204 | route_path('repo_commits_file', repo_name=backend.repo_name, | |
|
187 | 205 | commit_id='tip', f_path='/tests'), |
|
188 | 206 | status=302) |
|
189 | 207 | |
|
190 |
def test_c |
|
|
208 | def test_commits_with_filenode_not_existing(self, backend): | |
|
191 | 209 | self.log_user() |
|
192 | 210 | self.app.get( |
|
193 |
route_path('repo_c |
|
|
211 | route_path('repo_commits_file', repo_name=backend.repo_name, | |
|
194 | 212 | commit_id='tip', f_path='wrong_path'), |
|
195 | 213 | status=302) |
@@ -1,1216 +1,1218 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | import mock |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | import rhodecode |
|
24 | 24 | from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason |
|
25 | 25 | from rhodecode.lib.vcs.nodes import FileNode |
|
26 | 26 | from rhodecode.lib import helpers as h |
|
27 | 27 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
28 | 28 | from rhodecode.model.db import ( |
|
29 | 29 | PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository) |
|
30 | 30 | from rhodecode.model.meta import Session |
|
31 | 31 | from rhodecode.model.pull_request import PullRequestModel |
|
32 | 32 | from rhodecode.model.user import UserModel |
|
33 | 33 | from rhodecode.tests import ( |
|
34 | 34 | assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | def route_path(name, params=None, **kwargs): |
|
38 | 38 | import urllib |
|
39 | 39 | |
|
40 | 40 | base_url = { |
|
41 | 41 | 'repo_changelog': '/{repo_name}/changelog', |
|
42 | 42 | 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}', |
|
43 | 'repo_commits': '/{repo_name}/changelog', | |
|
44 | 'repo_commits_file': '/{repo_name}/changelog/{commit_id}/{f_path}', | |
|
43 | 45 | 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}', |
|
44 | 46 | 'pullrequest_show_all': '/{repo_name}/pull-request', |
|
45 | 47 | 'pullrequest_show_all_data': '/{repo_name}/pull-request-data', |
|
46 | 48 | 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
47 | 49 | 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations', |
|
48 | 50 | 'pullrequest_new': '/{repo_name}/pull-request/new', |
|
49 | 51 | 'pullrequest_create': '/{repo_name}/pull-request/create', |
|
50 | 52 | 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update', |
|
51 | 53 | 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge', |
|
52 | 54 | 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete', |
|
53 | 55 | 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment', |
|
54 | 56 | 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete', |
|
55 | 57 | }[name].format(**kwargs) |
|
56 | 58 | |
|
57 | 59 | if params: |
|
58 | 60 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
59 | 61 | return base_url |
|
60 | 62 | |
|
61 | 63 | |
|
62 | 64 | @pytest.mark.usefixtures('app', 'autologin_user') |
|
63 | 65 | @pytest.mark.backends("git", "hg") |
|
64 | 66 | class TestPullrequestsView(object): |
|
65 | 67 | |
|
66 | 68 | def test_index(self, backend): |
|
67 | 69 | self.app.get(route_path( |
|
68 | 70 | 'pullrequest_new', |
|
69 | 71 | repo_name=backend.repo_name)) |
|
70 | 72 | |
|
71 | 73 | def test_option_menu_create_pull_request_exists(self, backend): |
|
72 | 74 | repo_name = backend.repo_name |
|
73 | 75 | response = self.app.get(h.route_path('repo_summary', repo_name=repo_name)) |
|
74 | 76 | |
|
75 | 77 | create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path( |
|
76 | 78 | 'pullrequest_new', repo_name=repo_name) |
|
77 | 79 | response.mustcontain(create_pr_link) |
|
78 | 80 | |
|
79 | 81 | def test_create_pr_form_with_raw_commit_id(self, backend): |
|
80 | 82 | repo = backend.repo |
|
81 | 83 | |
|
82 | 84 | self.app.get( |
|
83 | 85 | route_path('pullrequest_new', repo_name=repo.repo_name, |
|
84 | 86 | commit=repo.get_commit().raw_id), |
|
85 | 87 | status=200) |
|
86 | 88 | |
|
87 | 89 | @pytest.mark.parametrize('pr_merge_enabled', [True, False]) |
|
88 | 90 | @pytest.mark.parametrize('range_diff', ["0", "1"]) |
|
89 | 91 | def test_show(self, pr_util, pr_merge_enabled, range_diff): |
|
90 | 92 | pull_request = pr_util.create_pull_request( |
|
91 | 93 | mergeable=pr_merge_enabled, enable_notifications=False) |
|
92 | 94 | |
|
93 | 95 | response = self.app.get(route_path( |
|
94 | 96 | 'pullrequest_show', |
|
95 | 97 | repo_name=pull_request.target_repo.scm_instance().name, |
|
96 | 98 | pull_request_id=pull_request.pull_request_id, |
|
97 | 99 | params={'range-diff': range_diff})) |
|
98 | 100 | |
|
99 | 101 | for commit_id in pull_request.revisions: |
|
100 | 102 | response.mustcontain(commit_id) |
|
101 | 103 | |
|
102 | 104 | assert pull_request.target_ref_parts.type in response |
|
103 | 105 | assert pull_request.target_ref_parts.name in response |
|
104 | 106 | target_clone_url = pull_request.target_repo.clone_url() |
|
105 | 107 | assert target_clone_url in response |
|
106 | 108 | |
|
107 | 109 | assert 'class="pull-request-merge"' in response |
|
108 | 110 | if pr_merge_enabled: |
|
109 | 111 | response.mustcontain('Pull request reviewer approval is pending') |
|
110 | 112 | else: |
|
111 | 113 | response.mustcontain('Server-side pull request merging is disabled.') |
|
112 | 114 | |
|
113 | 115 | if range_diff == "1": |
|
114 | 116 | response.mustcontain('Turn off: Show the diff as commit range') |
|
115 | 117 | |
|
116 | 118 | def test_close_status_visibility(self, pr_util, user_util, csrf_token): |
|
117 | 119 | # Logout |
|
118 | 120 | response = self.app.post( |
|
119 | 121 | h.route_path('logout'), |
|
120 | 122 | params={'csrf_token': csrf_token}) |
|
121 | 123 | # Login as regular user |
|
122 | 124 | response = self.app.post(h.route_path('login'), |
|
123 | 125 | {'username': TEST_USER_REGULAR_LOGIN, |
|
124 | 126 | 'password': 'test12'}) |
|
125 | 127 | |
|
126 | 128 | pull_request = pr_util.create_pull_request( |
|
127 | 129 | author=TEST_USER_REGULAR_LOGIN) |
|
128 | 130 | |
|
129 | 131 | response = self.app.get(route_path( |
|
130 | 132 | 'pullrequest_show', |
|
131 | 133 | repo_name=pull_request.target_repo.scm_instance().name, |
|
132 | 134 | pull_request_id=pull_request.pull_request_id)) |
|
133 | 135 | |
|
134 | 136 | response.mustcontain('Server-side pull request merging is disabled.') |
|
135 | 137 | |
|
136 | 138 | assert_response = response.assert_response() |
|
137 | 139 | # for regular user without a merge permissions, we don't see it |
|
138 | 140 | assert_response.no_element_exists('#close-pull-request-action') |
|
139 | 141 | |
|
140 | 142 | user_util.grant_user_permission_to_repo( |
|
141 | 143 | pull_request.target_repo, |
|
142 | 144 | UserModel().get_by_username(TEST_USER_REGULAR_LOGIN), |
|
143 | 145 | 'repository.write') |
|
144 | 146 | response = self.app.get(route_path( |
|
145 | 147 | 'pullrequest_show', |
|
146 | 148 | repo_name=pull_request.target_repo.scm_instance().name, |
|
147 | 149 | pull_request_id=pull_request.pull_request_id)) |
|
148 | 150 | |
|
149 | 151 | response.mustcontain('Server-side pull request merging is disabled.') |
|
150 | 152 | |
|
151 | 153 | assert_response = response.assert_response() |
|
152 | 154 | # now regular user has a merge permissions, we have CLOSE button |
|
153 | 155 | assert_response.one_element_exists('#close-pull-request-action') |
|
154 | 156 | |
|
155 | 157 | def test_show_invalid_commit_id(self, pr_util): |
|
156 | 158 | # Simulating invalid revisions which will cause a lookup error |
|
157 | 159 | pull_request = pr_util.create_pull_request() |
|
158 | 160 | pull_request.revisions = ['invalid'] |
|
159 | 161 | Session().add(pull_request) |
|
160 | 162 | Session().commit() |
|
161 | 163 | |
|
162 | 164 | response = self.app.get(route_path( |
|
163 | 165 | 'pullrequest_show', |
|
164 | 166 | repo_name=pull_request.target_repo.scm_instance().name, |
|
165 | 167 | pull_request_id=pull_request.pull_request_id)) |
|
166 | 168 | |
|
167 | 169 | for commit_id in pull_request.revisions: |
|
168 | 170 | response.mustcontain(commit_id) |
|
169 | 171 | |
|
170 | 172 | def test_show_invalid_source_reference(self, pr_util): |
|
171 | 173 | pull_request = pr_util.create_pull_request() |
|
172 | 174 | pull_request.source_ref = 'branch:b:invalid' |
|
173 | 175 | Session().add(pull_request) |
|
174 | 176 | Session().commit() |
|
175 | 177 | |
|
176 | 178 | self.app.get(route_path( |
|
177 | 179 | 'pullrequest_show', |
|
178 | 180 | repo_name=pull_request.target_repo.scm_instance().name, |
|
179 | 181 | pull_request_id=pull_request.pull_request_id)) |
|
180 | 182 | |
|
181 | 183 | def test_edit_title_description(self, pr_util, csrf_token): |
|
182 | 184 | pull_request = pr_util.create_pull_request() |
|
183 | 185 | pull_request_id = pull_request.pull_request_id |
|
184 | 186 | |
|
185 | 187 | response = self.app.post( |
|
186 | 188 | route_path('pullrequest_update', |
|
187 | 189 | repo_name=pull_request.target_repo.repo_name, |
|
188 | 190 | pull_request_id=pull_request_id), |
|
189 | 191 | params={ |
|
190 | 192 | 'edit_pull_request': 'true', |
|
191 | 193 | 'title': 'New title', |
|
192 | 194 | 'description': 'New description', |
|
193 | 195 | 'csrf_token': csrf_token}) |
|
194 | 196 | |
|
195 | 197 | assert_session_flash( |
|
196 | 198 | response, u'Pull request title & description updated.', |
|
197 | 199 | category='success') |
|
198 | 200 | |
|
199 | 201 | pull_request = PullRequest.get(pull_request_id) |
|
200 | 202 | assert pull_request.title == 'New title' |
|
201 | 203 | assert pull_request.description == 'New description' |
|
202 | 204 | |
|
203 | 205 | def test_edit_title_description_closed(self, pr_util, csrf_token): |
|
204 | 206 | pull_request = pr_util.create_pull_request() |
|
205 | 207 | pull_request_id = pull_request.pull_request_id |
|
206 | 208 | repo_name = pull_request.target_repo.repo_name |
|
207 | 209 | pr_util.close() |
|
208 | 210 | |
|
209 | 211 | response = self.app.post( |
|
210 | 212 | route_path('pullrequest_update', |
|
211 | 213 | repo_name=repo_name, pull_request_id=pull_request_id), |
|
212 | 214 | params={ |
|
213 | 215 | 'edit_pull_request': 'true', |
|
214 | 216 | 'title': 'New title', |
|
215 | 217 | 'description': 'New description', |
|
216 | 218 | 'csrf_token': csrf_token}, status=200) |
|
217 | 219 | assert_session_flash( |
|
218 | 220 | response, u'Cannot update closed pull requests.', |
|
219 | 221 | category='error') |
|
220 | 222 | |
|
221 | 223 | def test_update_invalid_source_reference(self, pr_util, csrf_token): |
|
222 | 224 | from rhodecode.lib.vcs.backends.base import UpdateFailureReason |
|
223 | 225 | |
|
224 | 226 | pull_request = pr_util.create_pull_request() |
|
225 | 227 | pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id' |
|
226 | 228 | Session().add(pull_request) |
|
227 | 229 | Session().commit() |
|
228 | 230 | |
|
229 | 231 | pull_request_id = pull_request.pull_request_id |
|
230 | 232 | |
|
231 | 233 | response = self.app.post( |
|
232 | 234 | route_path('pullrequest_update', |
|
233 | 235 | repo_name=pull_request.target_repo.repo_name, |
|
234 | 236 | pull_request_id=pull_request_id), |
|
235 | 237 | params={'update_commits': 'true', 'csrf_token': csrf_token}) |
|
236 | 238 | |
|
237 | 239 | expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[ |
|
238 | 240 | UpdateFailureReason.MISSING_SOURCE_REF]) |
|
239 | 241 | assert_session_flash(response, expected_msg, category='error') |
|
240 | 242 | |
|
241 | 243 | def test_missing_target_reference(self, pr_util, csrf_token): |
|
242 | 244 | from rhodecode.lib.vcs.backends.base import MergeFailureReason |
|
243 | 245 | pull_request = pr_util.create_pull_request( |
|
244 | 246 | approved=True, mergeable=True) |
|
245 | 247 | unicode_reference = u'branch:invalid-branch:invalid-commit-id' |
|
246 | 248 | pull_request.target_ref = unicode_reference |
|
247 | 249 | Session().add(pull_request) |
|
248 | 250 | Session().commit() |
|
249 | 251 | |
|
250 | 252 | pull_request_id = pull_request.pull_request_id |
|
251 | 253 | pull_request_url = route_path( |
|
252 | 254 | 'pullrequest_show', |
|
253 | 255 | repo_name=pull_request.target_repo.repo_name, |
|
254 | 256 | pull_request_id=pull_request_id) |
|
255 | 257 | |
|
256 | 258 | response = self.app.get(pull_request_url) |
|
257 | 259 | target_ref_id = 'invalid-branch' |
|
258 | 260 | merge_resp = MergeResponse( |
|
259 | 261 | True, True, '', MergeFailureReason.MISSING_TARGET_REF, |
|
260 | 262 | metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)}) |
|
261 | 263 | response.assert_response().element_contains( |
|
262 | 264 | 'span[data-role="merge-message"]', merge_resp.merge_status_message) |
|
263 | 265 | |
|
264 | 266 | def test_comment_and_close_pull_request_custom_message_approved( |
|
265 | 267 | self, pr_util, csrf_token, xhr_header): |
|
266 | 268 | |
|
267 | 269 | pull_request = pr_util.create_pull_request(approved=True) |
|
268 | 270 | pull_request_id = pull_request.pull_request_id |
|
269 | 271 | author = pull_request.user_id |
|
270 | 272 | repo = pull_request.target_repo.repo_id |
|
271 | 273 | |
|
272 | 274 | self.app.post( |
|
273 | 275 | route_path('pullrequest_comment_create', |
|
274 | 276 | repo_name=pull_request.target_repo.scm_instance().name, |
|
275 | 277 | pull_request_id=pull_request_id), |
|
276 | 278 | params={ |
|
277 | 279 | 'close_pull_request': '1', |
|
278 | 280 | 'text': 'Closing a PR', |
|
279 | 281 | 'csrf_token': csrf_token}, |
|
280 | 282 | extra_environ=xhr_header,) |
|
281 | 283 | |
|
282 | 284 | journal = UserLog.query()\ |
|
283 | 285 | .filter(UserLog.user_id == author)\ |
|
284 | 286 | .filter(UserLog.repository_id == repo) \ |
|
285 | 287 | .order_by('user_log_id') \ |
|
286 | 288 | .all() |
|
287 | 289 | assert journal[-1].action == 'repo.pull_request.close' |
|
288 | 290 | |
|
289 | 291 | pull_request = PullRequest.get(pull_request_id) |
|
290 | 292 | assert pull_request.is_closed() |
|
291 | 293 | |
|
292 | 294 | status = ChangesetStatusModel().get_status( |
|
293 | 295 | pull_request.source_repo, pull_request=pull_request) |
|
294 | 296 | assert status == ChangesetStatus.STATUS_APPROVED |
|
295 | 297 | comments = ChangesetComment().query() \ |
|
296 | 298 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
297 | 299 | .order_by(ChangesetComment.comment_id.asc())\ |
|
298 | 300 | .all() |
|
299 | 301 | assert comments[-1].text == 'Closing a PR' |
|
300 | 302 | |
|
301 | 303 | def test_comment_force_close_pull_request_rejected( |
|
302 | 304 | self, pr_util, csrf_token, xhr_header): |
|
303 | 305 | pull_request = pr_util.create_pull_request() |
|
304 | 306 | pull_request_id = pull_request.pull_request_id |
|
305 | 307 | PullRequestModel().update_reviewers( |
|
306 | 308 | pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])], |
|
307 | 309 | pull_request.author) |
|
308 | 310 | author = pull_request.user_id |
|
309 | 311 | repo = pull_request.target_repo.repo_id |
|
310 | 312 | |
|
311 | 313 | self.app.post( |
|
312 | 314 | route_path('pullrequest_comment_create', |
|
313 | 315 | repo_name=pull_request.target_repo.scm_instance().name, |
|
314 | 316 | pull_request_id=pull_request_id), |
|
315 | 317 | params={ |
|
316 | 318 | 'close_pull_request': '1', |
|
317 | 319 | 'csrf_token': csrf_token}, |
|
318 | 320 | extra_environ=xhr_header) |
|
319 | 321 | |
|
320 | 322 | pull_request = PullRequest.get(pull_request_id) |
|
321 | 323 | |
|
322 | 324 | journal = UserLog.query()\ |
|
323 | 325 | .filter(UserLog.user_id == author, UserLog.repository_id == repo) \ |
|
324 | 326 | .order_by('user_log_id') \ |
|
325 | 327 | .all() |
|
326 | 328 | assert journal[-1].action == 'repo.pull_request.close' |
|
327 | 329 | |
|
328 | 330 | # check only the latest status, not the review status |
|
329 | 331 | status = ChangesetStatusModel().get_status( |
|
330 | 332 | pull_request.source_repo, pull_request=pull_request) |
|
331 | 333 | assert status == ChangesetStatus.STATUS_REJECTED |
|
332 | 334 | |
|
333 | 335 | def test_comment_and_close_pull_request( |
|
334 | 336 | self, pr_util, csrf_token, xhr_header): |
|
335 | 337 | pull_request = pr_util.create_pull_request() |
|
336 | 338 | pull_request_id = pull_request.pull_request_id |
|
337 | 339 | |
|
338 | 340 | response = self.app.post( |
|
339 | 341 | route_path('pullrequest_comment_create', |
|
340 | 342 | repo_name=pull_request.target_repo.scm_instance().name, |
|
341 | 343 | pull_request_id=pull_request.pull_request_id), |
|
342 | 344 | params={ |
|
343 | 345 | 'close_pull_request': 'true', |
|
344 | 346 | 'csrf_token': csrf_token}, |
|
345 | 347 | extra_environ=xhr_header) |
|
346 | 348 | |
|
347 | 349 | assert response.json |
|
348 | 350 | |
|
349 | 351 | pull_request = PullRequest.get(pull_request_id) |
|
350 | 352 | assert pull_request.is_closed() |
|
351 | 353 | |
|
352 | 354 | # check only the latest status, not the review status |
|
353 | 355 | status = ChangesetStatusModel().get_status( |
|
354 | 356 | pull_request.source_repo, pull_request=pull_request) |
|
355 | 357 | assert status == ChangesetStatus.STATUS_REJECTED |
|
356 | 358 | |
|
357 | 359 | def test_create_pull_request(self, backend, csrf_token): |
|
358 | 360 | commits = [ |
|
359 | 361 | {'message': 'ancestor'}, |
|
360 | 362 | {'message': 'change'}, |
|
361 | 363 | {'message': 'change2'}, |
|
362 | 364 | ] |
|
363 | 365 | commit_ids = backend.create_master_repo(commits) |
|
364 | 366 | target = backend.create_repo(heads=['ancestor']) |
|
365 | 367 | source = backend.create_repo(heads=['change2']) |
|
366 | 368 | |
|
367 | 369 | response = self.app.post( |
|
368 | 370 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
369 | 371 | [ |
|
370 | 372 | ('source_repo', source.repo_name), |
|
371 | 373 | ('source_ref', 'branch:default:' + commit_ids['change2']), |
|
372 | 374 | ('target_repo', target.repo_name), |
|
373 | 375 | ('target_ref', 'branch:default:' + commit_ids['ancestor']), |
|
374 | 376 | ('common_ancestor', commit_ids['ancestor']), |
|
375 | 377 | ('pullrequest_title', 'Title'), |
|
376 | 378 | ('pullrequest_desc', 'Description'), |
|
377 | 379 | ('description_renderer', 'markdown'), |
|
378 | 380 | ('__start__', 'review_members:sequence'), |
|
379 | 381 | ('__start__', 'reviewer:mapping'), |
|
380 | 382 | ('user_id', '1'), |
|
381 | 383 | ('__start__', 'reasons:sequence'), |
|
382 | 384 | ('reason', 'Some reason'), |
|
383 | 385 | ('__end__', 'reasons:sequence'), |
|
384 | 386 | ('__start__', 'rules:sequence'), |
|
385 | 387 | ('__end__', 'rules:sequence'), |
|
386 | 388 | ('mandatory', 'False'), |
|
387 | 389 | ('__end__', 'reviewer:mapping'), |
|
388 | 390 | ('__end__', 'review_members:sequence'), |
|
389 | 391 | ('__start__', 'revisions:sequence'), |
|
390 | 392 | ('revisions', commit_ids['change']), |
|
391 | 393 | ('revisions', commit_ids['change2']), |
|
392 | 394 | ('__end__', 'revisions:sequence'), |
|
393 | 395 | ('user', ''), |
|
394 | 396 | ('csrf_token', csrf_token), |
|
395 | 397 | ], |
|
396 | 398 | status=302) |
|
397 | 399 | |
|
398 | 400 | location = response.headers['Location'] |
|
399 | 401 | pull_request_id = location.rsplit('/', 1)[1] |
|
400 | 402 | assert pull_request_id != 'new' |
|
401 | 403 | pull_request = PullRequest.get(int(pull_request_id)) |
|
402 | 404 | |
|
403 | 405 | # check that we have now both revisions |
|
404 | 406 | assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']] |
|
405 | 407 | assert pull_request.source_ref == 'branch:default:' + commit_ids['change2'] |
|
406 | 408 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] |
|
407 | 409 | assert pull_request.target_ref == expected_target_ref |
|
408 | 410 | |
|
409 | 411 | def test_reviewer_notifications(self, backend, csrf_token): |
|
410 | 412 | # We have to use the app.post for this test so it will create the |
|
411 | 413 | # notifications properly with the new PR |
|
412 | 414 | commits = [ |
|
413 | 415 | {'message': 'ancestor', |
|
414 | 416 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, |
|
415 | 417 | {'message': 'change', |
|
416 | 418 | 'added': [FileNode('file_a', content='content_of_change')]}, |
|
417 | 419 | {'message': 'change-child'}, |
|
418 | 420 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
419 | 421 | 'added': [ |
|
420 | 422 | FileNode('file_B', content='content_of_ancestor_child')]}, |
|
421 | 423 | {'message': 'ancestor-child-2'}, |
|
422 | 424 | ] |
|
423 | 425 | commit_ids = backend.create_master_repo(commits) |
|
424 | 426 | target = backend.create_repo(heads=['ancestor-child']) |
|
425 | 427 | source = backend.create_repo(heads=['change']) |
|
426 | 428 | |
|
427 | 429 | response = self.app.post( |
|
428 | 430 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
429 | 431 | [ |
|
430 | 432 | ('source_repo', source.repo_name), |
|
431 | 433 | ('source_ref', 'branch:default:' + commit_ids['change']), |
|
432 | 434 | ('target_repo', target.repo_name), |
|
433 | 435 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), |
|
434 | 436 | ('common_ancestor', commit_ids['ancestor']), |
|
435 | 437 | ('pullrequest_title', 'Title'), |
|
436 | 438 | ('pullrequest_desc', 'Description'), |
|
437 | 439 | ('description_renderer', 'markdown'), |
|
438 | 440 | ('__start__', 'review_members:sequence'), |
|
439 | 441 | ('__start__', 'reviewer:mapping'), |
|
440 | 442 | ('user_id', '2'), |
|
441 | 443 | ('__start__', 'reasons:sequence'), |
|
442 | 444 | ('reason', 'Some reason'), |
|
443 | 445 | ('__end__', 'reasons:sequence'), |
|
444 | 446 | ('__start__', 'rules:sequence'), |
|
445 | 447 | ('__end__', 'rules:sequence'), |
|
446 | 448 | ('mandatory', 'False'), |
|
447 | 449 | ('__end__', 'reviewer:mapping'), |
|
448 | 450 | ('__end__', 'review_members:sequence'), |
|
449 | 451 | ('__start__', 'revisions:sequence'), |
|
450 | 452 | ('revisions', commit_ids['change']), |
|
451 | 453 | ('__end__', 'revisions:sequence'), |
|
452 | 454 | ('user', ''), |
|
453 | 455 | ('csrf_token', csrf_token), |
|
454 | 456 | ], |
|
455 | 457 | status=302) |
|
456 | 458 | |
|
457 | 459 | location = response.headers['Location'] |
|
458 | 460 | |
|
459 | 461 | pull_request_id = location.rsplit('/', 1)[1] |
|
460 | 462 | assert pull_request_id != 'new' |
|
461 | 463 | pull_request = PullRequest.get(int(pull_request_id)) |
|
462 | 464 | |
|
463 | 465 | # Check that a notification was made |
|
464 | 466 | notifications = Notification.query()\ |
|
465 | 467 | .filter(Notification.created_by == pull_request.author.user_id, |
|
466 | 468 | Notification.type_ == Notification.TYPE_PULL_REQUEST, |
|
467 | 469 | Notification.subject.contains( |
|
468 | 470 | "wants you to review pull request #%s" % pull_request_id)) |
|
469 | 471 | assert len(notifications.all()) == 1 |
|
470 | 472 | |
|
471 | 473 | # Change reviewers and check that a notification was made |
|
472 | 474 | PullRequestModel().update_reviewers( |
|
473 | 475 | pull_request.pull_request_id, [(1, [], False, [])], |
|
474 | 476 | pull_request.author) |
|
475 | 477 | assert len(notifications.all()) == 2 |
|
476 | 478 | |
|
477 | 479 | def test_create_pull_request_stores_ancestor_commit_id(self, backend, |
|
478 | 480 | csrf_token): |
|
479 | 481 | commits = [ |
|
480 | 482 | {'message': 'ancestor', |
|
481 | 483 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, |
|
482 | 484 | {'message': 'change', |
|
483 | 485 | 'added': [FileNode('file_a', content='content_of_change')]}, |
|
484 | 486 | {'message': 'change-child'}, |
|
485 | 487 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
486 | 488 | 'added': [ |
|
487 | 489 | FileNode('file_B', content='content_of_ancestor_child')]}, |
|
488 | 490 | {'message': 'ancestor-child-2'}, |
|
489 | 491 | ] |
|
490 | 492 | commit_ids = backend.create_master_repo(commits) |
|
491 | 493 | target = backend.create_repo(heads=['ancestor-child']) |
|
492 | 494 | source = backend.create_repo(heads=['change']) |
|
493 | 495 | |
|
494 | 496 | response = self.app.post( |
|
495 | 497 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
496 | 498 | [ |
|
497 | 499 | ('source_repo', source.repo_name), |
|
498 | 500 | ('source_ref', 'branch:default:' + commit_ids['change']), |
|
499 | 501 | ('target_repo', target.repo_name), |
|
500 | 502 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), |
|
501 | 503 | ('common_ancestor', commit_ids['ancestor']), |
|
502 | 504 | ('pullrequest_title', 'Title'), |
|
503 | 505 | ('pullrequest_desc', 'Description'), |
|
504 | 506 | ('description_renderer', 'markdown'), |
|
505 | 507 | ('__start__', 'review_members:sequence'), |
|
506 | 508 | ('__start__', 'reviewer:mapping'), |
|
507 | 509 | ('user_id', '1'), |
|
508 | 510 | ('__start__', 'reasons:sequence'), |
|
509 | 511 | ('reason', 'Some reason'), |
|
510 | 512 | ('__end__', 'reasons:sequence'), |
|
511 | 513 | ('__start__', 'rules:sequence'), |
|
512 | 514 | ('__end__', 'rules:sequence'), |
|
513 | 515 | ('mandatory', 'False'), |
|
514 | 516 | ('__end__', 'reviewer:mapping'), |
|
515 | 517 | ('__end__', 'review_members:sequence'), |
|
516 | 518 | ('__start__', 'revisions:sequence'), |
|
517 | 519 | ('revisions', commit_ids['change']), |
|
518 | 520 | ('__end__', 'revisions:sequence'), |
|
519 | 521 | ('user', ''), |
|
520 | 522 | ('csrf_token', csrf_token), |
|
521 | 523 | ], |
|
522 | 524 | status=302) |
|
523 | 525 | |
|
524 | 526 | location = response.headers['Location'] |
|
525 | 527 | |
|
526 | 528 | pull_request_id = location.rsplit('/', 1)[1] |
|
527 | 529 | assert pull_request_id != 'new' |
|
528 | 530 | pull_request = PullRequest.get(int(pull_request_id)) |
|
529 | 531 | |
|
530 | 532 | # target_ref has to point to the ancestor's commit_id in order to |
|
531 | 533 | # show the correct diff |
|
532 | 534 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] |
|
533 | 535 | assert pull_request.target_ref == expected_target_ref |
|
534 | 536 | |
|
535 | 537 | # Check generated diff contents |
|
536 | 538 | response = response.follow() |
|
537 | 539 | assert 'content_of_ancestor' not in response.body |
|
538 | 540 | assert 'content_of_ancestor-child' not in response.body |
|
539 | 541 | assert 'content_of_change' in response.body |
|
540 | 542 | |
|
541 | 543 | def test_merge_pull_request_enabled(self, pr_util, csrf_token): |
|
542 | 544 | # Clear any previous calls to rcextensions |
|
543 | 545 | rhodecode.EXTENSIONS.calls.clear() |
|
544 | 546 | |
|
545 | 547 | pull_request = pr_util.create_pull_request( |
|
546 | 548 | approved=True, mergeable=True) |
|
547 | 549 | pull_request_id = pull_request.pull_request_id |
|
548 | 550 | repo_name = pull_request.target_repo.scm_instance().name, |
|
549 | 551 | |
|
550 | 552 | response = self.app.post( |
|
551 | 553 | route_path('pullrequest_merge', |
|
552 | 554 | repo_name=str(repo_name[0]), |
|
553 | 555 | pull_request_id=pull_request_id), |
|
554 | 556 | params={'csrf_token': csrf_token}).follow() |
|
555 | 557 | |
|
556 | 558 | pull_request = PullRequest.get(pull_request_id) |
|
557 | 559 | |
|
558 | 560 | assert response.status_int == 200 |
|
559 | 561 | assert pull_request.is_closed() |
|
560 | 562 | assert_pull_request_status( |
|
561 | 563 | pull_request, ChangesetStatus.STATUS_APPROVED) |
|
562 | 564 | |
|
563 | 565 | # Check the relevant log entries were added |
|
564 | 566 | user_logs = UserLog.query().order_by('-user_log_id').limit(3) |
|
565 | 567 | actions = [log.action for log in user_logs] |
|
566 | 568 | pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
567 | 569 | expected_actions = [ |
|
568 | 570 | u'repo.pull_request.close', |
|
569 | 571 | u'repo.pull_request.merge', |
|
570 | 572 | u'repo.pull_request.comment.create' |
|
571 | 573 | ] |
|
572 | 574 | assert actions == expected_actions |
|
573 | 575 | |
|
574 | 576 | user_logs = UserLog.query().order_by('-user_log_id').limit(4) |
|
575 | 577 | actions = [log for log in user_logs] |
|
576 | 578 | assert actions[-1].action == 'user.push' |
|
577 | 579 | assert actions[-1].action_data['commit_ids'] == pr_commit_ids |
|
578 | 580 | |
|
579 | 581 | # Check post_push rcextension was really executed |
|
580 | 582 | push_calls = rhodecode.EXTENSIONS.calls['_push_hook'] |
|
581 | 583 | assert len(push_calls) == 1 |
|
582 | 584 | unused_last_call_args, last_call_kwargs = push_calls[0] |
|
583 | 585 | assert last_call_kwargs['action'] == 'push' |
|
584 | 586 | assert last_call_kwargs['commit_ids'] == pr_commit_ids |
|
585 | 587 | |
|
586 | 588 | def test_merge_pull_request_disabled(self, pr_util, csrf_token): |
|
587 | 589 | pull_request = pr_util.create_pull_request(mergeable=False) |
|
588 | 590 | pull_request_id = pull_request.pull_request_id |
|
589 | 591 | pull_request = PullRequest.get(pull_request_id) |
|
590 | 592 | |
|
591 | 593 | response = self.app.post( |
|
592 | 594 | route_path('pullrequest_merge', |
|
593 | 595 | repo_name=pull_request.target_repo.scm_instance().name, |
|
594 | 596 | pull_request_id=pull_request.pull_request_id), |
|
595 | 597 | params={'csrf_token': csrf_token}).follow() |
|
596 | 598 | |
|
597 | 599 | assert response.status_int == 200 |
|
598 | 600 | response.mustcontain( |
|
599 | 601 | 'Merge is not currently possible because of below failed checks.') |
|
600 | 602 | response.mustcontain('Server-side pull request merging is disabled.') |
|
601 | 603 | |
|
602 | 604 | @pytest.mark.skip_backends('svn') |
|
603 | 605 | def test_merge_pull_request_not_approved(self, pr_util, csrf_token): |
|
604 | 606 | pull_request = pr_util.create_pull_request(mergeable=True) |
|
605 | 607 | pull_request_id = pull_request.pull_request_id |
|
606 | 608 | repo_name = pull_request.target_repo.scm_instance().name |
|
607 | 609 | |
|
608 | 610 | response = self.app.post( |
|
609 | 611 | route_path('pullrequest_merge', |
|
610 | 612 | repo_name=repo_name, pull_request_id=pull_request_id), |
|
611 | 613 | params={'csrf_token': csrf_token}).follow() |
|
612 | 614 | |
|
613 | 615 | assert response.status_int == 200 |
|
614 | 616 | |
|
615 | 617 | response.mustcontain( |
|
616 | 618 | 'Merge is not currently possible because of below failed checks.') |
|
617 | 619 | response.mustcontain('Pull request reviewer approval is pending.') |
|
618 | 620 | |
|
619 | 621 | def test_merge_pull_request_renders_failure_reason( |
|
620 | 622 | self, user_regular, csrf_token, pr_util): |
|
621 | 623 | pull_request = pr_util.create_pull_request(mergeable=True, approved=True) |
|
622 | 624 | pull_request_id = pull_request.pull_request_id |
|
623 | 625 | repo_name = pull_request.target_repo.scm_instance().name |
|
624 | 626 | |
|
625 | 627 | merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID', |
|
626 | 628 | MergeFailureReason.PUSH_FAILED, |
|
627 | 629 | metadata={'target': 'shadow repo', |
|
628 | 630 | 'merge_commit': 'xxx'}) |
|
629 | 631 | model_patcher = mock.patch.multiple( |
|
630 | 632 | PullRequestModel, |
|
631 | 633 | merge_repo=mock.Mock(return_value=merge_resp), |
|
632 | 634 | merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE'))) |
|
633 | 635 | |
|
634 | 636 | with model_patcher: |
|
635 | 637 | response = self.app.post( |
|
636 | 638 | route_path('pullrequest_merge', |
|
637 | 639 | repo_name=repo_name, |
|
638 | 640 | pull_request_id=pull_request_id), |
|
639 | 641 | params={'csrf_token': csrf_token}, status=302) |
|
640 | 642 | |
|
641 | 643 | merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED, |
|
642 | 644 | metadata={'target': 'shadow repo', |
|
643 | 645 | 'merge_commit': 'xxx'}) |
|
644 | 646 | assert_session_flash(response, merge_resp.merge_status_message) |
|
645 | 647 | |
|
646 | 648 | def test_update_source_revision(self, backend, csrf_token): |
|
647 | 649 | commits = [ |
|
648 | 650 | {'message': 'ancestor'}, |
|
649 | 651 | {'message': 'change'}, |
|
650 | 652 | {'message': 'change-2'}, |
|
651 | 653 | ] |
|
652 | 654 | commit_ids = backend.create_master_repo(commits) |
|
653 | 655 | target = backend.create_repo(heads=['ancestor']) |
|
654 | 656 | source = backend.create_repo(heads=['change']) |
|
655 | 657 | |
|
656 | 658 | # create pr from a in source to A in target |
|
657 | 659 | pull_request = PullRequest() |
|
658 | 660 | |
|
659 | 661 | pull_request.source_repo = source |
|
660 | 662 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
661 | 663 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
662 | 664 | |
|
663 | 665 | pull_request.target_repo = target |
|
664 | 666 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
665 | 667 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
666 | 668 | |
|
667 | 669 | pull_request.revisions = [commit_ids['change']] |
|
668 | 670 | pull_request.title = u"Test" |
|
669 | 671 | pull_request.description = u"Description" |
|
670 | 672 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
671 | 673 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
672 | 674 | Session().add(pull_request) |
|
673 | 675 | Session().commit() |
|
674 | 676 | pull_request_id = pull_request.pull_request_id |
|
675 | 677 | |
|
676 | 678 | # source has ancestor - change - change-2 |
|
677 | 679 | backend.pull_heads(source, heads=['change-2']) |
|
678 | 680 | |
|
679 | 681 | # update PR |
|
680 | 682 | self.app.post( |
|
681 | 683 | route_path('pullrequest_update', |
|
682 | 684 | repo_name=target.repo_name, pull_request_id=pull_request_id), |
|
683 | 685 | params={'update_commits': 'true', 'csrf_token': csrf_token}) |
|
684 | 686 | |
|
685 | 687 | response = self.app.get( |
|
686 | 688 | route_path('pullrequest_show', |
|
687 | 689 | repo_name=target.repo_name, |
|
688 | 690 | pull_request_id=pull_request.pull_request_id)) |
|
689 | 691 | |
|
690 | 692 | assert response.status_int == 200 |
|
691 | 693 | assert 'Pull request updated to' in response.body |
|
692 | 694 | assert 'with 1 added, 0 removed commits.' in response.body |
|
693 | 695 | |
|
694 | 696 | # check that we have now both revisions |
|
695 | 697 | pull_request = PullRequest.get(pull_request_id) |
|
696 | 698 | assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']] |
|
697 | 699 | |
|
698 | 700 | def test_update_target_revision(self, backend, csrf_token): |
|
699 | 701 | commits = [ |
|
700 | 702 | {'message': 'ancestor'}, |
|
701 | 703 | {'message': 'change'}, |
|
702 | 704 | {'message': 'ancestor-new', 'parents': ['ancestor']}, |
|
703 | 705 | {'message': 'change-rebased'}, |
|
704 | 706 | ] |
|
705 | 707 | commit_ids = backend.create_master_repo(commits) |
|
706 | 708 | target = backend.create_repo(heads=['ancestor']) |
|
707 | 709 | source = backend.create_repo(heads=['change']) |
|
708 | 710 | |
|
709 | 711 | # create pr from a in source to A in target |
|
710 | 712 | pull_request = PullRequest() |
|
711 | 713 | |
|
712 | 714 | pull_request.source_repo = source |
|
713 | 715 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
714 | 716 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
715 | 717 | |
|
716 | 718 | pull_request.target_repo = target |
|
717 | 719 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
718 | 720 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
719 | 721 | |
|
720 | 722 | pull_request.revisions = [commit_ids['change']] |
|
721 | 723 | pull_request.title = u"Test" |
|
722 | 724 | pull_request.description = u"Description" |
|
723 | 725 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
724 | 726 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
725 | 727 | |
|
726 | 728 | Session().add(pull_request) |
|
727 | 729 | Session().commit() |
|
728 | 730 | pull_request_id = pull_request.pull_request_id |
|
729 | 731 | |
|
730 | 732 | # target has ancestor - ancestor-new |
|
731 | 733 | # source has ancestor - ancestor-new - change-rebased |
|
732 | 734 | backend.pull_heads(target, heads=['ancestor-new']) |
|
733 | 735 | backend.pull_heads(source, heads=['change-rebased']) |
|
734 | 736 | |
|
735 | 737 | # update PR |
|
736 | 738 | self.app.post( |
|
737 | 739 | route_path('pullrequest_update', |
|
738 | 740 | repo_name=target.repo_name, |
|
739 | 741 | pull_request_id=pull_request_id), |
|
740 | 742 | params={'update_commits': 'true', 'csrf_token': csrf_token}, |
|
741 | 743 | status=200) |
|
742 | 744 | |
|
743 | 745 | # check that we have now both revisions |
|
744 | 746 | pull_request = PullRequest.get(pull_request_id) |
|
745 | 747 | assert pull_request.revisions == [commit_ids['change-rebased']] |
|
746 | 748 | assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format( |
|
747 | 749 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new']) |
|
748 | 750 | |
|
749 | 751 | response = self.app.get( |
|
750 | 752 | route_path('pullrequest_show', |
|
751 | 753 | repo_name=target.repo_name, |
|
752 | 754 | pull_request_id=pull_request.pull_request_id)) |
|
753 | 755 | assert response.status_int == 200 |
|
754 | 756 | assert 'Pull request updated to' in response.body |
|
755 | 757 | assert 'with 1 added, 1 removed commits.' in response.body |
|
756 | 758 | |
|
757 | 759 | def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token): |
|
758 | 760 | backend = backend_git |
|
759 | 761 | commits = [ |
|
760 | 762 | {'message': 'master-commit-1'}, |
|
761 | 763 | {'message': 'master-commit-2-change-1'}, |
|
762 | 764 | {'message': 'master-commit-3-change-2'}, |
|
763 | 765 | |
|
764 | 766 | {'message': 'feat-commit-1', 'parents': ['master-commit-1']}, |
|
765 | 767 | {'message': 'feat-commit-2'}, |
|
766 | 768 | ] |
|
767 | 769 | commit_ids = backend.create_master_repo(commits) |
|
768 | 770 | target = backend.create_repo(heads=['master-commit-3-change-2']) |
|
769 | 771 | source = backend.create_repo(heads=['feat-commit-2']) |
|
770 | 772 | |
|
771 | 773 | # create pr from a in source to A in target |
|
772 | 774 | pull_request = PullRequest() |
|
773 | 775 | pull_request.source_repo = source |
|
774 | 776 | |
|
775 | 777 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
776 | 778 | branch=backend.default_branch_name, |
|
777 | 779 | commit_id=commit_ids['master-commit-3-change-2']) |
|
778 | 780 | |
|
779 | 781 | pull_request.target_repo = target |
|
780 | 782 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
781 | 783 | branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2']) |
|
782 | 784 | |
|
783 | 785 | pull_request.revisions = [ |
|
784 | 786 | commit_ids['feat-commit-1'], |
|
785 | 787 | commit_ids['feat-commit-2'] |
|
786 | 788 | ] |
|
787 | 789 | pull_request.title = u"Test" |
|
788 | 790 | pull_request.description = u"Description" |
|
789 | 791 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
790 | 792 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
791 | 793 | Session().add(pull_request) |
|
792 | 794 | Session().commit() |
|
793 | 795 | pull_request_id = pull_request.pull_request_id |
|
794 | 796 | |
|
795 | 797 | # PR is created, now we simulate a force-push into target, |
|
796 | 798 | # that drops a 2 last commits |
|
797 | 799 | vcsrepo = target.scm_instance() |
|
798 | 800 | vcsrepo.config.clear_section('hooks') |
|
799 | 801 | vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2']) |
|
800 | 802 | |
|
801 | 803 | # update PR |
|
802 | 804 | self.app.post( |
|
803 | 805 | route_path('pullrequest_update', |
|
804 | 806 | repo_name=target.repo_name, |
|
805 | 807 | pull_request_id=pull_request_id), |
|
806 | 808 | params={'update_commits': 'true', 'csrf_token': csrf_token}, |
|
807 | 809 | status=200) |
|
808 | 810 | |
|
809 | 811 | response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name)) |
|
810 | 812 | assert response.status_int == 200 |
|
811 | 813 | response.mustcontain('Pull request updated to') |
|
812 | 814 | response.mustcontain('with 0 added, 0 removed commits.') |
|
813 | 815 | |
|
814 | 816 | def test_update_of_ancestor_reference(self, backend, csrf_token): |
|
815 | 817 | commits = [ |
|
816 | 818 | {'message': 'ancestor'}, |
|
817 | 819 | {'message': 'change'}, |
|
818 | 820 | {'message': 'change-2'}, |
|
819 | 821 | {'message': 'ancestor-new', 'parents': ['ancestor']}, |
|
820 | 822 | {'message': 'change-rebased'}, |
|
821 | 823 | ] |
|
822 | 824 | commit_ids = backend.create_master_repo(commits) |
|
823 | 825 | target = backend.create_repo(heads=['ancestor']) |
|
824 | 826 | source = backend.create_repo(heads=['change']) |
|
825 | 827 | |
|
826 | 828 | # create pr from a in source to A in target |
|
827 | 829 | pull_request = PullRequest() |
|
828 | 830 | pull_request.source_repo = source |
|
829 | 831 | |
|
830 | 832 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
831 | 833 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
832 | 834 | pull_request.target_repo = target |
|
833 | 835 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
834 | 836 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
835 | 837 | pull_request.revisions = [commit_ids['change']] |
|
836 | 838 | pull_request.title = u"Test" |
|
837 | 839 | pull_request.description = u"Description" |
|
838 | 840 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
839 | 841 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
840 | 842 | Session().add(pull_request) |
|
841 | 843 | Session().commit() |
|
842 | 844 | pull_request_id = pull_request.pull_request_id |
|
843 | 845 | |
|
844 | 846 | # target has ancestor - ancestor-new |
|
845 | 847 | # source has ancestor - ancestor-new - change-rebased |
|
846 | 848 | backend.pull_heads(target, heads=['ancestor-new']) |
|
847 | 849 | backend.pull_heads(source, heads=['change-rebased']) |
|
848 | 850 | |
|
849 | 851 | # update PR |
|
850 | 852 | self.app.post( |
|
851 | 853 | route_path('pullrequest_update', |
|
852 | 854 | repo_name=target.repo_name, pull_request_id=pull_request_id), |
|
853 | 855 | params={'update_commits': 'true', 'csrf_token': csrf_token}, |
|
854 | 856 | status=200) |
|
855 | 857 | |
|
856 | 858 | # Expect the target reference to be updated correctly |
|
857 | 859 | pull_request = PullRequest.get(pull_request_id) |
|
858 | 860 | assert pull_request.revisions == [commit_ids['change-rebased']] |
|
859 | 861 | expected_target_ref = 'branch:{branch}:{commit_id}'.format( |
|
860 | 862 | branch=backend.default_branch_name, |
|
861 | 863 | commit_id=commit_ids['ancestor-new']) |
|
862 | 864 | assert pull_request.target_ref == expected_target_ref |
|
863 | 865 | |
|
864 | 866 | def test_remove_pull_request_branch(self, backend_git, csrf_token): |
|
865 | 867 | branch_name = 'development' |
|
866 | 868 | commits = [ |
|
867 | 869 | {'message': 'initial-commit'}, |
|
868 | 870 | {'message': 'old-feature'}, |
|
869 | 871 | {'message': 'new-feature', 'branch': branch_name}, |
|
870 | 872 | ] |
|
871 | 873 | repo = backend_git.create_repo(commits) |
|
872 | 874 | commit_ids = backend_git.commit_ids |
|
873 | 875 | |
|
874 | 876 | pull_request = PullRequest() |
|
875 | 877 | pull_request.source_repo = repo |
|
876 | 878 | pull_request.target_repo = repo |
|
877 | 879 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
878 | 880 | branch=branch_name, commit_id=commit_ids['new-feature']) |
|
879 | 881 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
880 | 882 | branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature']) |
|
881 | 883 | pull_request.revisions = [commit_ids['new-feature']] |
|
882 | 884 | pull_request.title = u"Test" |
|
883 | 885 | pull_request.description = u"Description" |
|
884 | 886 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
885 | 887 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
886 | 888 | Session().add(pull_request) |
|
887 | 889 | Session().commit() |
|
888 | 890 | |
|
889 | 891 | vcs = repo.scm_instance() |
|
890 | 892 | vcs.remove_ref('refs/heads/{}'.format(branch_name)) |
|
891 | 893 | |
|
892 | 894 | response = self.app.get(route_path( |
|
893 | 895 | 'pullrequest_show', |
|
894 | 896 | repo_name=repo.repo_name, |
|
895 | 897 | pull_request_id=pull_request.pull_request_id)) |
|
896 | 898 | |
|
897 | 899 | assert response.status_int == 200 |
|
898 | 900 | |
|
899 | 901 | response.assert_response().element_contains( |
|
900 | 902 | '#changeset_compare_view_content .alert strong', |
|
901 | 903 | 'Missing commits') |
|
902 | 904 | response.assert_response().element_contains( |
|
903 | 905 | '#changeset_compare_view_content .alert', |
|
904 | 906 | 'This pull request cannot be displayed, because one or more' |
|
905 | 907 | ' commits no longer exist in the source repository.') |
|
906 | 908 | |
|
907 | 909 | def test_strip_commits_from_pull_request( |
|
908 | 910 | self, backend, pr_util, csrf_token): |
|
909 | 911 | commits = [ |
|
910 | 912 | {'message': 'initial-commit'}, |
|
911 | 913 | {'message': 'old-feature'}, |
|
912 | 914 | {'message': 'new-feature', 'parents': ['initial-commit']}, |
|
913 | 915 | ] |
|
914 | 916 | pull_request = pr_util.create_pull_request( |
|
915 | 917 | commits, target_head='initial-commit', source_head='new-feature', |
|
916 | 918 | revisions=['new-feature']) |
|
917 | 919 | |
|
918 | 920 | vcs = pr_util.source_repository.scm_instance() |
|
919 | 921 | if backend.alias == 'git': |
|
920 | 922 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') |
|
921 | 923 | else: |
|
922 | 924 | vcs.strip(pr_util.commit_ids['new-feature']) |
|
923 | 925 | |
|
924 | 926 | response = self.app.get(route_path( |
|
925 | 927 | 'pullrequest_show', |
|
926 | 928 | repo_name=pr_util.target_repository.repo_name, |
|
927 | 929 | pull_request_id=pull_request.pull_request_id)) |
|
928 | 930 | |
|
929 | 931 | assert response.status_int == 200 |
|
930 | 932 | |
|
931 | 933 | response.assert_response().element_contains( |
|
932 | 934 | '#changeset_compare_view_content .alert strong', |
|
933 | 935 | 'Missing commits') |
|
934 | 936 | response.assert_response().element_contains( |
|
935 | 937 | '#changeset_compare_view_content .alert', |
|
936 | 938 | 'This pull request cannot be displayed, because one or more' |
|
937 | 939 | ' commits no longer exist in the source repository.') |
|
938 | 940 | response.assert_response().element_contains( |
|
939 | 941 | '#update_commits', |
|
940 | 942 | 'Update commits') |
|
941 | 943 | |
|
942 | 944 | def test_strip_commits_and_update( |
|
943 | 945 | self, backend, pr_util, csrf_token): |
|
944 | 946 | commits = [ |
|
945 | 947 | {'message': 'initial-commit'}, |
|
946 | 948 | {'message': 'old-feature'}, |
|
947 | 949 | {'message': 'new-feature', 'parents': ['old-feature']}, |
|
948 | 950 | ] |
|
949 | 951 | pull_request = pr_util.create_pull_request( |
|
950 | 952 | commits, target_head='old-feature', source_head='new-feature', |
|
951 | 953 | revisions=['new-feature'], mergeable=True) |
|
952 | 954 | |
|
953 | 955 | vcs = pr_util.source_repository.scm_instance() |
|
954 | 956 | if backend.alias == 'git': |
|
955 | 957 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') |
|
956 | 958 | else: |
|
957 | 959 | vcs.strip(pr_util.commit_ids['new-feature']) |
|
958 | 960 | |
|
959 | 961 | response = self.app.post( |
|
960 | 962 | route_path('pullrequest_update', |
|
961 | 963 | repo_name=pull_request.target_repo.repo_name, |
|
962 | 964 | pull_request_id=pull_request.pull_request_id), |
|
963 | 965 | params={'update_commits': 'true', |
|
964 | 966 | 'csrf_token': csrf_token}) |
|
965 | 967 | |
|
966 | 968 | assert response.status_int == 200 |
|
967 | 969 | assert response.body == 'true' |
|
968 | 970 | |
|
969 | 971 | # Make sure that after update, it won't raise 500 errors |
|
970 | 972 | response = self.app.get(route_path( |
|
971 | 973 | 'pullrequest_show', |
|
972 | 974 | repo_name=pr_util.target_repository.repo_name, |
|
973 | 975 | pull_request_id=pull_request.pull_request_id)) |
|
974 | 976 | |
|
975 | 977 | assert response.status_int == 200 |
|
976 | 978 | response.assert_response().element_contains( |
|
977 | 979 | '#changeset_compare_view_content .alert strong', |
|
978 | 980 | 'Missing commits') |
|
979 | 981 | |
|
980 | 982 | def test_branch_is_a_link(self, pr_util): |
|
981 | 983 | pull_request = pr_util.create_pull_request() |
|
982 | 984 | pull_request.source_ref = 'branch:origin:1234567890abcdef' |
|
983 | 985 | pull_request.target_ref = 'branch:target:abcdef1234567890' |
|
984 | 986 | Session().add(pull_request) |
|
985 | 987 | Session().commit() |
|
986 | 988 | |
|
987 | 989 | response = self.app.get(route_path( |
|
988 | 990 | 'pullrequest_show', |
|
989 | 991 | repo_name=pull_request.target_repo.scm_instance().name, |
|
990 | 992 | pull_request_id=pull_request.pull_request_id)) |
|
991 | 993 | assert response.status_int == 200 |
|
992 | 994 | |
|
993 | 995 | origin = response.assert_response().get_element('.pr-origininfo .tag') |
|
994 | 996 | origin_children = origin.getchildren() |
|
995 | 997 | assert len(origin_children) == 1 |
|
996 | 998 | target = response.assert_response().get_element('.pr-targetinfo .tag') |
|
997 | 999 | target_children = target.getchildren() |
|
998 | 1000 | assert len(target_children) == 1 |
|
999 | 1001 | |
|
1000 | 1002 | expected_origin_link = route_path( |
|
1001 |
'repo_c |
|
|
1003 | 'repo_commits', | |
|
1002 | 1004 | repo_name=pull_request.source_repo.scm_instance().name, |
|
1003 | 1005 | params=dict(branch='origin')) |
|
1004 | 1006 | expected_target_link = route_path( |
|
1005 |
'repo_c |
|
|
1007 | 'repo_commits', | |
|
1006 | 1008 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1007 | 1009 | params=dict(branch='target')) |
|
1008 | 1010 | assert origin_children[0].attrib['href'] == expected_origin_link |
|
1009 | 1011 | assert origin_children[0].text == 'branch: origin' |
|
1010 | 1012 | assert target_children[0].attrib['href'] == expected_target_link |
|
1011 | 1013 | assert target_children[0].text == 'branch: target' |
|
1012 | 1014 | |
|
1013 | 1015 | def test_bookmark_is_not_a_link(self, pr_util): |
|
1014 | 1016 | pull_request = pr_util.create_pull_request() |
|
1015 | 1017 | pull_request.source_ref = 'bookmark:origin:1234567890abcdef' |
|
1016 | 1018 | pull_request.target_ref = 'bookmark:target:abcdef1234567890' |
|
1017 | 1019 | Session().add(pull_request) |
|
1018 | 1020 | Session().commit() |
|
1019 | 1021 | |
|
1020 | 1022 | response = self.app.get(route_path( |
|
1021 | 1023 | 'pullrequest_show', |
|
1022 | 1024 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1023 | 1025 | pull_request_id=pull_request.pull_request_id)) |
|
1024 | 1026 | assert response.status_int == 200 |
|
1025 | 1027 | |
|
1026 | 1028 | origin = response.assert_response().get_element('.pr-origininfo .tag') |
|
1027 | 1029 | assert origin.text.strip() == 'bookmark: origin' |
|
1028 | 1030 | assert origin.getchildren() == [] |
|
1029 | 1031 | |
|
1030 | 1032 | target = response.assert_response().get_element('.pr-targetinfo .tag') |
|
1031 | 1033 | assert target.text.strip() == 'bookmark: target' |
|
1032 | 1034 | assert target.getchildren() == [] |
|
1033 | 1035 | |
|
1034 | 1036 | def test_tag_is_not_a_link(self, pr_util): |
|
1035 | 1037 | pull_request = pr_util.create_pull_request() |
|
1036 | 1038 | pull_request.source_ref = 'tag:origin:1234567890abcdef' |
|
1037 | 1039 | pull_request.target_ref = 'tag:target:abcdef1234567890' |
|
1038 | 1040 | Session().add(pull_request) |
|
1039 | 1041 | Session().commit() |
|
1040 | 1042 | |
|
1041 | 1043 | response = self.app.get(route_path( |
|
1042 | 1044 | 'pullrequest_show', |
|
1043 | 1045 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1044 | 1046 | pull_request_id=pull_request.pull_request_id)) |
|
1045 | 1047 | assert response.status_int == 200 |
|
1046 | 1048 | |
|
1047 | 1049 | origin = response.assert_response().get_element('.pr-origininfo .tag') |
|
1048 | 1050 | assert origin.text.strip() == 'tag: origin' |
|
1049 | 1051 | assert origin.getchildren() == [] |
|
1050 | 1052 | |
|
1051 | 1053 | target = response.assert_response().get_element('.pr-targetinfo .tag') |
|
1052 | 1054 | assert target.text.strip() == 'tag: target' |
|
1053 | 1055 | assert target.getchildren() == [] |
|
1054 | 1056 | |
|
1055 | 1057 | @pytest.mark.parametrize('mergeable', [True, False]) |
|
1056 | 1058 | def test_shadow_repository_link( |
|
1057 | 1059 | self, mergeable, pr_util, http_host_only_stub): |
|
1058 | 1060 | """ |
|
1059 | 1061 | Check that the pull request summary page displays a link to the shadow |
|
1060 | 1062 | repository if the pull request is mergeable. If it is not mergeable |
|
1061 | 1063 | the link should not be displayed. |
|
1062 | 1064 | """ |
|
1063 | 1065 | pull_request = pr_util.create_pull_request( |
|
1064 | 1066 | mergeable=mergeable, enable_notifications=False) |
|
1065 | 1067 | target_repo = pull_request.target_repo.scm_instance() |
|
1066 | 1068 | pr_id = pull_request.pull_request_id |
|
1067 | 1069 | shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format( |
|
1068 | 1070 | host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id) |
|
1069 | 1071 | |
|
1070 | 1072 | response = self.app.get(route_path( |
|
1071 | 1073 | 'pullrequest_show', |
|
1072 | 1074 | repo_name=target_repo.name, |
|
1073 | 1075 | pull_request_id=pr_id)) |
|
1074 | 1076 | |
|
1075 | 1077 | if mergeable: |
|
1076 | 1078 | response.assert_response().element_value_contains( |
|
1077 | 1079 | 'input.pr-mergeinfo', shadow_url) |
|
1078 | 1080 | response.assert_response().element_value_contains( |
|
1079 | 1081 | 'input.pr-mergeinfo ', 'pr-merge') |
|
1080 | 1082 | else: |
|
1081 | 1083 | response.assert_response().no_element_exists('.pr-mergeinfo') |
|
1082 | 1084 | |
|
1083 | 1085 | |
|
1084 | 1086 | @pytest.mark.usefixtures('app') |
|
1085 | 1087 | @pytest.mark.backends("git", "hg") |
|
1086 | 1088 | class TestPullrequestsControllerDelete(object): |
|
1087 | 1089 | def test_pull_request_delete_button_permissions_admin( |
|
1088 | 1090 | self, autologin_user, user_admin, pr_util): |
|
1089 | 1091 | pull_request = pr_util.create_pull_request( |
|
1090 | 1092 | author=user_admin.username, enable_notifications=False) |
|
1091 | 1093 | |
|
1092 | 1094 | response = self.app.get(route_path( |
|
1093 | 1095 | 'pullrequest_show', |
|
1094 | 1096 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1095 | 1097 | pull_request_id=pull_request.pull_request_id)) |
|
1096 | 1098 | |
|
1097 | 1099 | response.mustcontain('id="delete_pullrequest"') |
|
1098 | 1100 | response.mustcontain('Confirm to delete this pull request') |
|
1099 | 1101 | |
|
1100 | 1102 | def test_pull_request_delete_button_permissions_owner( |
|
1101 | 1103 | self, autologin_regular_user, user_regular, pr_util): |
|
1102 | 1104 | pull_request = pr_util.create_pull_request( |
|
1103 | 1105 | author=user_regular.username, enable_notifications=False) |
|
1104 | 1106 | |
|
1105 | 1107 | response = self.app.get(route_path( |
|
1106 | 1108 | 'pullrequest_show', |
|
1107 | 1109 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1108 | 1110 | pull_request_id=pull_request.pull_request_id)) |
|
1109 | 1111 | |
|
1110 | 1112 | response.mustcontain('id="delete_pullrequest"') |
|
1111 | 1113 | response.mustcontain('Confirm to delete this pull request') |
|
1112 | 1114 | |
|
1113 | 1115 | def test_pull_request_delete_button_permissions_forbidden( |
|
1114 | 1116 | self, autologin_regular_user, user_regular, user_admin, pr_util): |
|
1115 | 1117 | pull_request = pr_util.create_pull_request( |
|
1116 | 1118 | author=user_admin.username, enable_notifications=False) |
|
1117 | 1119 | |
|
1118 | 1120 | response = self.app.get(route_path( |
|
1119 | 1121 | 'pullrequest_show', |
|
1120 | 1122 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1121 | 1123 | pull_request_id=pull_request.pull_request_id)) |
|
1122 | 1124 | response.mustcontain(no=['id="delete_pullrequest"']) |
|
1123 | 1125 | response.mustcontain(no=['Confirm to delete this pull request']) |
|
1124 | 1126 | |
|
1125 | 1127 | def test_pull_request_delete_button_permissions_can_update_cannot_delete( |
|
1126 | 1128 | self, autologin_regular_user, user_regular, user_admin, pr_util, |
|
1127 | 1129 | user_util): |
|
1128 | 1130 | |
|
1129 | 1131 | pull_request = pr_util.create_pull_request( |
|
1130 | 1132 | author=user_admin.username, enable_notifications=False) |
|
1131 | 1133 | |
|
1132 | 1134 | user_util.grant_user_permission_to_repo( |
|
1133 | 1135 | pull_request.target_repo, user_regular, |
|
1134 | 1136 | 'repository.write') |
|
1135 | 1137 | |
|
1136 | 1138 | response = self.app.get(route_path( |
|
1137 | 1139 | 'pullrequest_show', |
|
1138 | 1140 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1139 | 1141 | pull_request_id=pull_request.pull_request_id)) |
|
1140 | 1142 | |
|
1141 | 1143 | response.mustcontain('id="open_edit_pullrequest"') |
|
1142 | 1144 | response.mustcontain('id="delete_pullrequest"') |
|
1143 | 1145 | response.mustcontain(no=['Confirm to delete this pull request']) |
|
1144 | 1146 | |
|
1145 | 1147 | def test_delete_comment_returns_404_if_comment_does_not_exist( |
|
1146 | 1148 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): |
|
1147 | 1149 | |
|
1148 | 1150 | pull_request = pr_util.create_pull_request( |
|
1149 | 1151 | author=user_admin.username, enable_notifications=False) |
|
1150 | 1152 | |
|
1151 | 1153 | self.app.post( |
|
1152 | 1154 | route_path( |
|
1153 | 1155 | 'pullrequest_comment_delete', |
|
1154 | 1156 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1155 | 1157 | pull_request_id=pull_request.pull_request_id, |
|
1156 | 1158 | comment_id=1024404), |
|
1157 | 1159 | extra_environ=xhr_header, |
|
1158 | 1160 | params={'csrf_token': csrf_token}, |
|
1159 | 1161 | status=404 |
|
1160 | 1162 | ) |
|
1161 | 1163 | |
|
1162 | 1164 | def test_delete_comment( |
|
1163 | 1165 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): |
|
1164 | 1166 | |
|
1165 | 1167 | pull_request = pr_util.create_pull_request( |
|
1166 | 1168 | author=user_admin.username, enable_notifications=False) |
|
1167 | 1169 | comment = pr_util.create_comment() |
|
1168 | 1170 | comment_id = comment.comment_id |
|
1169 | 1171 | |
|
1170 | 1172 | response = self.app.post( |
|
1171 | 1173 | route_path( |
|
1172 | 1174 | 'pullrequest_comment_delete', |
|
1173 | 1175 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1174 | 1176 | pull_request_id=pull_request.pull_request_id, |
|
1175 | 1177 | comment_id=comment_id), |
|
1176 | 1178 | extra_environ=xhr_header, |
|
1177 | 1179 | params={'csrf_token': csrf_token}, |
|
1178 | 1180 | status=200 |
|
1179 | 1181 | ) |
|
1180 | 1182 | assert response.body == 'true' |
|
1181 | 1183 | |
|
1182 | 1184 | @pytest.mark.parametrize('url_type', [ |
|
1183 | 1185 | 'pullrequest_new', |
|
1184 | 1186 | 'pullrequest_create', |
|
1185 | 1187 | 'pullrequest_update', |
|
1186 | 1188 | 'pullrequest_merge', |
|
1187 | 1189 | ]) |
|
1188 | 1190 | def test_pull_request_is_forbidden_on_archived_repo( |
|
1189 | 1191 | self, autologin_user, backend, xhr_header, user_util, url_type): |
|
1190 | 1192 | |
|
1191 | 1193 | # create a temporary repo |
|
1192 | 1194 | source = user_util.create_repo(repo_type=backend.alias) |
|
1193 | 1195 | repo_name = source.repo_name |
|
1194 | 1196 | repo = Repository.get_by_repo_name(repo_name) |
|
1195 | 1197 | repo.archived = True |
|
1196 | 1198 | Session().commit() |
|
1197 | 1199 | |
|
1198 | 1200 | response = self.app.get( |
|
1199 | 1201 | route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302) |
|
1200 | 1202 | |
|
1201 | 1203 | msg = 'Action not supported for archived repository.' |
|
1202 | 1204 | assert_session_flash(response, msg) |
|
1203 | 1205 | |
|
1204 | 1206 | |
|
1205 | 1207 | def assert_pull_request_status(pull_request, expected_status): |
|
1206 | 1208 | status = ChangesetStatusModel().calculated_review_status( |
|
1207 | 1209 | pull_request=pull_request) |
|
1208 | 1210 | assert status == expected_status |
|
1209 | 1211 | |
|
1210 | 1212 | |
|
1211 | 1213 | @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create']) |
|
1212 | 1214 | @pytest.mark.usefixtures("autologin_user") |
|
1213 | 1215 | def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route): |
|
1214 | 1216 | response = app.get( |
|
1215 | 1217 | route_path(route, repo_name=backend_svn.repo_name), status=404) |
|
1216 | 1218 |
@@ -1,358 +1,365 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | from pyramid.httpexceptions import HTTPNotFound, HTTPFound |
|
25 | 25 | from pyramid.view import view_config |
|
26 | 26 | from pyramid.renderers import render |
|
27 | 27 | from pyramid.response import Response |
|
28 | 28 | |
|
29 | 29 | from rhodecode.apps._base import RepoAppView |
|
30 | 30 | import rhodecode.lib.helpers as h |
|
31 | 31 | from rhodecode.lib.auth import ( |
|
32 | 32 | LoginRequired, HasRepoPermissionAnyDecorator) |
|
33 | 33 | |
|
34 | 34 | from rhodecode.lib.ext_json import json |
|
35 | 35 | from rhodecode.lib.graphmod import _colored, _dagwalker |
|
36 | 36 | from rhodecode.lib.helpers import RepoPage |
|
37 | 37 | from rhodecode.lib.utils2 import safe_int, safe_str, str2bool |
|
38 | 38 | from rhodecode.lib.vcs.exceptions import ( |
|
39 | 39 | RepositoryError, CommitDoesNotExistError, |
|
40 | 40 | CommitError, NodeDoesNotExistError, EmptyRepositoryError) |
|
41 | 41 | |
|
42 | 42 | log = logging.getLogger(__name__) |
|
43 | 43 | |
|
44 | 44 | DEFAULT_CHANGELOG_SIZE = 20 |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | class RepoChangelogView(RepoAppView): |
|
48 | 48 | |
|
49 | 49 | def _get_commit_or_redirect(self, commit_id, redirect_after=True): |
|
50 | 50 | """ |
|
51 | 51 | This is a safe way to get commit. If an error occurs it redirects to |
|
52 | 52 | tip with proper message |
|
53 | 53 | |
|
54 | 54 | :param commit_id: id of commit to fetch |
|
55 | 55 | :param redirect_after: toggle redirection |
|
56 | 56 | """ |
|
57 | 57 | _ = self.request.translate |
|
58 | 58 | |
|
59 | 59 | try: |
|
60 | 60 | return self.rhodecode_vcs_repo.get_commit(commit_id) |
|
61 | 61 | except EmptyRepositoryError: |
|
62 | 62 | if not redirect_after: |
|
63 | 63 | return None |
|
64 | 64 | |
|
65 | 65 | h.flash(h.literal( |
|
66 | 66 | _('There are no commits yet')), category='warning') |
|
67 | 67 | raise HTTPFound( |
|
68 | 68 | h.route_path('repo_summary', repo_name=self.db_repo_name)) |
|
69 | 69 | |
|
70 | 70 | except (CommitDoesNotExistError, LookupError): |
|
71 | 71 | msg = _('No such commit exists for this repository') |
|
72 | 72 | h.flash(msg, category='error') |
|
73 | 73 | raise HTTPNotFound() |
|
74 | 74 | except RepositoryError as e: |
|
75 | 75 | h.flash(safe_str(h.escape(e)), category='error') |
|
76 | 76 | raise HTTPNotFound() |
|
77 | 77 | |
|
78 | 78 | def _graph(self, repo, commits, prev_data=None, next_data=None): |
|
79 | 79 | """ |
|
80 | 80 | Generates a DAG graph for repo |
|
81 | 81 | |
|
82 | 82 | :param repo: repo instance |
|
83 | 83 | :param commits: list of commits |
|
84 | 84 | """ |
|
85 | 85 | if not commits: |
|
86 | 86 | return json.dumps([]), json.dumps([]) |
|
87 | 87 | |
|
88 | 88 | def serialize(commit, parents=True): |
|
89 | 89 | data = dict( |
|
90 | 90 | raw_id=commit.raw_id, |
|
91 | 91 | idx=commit.idx, |
|
92 | 92 | branch=h.escape(commit.branch), |
|
93 | 93 | ) |
|
94 | 94 | if parents: |
|
95 | 95 | data['parents'] = [ |
|
96 | 96 | serialize(x, parents=False) for x in commit.parents] |
|
97 | 97 | return data |
|
98 | 98 | |
|
99 | 99 | prev_data = prev_data or [] |
|
100 | 100 | next_data = next_data or [] |
|
101 | 101 | |
|
102 | 102 | current = [serialize(x) for x in commits] |
|
103 | 103 | commits = prev_data + current + next_data |
|
104 | 104 | |
|
105 | 105 | dag = _dagwalker(repo, commits) |
|
106 | 106 | |
|
107 | 107 | data = [[commit_id, vtx, edges, branch] |
|
108 | 108 | for commit_id, vtx, edges, branch in _colored(dag)] |
|
109 | 109 | return json.dumps(data), json.dumps(current) |
|
110 | 110 | |
|
111 | 111 | def _check_if_valid_branch(self, branch_name, repo_name, f_path): |
|
112 | 112 | if branch_name not in self.rhodecode_vcs_repo.branches_all: |
|
113 | 113 | h.flash('Branch {} is not found.'.format(h.escape(branch_name)), |
|
114 | 114 | category='warning') |
|
115 | 115 | redirect_url = h.route_path( |
|
116 |
'repo_c |
|
|
116 | 'repo_commits_file', repo_name=repo_name, | |
|
117 | 117 | commit_id=branch_name, f_path=f_path or '') |
|
118 | 118 | raise HTTPFound(redirect_url) |
|
119 | 119 | |
|
120 | 120 | def _load_changelog_data( |
|
121 | 121 | self, c, collection, page, chunk_size, branch_name=None, |
|
122 | 122 | dynamic=False, f_path=None, commit_id=None): |
|
123 | 123 | |
|
124 | 124 | def url_generator(**kw): |
|
125 | 125 | query_params = {} |
|
126 | 126 | query_params.update(kw) |
|
127 | 127 | if f_path: |
|
128 | 128 | # changelog for file |
|
129 | 129 | return h.route_path( |
|
130 |
'repo_c |
|
|
130 | 'repo_commits_file', | |
|
131 | 131 | repo_name=c.rhodecode_db_repo.repo_name, |
|
132 | 132 | commit_id=commit_id, f_path=f_path, |
|
133 | 133 | _query=query_params) |
|
134 | 134 | else: |
|
135 | 135 | return h.route_path( |
|
136 |
'repo_c |
|
|
136 | 'repo_commits', | |
|
137 | 137 | repo_name=c.rhodecode_db_repo.repo_name, _query=query_params) |
|
138 | 138 | |
|
139 | 139 | c.total_cs = len(collection) |
|
140 | 140 | c.showing_commits = min(chunk_size, c.total_cs) |
|
141 | 141 | c.pagination = RepoPage(collection, page=page, item_count=c.total_cs, |
|
142 | 142 | items_per_page=chunk_size, branch=branch_name, |
|
143 | 143 | url=url_generator) |
|
144 | 144 | |
|
145 | 145 | c.next_page = c.pagination.next_page |
|
146 | 146 | c.prev_page = c.pagination.previous_page |
|
147 | 147 | |
|
148 | 148 | if dynamic: |
|
149 | 149 | if self.request.GET.get('chunk') != 'next': |
|
150 | 150 | c.next_page = None |
|
151 | 151 | if self.request.GET.get('chunk') != 'prev': |
|
152 | 152 | c.prev_page = None |
|
153 | 153 | |
|
154 | 154 | page_commit_ids = [x.raw_id for x in c.pagination] |
|
155 | 155 | c.comments = c.rhodecode_db_repo.get_comments(page_commit_ids) |
|
156 | 156 | c.statuses = c.rhodecode_db_repo.statuses(page_commit_ids) |
|
157 | 157 | |
|
158 | 158 | def load_default_context(self): |
|
159 | 159 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
160 | 160 | |
|
161 | 161 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
162 | 162 | |
|
163 | 163 | return c |
|
164 | 164 | |
|
165 | 165 | def _get_preload_attrs(self): |
|
166 | 166 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
167 | 167 | 'obsolete', 'phase', 'hidden'] |
|
168 | 168 | return pre_load |
|
169 | 169 | |
|
170 | 170 | @LoginRequired() |
|
171 | 171 | @HasRepoPermissionAnyDecorator( |
|
172 | 172 | 'repository.read', 'repository.write', 'repository.admin') |
|
173 | 173 | @view_config( |
|
174 | route_name='repo_commits', request_method='GET', | |
|
175 | renderer='rhodecode:templates/commits/changelog.mako') | |
|
176 | @view_config( | |
|
177 | route_name='repo_commits_file', request_method='GET', | |
|
178 | renderer='rhodecode:templates/commits/changelog.mako') | |
|
179 | # old routes for backward compat | |
|
180 | @view_config( | |
|
174 | 181 | route_name='repo_changelog', request_method='GET', |
|
175 |
renderer='rhodecode:templates/c |
|
|
182 | renderer='rhodecode:templates/commits/changelog.mako') | |
|
176 | 183 | @view_config( |
|
177 | 184 | route_name='repo_changelog_file', request_method='GET', |
|
178 |
renderer='rhodecode:templates/c |
|
|
185 | renderer='rhodecode:templates/commits/changelog.mako') | |
|
179 | 186 | def repo_changelog(self): |
|
180 | 187 | c = self.load_default_context() |
|
181 | 188 | |
|
182 | 189 | commit_id = self.request.matchdict.get('commit_id') |
|
183 | 190 | f_path = self._get_f_path(self.request.matchdict) |
|
184 | 191 | show_hidden = str2bool(self.request.GET.get('evolve')) |
|
185 | 192 | |
|
186 | 193 | chunk_size = 20 |
|
187 | 194 | |
|
188 | 195 | c.branch_name = branch_name = self.request.GET.get('branch') or '' |
|
189 | 196 | c.book_name = book_name = self.request.GET.get('bookmark') or '' |
|
190 | 197 | c.f_path = f_path |
|
191 | 198 | c.commit_id = commit_id |
|
192 | 199 | c.show_hidden = show_hidden |
|
193 | 200 | |
|
194 | 201 | hist_limit = safe_int(self.request.GET.get('limit')) or None |
|
195 | 202 | |
|
196 | 203 | p = safe_int(self.request.GET.get('page', 1), 1) |
|
197 | 204 | |
|
198 | 205 | c.selected_name = branch_name or book_name |
|
199 | 206 | if not commit_id and branch_name: |
|
200 | 207 | self._check_if_valid_branch(branch_name, self.db_repo_name, f_path) |
|
201 | 208 | |
|
202 | 209 | c.changelog_for_path = f_path |
|
203 | 210 | pre_load = self._get_preload_attrs() |
|
204 | 211 | |
|
205 | 212 | partial_xhr = self.request.environ.get('HTTP_X_PARTIAL_XHR') |
|
206 | 213 | try: |
|
207 | 214 | if f_path: |
|
208 | 215 | log.debug('generating changelog for path %s', f_path) |
|
209 | 216 | # get the history for the file ! |
|
210 | 217 | base_commit = self.rhodecode_vcs_repo.get_commit(commit_id) |
|
211 | 218 | |
|
212 | 219 | try: |
|
213 | 220 | collection = base_commit.get_path_history( |
|
214 | 221 | f_path, limit=hist_limit, pre_load=pre_load) |
|
215 | 222 | if collection and partial_xhr: |
|
216 | 223 | # for ajax call we remove first one since we're looking |
|
217 | 224 | # at it right now in the context of a file commit |
|
218 | 225 | collection.pop(0) |
|
219 | 226 | except (NodeDoesNotExistError, CommitError): |
|
220 | 227 | # this node is not present at tip! |
|
221 | 228 | try: |
|
222 | 229 | commit = self._get_commit_or_redirect(commit_id) |
|
223 | 230 | collection = commit.get_path_history(f_path) |
|
224 | 231 | except RepositoryError as e: |
|
225 | 232 | h.flash(safe_str(e), category='warning') |
|
226 | 233 | redirect_url = h.route_path( |
|
227 |
'repo_c |
|
|
234 | 'repo_commits', repo_name=self.db_repo_name) | |
|
228 | 235 | raise HTTPFound(redirect_url) |
|
229 | 236 | collection = list(reversed(collection)) |
|
230 | 237 | else: |
|
231 | 238 | collection = self.rhodecode_vcs_repo.get_commits( |
|
232 | 239 | branch_name=branch_name, show_hidden=show_hidden, |
|
233 | 240 | pre_load=pre_load, translate_tags=False) |
|
234 | 241 | |
|
235 | 242 | self._load_changelog_data( |
|
236 | 243 | c, collection, p, chunk_size, c.branch_name, |
|
237 | 244 | f_path=f_path, commit_id=commit_id) |
|
238 | 245 | |
|
239 | 246 | except EmptyRepositoryError as e: |
|
240 | 247 | h.flash(safe_str(h.escape(e)), category='warning') |
|
241 | 248 | raise HTTPFound( |
|
242 | 249 | h.route_path('repo_summary', repo_name=self.db_repo_name)) |
|
243 | 250 | except HTTPFound: |
|
244 | 251 | raise |
|
245 | 252 | except (RepositoryError, CommitDoesNotExistError, Exception) as e: |
|
246 | 253 | log.exception(safe_str(e)) |
|
247 | 254 | h.flash(safe_str(h.escape(e)), category='error') |
|
248 | 255 | raise HTTPFound( |
|
249 |
h.route_path('repo_c |
|
|
256 | h.route_path('repo_commits', repo_name=self.db_repo_name)) | |
|
250 | 257 | |
|
251 | 258 | if partial_xhr or self.request.environ.get('HTTP_X_PJAX'): |
|
252 | 259 | # case when loading dynamic file history in file view |
|
253 | 260 | # loading from ajax, we don't want the first result, it's popped |
|
254 | 261 | # in the code above |
|
255 | 262 | html = render( |
|
256 |
'rhodecode:templates/c |
|
|
263 | 'rhodecode:templates/commits/changelog_file_history.mako', | |
|
257 | 264 | self._get_template_context(c), self.request) |
|
258 | 265 | return Response(html) |
|
259 | 266 | |
|
260 | 267 | commit_ids = [] |
|
261 | 268 | if not f_path: |
|
262 | 269 | # only load graph data when not in file history mode |
|
263 | 270 | commit_ids = c.pagination |
|
264 | 271 | |
|
265 | 272 | c.graph_data, c.graph_commits = self._graph( |
|
266 | 273 | self.rhodecode_vcs_repo, commit_ids) |
|
267 | 274 | |
|
268 | 275 | return self._get_template_context(c) |
|
269 | 276 | |
|
270 | 277 | @LoginRequired() |
|
271 | 278 | @HasRepoPermissionAnyDecorator( |
|
272 | 279 | 'repository.read', 'repository.write', 'repository.admin') |
|
273 | 280 | @view_config( |
|
274 |
route_name='repo_c |
|
|
275 |
renderer='rhodecode:templates/c |
|
|
281 | route_name='repo_commits_elements', request_method=('GET', 'POST'), | |
|
282 | renderer='rhodecode:templates/commits/changelog_elements.mako', | |
|
276 | 283 | xhr=True) |
|
277 | 284 | @view_config( |
|
278 |
route_name='repo_c |
|
|
279 |
renderer='rhodecode:templates/c |
|
|
285 | route_name='repo_commits_elements_file', request_method=('GET', 'POST'), | |
|
286 | renderer='rhodecode:templates/commits/changelog_elements.mako', | |
|
280 | 287 | xhr=True) |
|
281 |
def repo_c |
|
|
288 | def repo_commits_elements(self): | |
|
282 | 289 | c = self.load_default_context() |
|
283 | 290 | commit_id = self.request.matchdict.get('commit_id') |
|
284 | 291 | f_path = self._get_f_path(self.request.matchdict) |
|
285 | 292 | show_hidden = str2bool(self.request.GET.get('evolve')) |
|
286 | 293 | |
|
287 | 294 | chunk_size = 20 |
|
288 | 295 | hist_limit = safe_int(self.request.GET.get('limit')) or None |
|
289 | 296 | |
|
290 | 297 | def wrap_for_error(err): |
|
291 | 298 | html = '<tr>' \ |
|
292 | 299 | '<td colspan="9" class="alert alert-error">ERROR: {}</td>' \ |
|
293 | 300 | '</tr>'.format(err) |
|
294 | 301 | return Response(html) |
|
295 | 302 | |
|
296 | 303 | c.branch_name = branch_name = self.request.GET.get('branch') or '' |
|
297 | 304 | c.book_name = book_name = self.request.GET.get('bookmark') or '' |
|
298 | 305 | c.f_path = f_path |
|
299 | 306 | c.commit_id = commit_id |
|
300 | 307 | c.show_hidden = show_hidden |
|
301 | 308 | |
|
302 | 309 | c.selected_name = branch_name or book_name |
|
303 | 310 | if branch_name and branch_name not in self.rhodecode_vcs_repo.branches_all: |
|
304 | 311 | return wrap_for_error( |
|
305 | 312 | safe_str('Branch: {} is not valid'.format(branch_name))) |
|
306 | 313 | |
|
307 | 314 | pre_load = self._get_preload_attrs() |
|
308 | 315 | |
|
309 | 316 | if f_path: |
|
310 | 317 | try: |
|
311 | 318 | base_commit = self.rhodecode_vcs_repo.get_commit(commit_id) |
|
312 | 319 | except (RepositoryError, CommitDoesNotExistError, Exception) as e: |
|
313 | 320 | log.exception(safe_str(e)) |
|
314 | 321 | raise HTTPFound( |
|
315 |
h.route_path('repo_c |
|
|
322 | h.route_path('repo_commits', repo_name=self.db_repo_name)) | |
|
316 | 323 | |
|
317 | 324 | collection = base_commit.get_path_history( |
|
318 | 325 | f_path, limit=hist_limit, pre_load=pre_load) |
|
319 | 326 | collection = list(reversed(collection)) |
|
320 | 327 | else: |
|
321 | 328 | collection = self.rhodecode_vcs_repo.get_commits( |
|
322 | 329 | branch_name=branch_name, show_hidden=show_hidden, pre_load=pre_load, |
|
323 | 330 | translate_tags=False) |
|
324 | 331 | |
|
325 | 332 | p = safe_int(self.request.GET.get('page', 1), 1) |
|
326 | 333 | try: |
|
327 | 334 | self._load_changelog_data( |
|
328 | 335 | c, collection, p, chunk_size, dynamic=True, |
|
329 | 336 | f_path=f_path, commit_id=commit_id) |
|
330 | 337 | except EmptyRepositoryError as e: |
|
331 | 338 | return wrap_for_error(safe_str(e)) |
|
332 | 339 | except (RepositoryError, CommitDoesNotExistError, Exception) as e: |
|
333 | 340 | log.exception('Failed to fetch commits') |
|
334 | 341 | return wrap_for_error(safe_str(e)) |
|
335 | 342 | |
|
336 | 343 | prev_data = None |
|
337 | 344 | next_data = None |
|
338 | 345 | |
|
339 | 346 | try: |
|
340 | 347 | prev_graph = json.loads(self.request.POST.get('graph') or '{}') |
|
341 | 348 | except json.JSONDecodeError: |
|
342 | 349 | prev_graph = {} |
|
343 | 350 | |
|
344 | 351 | if self.request.GET.get('chunk') == 'prev': |
|
345 | 352 | next_data = prev_graph |
|
346 | 353 | elif self.request.GET.get('chunk') == 'next': |
|
347 | 354 | prev_data = prev_graph |
|
348 | 355 | |
|
349 | 356 | commit_ids = [] |
|
350 | 357 | if not f_path: |
|
351 | 358 | # only load graph data when not in file history mode |
|
352 | 359 | commit_ids = c.pagination |
|
353 | 360 | |
|
354 | 361 | c.graph_data, c.graph_commits = self._graph( |
|
355 | 362 | self.rhodecode_vcs_repo, commit_ids, |
|
356 | 363 | prev_data=prev_data, next_data=next_data) |
|
357 | 364 | |
|
358 | 365 | return self._get_template_context(c) |
@@ -1,2070 +1,2071 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Helper functions |
|
23 | 23 | |
|
24 | 24 | Consists of functions to typically be used within templates, but also |
|
25 | 25 | available to Controllers. This module is available to both as 'h'. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import os |
|
29 | 29 | import random |
|
30 | 30 | import hashlib |
|
31 | 31 | import StringIO |
|
32 | 32 | import textwrap |
|
33 | 33 | import urllib |
|
34 | 34 | import math |
|
35 | 35 | import logging |
|
36 | 36 | import re |
|
37 | 37 | import time |
|
38 | 38 | import string |
|
39 | 39 | import hashlib |
|
40 | 40 | from collections import OrderedDict |
|
41 | 41 | |
|
42 | 42 | import pygments |
|
43 | 43 | import itertools |
|
44 | 44 | import fnmatch |
|
45 | 45 | import bleach |
|
46 | 46 | |
|
47 | 47 | from pyramid import compat |
|
48 | 48 | from datetime import datetime |
|
49 | 49 | from functools import partial |
|
50 | 50 | from pygments.formatters.html import HtmlFormatter |
|
51 | 51 | from pygments.lexers import ( |
|
52 | 52 | get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype) |
|
53 | 53 | |
|
54 | 54 | from pyramid.threadlocal import get_current_request |
|
55 | 55 | |
|
56 | 56 | from webhelpers.html import literal, HTML, escape |
|
57 | 57 | from webhelpers.html.tools import * |
|
58 | 58 | from webhelpers.html.builder import make_tag |
|
59 | 59 | from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \ |
|
60 | 60 | end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \ |
|
61 | 61 | link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \ |
|
62 | 62 | submit, text, password, textarea, title, ul, xml_declaration, radio |
|
63 | 63 | from webhelpers.html.tools import auto_link, button_to, highlight, \ |
|
64 | 64 | js_obfuscate, mail_to, strip_links, strip_tags, tag_re |
|
65 | 65 | from webhelpers.text import chop_at, collapse, convert_accented_entities, \ |
|
66 | 66 | convert_misc_entities, lchop, plural, rchop, remove_formatting, \ |
|
67 | 67 | replace_whitespace, urlify, truncate, wrap_paragraphs |
|
68 | 68 | from webhelpers.date import time_ago_in_words |
|
69 | 69 | from webhelpers.paginate import Page as _Page |
|
70 | 70 | from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \ |
|
71 | 71 | convert_boolean_attrs, NotGiven, _make_safe_id_component |
|
72 | 72 | from webhelpers2.number import format_byte_size |
|
73 | 73 | |
|
74 | 74 | from rhodecode.lib.action_parser import action_parser |
|
75 | 75 | from rhodecode.lib.ext_json import json |
|
76 | 76 | from rhodecode.lib.utils import repo_name_slug, get_custom_lexer |
|
77 | 77 | from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \ |
|
78 | 78 | get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \ |
|
79 | 79 | AttributeDict, safe_int, md5, md5_safe |
|
80 | 80 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links |
|
81 | 81 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
82 | 82 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit |
|
83 | 83 | from rhodecode.lib.index.search_utils import get_matching_line_offsets |
|
84 | 84 | from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT |
|
85 | 85 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
86 | 86 | from rhodecode.model.db import Permission, User, Repository |
|
87 | 87 | from rhodecode.model.repo_group import RepoGroupModel |
|
88 | 88 | from rhodecode.model.settings import IssueTrackerSettingsModel |
|
89 | 89 | |
|
90 | 90 | |
|
91 | 91 | log = logging.getLogger(__name__) |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | DEFAULT_USER = User.DEFAULT_USER |
|
95 | 95 | DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL |
|
96 | 96 | |
|
97 | 97 | |
|
98 | 98 | def asset(path, ver=None, **kwargs): |
|
99 | 99 | """ |
|
100 | 100 | Helper to generate a static asset file path for rhodecode assets |
|
101 | 101 | |
|
102 | 102 | eg. h.asset('images/image.png', ver='3923') |
|
103 | 103 | |
|
104 | 104 | :param path: path of asset |
|
105 | 105 | :param ver: optional version query param to append as ?ver= |
|
106 | 106 | """ |
|
107 | 107 | request = get_current_request() |
|
108 | 108 | query = {} |
|
109 | 109 | query.update(kwargs) |
|
110 | 110 | if ver: |
|
111 | 111 | query = {'ver': ver} |
|
112 | 112 | return request.static_path( |
|
113 | 113 | 'rhodecode:public/{}'.format(path), _query=query) |
|
114 | 114 | |
|
115 | 115 | |
|
116 | 116 | default_html_escape_table = { |
|
117 | 117 | ord('&'): u'&', |
|
118 | 118 | ord('<'): u'<', |
|
119 | 119 | ord('>'): u'>', |
|
120 | 120 | ord('"'): u'"', |
|
121 | 121 | ord("'"): u''', |
|
122 | 122 | } |
|
123 | 123 | |
|
124 | 124 | |
|
125 | 125 | def html_escape(text, html_escape_table=default_html_escape_table): |
|
126 | 126 | """Produce entities within text.""" |
|
127 | 127 | return text.translate(html_escape_table) |
|
128 | 128 | |
|
129 | 129 | |
|
130 | 130 | def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None): |
|
131 | 131 | """ |
|
132 | 132 | Truncate string ``s`` at the first occurrence of ``sub``. |
|
133 | 133 | |
|
134 | 134 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. |
|
135 | 135 | """ |
|
136 | 136 | suffix_if_chopped = suffix_if_chopped or '' |
|
137 | 137 | pos = s.find(sub) |
|
138 | 138 | if pos == -1: |
|
139 | 139 | return s |
|
140 | 140 | |
|
141 | 141 | if inclusive: |
|
142 | 142 | pos += len(sub) |
|
143 | 143 | |
|
144 | 144 | chopped = s[:pos] |
|
145 | 145 | left = s[pos:].strip() |
|
146 | 146 | |
|
147 | 147 | if left and suffix_if_chopped: |
|
148 | 148 | chopped += suffix_if_chopped |
|
149 | 149 | |
|
150 | 150 | return chopped |
|
151 | 151 | |
|
152 | 152 | |
|
153 | 153 | def shorter(text, size=20): |
|
154 | 154 | postfix = '...' |
|
155 | 155 | if len(text) > size: |
|
156 | 156 | return text[:size - len(postfix)] + postfix |
|
157 | 157 | return text |
|
158 | 158 | |
|
159 | 159 | |
|
160 | 160 | def _reset(name, value=None, id=NotGiven, type="reset", **attrs): |
|
161 | 161 | """ |
|
162 | 162 | Reset button |
|
163 | 163 | """ |
|
164 | 164 | _set_input_attrs(attrs, type, name, value) |
|
165 | 165 | _set_id_attr(attrs, id, name) |
|
166 | 166 | convert_boolean_attrs(attrs, ["disabled"]) |
|
167 | 167 | return HTML.input(**attrs) |
|
168 | 168 | |
|
169 | 169 | reset = _reset |
|
170 | 170 | safeid = _make_safe_id_component |
|
171 | 171 | |
|
172 | 172 | |
|
173 | 173 | def branding(name, length=40): |
|
174 | 174 | return truncate(name, length, indicator="") |
|
175 | 175 | |
|
176 | 176 | |
|
177 | 177 | def FID(raw_id, path): |
|
178 | 178 | """ |
|
179 | 179 | Creates a unique ID for filenode based on it's hash of path and commit |
|
180 | 180 | it's safe to use in urls |
|
181 | 181 | |
|
182 | 182 | :param raw_id: |
|
183 | 183 | :param path: |
|
184 | 184 | """ |
|
185 | 185 | |
|
186 | 186 | return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12]) |
|
187 | 187 | |
|
188 | 188 | |
|
189 | 189 | class _GetError(object): |
|
190 | 190 | """Get error from form_errors, and represent it as span wrapped error |
|
191 | 191 | message |
|
192 | 192 | |
|
193 | 193 | :param field_name: field to fetch errors for |
|
194 | 194 | :param form_errors: form errors dict |
|
195 | 195 | """ |
|
196 | 196 | |
|
197 | 197 | def __call__(self, field_name, form_errors): |
|
198 | 198 | tmpl = """<span class="error_msg">%s</span>""" |
|
199 | 199 | if form_errors and field_name in form_errors: |
|
200 | 200 | return literal(tmpl % form_errors.get(field_name)) |
|
201 | 201 | |
|
202 | 202 | |
|
203 | 203 | get_error = _GetError() |
|
204 | 204 | |
|
205 | 205 | |
|
206 | 206 | class _ToolTip(object): |
|
207 | 207 | |
|
208 | 208 | def __call__(self, tooltip_title, trim_at=50): |
|
209 | 209 | """ |
|
210 | 210 | Special function just to wrap our text into nice formatted |
|
211 | 211 | autowrapped text |
|
212 | 212 | |
|
213 | 213 | :param tooltip_title: |
|
214 | 214 | """ |
|
215 | 215 | tooltip_title = escape(tooltip_title) |
|
216 | 216 | tooltip_title = tooltip_title.replace('<', '<').replace('>', '>') |
|
217 | 217 | return tooltip_title |
|
218 | 218 | |
|
219 | 219 | |
|
220 | 220 | tooltip = _ToolTip() |
|
221 | 221 | |
|
222 | 222 | files_icon = icon = '<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy the full path"></i>' |
|
223 | 223 | |
|
224 | 224 | def files_breadcrumbs(repo_name, commit_id, file_path, at_ref=None, limit_items=False): |
|
225 | 225 | if isinstance(file_path, str): |
|
226 | 226 | file_path = safe_unicode(file_path) |
|
227 | 227 | |
|
228 | 228 | route_qry = {'at': at_ref} if at_ref else None |
|
229 | 229 | |
|
230 | 230 | # first segment is a `..` link to repo files |
|
231 | 231 | root_name = literal(u'<i class="icon-home"></i>') |
|
232 | 232 | url_segments = [ |
|
233 | 233 | link_to( |
|
234 | 234 | root_name, |
|
235 | 235 | route_path( |
|
236 | 236 | 'repo_files', |
|
237 | 237 | repo_name=repo_name, |
|
238 | 238 | commit_id=commit_id, |
|
239 | 239 | f_path='', |
|
240 | 240 | _query=route_qry), |
|
241 | 241 | )] |
|
242 | 242 | |
|
243 | 243 | path_segments = file_path.split('/') |
|
244 | 244 | last_cnt = len(path_segments) - 1 |
|
245 | 245 | for cnt, segment in enumerate(path_segments): |
|
246 | 246 | if not segment: |
|
247 | 247 | continue |
|
248 | 248 | segment_html = escape(segment) |
|
249 | 249 | |
|
250 | 250 | if cnt != last_cnt: |
|
251 | 251 | url_segments.append( |
|
252 | 252 | link_to( |
|
253 | 253 | segment_html, |
|
254 | 254 | route_path( |
|
255 | 255 | 'repo_files', |
|
256 | 256 | repo_name=repo_name, |
|
257 | 257 | commit_id=commit_id, |
|
258 | 258 | f_path='/'.join(path_segments[:cnt + 1]), |
|
259 | 259 | _query=route_qry), |
|
260 | 260 | )) |
|
261 | 261 | else: |
|
262 | 262 | url_segments.append(segment_html) |
|
263 | 263 | |
|
264 | 264 | limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:] |
|
265 | 265 | if limit_items and len(limited_url_segments) < len(url_segments): |
|
266 | 266 | url_segments = limited_url_segments |
|
267 | 267 | |
|
268 | 268 | full_path = file_path |
|
269 | 269 | icon = files_icon.format(escape(full_path)) |
|
270 | 270 | if file_path == '': |
|
271 | 271 | return root_name |
|
272 | 272 | else: |
|
273 | 273 | return literal(' / '.join(url_segments) + icon) |
|
274 | 274 | |
|
275 | 275 | |
|
276 | 276 | def files_url_data(request): |
|
277 | 277 | matchdict = request.matchdict |
|
278 | 278 | |
|
279 | 279 | if 'f_path' not in matchdict: |
|
280 | 280 | matchdict['f_path'] = '' |
|
281 | 281 | |
|
282 | 282 | if 'commit_id' not in matchdict: |
|
283 | 283 | matchdict['commit_id'] = 'tip' |
|
284 | 284 | |
|
285 | 285 | return json.dumps(matchdict) |
|
286 | 286 | |
|
287 | 287 | |
|
288 | 288 | def code_highlight(code, lexer, formatter, use_hl_filter=False): |
|
289 | 289 | """ |
|
290 | 290 | Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``. |
|
291 | 291 | |
|
292 | 292 | If ``outfile`` is given and a valid file object (an object |
|
293 | 293 | with a ``write`` method), the result will be written to it, otherwise |
|
294 | 294 | it is returned as a string. |
|
295 | 295 | """ |
|
296 | 296 | if use_hl_filter: |
|
297 | 297 | # add HL filter |
|
298 | 298 | from rhodecode.lib.index import search_utils |
|
299 | 299 | lexer.add_filter(search_utils.ElasticSearchHLFilter()) |
|
300 | 300 | return pygments.format(pygments.lex(code, lexer), formatter) |
|
301 | 301 | |
|
302 | 302 | |
|
303 | 303 | class CodeHtmlFormatter(HtmlFormatter): |
|
304 | 304 | """ |
|
305 | 305 | My code Html Formatter for source codes |
|
306 | 306 | """ |
|
307 | 307 | |
|
308 | 308 | def wrap(self, source, outfile): |
|
309 | 309 | return self._wrap_div(self._wrap_pre(self._wrap_code(source))) |
|
310 | 310 | |
|
311 | 311 | def _wrap_code(self, source): |
|
312 | 312 | for cnt, it in enumerate(source): |
|
313 | 313 | i, t = it |
|
314 | 314 | t = '<div id="L%s">%s</div>' % (cnt + 1, t) |
|
315 | 315 | yield i, t |
|
316 | 316 | |
|
317 | 317 | def _wrap_tablelinenos(self, inner): |
|
318 | 318 | dummyoutfile = StringIO.StringIO() |
|
319 | 319 | lncount = 0 |
|
320 | 320 | for t, line in inner: |
|
321 | 321 | if t: |
|
322 | 322 | lncount += 1 |
|
323 | 323 | dummyoutfile.write(line) |
|
324 | 324 | |
|
325 | 325 | fl = self.linenostart |
|
326 | 326 | mw = len(str(lncount + fl - 1)) |
|
327 | 327 | sp = self.linenospecial |
|
328 | 328 | st = self.linenostep |
|
329 | 329 | la = self.lineanchors |
|
330 | 330 | aln = self.anchorlinenos |
|
331 | 331 | nocls = self.noclasses |
|
332 | 332 | if sp: |
|
333 | 333 | lines = [] |
|
334 | 334 | |
|
335 | 335 | for i in range(fl, fl + lncount): |
|
336 | 336 | if i % st == 0: |
|
337 | 337 | if i % sp == 0: |
|
338 | 338 | if aln: |
|
339 | 339 | lines.append('<a href="#%s%d" class="special">%*d</a>' % |
|
340 | 340 | (la, i, mw, i)) |
|
341 | 341 | else: |
|
342 | 342 | lines.append('<span class="special">%*d</span>' % (mw, i)) |
|
343 | 343 | else: |
|
344 | 344 | if aln: |
|
345 | 345 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
346 | 346 | else: |
|
347 | 347 | lines.append('%*d' % (mw, i)) |
|
348 | 348 | else: |
|
349 | 349 | lines.append('') |
|
350 | 350 | ls = '\n'.join(lines) |
|
351 | 351 | else: |
|
352 | 352 | lines = [] |
|
353 | 353 | for i in range(fl, fl + lncount): |
|
354 | 354 | if i % st == 0: |
|
355 | 355 | if aln: |
|
356 | 356 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
357 | 357 | else: |
|
358 | 358 | lines.append('%*d' % (mw, i)) |
|
359 | 359 | else: |
|
360 | 360 | lines.append('') |
|
361 | 361 | ls = '\n'.join(lines) |
|
362 | 362 | |
|
363 | 363 | # in case you wonder about the seemingly redundant <div> here: since the |
|
364 | 364 | # content in the other cell also is wrapped in a div, some browsers in |
|
365 | 365 | # some configurations seem to mess up the formatting... |
|
366 | 366 | if nocls: |
|
367 | 367 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
368 | 368 | '<tr><td><div class="linenodiv" ' |
|
369 | 369 | 'style="background-color: #f0f0f0; padding-right: 10px">' |
|
370 | 370 | '<pre style="line-height: 125%">' + |
|
371 | 371 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
372 | 372 | else: |
|
373 | 373 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
374 | 374 | '<tr><td class="linenos"><div class="linenodiv"><pre>' + |
|
375 | 375 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
376 | 376 | yield 0, dummyoutfile.getvalue() |
|
377 | 377 | yield 0, '</td></tr></table>' |
|
378 | 378 | |
|
379 | 379 | |
|
380 | 380 | class SearchContentCodeHtmlFormatter(CodeHtmlFormatter): |
|
381 | 381 | def __init__(self, **kw): |
|
382 | 382 | # only show these line numbers if set |
|
383 | 383 | self.only_lines = kw.pop('only_line_numbers', []) |
|
384 | 384 | self.query_terms = kw.pop('query_terms', []) |
|
385 | 385 | self.max_lines = kw.pop('max_lines', 5) |
|
386 | 386 | self.line_context = kw.pop('line_context', 3) |
|
387 | 387 | self.url = kw.pop('url', None) |
|
388 | 388 | |
|
389 | 389 | super(CodeHtmlFormatter, self).__init__(**kw) |
|
390 | 390 | |
|
391 | 391 | def _wrap_code(self, source): |
|
392 | 392 | for cnt, it in enumerate(source): |
|
393 | 393 | i, t = it |
|
394 | 394 | t = '<pre>%s</pre>' % t |
|
395 | 395 | yield i, t |
|
396 | 396 | |
|
397 | 397 | def _wrap_tablelinenos(self, inner): |
|
398 | 398 | yield 0, '<table class="code-highlight %stable">' % self.cssclass |
|
399 | 399 | |
|
400 | 400 | last_shown_line_number = 0 |
|
401 | 401 | current_line_number = 1 |
|
402 | 402 | |
|
403 | 403 | for t, line in inner: |
|
404 | 404 | if not t: |
|
405 | 405 | yield t, line |
|
406 | 406 | continue |
|
407 | 407 | |
|
408 | 408 | if current_line_number in self.only_lines: |
|
409 | 409 | if last_shown_line_number + 1 != current_line_number: |
|
410 | 410 | yield 0, '<tr>' |
|
411 | 411 | yield 0, '<td class="line">...</td>' |
|
412 | 412 | yield 0, '<td id="hlcode" class="code"></td>' |
|
413 | 413 | yield 0, '</tr>' |
|
414 | 414 | |
|
415 | 415 | yield 0, '<tr>' |
|
416 | 416 | if self.url: |
|
417 | 417 | yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % ( |
|
418 | 418 | self.url, current_line_number, current_line_number) |
|
419 | 419 | else: |
|
420 | 420 | yield 0, '<td class="line"><a href="">%i</a></td>' % ( |
|
421 | 421 | current_line_number) |
|
422 | 422 | yield 0, '<td id="hlcode" class="code">' + line + '</td>' |
|
423 | 423 | yield 0, '</tr>' |
|
424 | 424 | |
|
425 | 425 | last_shown_line_number = current_line_number |
|
426 | 426 | |
|
427 | 427 | current_line_number += 1 |
|
428 | 428 | |
|
429 | 429 | yield 0, '</table>' |
|
430 | 430 | |
|
431 | 431 | |
|
432 | 432 | def hsv_to_rgb(h, s, v): |
|
433 | 433 | """ Convert hsv color values to rgb """ |
|
434 | 434 | |
|
435 | 435 | if s == 0.0: |
|
436 | 436 | return v, v, v |
|
437 | 437 | i = int(h * 6.0) # XXX assume int() truncates! |
|
438 | 438 | f = (h * 6.0) - i |
|
439 | 439 | p = v * (1.0 - s) |
|
440 | 440 | q = v * (1.0 - s * f) |
|
441 | 441 | t = v * (1.0 - s * (1.0 - f)) |
|
442 | 442 | i = i % 6 |
|
443 | 443 | if i == 0: |
|
444 | 444 | return v, t, p |
|
445 | 445 | if i == 1: |
|
446 | 446 | return q, v, p |
|
447 | 447 | if i == 2: |
|
448 | 448 | return p, v, t |
|
449 | 449 | if i == 3: |
|
450 | 450 | return p, q, v |
|
451 | 451 | if i == 4: |
|
452 | 452 | return t, p, v |
|
453 | 453 | if i == 5: |
|
454 | 454 | return v, p, q |
|
455 | 455 | |
|
456 | 456 | |
|
457 | 457 | def unique_color_generator(n=10000, saturation=0.10, lightness=0.95): |
|
458 | 458 | """ |
|
459 | 459 | Generator for getting n of evenly distributed colors using |
|
460 | 460 | hsv color and golden ratio. It always return same order of colors |
|
461 | 461 | |
|
462 | 462 | :param n: number of colors to generate |
|
463 | 463 | :param saturation: saturation of returned colors |
|
464 | 464 | :param lightness: lightness of returned colors |
|
465 | 465 | :returns: RGB tuple |
|
466 | 466 | """ |
|
467 | 467 | |
|
468 | 468 | golden_ratio = 0.618033988749895 |
|
469 | 469 | h = 0.22717784590367374 |
|
470 | 470 | |
|
471 | 471 | for _ in xrange(n): |
|
472 | 472 | h += golden_ratio |
|
473 | 473 | h %= 1 |
|
474 | 474 | HSV_tuple = [h, saturation, lightness] |
|
475 | 475 | RGB_tuple = hsv_to_rgb(*HSV_tuple) |
|
476 | 476 | yield map(lambda x: str(int(x * 256)), RGB_tuple) |
|
477 | 477 | |
|
478 | 478 | |
|
479 | 479 | def color_hasher(n=10000, saturation=0.10, lightness=0.95): |
|
480 | 480 | """ |
|
481 | 481 | Returns a function which when called with an argument returns a unique |
|
482 | 482 | color for that argument, eg. |
|
483 | 483 | |
|
484 | 484 | :param n: number of colors to generate |
|
485 | 485 | :param saturation: saturation of returned colors |
|
486 | 486 | :param lightness: lightness of returned colors |
|
487 | 487 | :returns: css RGB string |
|
488 | 488 | |
|
489 | 489 | >>> color_hash = color_hasher() |
|
490 | 490 | >>> color_hash('hello') |
|
491 | 491 | 'rgb(34, 12, 59)' |
|
492 | 492 | >>> color_hash('hello') |
|
493 | 493 | 'rgb(34, 12, 59)' |
|
494 | 494 | >>> color_hash('other') |
|
495 | 495 | 'rgb(90, 224, 159)' |
|
496 | 496 | """ |
|
497 | 497 | |
|
498 | 498 | color_dict = {} |
|
499 | 499 | cgenerator = unique_color_generator( |
|
500 | 500 | saturation=saturation, lightness=lightness) |
|
501 | 501 | |
|
502 | 502 | def get_color_string(thing): |
|
503 | 503 | if thing in color_dict: |
|
504 | 504 | col = color_dict[thing] |
|
505 | 505 | else: |
|
506 | 506 | col = color_dict[thing] = cgenerator.next() |
|
507 | 507 | return "rgb(%s)" % (', '.join(col)) |
|
508 | 508 | |
|
509 | 509 | return get_color_string |
|
510 | 510 | |
|
511 | 511 | |
|
512 | 512 | def get_lexer_safe(mimetype=None, filepath=None): |
|
513 | 513 | """ |
|
514 | 514 | Tries to return a relevant pygments lexer using mimetype/filepath name, |
|
515 | 515 | defaulting to plain text if none could be found |
|
516 | 516 | """ |
|
517 | 517 | lexer = None |
|
518 | 518 | try: |
|
519 | 519 | if mimetype: |
|
520 | 520 | lexer = get_lexer_for_mimetype(mimetype) |
|
521 | 521 | if not lexer: |
|
522 | 522 | lexer = get_lexer_for_filename(filepath) |
|
523 | 523 | except pygments.util.ClassNotFound: |
|
524 | 524 | pass |
|
525 | 525 | |
|
526 | 526 | if not lexer: |
|
527 | 527 | lexer = get_lexer_by_name('text') |
|
528 | 528 | |
|
529 | 529 | return lexer |
|
530 | 530 | |
|
531 | 531 | |
|
532 | 532 | def get_lexer_for_filenode(filenode): |
|
533 | 533 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer |
|
534 | 534 | return lexer |
|
535 | 535 | |
|
536 | 536 | |
|
537 | 537 | def pygmentize(filenode, **kwargs): |
|
538 | 538 | """ |
|
539 | 539 | pygmentize function using pygments |
|
540 | 540 | |
|
541 | 541 | :param filenode: |
|
542 | 542 | """ |
|
543 | 543 | lexer = get_lexer_for_filenode(filenode) |
|
544 | 544 | return literal(code_highlight(filenode.content, lexer, |
|
545 | 545 | CodeHtmlFormatter(**kwargs))) |
|
546 | 546 | |
|
547 | 547 | |
|
548 | 548 | def is_following_repo(repo_name, user_id): |
|
549 | 549 | from rhodecode.model.scm import ScmModel |
|
550 | 550 | return ScmModel().is_following_repo(repo_name, user_id) |
|
551 | 551 | |
|
552 | 552 | |
|
553 | 553 | class _Message(object): |
|
554 | 554 | """A message returned by ``Flash.pop_messages()``. |
|
555 | 555 | |
|
556 | 556 | Converting the message to a string returns the message text. Instances |
|
557 | 557 | also have the following attributes: |
|
558 | 558 | |
|
559 | 559 | * ``message``: the message text. |
|
560 | 560 | * ``category``: the category specified when the message was created. |
|
561 | 561 | """ |
|
562 | 562 | |
|
563 | 563 | def __init__(self, category, message): |
|
564 | 564 | self.category = category |
|
565 | 565 | self.message = message |
|
566 | 566 | |
|
567 | 567 | def __str__(self): |
|
568 | 568 | return self.message |
|
569 | 569 | |
|
570 | 570 | __unicode__ = __str__ |
|
571 | 571 | |
|
572 | 572 | def __html__(self): |
|
573 | 573 | return escape(safe_unicode(self.message)) |
|
574 | 574 | |
|
575 | 575 | |
|
576 | 576 | class Flash(object): |
|
577 | 577 | # List of allowed categories. If None, allow any category. |
|
578 | 578 | categories = ["warning", "notice", "error", "success"] |
|
579 | 579 | |
|
580 | 580 | # Default category if none is specified. |
|
581 | 581 | default_category = "notice" |
|
582 | 582 | |
|
583 | 583 | def __init__(self, session_key="flash", categories=None, |
|
584 | 584 | default_category=None): |
|
585 | 585 | """ |
|
586 | 586 | Instantiate a ``Flash`` object. |
|
587 | 587 | |
|
588 | 588 | ``session_key`` is the key to save the messages under in the user's |
|
589 | 589 | session. |
|
590 | 590 | |
|
591 | 591 | ``categories`` is an optional list which overrides the default list |
|
592 | 592 | of categories. |
|
593 | 593 | |
|
594 | 594 | ``default_category`` overrides the default category used for messages |
|
595 | 595 | when none is specified. |
|
596 | 596 | """ |
|
597 | 597 | self.session_key = session_key |
|
598 | 598 | if categories is not None: |
|
599 | 599 | self.categories = categories |
|
600 | 600 | if default_category is not None: |
|
601 | 601 | self.default_category = default_category |
|
602 | 602 | if self.categories and self.default_category not in self.categories: |
|
603 | 603 | raise ValueError( |
|
604 | 604 | "unrecognized default category %r" % (self.default_category,)) |
|
605 | 605 | |
|
606 | 606 | def pop_messages(self, session=None, request=None): |
|
607 | 607 | """ |
|
608 | 608 | Return all accumulated messages and delete them from the session. |
|
609 | 609 | |
|
610 | 610 | The return value is a list of ``Message`` objects. |
|
611 | 611 | """ |
|
612 | 612 | messages = [] |
|
613 | 613 | |
|
614 | 614 | if not session: |
|
615 | 615 | if not request: |
|
616 | 616 | request = get_current_request() |
|
617 | 617 | session = request.session |
|
618 | 618 | |
|
619 | 619 | # Pop the 'old' pylons flash messages. They are tuples of the form |
|
620 | 620 | # (category, message) |
|
621 | 621 | for cat, msg in session.pop(self.session_key, []): |
|
622 | 622 | messages.append(_Message(cat, msg)) |
|
623 | 623 | |
|
624 | 624 | # Pop the 'new' pyramid flash messages for each category as list |
|
625 | 625 | # of strings. |
|
626 | 626 | for cat in self.categories: |
|
627 | 627 | for msg in session.pop_flash(queue=cat): |
|
628 | 628 | messages.append(_Message(cat, msg)) |
|
629 | 629 | # Map messages from the default queue to the 'notice' category. |
|
630 | 630 | for msg in session.pop_flash(): |
|
631 | 631 | messages.append(_Message('notice', msg)) |
|
632 | 632 | |
|
633 | 633 | session.save() |
|
634 | 634 | return messages |
|
635 | 635 | |
|
636 | 636 | def json_alerts(self, session=None, request=None): |
|
637 | 637 | payloads = [] |
|
638 | 638 | messages = flash.pop_messages(session=session, request=request) |
|
639 | 639 | if messages: |
|
640 | 640 | for message in messages: |
|
641 | 641 | subdata = {} |
|
642 | 642 | if hasattr(message.message, 'rsplit'): |
|
643 | 643 | flash_data = message.message.rsplit('|DELIM|', 1) |
|
644 | 644 | org_message = flash_data[0] |
|
645 | 645 | if len(flash_data) > 1: |
|
646 | 646 | subdata = json.loads(flash_data[1]) |
|
647 | 647 | else: |
|
648 | 648 | org_message = message.message |
|
649 | 649 | payloads.append({ |
|
650 | 650 | 'message': { |
|
651 | 651 | 'message': u'{}'.format(org_message), |
|
652 | 652 | 'level': message.category, |
|
653 | 653 | 'force': True, |
|
654 | 654 | 'subdata': subdata |
|
655 | 655 | } |
|
656 | 656 | }) |
|
657 | 657 | return json.dumps(payloads) |
|
658 | 658 | |
|
659 | 659 | def __call__(self, message, category=None, ignore_duplicate=False, |
|
660 | 660 | session=None, request=None): |
|
661 | 661 | |
|
662 | 662 | if not session: |
|
663 | 663 | if not request: |
|
664 | 664 | request = get_current_request() |
|
665 | 665 | session = request.session |
|
666 | 666 | |
|
667 | 667 | session.flash( |
|
668 | 668 | message, queue=category, allow_duplicate=not ignore_duplicate) |
|
669 | 669 | |
|
670 | 670 | |
|
671 | 671 | flash = Flash() |
|
672 | 672 | |
|
673 | 673 | #============================================================================== |
|
674 | 674 | # SCM FILTERS available via h. |
|
675 | 675 | #============================================================================== |
|
676 | 676 | from rhodecode.lib.vcs.utils import author_name, author_email |
|
677 | 677 | from rhodecode.lib.utils2 import credentials_filter, age, age_from_seconds |
|
678 | 678 | from rhodecode.model.db import User, ChangesetStatus |
|
679 | 679 | |
|
680 | 680 | capitalize = lambda x: x.capitalize() |
|
681 | 681 | email = author_email |
|
682 | 682 | short_id = lambda x: x[:12] |
|
683 | 683 | hide_credentials = lambda x: ''.join(credentials_filter(x)) |
|
684 | 684 | |
|
685 | 685 | |
|
686 | 686 | import pytz |
|
687 | 687 | import tzlocal |
|
688 | 688 | local_timezone = tzlocal.get_localzone() |
|
689 | 689 | |
|
690 | 690 | |
|
691 | 691 | def age_component(datetime_iso, value=None, time_is_local=False): |
|
692 | 692 | title = value or format_date(datetime_iso) |
|
693 | 693 | tzinfo = '+00:00' |
|
694 | 694 | |
|
695 | 695 | # detect if we have a timezone info, otherwise, add it |
|
696 | 696 | if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo: |
|
697 | 697 | force_timezone = os.environ.get('RC_TIMEZONE', '') |
|
698 | 698 | if force_timezone: |
|
699 | 699 | force_timezone = pytz.timezone(force_timezone) |
|
700 | 700 | timezone = force_timezone or local_timezone |
|
701 | 701 | offset = timezone.localize(datetime_iso).strftime('%z') |
|
702 | 702 | tzinfo = '{}:{}'.format(offset[:-2], offset[-2:]) |
|
703 | 703 | |
|
704 | 704 | return literal( |
|
705 | 705 | '<time class="timeago tooltip" ' |
|
706 | 706 | 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format( |
|
707 | 707 | datetime_iso, title, tzinfo)) |
|
708 | 708 | |
|
709 | 709 | |
|
710 | 710 | def _shorten_commit_id(commit_id, commit_len=None): |
|
711 | 711 | if commit_len is None: |
|
712 | 712 | request = get_current_request() |
|
713 | 713 | commit_len = request.call_context.visual.show_sha_length |
|
714 | 714 | return commit_id[:commit_len] |
|
715 | 715 | |
|
716 | 716 | |
|
717 | 717 | def show_id(commit, show_idx=None, commit_len=None): |
|
718 | 718 | """ |
|
719 | 719 | Configurable function that shows ID |
|
720 | 720 | by default it's r123:fffeeefffeee |
|
721 | 721 | |
|
722 | 722 | :param commit: commit instance |
|
723 | 723 | """ |
|
724 | 724 | if show_idx is None: |
|
725 | 725 | request = get_current_request() |
|
726 | 726 | show_idx = request.call_context.visual.show_revision_number |
|
727 | 727 | |
|
728 | 728 | raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len) |
|
729 | 729 | if show_idx: |
|
730 | 730 | return 'r%s:%s' % (commit.idx, raw_id) |
|
731 | 731 | else: |
|
732 | 732 | return '%s' % (raw_id, ) |
|
733 | 733 | |
|
734 | 734 | |
|
735 | 735 | def format_date(date): |
|
736 | 736 | """ |
|
737 | 737 | use a standardized formatting for dates used in RhodeCode |
|
738 | 738 | |
|
739 | 739 | :param date: date/datetime object |
|
740 | 740 | :return: formatted date |
|
741 | 741 | """ |
|
742 | 742 | |
|
743 | 743 | if date: |
|
744 | 744 | _fmt = "%a, %d %b %Y %H:%M:%S" |
|
745 | 745 | return safe_unicode(date.strftime(_fmt)) |
|
746 | 746 | |
|
747 | 747 | return u"" |
|
748 | 748 | |
|
749 | 749 | |
|
750 | 750 | class _RepoChecker(object): |
|
751 | 751 | |
|
752 | 752 | def __init__(self, backend_alias): |
|
753 | 753 | self._backend_alias = backend_alias |
|
754 | 754 | |
|
755 | 755 | def __call__(self, repository): |
|
756 | 756 | if hasattr(repository, 'alias'): |
|
757 | 757 | _type = repository.alias |
|
758 | 758 | elif hasattr(repository, 'repo_type'): |
|
759 | 759 | _type = repository.repo_type |
|
760 | 760 | else: |
|
761 | 761 | _type = repository |
|
762 | 762 | return _type == self._backend_alias |
|
763 | 763 | |
|
764 | 764 | |
|
765 | 765 | is_git = _RepoChecker('git') |
|
766 | 766 | is_hg = _RepoChecker('hg') |
|
767 | 767 | is_svn = _RepoChecker('svn') |
|
768 | 768 | |
|
769 | 769 | |
|
770 | 770 | def get_repo_type_by_name(repo_name): |
|
771 | 771 | repo = Repository.get_by_repo_name(repo_name) |
|
772 | 772 | if repo: |
|
773 | 773 | return repo.repo_type |
|
774 | 774 | |
|
775 | 775 | |
|
776 | 776 | def is_svn_without_proxy(repository): |
|
777 | 777 | if is_svn(repository): |
|
778 | 778 | from rhodecode.model.settings import VcsSettingsModel |
|
779 | 779 | conf = VcsSettingsModel().get_ui_settings_as_config_obj() |
|
780 | 780 | return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled')) |
|
781 | 781 | return False |
|
782 | 782 | |
|
783 | 783 | |
|
784 | 784 | def discover_user(author): |
|
785 | 785 | """ |
|
786 | 786 | Tries to discover RhodeCode User based on the autho string. Author string |
|
787 | 787 | is typically `FirstName LastName <email@address.com>` |
|
788 | 788 | """ |
|
789 | 789 | |
|
790 | 790 | # if author is already an instance use it for extraction |
|
791 | 791 | if isinstance(author, User): |
|
792 | 792 | return author |
|
793 | 793 | |
|
794 | 794 | # Valid email in the attribute passed, see if they're in the system |
|
795 | 795 | _email = author_email(author) |
|
796 | 796 | if _email != '': |
|
797 | 797 | user = User.get_by_email(_email, case_insensitive=True, cache=True) |
|
798 | 798 | if user is not None: |
|
799 | 799 | return user |
|
800 | 800 | |
|
801 | 801 | # Maybe it's a username, we try to extract it and fetch by username ? |
|
802 | 802 | _author = author_name(author) |
|
803 | 803 | user = User.get_by_username(_author, case_insensitive=True, cache=True) |
|
804 | 804 | if user is not None: |
|
805 | 805 | return user |
|
806 | 806 | |
|
807 | 807 | return None |
|
808 | 808 | |
|
809 | 809 | |
|
810 | 810 | def email_or_none(author): |
|
811 | 811 | # extract email from the commit string |
|
812 | 812 | _email = author_email(author) |
|
813 | 813 | |
|
814 | 814 | # If we have an email, use it, otherwise |
|
815 | 815 | # see if it contains a username we can get an email from |
|
816 | 816 | if _email != '': |
|
817 | 817 | return _email |
|
818 | 818 | else: |
|
819 | 819 | user = User.get_by_username( |
|
820 | 820 | author_name(author), case_insensitive=True, cache=True) |
|
821 | 821 | |
|
822 | 822 | if user is not None: |
|
823 | 823 | return user.email |
|
824 | 824 | |
|
825 | 825 | # No valid email, not a valid user in the system, none! |
|
826 | 826 | return None |
|
827 | 827 | |
|
828 | 828 | |
|
829 | 829 | def link_to_user(author, length=0, **kwargs): |
|
830 | 830 | user = discover_user(author) |
|
831 | 831 | # user can be None, but if we have it already it means we can re-use it |
|
832 | 832 | # in the person() function, so we save 1 intensive-query |
|
833 | 833 | if user: |
|
834 | 834 | author = user |
|
835 | 835 | |
|
836 | 836 | display_person = person(author, 'username_or_name_or_email') |
|
837 | 837 | if length: |
|
838 | 838 | display_person = shorter(display_person, length) |
|
839 | 839 | |
|
840 | 840 | if user: |
|
841 | 841 | return link_to( |
|
842 | 842 | escape(display_person), |
|
843 | 843 | route_path('user_profile', username=user.username), |
|
844 | 844 | **kwargs) |
|
845 | 845 | else: |
|
846 | 846 | return escape(display_person) |
|
847 | 847 | |
|
848 | 848 | |
|
849 | 849 | def link_to_group(users_group_name, **kwargs): |
|
850 | 850 | return link_to( |
|
851 | 851 | escape(users_group_name), |
|
852 | 852 | route_path('user_group_profile', user_group_name=users_group_name), |
|
853 | 853 | **kwargs) |
|
854 | 854 | |
|
855 | 855 | |
|
856 | 856 | def person(author, show_attr="username_and_name"): |
|
857 | 857 | user = discover_user(author) |
|
858 | 858 | if user: |
|
859 | 859 | return getattr(user, show_attr) |
|
860 | 860 | else: |
|
861 | 861 | _author = author_name(author) |
|
862 | 862 | _email = email(author) |
|
863 | 863 | return _author or _email |
|
864 | 864 | |
|
865 | 865 | |
|
866 | 866 | def author_string(email): |
|
867 | 867 | if email: |
|
868 | 868 | user = User.get_by_email(email, case_insensitive=True, cache=True) |
|
869 | 869 | if user: |
|
870 | 870 | if user.first_name or user.last_name: |
|
871 | 871 | return '%s %s <%s>' % ( |
|
872 | 872 | user.first_name, user.last_name, email) |
|
873 | 873 | else: |
|
874 | 874 | return email |
|
875 | 875 | else: |
|
876 | 876 | return email |
|
877 | 877 | else: |
|
878 | 878 | return None |
|
879 | 879 | |
|
880 | 880 | |
|
881 | 881 | def person_by_id(id_, show_attr="username_and_name"): |
|
882 | 882 | # attr to return from fetched user |
|
883 | 883 | person_getter = lambda usr: getattr(usr, show_attr) |
|
884 | 884 | |
|
885 | 885 | #maybe it's an ID ? |
|
886 | 886 | if str(id_).isdigit() or isinstance(id_, int): |
|
887 | 887 | id_ = int(id_) |
|
888 | 888 | user = User.get(id_) |
|
889 | 889 | if user is not None: |
|
890 | 890 | return person_getter(user) |
|
891 | 891 | return id_ |
|
892 | 892 | |
|
893 | 893 | |
|
894 | 894 | def gravatar_with_user(request, author, show_disabled=False): |
|
895 | 895 | _render = request.get_partial_renderer( |
|
896 | 896 | 'rhodecode:templates/base/base.mako') |
|
897 | 897 | return _render('gravatar_with_user', author, show_disabled=show_disabled) |
|
898 | 898 | |
|
899 | 899 | |
|
900 | 900 | tags_paterns = OrderedDict(( |
|
901 | 901 | ('lang', (re.compile(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+\.]*)\]'), |
|
902 | 902 | '<div class="metatag" tag="lang">\\2</div>')), |
|
903 | 903 | |
|
904 | 904 | ('see', (re.compile(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]'), |
|
905 | 905 | '<div class="metatag" tag="see">see: \\1 </div>')), |
|
906 | 906 | |
|
907 | 907 | ('url', (re.compile(r'\[url\ \=\>\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'), |
|
908 | 908 | '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')), |
|
909 | 909 | |
|
910 | 910 | ('license', (re.compile(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]'), |
|
911 | 911 | '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')), |
|
912 | 912 | |
|
913 | 913 | ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]'), |
|
914 | 914 | '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')), |
|
915 | 915 | |
|
916 | 916 | ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'), |
|
917 | 917 | '<div class="metatag" tag="state \\1">\\1</div>')), |
|
918 | 918 | |
|
919 | 919 | # label in grey |
|
920 | 920 | ('label', (re.compile(r'\[([a-z]+)\]'), |
|
921 | 921 | '<div class="metatag" tag="label">\\1</div>')), |
|
922 | 922 | |
|
923 | 923 | # generic catch all in grey |
|
924 | 924 | ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'), |
|
925 | 925 | '<div class="metatag" tag="generic">\\1</div>')), |
|
926 | 926 | )) |
|
927 | 927 | |
|
928 | 928 | |
|
929 | 929 | def extract_metatags(value): |
|
930 | 930 | """ |
|
931 | 931 | Extract supported meta-tags from given text value |
|
932 | 932 | """ |
|
933 | 933 | tags = [] |
|
934 | 934 | if not value: |
|
935 | 935 | return tags, '' |
|
936 | 936 | |
|
937 | 937 | for key, val in tags_paterns.items(): |
|
938 | 938 | pat, replace_html = val |
|
939 | 939 | tags.extend([(key, x.group()) for x in pat.finditer(value)]) |
|
940 | 940 | value = pat.sub('', value) |
|
941 | 941 | |
|
942 | 942 | return tags, value |
|
943 | 943 | |
|
944 | 944 | |
|
945 | 945 | def style_metatag(tag_type, value): |
|
946 | 946 | """ |
|
947 | 947 | converts tags from value into html equivalent |
|
948 | 948 | """ |
|
949 | 949 | if not value: |
|
950 | 950 | return '' |
|
951 | 951 | |
|
952 | 952 | html_value = value |
|
953 | 953 | tag_data = tags_paterns.get(tag_type) |
|
954 | 954 | if tag_data: |
|
955 | 955 | pat, replace_html = tag_data |
|
956 | 956 | # convert to plain `unicode` instead of a markup tag to be used in |
|
957 | 957 | # regex expressions. safe_unicode doesn't work here |
|
958 | 958 | html_value = pat.sub(replace_html, unicode(value)) |
|
959 | 959 | |
|
960 | 960 | return html_value |
|
961 | 961 | |
|
962 | 962 | |
|
963 | 963 | def bool2icon(value, show_at_false=True): |
|
964 | 964 | """ |
|
965 | 965 | Returns boolean value of a given value, represented as html element with |
|
966 | 966 | classes that will represent icons |
|
967 | 967 | |
|
968 | 968 | :param value: given value to convert to html node |
|
969 | 969 | """ |
|
970 | 970 | |
|
971 | 971 | if value: # does bool conversion |
|
972 | 972 | return HTML.tag('i', class_="icon-true") |
|
973 | 973 | else: # not true as bool |
|
974 | 974 | if show_at_false: |
|
975 | 975 | return HTML.tag('i', class_="icon-false") |
|
976 | 976 | return HTML.tag('i') |
|
977 | 977 | |
|
978 | 978 | #============================================================================== |
|
979 | 979 | # PERMS |
|
980 | 980 | #============================================================================== |
|
981 | 981 | from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \ |
|
982 | 982 | HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \ |
|
983 | 983 | HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \ |
|
984 | 984 | csrf_token_key |
|
985 | 985 | |
|
986 | 986 | |
|
987 | 987 | #============================================================================== |
|
988 | 988 | # GRAVATAR URL |
|
989 | 989 | #============================================================================== |
|
990 | 990 | class InitialsGravatar(object): |
|
991 | 991 | def __init__(self, email_address, first_name, last_name, size=30, |
|
992 | 992 | background=None, text_color='#fff'): |
|
993 | 993 | self.size = size |
|
994 | 994 | self.first_name = first_name |
|
995 | 995 | self.last_name = last_name |
|
996 | 996 | self.email_address = email_address |
|
997 | 997 | self.background = background or self.str2color(email_address) |
|
998 | 998 | self.text_color = text_color |
|
999 | 999 | |
|
1000 | 1000 | def get_color_bank(self): |
|
1001 | 1001 | """ |
|
1002 | 1002 | returns a predefined list of colors that gravatars can use. |
|
1003 | 1003 | Those are randomized distinct colors that guarantee readability and |
|
1004 | 1004 | uniqueness. |
|
1005 | 1005 | |
|
1006 | 1006 | generated with: http://phrogz.net/css/distinct-colors.html |
|
1007 | 1007 | """ |
|
1008 | 1008 | return [ |
|
1009 | 1009 | '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000', |
|
1010 | 1010 | '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320', |
|
1011 | 1011 | '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300', |
|
1012 | 1012 | '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140', |
|
1013 | 1013 | '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c', |
|
1014 | 1014 | '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020', |
|
1015 | 1015 | '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039', |
|
1016 | 1016 | '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f', |
|
1017 | 1017 | '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340', |
|
1018 | 1018 | '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98', |
|
1019 | 1019 | '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c', |
|
1020 | 1020 | '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200', |
|
1021 | 1021 | '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a', |
|
1022 | 1022 | '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959', |
|
1023 | 1023 | '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3', |
|
1024 | 1024 | '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626', |
|
1025 | 1025 | '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000', |
|
1026 | 1026 | '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362', |
|
1027 | 1027 | '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3', |
|
1028 | 1028 | '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a', |
|
1029 | 1029 | '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939', |
|
1030 | 1030 | '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39', |
|
1031 | 1031 | '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953', |
|
1032 | 1032 | '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9', |
|
1033 | 1033 | '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1', |
|
1034 | 1034 | '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900', |
|
1035 | 1035 | '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00', |
|
1036 | 1036 | '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3', |
|
1037 | 1037 | '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59', |
|
1038 | 1038 | '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079', |
|
1039 | 1039 | '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700', |
|
1040 | 1040 | '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d', |
|
1041 | 1041 | '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2', |
|
1042 | 1042 | '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff', |
|
1043 | 1043 | '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20', |
|
1044 | 1044 | '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626', |
|
1045 | 1045 | '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23', |
|
1046 | 1046 | '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff', |
|
1047 | 1047 | '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6', |
|
1048 | 1048 | '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a', |
|
1049 | 1049 | '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c', |
|
1050 | 1050 | '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600', |
|
1051 | 1051 | '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff', |
|
1052 | 1052 | '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539', |
|
1053 | 1053 | '#4f8c46', '#368dd9', '#5c0073' |
|
1054 | 1054 | ] |
|
1055 | 1055 | |
|
1056 | 1056 | def rgb_to_hex_color(self, rgb_tuple): |
|
1057 | 1057 | """ |
|
1058 | 1058 | Converts an rgb_tuple passed to an hex color. |
|
1059 | 1059 | |
|
1060 | 1060 | :param rgb_tuple: tuple with 3 ints represents rgb color space |
|
1061 | 1061 | """ |
|
1062 | 1062 | return '#' + ("".join(map(chr, rgb_tuple)).encode('hex')) |
|
1063 | 1063 | |
|
1064 | 1064 | def email_to_int_list(self, email_str): |
|
1065 | 1065 | """ |
|
1066 | 1066 | Get every byte of the hex digest value of email and turn it to integer. |
|
1067 | 1067 | It's going to be always between 0-255 |
|
1068 | 1068 | """ |
|
1069 | 1069 | digest = md5_safe(email_str.lower()) |
|
1070 | 1070 | return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)] |
|
1071 | 1071 | |
|
1072 | 1072 | def pick_color_bank_index(self, email_str, color_bank): |
|
1073 | 1073 | return self.email_to_int_list(email_str)[0] % len(color_bank) |
|
1074 | 1074 | |
|
1075 | 1075 | def str2color(self, email_str): |
|
1076 | 1076 | """ |
|
1077 | 1077 | Tries to map in a stable algorithm an email to color |
|
1078 | 1078 | |
|
1079 | 1079 | :param email_str: |
|
1080 | 1080 | """ |
|
1081 | 1081 | color_bank = self.get_color_bank() |
|
1082 | 1082 | # pick position (module it's length so we always find it in the |
|
1083 | 1083 | # bank even if it's smaller than 256 values |
|
1084 | 1084 | pos = self.pick_color_bank_index(email_str, color_bank) |
|
1085 | 1085 | return color_bank[pos] |
|
1086 | 1086 | |
|
1087 | 1087 | def normalize_email(self, email_address): |
|
1088 | 1088 | import unicodedata |
|
1089 | 1089 | # default host used to fill in the fake/missing email |
|
1090 | 1090 | default_host = u'localhost' |
|
1091 | 1091 | |
|
1092 | 1092 | if not email_address: |
|
1093 | 1093 | email_address = u'%s@%s' % (User.DEFAULT_USER, default_host) |
|
1094 | 1094 | |
|
1095 | 1095 | email_address = safe_unicode(email_address) |
|
1096 | 1096 | |
|
1097 | 1097 | if u'@' not in email_address: |
|
1098 | 1098 | email_address = u'%s@%s' % (email_address, default_host) |
|
1099 | 1099 | |
|
1100 | 1100 | if email_address.endswith(u'@'): |
|
1101 | 1101 | email_address = u'%s%s' % (email_address, default_host) |
|
1102 | 1102 | |
|
1103 | 1103 | email_address = unicodedata.normalize('NFKD', email_address)\ |
|
1104 | 1104 | .encode('ascii', 'ignore') |
|
1105 | 1105 | return email_address |
|
1106 | 1106 | |
|
1107 | 1107 | def get_initials(self): |
|
1108 | 1108 | """ |
|
1109 | 1109 | Returns 2 letter initials calculated based on the input. |
|
1110 | 1110 | The algorithm picks first given email address, and takes first letter |
|
1111 | 1111 | of part before @, and then the first letter of server name. In case |
|
1112 | 1112 | the part before @ is in a format of `somestring.somestring2` it replaces |
|
1113 | 1113 | the server letter with first letter of somestring2 |
|
1114 | 1114 | |
|
1115 | 1115 | In case function was initialized with both first and lastname, this |
|
1116 | 1116 | overrides the extraction from email by first letter of the first and |
|
1117 | 1117 | last name. We add special logic to that functionality, In case Full name |
|
1118 | 1118 | is compound, like Guido Von Rossum, we use last part of the last name |
|
1119 | 1119 | (Von Rossum) picking `R`. |
|
1120 | 1120 | |
|
1121 | 1121 | Function also normalizes the non-ascii characters to they ascii |
|
1122 | 1122 | representation, eg Ą => A |
|
1123 | 1123 | """ |
|
1124 | 1124 | import unicodedata |
|
1125 | 1125 | # replace non-ascii to ascii |
|
1126 | 1126 | first_name = unicodedata.normalize( |
|
1127 | 1127 | 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore') |
|
1128 | 1128 | last_name = unicodedata.normalize( |
|
1129 | 1129 | 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore') |
|
1130 | 1130 | |
|
1131 | 1131 | # do NFKD encoding, and also make sure email has proper format |
|
1132 | 1132 | email_address = self.normalize_email(self.email_address) |
|
1133 | 1133 | |
|
1134 | 1134 | # first push the email initials |
|
1135 | 1135 | prefix, server = email_address.split('@', 1) |
|
1136 | 1136 | |
|
1137 | 1137 | # check if prefix is maybe a 'first_name.last_name' syntax |
|
1138 | 1138 | _dot_split = prefix.rsplit('.', 1) |
|
1139 | 1139 | if len(_dot_split) == 2 and _dot_split[1]: |
|
1140 | 1140 | initials = [_dot_split[0][0], _dot_split[1][0]] |
|
1141 | 1141 | else: |
|
1142 | 1142 | initials = [prefix[0], server[0]] |
|
1143 | 1143 | |
|
1144 | 1144 | # then try to replace either first_name or last_name |
|
1145 | 1145 | fn_letter = (first_name or " ")[0].strip() |
|
1146 | 1146 | ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip() |
|
1147 | 1147 | |
|
1148 | 1148 | if fn_letter: |
|
1149 | 1149 | initials[0] = fn_letter |
|
1150 | 1150 | |
|
1151 | 1151 | if ln_letter: |
|
1152 | 1152 | initials[1] = ln_letter |
|
1153 | 1153 | |
|
1154 | 1154 | return ''.join(initials).upper() |
|
1155 | 1155 | |
|
1156 | 1156 | def get_img_data_by_type(self, font_family, img_type): |
|
1157 | 1157 | default_user = """ |
|
1158 | 1158 | <svg xmlns="http://www.w3.org/2000/svg" |
|
1159 | 1159 | version="1.1" x="0px" y="0px" width="{size}" height="{size}" |
|
1160 | 1160 | viewBox="-15 -10 439.165 429.164" |
|
1161 | 1161 | |
|
1162 | 1162 | xml:space="preserve" |
|
1163 | 1163 | style="background:{background};" > |
|
1164 | 1164 | |
|
1165 | 1165 | <path d="M204.583,216.671c50.664,0,91.74-48.075, |
|
1166 | 1166 | 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377 |
|
1167 | 1167 | c-50.668,0-91.74,25.14-91.74,107.377C112.844, |
|
1168 | 1168 | 168.596,153.916,216.671, |
|
1169 | 1169 | 204.583,216.671z" fill="{text_color}"/> |
|
1170 | 1170 | <path d="M407.164,374.717L360.88, |
|
1171 | 1171 | 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392 |
|
1172 | 1172 | c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316, |
|
1173 | 1173 | 15.366-44.203,23.488-69.076,23.488c-24.877, |
|
1174 | 1174 | 0-48.762-8.122-69.078-23.488 |
|
1175 | 1175 | c-1.428-1.078-3.346-1.238-4.93-0.415L58.75, |
|
1176 | 1176 | 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717 |
|
1177 | 1177 | c-3.191,7.188-2.537,15.412,1.75,22.005c4.285, |
|
1178 | 1178 | 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936, |
|
1179 | 1179 | 19.402-10.527 C409.699,390.129, |
|
1180 | 1180 | 410.355,381.902,407.164,374.717z" fill="{text_color}"/> |
|
1181 | 1181 | </svg>""".format( |
|
1182 | 1182 | size=self.size, |
|
1183 | 1183 | background='#979797', # @grey4 |
|
1184 | 1184 | text_color=self.text_color, |
|
1185 | 1185 | font_family=font_family) |
|
1186 | 1186 | |
|
1187 | 1187 | return { |
|
1188 | 1188 | "default_user": default_user |
|
1189 | 1189 | }[img_type] |
|
1190 | 1190 | |
|
1191 | 1191 | def get_img_data(self, svg_type=None): |
|
1192 | 1192 | """ |
|
1193 | 1193 | generates the svg metadata for image |
|
1194 | 1194 | """ |
|
1195 | 1195 | fonts = [ |
|
1196 | 1196 | '-apple-system', |
|
1197 | 1197 | 'BlinkMacSystemFont', |
|
1198 | 1198 | 'Segoe UI', |
|
1199 | 1199 | 'Roboto', |
|
1200 | 1200 | 'Oxygen-Sans', |
|
1201 | 1201 | 'Ubuntu', |
|
1202 | 1202 | 'Cantarell', |
|
1203 | 1203 | 'Helvetica Neue', |
|
1204 | 1204 | 'sans-serif' |
|
1205 | 1205 | ] |
|
1206 | 1206 | font_family = ','.join(fonts) |
|
1207 | 1207 | if svg_type: |
|
1208 | 1208 | return self.get_img_data_by_type(font_family, svg_type) |
|
1209 | 1209 | |
|
1210 | 1210 | initials = self.get_initials() |
|
1211 | 1211 | img_data = """ |
|
1212 | 1212 | <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none" |
|
1213 | 1213 | width="{size}" height="{size}" |
|
1214 | 1214 | style="width: 100%; height: 100%; background-color: {background}" |
|
1215 | 1215 | viewBox="0 0 {size} {size}"> |
|
1216 | 1216 | <text text-anchor="middle" y="50%" x="50%" dy="0.35em" |
|
1217 | 1217 | pointer-events="auto" fill="{text_color}" |
|
1218 | 1218 | font-family="{font_family}" |
|
1219 | 1219 | style="font-weight: 400; font-size: {f_size}px;">{text} |
|
1220 | 1220 | </text> |
|
1221 | 1221 | </svg>""".format( |
|
1222 | 1222 | size=self.size, |
|
1223 | 1223 | f_size=self.size/2.05, # scale the text inside the box nicely |
|
1224 | 1224 | background=self.background, |
|
1225 | 1225 | text_color=self.text_color, |
|
1226 | 1226 | text=initials.upper(), |
|
1227 | 1227 | font_family=font_family) |
|
1228 | 1228 | |
|
1229 | 1229 | return img_data |
|
1230 | 1230 | |
|
1231 | 1231 | def generate_svg(self, svg_type=None): |
|
1232 | 1232 | img_data = self.get_img_data(svg_type) |
|
1233 | 1233 | return "data:image/svg+xml;base64,%s" % img_data.encode('base64') |
|
1234 | 1234 | |
|
1235 | 1235 | |
|
1236 | 1236 | def initials_gravatar(email_address, first_name, last_name, size=30): |
|
1237 | 1237 | svg_type = None |
|
1238 | 1238 | if email_address == User.DEFAULT_USER_EMAIL: |
|
1239 | 1239 | svg_type = 'default_user' |
|
1240 | 1240 | klass = InitialsGravatar(email_address, first_name, last_name, size) |
|
1241 | 1241 | return klass.generate_svg(svg_type=svg_type) |
|
1242 | 1242 | |
|
1243 | 1243 | |
|
1244 | 1244 | def gravatar_url(email_address, size=30, request=None): |
|
1245 | 1245 | request = get_current_request() |
|
1246 | 1246 | _use_gravatar = request.call_context.visual.use_gravatar |
|
1247 | 1247 | _gravatar_url = request.call_context.visual.gravatar_url |
|
1248 | 1248 | |
|
1249 | 1249 | _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL |
|
1250 | 1250 | |
|
1251 | 1251 | email_address = email_address or User.DEFAULT_USER_EMAIL |
|
1252 | 1252 | if isinstance(email_address, unicode): |
|
1253 | 1253 | # hashlib crashes on unicode items |
|
1254 | 1254 | email_address = safe_str(email_address) |
|
1255 | 1255 | |
|
1256 | 1256 | # empty email or default user |
|
1257 | 1257 | if not email_address or email_address == User.DEFAULT_USER_EMAIL: |
|
1258 | 1258 | return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size) |
|
1259 | 1259 | |
|
1260 | 1260 | if _use_gravatar: |
|
1261 | 1261 | # TODO: Disuse pyramid thread locals. Think about another solution to |
|
1262 | 1262 | # get the host and schema here. |
|
1263 | 1263 | request = get_current_request() |
|
1264 | 1264 | tmpl = safe_str(_gravatar_url) |
|
1265 | 1265 | tmpl = tmpl.replace('{email}', email_address)\ |
|
1266 | 1266 | .replace('{md5email}', md5_safe(email_address.lower())) \ |
|
1267 | 1267 | .replace('{netloc}', request.host)\ |
|
1268 | 1268 | .replace('{scheme}', request.scheme)\ |
|
1269 | 1269 | .replace('{size}', safe_str(size)) |
|
1270 | 1270 | return tmpl |
|
1271 | 1271 | else: |
|
1272 | 1272 | return initials_gravatar(email_address, '', '', size=size) |
|
1273 | 1273 | |
|
1274 | 1274 | |
|
1275 | 1275 | class Page(_Page): |
|
1276 | 1276 | """ |
|
1277 | 1277 | Custom pager to match rendering style with paginator |
|
1278 | 1278 | """ |
|
1279 | 1279 | |
|
1280 | 1280 | def _get_pos(self, cur_page, max_page, items): |
|
1281 | 1281 | edge = (items / 2) + 1 |
|
1282 | 1282 | if (cur_page <= edge): |
|
1283 | 1283 | radius = max(items / 2, items - cur_page) |
|
1284 | 1284 | elif (max_page - cur_page) < edge: |
|
1285 | 1285 | radius = (items - 1) - (max_page - cur_page) |
|
1286 | 1286 | else: |
|
1287 | 1287 | radius = items / 2 |
|
1288 | 1288 | |
|
1289 | 1289 | left = max(1, (cur_page - (radius))) |
|
1290 | 1290 | right = min(max_page, cur_page + (radius)) |
|
1291 | 1291 | return left, cur_page, right |
|
1292 | 1292 | |
|
1293 | 1293 | def _range(self, regexp_match): |
|
1294 | 1294 | """ |
|
1295 | 1295 | Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8'). |
|
1296 | 1296 | |
|
1297 | 1297 | Arguments: |
|
1298 | 1298 | |
|
1299 | 1299 | regexp_match |
|
1300 | 1300 | A "re" (regular expressions) match object containing the |
|
1301 | 1301 | radius of linked pages around the current page in |
|
1302 | 1302 | regexp_match.group(1) as a string |
|
1303 | 1303 | |
|
1304 | 1304 | This function is supposed to be called as a callable in |
|
1305 | 1305 | re.sub. |
|
1306 | 1306 | |
|
1307 | 1307 | """ |
|
1308 | 1308 | radius = int(regexp_match.group(1)) |
|
1309 | 1309 | |
|
1310 | 1310 | # Compute the first and last page number within the radius |
|
1311 | 1311 | # e.g. '1 .. 5 6 [7] 8 9 .. 12' |
|
1312 | 1312 | # -> leftmost_page = 5 |
|
1313 | 1313 | # -> rightmost_page = 9 |
|
1314 | 1314 | leftmost_page, _cur, rightmost_page = self._get_pos(self.page, |
|
1315 | 1315 | self.last_page, |
|
1316 | 1316 | (radius * 2) + 1) |
|
1317 | 1317 | nav_items = [] |
|
1318 | 1318 | |
|
1319 | 1319 | # Create a link to the first page (unless we are on the first page |
|
1320 | 1320 | # or there would be no need to insert '..' spacers) |
|
1321 | 1321 | if self.page != self.first_page and self.first_page < leftmost_page: |
|
1322 | 1322 | nav_items.append(self._pagerlink(self.first_page, self.first_page)) |
|
1323 | 1323 | |
|
1324 | 1324 | # Insert dots if there are pages between the first page |
|
1325 | 1325 | # and the currently displayed page range |
|
1326 | 1326 | if leftmost_page - self.first_page > 1: |
|
1327 | 1327 | # Wrap in a SPAN tag if nolink_attr is set |
|
1328 | 1328 | text = '..' |
|
1329 | 1329 | if self.dotdot_attr: |
|
1330 | 1330 | text = HTML.span(c=text, **self.dotdot_attr) |
|
1331 | 1331 | nav_items.append(text) |
|
1332 | 1332 | |
|
1333 | 1333 | for thispage in xrange(leftmost_page, rightmost_page + 1): |
|
1334 | 1334 | # Hilight the current page number and do not use a link |
|
1335 | 1335 | if thispage == self.page: |
|
1336 | 1336 | text = '%s' % (thispage,) |
|
1337 | 1337 | # Wrap in a SPAN tag if nolink_attr is set |
|
1338 | 1338 | if self.curpage_attr: |
|
1339 | 1339 | text = HTML.span(c=text, **self.curpage_attr) |
|
1340 | 1340 | nav_items.append(text) |
|
1341 | 1341 | # Otherwise create just a link to that page |
|
1342 | 1342 | else: |
|
1343 | 1343 | text = '%s' % (thispage,) |
|
1344 | 1344 | nav_items.append(self._pagerlink(thispage, text)) |
|
1345 | 1345 | |
|
1346 | 1346 | # Insert dots if there are pages between the displayed |
|
1347 | 1347 | # page numbers and the end of the page range |
|
1348 | 1348 | if self.last_page - rightmost_page > 1: |
|
1349 | 1349 | text = '..' |
|
1350 | 1350 | # Wrap in a SPAN tag if nolink_attr is set |
|
1351 | 1351 | if self.dotdot_attr: |
|
1352 | 1352 | text = HTML.span(c=text, **self.dotdot_attr) |
|
1353 | 1353 | nav_items.append(text) |
|
1354 | 1354 | |
|
1355 | 1355 | # Create a link to the very last page (unless we are on the last |
|
1356 | 1356 | # page or there would be no need to insert '..' spacers) |
|
1357 | 1357 | if self.page != self.last_page and rightmost_page < self.last_page: |
|
1358 | 1358 | nav_items.append(self._pagerlink(self.last_page, self.last_page)) |
|
1359 | 1359 | |
|
1360 | 1360 | ## prerender links |
|
1361 | 1361 | #_page_link = url.current() |
|
1362 | 1362 | #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1)))) |
|
1363 | 1363 | #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1)))) |
|
1364 | 1364 | return self.separator.join(nav_items) |
|
1365 | 1365 | |
|
1366 | 1366 | def pager(self, format='~2~', page_param='page', partial_param='partial', |
|
1367 | 1367 | show_if_single_page=False, separator=' ', onclick=None, |
|
1368 | 1368 | symbol_first='<<', symbol_last='>>', |
|
1369 | 1369 | symbol_previous='<', symbol_next='>', |
|
1370 | 1370 | link_attr={'class': 'pager_link', 'rel': 'prerender'}, |
|
1371 | 1371 | curpage_attr={'class': 'pager_curpage'}, |
|
1372 | 1372 | dotdot_attr={'class': 'pager_dotdot'}, **kwargs): |
|
1373 | 1373 | |
|
1374 | 1374 | self.curpage_attr = curpage_attr |
|
1375 | 1375 | self.separator = separator |
|
1376 | 1376 | self.pager_kwargs = kwargs |
|
1377 | 1377 | self.page_param = page_param |
|
1378 | 1378 | self.partial_param = partial_param |
|
1379 | 1379 | self.onclick = onclick |
|
1380 | 1380 | self.link_attr = link_attr |
|
1381 | 1381 | self.dotdot_attr = dotdot_attr |
|
1382 | 1382 | |
|
1383 | 1383 | # Don't show navigator if there is no more than one page |
|
1384 | 1384 | if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page): |
|
1385 | 1385 | return '' |
|
1386 | 1386 | |
|
1387 | 1387 | from string import Template |
|
1388 | 1388 | # Replace ~...~ in token format by range of pages |
|
1389 | 1389 | result = re.sub(r'~(\d+)~', self._range, format) |
|
1390 | 1390 | |
|
1391 | 1391 | # Interpolate '%' variables |
|
1392 | 1392 | result = Template(result).safe_substitute({ |
|
1393 | 1393 | 'first_page': self.first_page, |
|
1394 | 1394 | 'last_page': self.last_page, |
|
1395 | 1395 | 'page': self.page, |
|
1396 | 1396 | 'page_count': self.page_count, |
|
1397 | 1397 | 'items_per_page': self.items_per_page, |
|
1398 | 1398 | 'first_item': self.first_item, |
|
1399 | 1399 | 'last_item': self.last_item, |
|
1400 | 1400 | 'item_count': self.item_count, |
|
1401 | 1401 | 'link_first': self.page > self.first_page and \ |
|
1402 | 1402 | self._pagerlink(self.first_page, symbol_first) or '', |
|
1403 | 1403 | 'link_last': self.page < self.last_page and \ |
|
1404 | 1404 | self._pagerlink(self.last_page, symbol_last) or '', |
|
1405 | 1405 | 'link_previous': self.previous_page and \ |
|
1406 | 1406 | self._pagerlink(self.previous_page, symbol_previous) \ |
|
1407 | 1407 | or HTML.span(symbol_previous, class_="pg-previous disabled"), |
|
1408 | 1408 | 'link_next': self.next_page and \ |
|
1409 | 1409 | self._pagerlink(self.next_page, symbol_next) \ |
|
1410 | 1410 | or HTML.span(symbol_next, class_="pg-next disabled") |
|
1411 | 1411 | }) |
|
1412 | 1412 | |
|
1413 | 1413 | return literal(result) |
|
1414 | 1414 | |
|
1415 | 1415 | |
|
1416 | 1416 | #============================================================================== |
|
1417 | 1417 | # REPO PAGER, PAGER FOR REPOSITORY |
|
1418 | 1418 | #============================================================================== |
|
1419 | 1419 | class RepoPage(Page): |
|
1420 | 1420 | |
|
1421 | 1421 | def __init__(self, collection, page=1, items_per_page=20, |
|
1422 | 1422 | item_count=None, url=None, **kwargs): |
|
1423 | 1423 | |
|
1424 | 1424 | """Create a "RepoPage" instance. special pager for paging |
|
1425 | 1425 | repository |
|
1426 | 1426 | """ |
|
1427 | 1427 | self._url_generator = url |
|
1428 | 1428 | |
|
1429 | 1429 | # Safe the kwargs class-wide so they can be used in the pager() method |
|
1430 | 1430 | self.kwargs = kwargs |
|
1431 | 1431 | |
|
1432 | 1432 | # Save a reference to the collection |
|
1433 | 1433 | self.original_collection = collection |
|
1434 | 1434 | |
|
1435 | 1435 | self.collection = collection |
|
1436 | 1436 | |
|
1437 | 1437 | # The self.page is the number of the current page. |
|
1438 | 1438 | # The first page has the number 1! |
|
1439 | 1439 | try: |
|
1440 | 1440 | self.page = int(page) # make it int() if we get it as a string |
|
1441 | 1441 | except (ValueError, TypeError): |
|
1442 | 1442 | self.page = 1 |
|
1443 | 1443 | |
|
1444 | 1444 | self.items_per_page = items_per_page |
|
1445 | 1445 | |
|
1446 | 1446 | # Unless the user tells us how many items the collections has |
|
1447 | 1447 | # we calculate that ourselves. |
|
1448 | 1448 | if item_count is not None: |
|
1449 | 1449 | self.item_count = item_count |
|
1450 | 1450 | else: |
|
1451 | 1451 | self.item_count = len(self.collection) |
|
1452 | 1452 | |
|
1453 | 1453 | # Compute the number of the first and last available page |
|
1454 | 1454 | if self.item_count > 0: |
|
1455 | 1455 | self.first_page = 1 |
|
1456 | 1456 | self.page_count = int(math.ceil(float(self.item_count) / |
|
1457 | 1457 | self.items_per_page)) |
|
1458 | 1458 | self.last_page = self.first_page + self.page_count - 1 |
|
1459 | 1459 | |
|
1460 | 1460 | # Make sure that the requested page number is the range of |
|
1461 | 1461 | # valid pages |
|
1462 | 1462 | if self.page > self.last_page: |
|
1463 | 1463 | self.page = self.last_page |
|
1464 | 1464 | elif self.page < self.first_page: |
|
1465 | 1465 | self.page = self.first_page |
|
1466 | 1466 | |
|
1467 | 1467 | # Note: the number of items on this page can be less than |
|
1468 | 1468 | # items_per_page if the last page is not full |
|
1469 | 1469 | self.first_item = max(0, (self.item_count) - (self.page * |
|
1470 | 1470 | items_per_page)) |
|
1471 | 1471 | self.last_item = ((self.item_count - 1) - items_per_page * |
|
1472 | 1472 | (self.page - 1)) |
|
1473 | 1473 | |
|
1474 | 1474 | self.items = list(self.collection[self.first_item:self.last_item + 1]) |
|
1475 | 1475 | |
|
1476 | 1476 | # Links to previous and next page |
|
1477 | 1477 | if self.page > self.first_page: |
|
1478 | 1478 | self.previous_page = self.page - 1 |
|
1479 | 1479 | else: |
|
1480 | 1480 | self.previous_page = None |
|
1481 | 1481 | |
|
1482 | 1482 | if self.page < self.last_page: |
|
1483 | 1483 | self.next_page = self.page + 1 |
|
1484 | 1484 | else: |
|
1485 | 1485 | self.next_page = None |
|
1486 | 1486 | |
|
1487 | 1487 | # No items available |
|
1488 | 1488 | else: |
|
1489 | 1489 | self.first_page = None |
|
1490 | 1490 | self.page_count = 0 |
|
1491 | 1491 | self.last_page = None |
|
1492 | 1492 | self.first_item = None |
|
1493 | 1493 | self.last_item = None |
|
1494 | 1494 | self.previous_page = None |
|
1495 | 1495 | self.next_page = None |
|
1496 | 1496 | self.items = [] |
|
1497 | 1497 | |
|
1498 | 1498 | # This is a subclass of the 'list' type. Initialise the list now. |
|
1499 | 1499 | list.__init__(self, reversed(self.items)) |
|
1500 | 1500 | |
|
1501 | 1501 | |
|
1502 | 1502 | def breadcrumb_repo_link(repo): |
|
1503 | 1503 | """ |
|
1504 | 1504 | Makes a breadcrumbs path link to repo |
|
1505 | 1505 | |
|
1506 | 1506 | ex:: |
|
1507 | 1507 | group >> subgroup >> repo |
|
1508 | 1508 | |
|
1509 | 1509 | :param repo: a Repository instance |
|
1510 | 1510 | """ |
|
1511 | 1511 | |
|
1512 | 1512 | path = [ |
|
1513 | 1513 | link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name), |
|
1514 | 1514 | title='last change:{}'.format(format_date(group.last_commit_change))) |
|
1515 | 1515 | for group in repo.groups_with_parents |
|
1516 | 1516 | ] + [ |
|
1517 | 1517 | link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name), |
|
1518 | 1518 | title='last change:{}'.format(format_date(repo.last_commit_change))) |
|
1519 | 1519 | ] |
|
1520 | 1520 | |
|
1521 | 1521 | return literal(' » '.join(path)) |
|
1522 | 1522 | |
|
1523 | 1523 | |
|
1524 | 1524 | def breadcrumb_repo_group_link(repo_group): |
|
1525 | 1525 | """ |
|
1526 | 1526 | Makes a breadcrumbs path link to repo |
|
1527 | 1527 | |
|
1528 | 1528 | ex:: |
|
1529 | 1529 | group >> subgroup |
|
1530 | 1530 | |
|
1531 | 1531 | :param repo_group: a Repository Group instance |
|
1532 | 1532 | """ |
|
1533 | 1533 | |
|
1534 | 1534 | path = [ |
|
1535 | 1535 | link_to(group.name, |
|
1536 | 1536 | route_path('repo_group_home', repo_group_name=group.group_name), |
|
1537 | 1537 | title='last change:{}'.format(format_date(group.last_commit_change))) |
|
1538 | 1538 | for group in repo_group.parents |
|
1539 | 1539 | ] + [ |
|
1540 | 1540 | link_to(repo_group.name, |
|
1541 | 1541 | route_path('repo_group_home', repo_group_name=repo_group.group_name), |
|
1542 | 1542 | title='last change:{}'.format(format_date(repo_group.last_commit_change))) |
|
1543 | 1543 | ] |
|
1544 | 1544 | |
|
1545 | 1545 | return literal(' » '.join(path)) |
|
1546 | 1546 | |
|
1547 | 1547 | |
|
1548 | 1548 | def format_byte_size_binary(file_size): |
|
1549 | 1549 | """ |
|
1550 | 1550 | Formats file/folder sizes to standard. |
|
1551 | 1551 | """ |
|
1552 | 1552 | if file_size is None: |
|
1553 | 1553 | file_size = 0 |
|
1554 | 1554 | |
|
1555 | 1555 | formatted_size = format_byte_size(file_size, binary=True) |
|
1556 | 1556 | return formatted_size |
|
1557 | 1557 | |
|
1558 | 1558 | |
|
1559 | 1559 | def urlify_text(text_, safe=True): |
|
1560 | 1560 | """ |
|
1561 | 1561 | Extrac urls from text and make html links out of them |
|
1562 | 1562 | |
|
1563 | 1563 | :param text_: |
|
1564 | 1564 | """ |
|
1565 | 1565 | |
|
1566 | 1566 | url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]''' |
|
1567 | 1567 | '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''') |
|
1568 | 1568 | |
|
1569 | 1569 | def url_func(match_obj): |
|
1570 | 1570 | url_full = match_obj.groups()[0] |
|
1571 | 1571 | return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full}) |
|
1572 | 1572 | _newtext = url_pat.sub(url_func, text_) |
|
1573 | 1573 | if safe: |
|
1574 | 1574 | return literal(_newtext) |
|
1575 | 1575 | return _newtext |
|
1576 | 1576 | |
|
1577 | 1577 | |
|
1578 | 1578 | def urlify_commits(text_, repository): |
|
1579 | 1579 | """ |
|
1580 | 1580 | Extract commit ids from text and make link from them |
|
1581 | 1581 | |
|
1582 | 1582 | :param text_: |
|
1583 | 1583 | :param repository: repo name to build the URL with |
|
1584 | 1584 | """ |
|
1585 | 1585 | |
|
1586 | 1586 | URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)') |
|
1587 | 1587 | |
|
1588 | 1588 | def url_func(match_obj): |
|
1589 | 1589 | commit_id = match_obj.groups()[1] |
|
1590 | 1590 | pref = match_obj.groups()[0] |
|
1591 | 1591 | suf = match_obj.groups()[2] |
|
1592 | 1592 | |
|
1593 | 1593 | tmpl = ( |
|
1594 | 1594 | '%(pref)s<a class="%(cls)s" href="%(url)s">' |
|
1595 | 1595 | '%(commit_id)s</a>%(suf)s' |
|
1596 | 1596 | ) |
|
1597 | 1597 | return tmpl % { |
|
1598 | 1598 | 'pref': pref, |
|
1599 | 1599 | 'cls': 'revision-link', |
|
1600 | 1600 | 'url': route_url('repo_commit', repo_name=repository, commit_id=commit_id), |
|
1601 | 1601 | 'commit_id': commit_id, |
|
1602 | 1602 | 'suf': suf |
|
1603 | 1603 | } |
|
1604 | 1604 | |
|
1605 | 1605 | newtext = URL_PAT.sub(url_func, text_) |
|
1606 | 1606 | |
|
1607 | 1607 | return newtext |
|
1608 | 1608 | |
|
1609 | 1609 | |
|
1610 | 1610 | def _process_url_func(match_obj, repo_name, uid, entry, |
|
1611 | 1611 | return_raw_data=False, link_format='html'): |
|
1612 | 1612 | pref = '' |
|
1613 | 1613 | if match_obj.group().startswith(' '): |
|
1614 | 1614 | pref = ' ' |
|
1615 | 1615 | |
|
1616 | 1616 | issue_id = ''.join(match_obj.groups()) |
|
1617 | 1617 | |
|
1618 | 1618 | if link_format == 'html': |
|
1619 | 1619 | tmpl = ( |
|
1620 | 1620 | '%(pref)s<a class="%(cls)s" href="%(url)s">' |
|
1621 | 1621 | '%(issue-prefix)s%(id-repr)s' |
|
1622 | 1622 | '</a>') |
|
1623 | 1623 | elif link_format == 'rst': |
|
1624 | 1624 | tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_' |
|
1625 | 1625 | elif link_format == 'markdown': |
|
1626 | 1626 | tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)' |
|
1627 | 1627 | else: |
|
1628 | 1628 | raise ValueError('Bad link_format:{}'.format(link_format)) |
|
1629 | 1629 | |
|
1630 | 1630 | (repo_name_cleaned, |
|
1631 | 1631 | parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name) |
|
1632 | 1632 | |
|
1633 | 1633 | # variables replacement |
|
1634 | 1634 | named_vars = { |
|
1635 | 1635 | 'id': issue_id, |
|
1636 | 1636 | 'repo': repo_name, |
|
1637 | 1637 | 'repo_name': repo_name_cleaned, |
|
1638 | 1638 | 'group_name': parent_group_name |
|
1639 | 1639 | } |
|
1640 | 1640 | # named regex variables |
|
1641 | 1641 | named_vars.update(match_obj.groupdict()) |
|
1642 | 1642 | _url = string.Template(entry['url']).safe_substitute(**named_vars) |
|
1643 | 1643 | |
|
1644 | 1644 | def quote_cleaner(input_str): |
|
1645 | 1645 | """Remove quotes as it's HTML""" |
|
1646 | 1646 | return input_str.replace('"', '') |
|
1647 | 1647 | |
|
1648 | 1648 | data = { |
|
1649 | 1649 | 'pref': pref, |
|
1650 | 1650 | 'cls': quote_cleaner('issue-tracker-link'), |
|
1651 | 1651 | 'url': quote_cleaner(_url), |
|
1652 | 1652 | 'id-repr': issue_id, |
|
1653 | 1653 | 'issue-prefix': entry['pref'], |
|
1654 | 1654 | 'serv': entry['url'], |
|
1655 | 1655 | } |
|
1656 | 1656 | if return_raw_data: |
|
1657 | 1657 | return { |
|
1658 | 1658 | 'id': issue_id, |
|
1659 | 1659 | 'url': _url |
|
1660 | 1660 | } |
|
1661 | 1661 | return tmpl % data |
|
1662 | 1662 | |
|
1663 | 1663 | |
|
1664 | 1664 | def get_active_pattern_entries(repo_name): |
|
1665 | 1665 | repo = None |
|
1666 | 1666 | if repo_name: |
|
1667 | 1667 | # Retrieving repo_name to avoid invalid repo_name to explode on |
|
1668 | 1668 | # IssueTrackerSettingsModel but still passing invalid name further down |
|
1669 | 1669 | repo = Repository.get_by_repo_name(repo_name, cache=True) |
|
1670 | 1670 | |
|
1671 | 1671 | settings_model = IssueTrackerSettingsModel(repo=repo) |
|
1672 | 1672 | active_entries = settings_model.get_settings(cache=True) |
|
1673 | 1673 | return active_entries |
|
1674 | 1674 | |
|
1675 | 1675 | |
|
1676 | 1676 | def process_patterns(text_string, repo_name, link_format='html', active_entries=None): |
|
1677 | 1677 | |
|
1678 | 1678 | allowed_formats = ['html', 'rst', 'markdown'] |
|
1679 | 1679 | if link_format not in allowed_formats: |
|
1680 | 1680 | raise ValueError('Link format can be only one of:{} got {}'.format( |
|
1681 | 1681 | allowed_formats, link_format)) |
|
1682 | 1682 | |
|
1683 | 1683 | active_entries = active_entries or get_active_pattern_entries(repo_name) |
|
1684 | 1684 | issues_data = [] |
|
1685 | 1685 | newtext = text_string |
|
1686 | 1686 | |
|
1687 | 1687 | for uid, entry in active_entries.items(): |
|
1688 | 1688 | log.debug('found issue tracker entry with uid %s', uid) |
|
1689 | 1689 | |
|
1690 | 1690 | if not (entry['pat'] and entry['url']): |
|
1691 | 1691 | log.debug('skipping due to missing data') |
|
1692 | 1692 | continue |
|
1693 | 1693 | |
|
1694 | 1694 | log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s', |
|
1695 | 1695 | uid, entry['pat'], entry['url'], entry['pref']) |
|
1696 | 1696 | |
|
1697 | 1697 | try: |
|
1698 | 1698 | pattern = re.compile(r'%s' % entry['pat']) |
|
1699 | 1699 | except re.error: |
|
1700 | 1700 | log.exception( |
|
1701 | 1701 | 'issue tracker pattern: `%s` failed to compile', |
|
1702 | 1702 | entry['pat']) |
|
1703 | 1703 | continue |
|
1704 | 1704 | |
|
1705 | 1705 | data_func = partial( |
|
1706 | 1706 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1707 | 1707 | return_raw_data=True) |
|
1708 | 1708 | |
|
1709 | 1709 | for match_obj in pattern.finditer(text_string): |
|
1710 | 1710 | issues_data.append(data_func(match_obj)) |
|
1711 | 1711 | |
|
1712 | 1712 | url_func = partial( |
|
1713 | 1713 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1714 | 1714 | link_format=link_format) |
|
1715 | 1715 | |
|
1716 | 1716 | newtext = pattern.sub(url_func, newtext) |
|
1717 | 1717 | log.debug('processed prefix:uid `%s`', uid) |
|
1718 | 1718 | |
|
1719 | 1719 | return newtext, issues_data |
|
1720 | 1720 | |
|
1721 | 1721 | |
|
1722 | 1722 | def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None): |
|
1723 | 1723 | """ |
|
1724 | 1724 | Parses given text message and makes proper links. |
|
1725 | 1725 | issues are linked to given issue-server, and rest is a commit link |
|
1726 | 1726 | |
|
1727 | 1727 | :param commit_text: |
|
1728 | 1728 | :param repository: |
|
1729 | 1729 | """ |
|
1730 | 1730 | def escaper(string): |
|
1731 | 1731 | return string.replace('<', '<').replace('>', '>') |
|
1732 | 1732 | |
|
1733 | 1733 | newtext = escaper(commit_text) |
|
1734 | 1734 | |
|
1735 | 1735 | # extract http/https links and make them real urls |
|
1736 | 1736 | newtext = urlify_text(newtext, safe=False) |
|
1737 | 1737 | |
|
1738 | 1738 | # urlify commits - extract commit ids and make link out of them, if we have |
|
1739 | 1739 | # the scope of repository present. |
|
1740 | 1740 | if repository: |
|
1741 | 1741 | newtext = urlify_commits(newtext, repository) |
|
1742 | 1742 | |
|
1743 | 1743 | # process issue tracker patterns |
|
1744 | 1744 | newtext, issues = process_patterns(newtext, repository or '', |
|
1745 | 1745 | active_entries=active_pattern_entries) |
|
1746 | 1746 | |
|
1747 | 1747 | return literal(newtext) |
|
1748 | 1748 | |
|
1749 | 1749 | |
|
1750 | 1750 | def render_binary(repo_name, file_obj): |
|
1751 | 1751 | """ |
|
1752 | 1752 | Choose how to render a binary file |
|
1753 | 1753 | """ |
|
1754 | 1754 | |
|
1755 | 1755 | filename = file_obj.name |
|
1756 | 1756 | |
|
1757 | 1757 | # images |
|
1758 | 1758 | for ext in ['*.png', '*.jpg', '*.ico', '*.gif']: |
|
1759 | 1759 | if fnmatch.fnmatch(filename, pat=ext): |
|
1760 | 1760 | alt = escape(filename) |
|
1761 | 1761 | src = route_path( |
|
1762 | 1762 | 'repo_file_raw', repo_name=repo_name, |
|
1763 | 1763 | commit_id=file_obj.commit.raw_id, |
|
1764 | 1764 | f_path=file_obj.path) |
|
1765 | 1765 | return literal( |
|
1766 | 1766 | '<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src)) |
|
1767 | 1767 | |
|
1768 | 1768 | |
|
1769 | 1769 | def renderer_from_filename(filename, exclude=None): |
|
1770 | 1770 | """ |
|
1771 | 1771 | choose a renderer based on filename, this works only for text based files |
|
1772 | 1772 | """ |
|
1773 | 1773 | |
|
1774 | 1774 | # ipython |
|
1775 | 1775 | for ext in ['*.ipynb']: |
|
1776 | 1776 | if fnmatch.fnmatch(filename, pat=ext): |
|
1777 | 1777 | return 'jupyter' |
|
1778 | 1778 | |
|
1779 | 1779 | is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude) |
|
1780 | 1780 | if is_markup: |
|
1781 | 1781 | return is_markup |
|
1782 | 1782 | return None |
|
1783 | 1783 | |
|
1784 | 1784 | |
|
1785 | 1785 | def render(source, renderer='rst', mentions=False, relative_urls=None, |
|
1786 | 1786 | repo_name=None): |
|
1787 | 1787 | |
|
1788 | 1788 | def maybe_convert_relative_links(html_source): |
|
1789 | 1789 | if relative_urls: |
|
1790 | 1790 | return relative_links(html_source, relative_urls) |
|
1791 | 1791 | return html_source |
|
1792 | 1792 | |
|
1793 | 1793 | if renderer == 'plain': |
|
1794 | 1794 | return literal( |
|
1795 | 1795 | MarkupRenderer.plain(source, leading_newline=False)) |
|
1796 | 1796 | |
|
1797 | 1797 | elif renderer == 'rst': |
|
1798 | 1798 | if repo_name: |
|
1799 | 1799 | # process patterns on comments if we pass in repo name |
|
1800 | 1800 | source, issues = process_patterns( |
|
1801 | 1801 | source, repo_name, link_format='rst') |
|
1802 | 1802 | |
|
1803 | 1803 | return literal( |
|
1804 | 1804 | '<div class="rst-block">%s</div>' % |
|
1805 | 1805 | maybe_convert_relative_links( |
|
1806 | 1806 | MarkupRenderer.rst(source, mentions=mentions))) |
|
1807 | 1807 | |
|
1808 | 1808 | elif renderer == 'markdown': |
|
1809 | 1809 | if repo_name: |
|
1810 | 1810 | # process patterns on comments if we pass in repo name |
|
1811 | 1811 | source, issues = process_patterns( |
|
1812 | 1812 | source, repo_name, link_format='markdown') |
|
1813 | 1813 | |
|
1814 | 1814 | return literal( |
|
1815 | 1815 | '<div class="markdown-block">%s</div>' % |
|
1816 | 1816 | maybe_convert_relative_links( |
|
1817 | 1817 | MarkupRenderer.markdown(source, flavored=True, |
|
1818 | 1818 | mentions=mentions))) |
|
1819 | 1819 | |
|
1820 | 1820 | elif renderer == 'jupyter': |
|
1821 | 1821 | return literal( |
|
1822 | 1822 | '<div class="ipynb">%s</div>' % |
|
1823 | 1823 | maybe_convert_relative_links( |
|
1824 | 1824 | MarkupRenderer.jupyter(source))) |
|
1825 | 1825 | |
|
1826 | 1826 | # None means just show the file-source |
|
1827 | 1827 | return None |
|
1828 | 1828 | |
|
1829 | 1829 | |
|
1830 | 1830 | def commit_status(repo, commit_id): |
|
1831 | 1831 | return ChangesetStatusModel().get_status(repo, commit_id) |
|
1832 | 1832 | |
|
1833 | 1833 | |
|
1834 | 1834 | def commit_status_lbl(commit_status): |
|
1835 | 1835 | return dict(ChangesetStatus.STATUSES).get(commit_status) |
|
1836 | 1836 | |
|
1837 | 1837 | |
|
1838 | 1838 | def commit_time(repo_name, commit_id): |
|
1839 | 1839 | repo = Repository.get_by_repo_name(repo_name) |
|
1840 | 1840 | commit = repo.get_commit(commit_id=commit_id) |
|
1841 | 1841 | return commit.date |
|
1842 | 1842 | |
|
1843 | 1843 | |
|
1844 | 1844 | def get_permission_name(key): |
|
1845 | 1845 | return dict(Permission.PERMS).get(key) |
|
1846 | 1846 | |
|
1847 | 1847 | |
|
1848 | 1848 | def journal_filter_help(request): |
|
1849 | 1849 | _ = request.translate |
|
1850 | 1850 | from rhodecode.lib.audit_logger import ACTIONS |
|
1851 | 1851 | actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80)) |
|
1852 | 1852 | |
|
1853 | 1853 | return _( |
|
1854 | 1854 | 'Example filter terms:\n' + |
|
1855 | 1855 | ' repository:vcs\n' + |
|
1856 | 1856 | ' username:marcin\n' + |
|
1857 | 1857 | ' username:(NOT marcin)\n' + |
|
1858 | 1858 | ' action:*push*\n' + |
|
1859 | 1859 | ' ip:127.0.0.1\n' + |
|
1860 | 1860 | ' date:20120101\n' + |
|
1861 | 1861 | ' date:[20120101100000 TO 20120102]\n' + |
|
1862 | 1862 | '\n' + |
|
1863 | 1863 | 'Actions: {actions}\n' + |
|
1864 | 1864 | '\n' + |
|
1865 | 1865 | 'Generate wildcards using \'*\' character:\n' + |
|
1866 | 1866 | ' "repository:vcs*" - search everything starting with \'vcs\'\n' + |
|
1867 | 1867 | ' "repository:*vcs*" - search for repository containing \'vcs\'\n' + |
|
1868 | 1868 | '\n' + |
|
1869 | 1869 | 'Optional AND / OR operators in queries\n' + |
|
1870 | 1870 | ' "repository:vcs OR repository:test"\n' + |
|
1871 | 1871 | ' "username:test AND repository:test*"\n' |
|
1872 | 1872 | ).format(actions=actions) |
|
1873 | 1873 | |
|
1874 | 1874 | |
|
1875 | 1875 | def not_mapped_error(repo_name): |
|
1876 | 1876 | from rhodecode.translation import _ |
|
1877 | 1877 | flash(_('%s repository is not mapped to db perhaps' |
|
1878 | 1878 | ' it was created or renamed from the filesystem' |
|
1879 | 1879 | ' please run the application again' |
|
1880 | 1880 | ' in order to rescan repositories') % repo_name, category='error') |
|
1881 | 1881 | |
|
1882 | 1882 | |
|
1883 | 1883 | def ip_range(ip_addr): |
|
1884 | 1884 | from rhodecode.model.db import UserIpMap |
|
1885 | 1885 | s, e = UserIpMap._get_ip_range(ip_addr) |
|
1886 | 1886 | return '%s - %s' % (s, e) |
|
1887 | 1887 | |
|
1888 | 1888 | |
|
1889 | 1889 | def form(url, method='post', needs_csrf_token=True, **attrs): |
|
1890 | 1890 | """Wrapper around webhelpers.tags.form to prevent CSRF attacks.""" |
|
1891 | 1891 | if method.lower() != 'get' and needs_csrf_token: |
|
1892 | 1892 | raise Exception( |
|
1893 | 1893 | 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' + |
|
1894 | 1894 | 'CSRF token. If the endpoint does not require such token you can ' + |
|
1895 | 1895 | 'explicitly set the parameter needs_csrf_token to false.') |
|
1896 | 1896 | |
|
1897 | 1897 | return wh_form(url, method=method, **attrs) |
|
1898 | 1898 | |
|
1899 | 1899 | |
|
1900 | 1900 | def secure_form(form_url, method="POST", multipart=False, **attrs): |
|
1901 | 1901 | """Start a form tag that points the action to an url. This |
|
1902 | 1902 | form tag will also include the hidden field containing |
|
1903 | 1903 | the auth token. |
|
1904 | 1904 | |
|
1905 | 1905 | The url options should be given either as a string, or as a |
|
1906 | 1906 | ``url()`` function. The method for the form defaults to POST. |
|
1907 | 1907 | |
|
1908 | 1908 | Options: |
|
1909 | 1909 | |
|
1910 | 1910 | ``multipart`` |
|
1911 | 1911 | If set to True, the enctype is set to "multipart/form-data". |
|
1912 | 1912 | ``method`` |
|
1913 | 1913 | The method to use when submitting the form, usually either |
|
1914 | 1914 | "GET" or "POST". If "PUT", "DELETE", or another verb is used, a |
|
1915 | 1915 | hidden input with name _method is added to simulate the verb |
|
1916 | 1916 | over POST. |
|
1917 | 1917 | |
|
1918 | 1918 | """ |
|
1919 | 1919 | from webhelpers.pylonslib.secure_form import insecure_form |
|
1920 | 1920 | |
|
1921 | 1921 | if 'request' in attrs: |
|
1922 | 1922 | session = attrs['request'].session |
|
1923 | 1923 | del attrs['request'] |
|
1924 | 1924 | else: |
|
1925 | 1925 | raise ValueError( |
|
1926 | 1926 | 'Calling this form requires request= to be passed as argument') |
|
1927 | 1927 | |
|
1928 | 1928 | form = insecure_form(form_url, method, multipart, **attrs) |
|
1929 | 1929 | token = literal( |
|
1930 | 1930 | '<input type="hidden" id="{}" name="{}" value="{}">'.format( |
|
1931 | 1931 | csrf_token_key, csrf_token_key, get_csrf_token(session))) |
|
1932 | 1932 | |
|
1933 | 1933 | return literal("%s\n%s" % (form, token)) |
|
1934 | 1934 | |
|
1935 | 1935 | |
|
1936 | 1936 | def dropdownmenu(name, selected, options, enable_filter=False, **attrs): |
|
1937 | 1937 | select_html = select(name, selected, options, **attrs) |
|
1938 | 1938 | select2 = """ |
|
1939 | 1939 | <script> |
|
1940 | 1940 | $(document).ready(function() { |
|
1941 | 1941 | $('#%s').select2({ |
|
1942 | 1942 | containerCssClass: 'drop-menu', |
|
1943 | 1943 | dropdownCssClass: 'drop-menu-dropdown', |
|
1944 | 1944 | dropdownAutoWidth: true%s |
|
1945 | 1945 | }); |
|
1946 | 1946 | }); |
|
1947 | 1947 | </script> |
|
1948 | 1948 | """ |
|
1949 | 1949 | filter_option = """, |
|
1950 | 1950 | minimumResultsForSearch: -1 |
|
1951 | 1951 | """ |
|
1952 | 1952 | input_id = attrs.get('id') or name |
|
1953 | 1953 | filter_enabled = "" if enable_filter else filter_option |
|
1954 | 1954 | select_script = literal(select2 % (input_id, filter_enabled)) |
|
1955 | 1955 | |
|
1956 | 1956 | return literal(select_html+select_script) |
|
1957 | 1957 | |
|
1958 | 1958 | |
|
1959 | 1959 | def get_visual_attr(tmpl_context_var, attr_name): |
|
1960 | 1960 | """ |
|
1961 | 1961 | A safe way to get a variable from visual variable of template context |
|
1962 | 1962 | |
|
1963 | 1963 | :param tmpl_context_var: instance of tmpl_context, usually present as `c` |
|
1964 | 1964 | :param attr_name: name of the attribute we fetch from the c.visual |
|
1965 | 1965 | """ |
|
1966 | 1966 | visual = getattr(tmpl_context_var, 'visual', None) |
|
1967 | 1967 | if not visual: |
|
1968 | 1968 | return |
|
1969 | 1969 | else: |
|
1970 | 1970 | return getattr(visual, attr_name, None) |
|
1971 | 1971 | |
|
1972 | 1972 | |
|
1973 | 1973 | def get_last_path_part(file_node): |
|
1974 | 1974 | if not file_node.path: |
|
1975 | 1975 | return u'/' |
|
1976 | 1976 | |
|
1977 | 1977 | path = safe_unicode(file_node.path.split('/')[-1]) |
|
1978 | 1978 | return u'../' + path |
|
1979 | 1979 | |
|
1980 | 1980 | |
|
1981 | 1981 | def route_url(*args, **kwargs): |
|
1982 | 1982 | """ |
|
1983 | 1983 | Wrapper around pyramids `route_url` (fully qualified url) function. |
|
1984 | 1984 | """ |
|
1985 | 1985 | req = get_current_request() |
|
1986 | 1986 | return req.route_url(*args, **kwargs) |
|
1987 | 1987 | |
|
1988 | 1988 | |
|
1989 | 1989 | def route_path(*args, **kwargs): |
|
1990 | 1990 | """ |
|
1991 | 1991 | Wrapper around pyramids `route_path` function. |
|
1992 | 1992 | """ |
|
1993 | 1993 | req = get_current_request() |
|
1994 | 1994 | return req.route_path(*args, **kwargs) |
|
1995 | 1995 | |
|
1996 | 1996 | |
|
1997 | 1997 | def route_path_or_none(*args, **kwargs): |
|
1998 | 1998 | try: |
|
1999 | 1999 | return route_path(*args, **kwargs) |
|
2000 | 2000 | except KeyError: |
|
2001 | 2001 | return None |
|
2002 | 2002 | |
|
2003 | 2003 | |
|
2004 | 2004 | def current_route_path(request, **kw): |
|
2005 | 2005 | new_args = request.GET.mixed() |
|
2006 | 2006 | new_args.update(kw) |
|
2007 | 2007 | return request.current_route_path(_query=new_args) |
|
2008 | 2008 | |
|
2009 | 2009 | |
|
2010 | 2010 | def api_call_example(method, args): |
|
2011 | 2011 | """ |
|
2012 | 2012 | Generates an API call example via CURL |
|
2013 | 2013 | """ |
|
2014 | 2014 | args_json = json.dumps(OrderedDict([ |
|
2015 | 2015 | ('id', 1), |
|
2016 | 2016 | ('auth_token', 'SECRET'), |
|
2017 | 2017 | ('method', method), |
|
2018 | 2018 | ('args', args) |
|
2019 | 2019 | ])) |
|
2020 | 2020 | return literal( |
|
2021 | 2021 | "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'" |
|
2022 | 2022 | "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, " |
|
2023 | 2023 | "and needs to be of `api calls` role." |
|
2024 | 2024 | .format( |
|
2025 | 2025 | api_url=route_url('apiv2'), |
|
2026 | 2026 | token_url=route_url('my_account_auth_tokens'), |
|
2027 | 2027 | data=args_json)) |
|
2028 | 2028 | |
|
2029 | 2029 | |
|
2030 | 2030 | def notification_description(notification, request): |
|
2031 | 2031 | """ |
|
2032 | 2032 | Generate notification human readable description based on notification type |
|
2033 | 2033 | """ |
|
2034 | 2034 | from rhodecode.model.notification import NotificationModel |
|
2035 | 2035 | return NotificationModel().make_description( |
|
2036 | 2036 | notification, translate=request.translate) |
|
2037 | 2037 | |
|
2038 | 2038 | |
|
2039 | 2039 | def go_import_header(request, db_repo=None): |
|
2040 | 2040 | """ |
|
2041 | 2041 | Creates a header for go-import functionality in Go Lang |
|
2042 | 2042 | """ |
|
2043 | 2043 | |
|
2044 | 2044 | if not db_repo: |
|
2045 | 2045 | return |
|
2046 | 2046 | if 'go-get' not in request.GET: |
|
2047 | 2047 | return |
|
2048 | 2048 | |
|
2049 | 2049 | clone_url = db_repo.clone_url() |
|
2050 | 2050 | prefix = re.split(r'^https?:\/\/', clone_url)[-1] |
|
2051 | 2051 | # we have a repo and go-get flag, |
|
2052 | 2052 | return literal('<meta name="go-import" content="{} {} {}">'.format( |
|
2053 | 2053 | prefix, db_repo.repo_type, clone_url)) |
|
2054 | 2054 | |
|
2055 | 2055 | |
|
2056 | 2056 | def reviewer_as_json(*args, **kwargs): |
|
2057 | 2057 | from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json |
|
2058 | 2058 | return _reviewer_as_json(*args, **kwargs) |
|
2059 | 2059 | |
|
2060 | 2060 | |
|
2061 | 2061 | def get_repo_view_type(request): |
|
2062 | 2062 | route_name = request.matched_route.name |
|
2063 | 2063 | route_to_view_type = { |
|
2064 |
'repo_changelog': 'c |
|
|
2064 | 'repo_changelog': 'commits', | |
|
2065 | 'repo_commits': 'commits', | |
|
2065 | 2066 | 'repo_files': 'files', |
|
2066 | 2067 | 'repo_summary': 'summary', |
|
2067 | 2068 | 'repo_commit': 'commit' |
|
2068 | 2069 | } |
|
2069 | 2070 | |
|
2070 | 2071 | return route_to_view_type.get(route_name) |
@@ -1,942 +1,942 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | HG repository module |
|
23 | 23 | """ |
|
24 | 24 | import os |
|
25 | 25 | import logging |
|
26 | 26 | import binascii |
|
27 | 27 | import urllib |
|
28 | 28 | |
|
29 | 29 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | 30 | |
|
31 | 31 | from rhodecode.lib.compat import OrderedDict |
|
32 | 32 | from rhodecode.lib.datelib import ( |
|
33 | 33 | date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate) |
|
34 | 34 | from rhodecode.lib.utils import safe_unicode, safe_str |
|
35 | 35 | from rhodecode.lib.vcs import connection, exceptions |
|
36 | 36 | from rhodecode.lib.vcs.backends.base import ( |
|
37 | 37 | BaseRepository, CollectionGenerator, Config, MergeResponse, |
|
38 | 38 | MergeFailureReason, Reference, BasePathPermissionChecker) |
|
39 | 39 | from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit |
|
40 | 40 | from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff |
|
41 | 41 | from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit |
|
42 | 42 | from rhodecode.lib.vcs.exceptions import ( |
|
43 | 43 | EmptyRepositoryError, RepositoryError, TagAlreadyExistError, |
|
44 | 44 | TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError) |
|
45 | 45 | from rhodecode.lib.vcs.compat import configparser |
|
46 | 46 | |
|
47 | 47 | hexlify = binascii.hexlify |
|
48 | 48 | nullid = "\0" * 20 |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class MercurialRepository(BaseRepository): |
|
54 | 54 | """ |
|
55 | 55 | Mercurial repository backend |
|
56 | 56 | """ |
|
57 | 57 | DEFAULT_BRANCH_NAME = 'default' |
|
58 | 58 | |
|
59 | 59 | def __init__(self, repo_path, config=None, create=False, src_url=None, |
|
60 | 60 | do_workspace_checkout=False, with_wire=None, bare=False): |
|
61 | 61 | """ |
|
62 | 62 | Raises RepositoryError if repository could not be find at the given |
|
63 | 63 | ``repo_path``. |
|
64 | 64 | |
|
65 | 65 | :param repo_path: local path of the repository |
|
66 | 66 | :param config: config object containing the repo configuration |
|
67 | 67 | :param create=False: if set to True, would try to create repository if |
|
68 | 68 | it does not exist rather than raising exception |
|
69 | 69 | :param src_url=None: would try to clone repository from given location |
|
70 | 70 | :param do_workspace_checkout=False: sets update of working copy after |
|
71 | 71 | making a clone |
|
72 | 72 | :param bare: not used, compatible with other VCS |
|
73 | 73 | """ |
|
74 | 74 | |
|
75 | 75 | self.path = safe_str(os.path.abspath(repo_path)) |
|
76 | 76 | # mercurial since 4.4.X requires certain configuration to be present |
|
77 | 77 | # because sometimes we init the repos with config we need to meet |
|
78 | 78 | # special requirements |
|
79 | 79 | self.config = config if config else self.get_default_config( |
|
80 | 80 | default=[('extensions', 'largefiles', '1')]) |
|
81 | 81 | self.with_wire = with_wire |
|
82 | 82 | |
|
83 | 83 | self._init_repo(create, src_url, do_workspace_checkout) |
|
84 | 84 | |
|
85 | 85 | # caches |
|
86 | 86 | self._commit_ids = {} |
|
87 | 87 | |
|
88 | 88 | @LazyProperty |
|
89 | 89 | def _remote(self): |
|
90 | 90 | return connection.Hg(self.path, self.config, with_wire=self.with_wire) |
|
91 | 91 | |
|
92 | 92 | @LazyProperty |
|
93 | 93 | def commit_ids(self): |
|
94 | 94 | """ |
|
95 | 95 | Returns list of commit ids, in ascending order. Being lazy |
|
96 | 96 | attribute allows external tools to inject shas from cache. |
|
97 | 97 | """ |
|
98 | 98 | commit_ids = self._get_all_commit_ids() |
|
99 | 99 | self._rebuild_cache(commit_ids) |
|
100 | 100 | return commit_ids |
|
101 | 101 | |
|
102 | 102 | def _rebuild_cache(self, commit_ids): |
|
103 | 103 | self._commit_ids = dict((commit_id, index) |
|
104 | 104 | for index, commit_id in enumerate(commit_ids)) |
|
105 | 105 | |
|
106 | 106 | @LazyProperty |
|
107 | 107 | def branches(self): |
|
108 | 108 | return self._get_branches() |
|
109 | 109 | |
|
110 | 110 | @LazyProperty |
|
111 | 111 | def branches_closed(self): |
|
112 | 112 | return self._get_branches(active=False, closed=True) |
|
113 | 113 | |
|
114 | 114 | @LazyProperty |
|
115 | 115 | def branches_all(self): |
|
116 | 116 | all_branches = {} |
|
117 | 117 | all_branches.update(self.branches) |
|
118 | 118 | all_branches.update(self.branches_closed) |
|
119 | 119 | return all_branches |
|
120 | 120 | |
|
121 | 121 | def _get_branches(self, active=True, closed=False): |
|
122 | 122 | """ |
|
123 | 123 | Gets branches for this repository |
|
124 | 124 | Returns only not closed active branches by default |
|
125 | 125 | |
|
126 | 126 | :param active: return also active branches |
|
127 | 127 | :param closed: return also closed branches |
|
128 | 128 | |
|
129 | 129 | """ |
|
130 | 130 | if self.is_empty(): |
|
131 | 131 | return {} |
|
132 | 132 | |
|
133 | 133 | def get_name(ctx): |
|
134 | 134 | return ctx[0] |
|
135 | 135 | |
|
136 | 136 | _branches = [(safe_unicode(n), hexlify(h),) for n, h in |
|
137 | 137 | self._remote.branches(active, closed).items()] |
|
138 | 138 | |
|
139 | 139 | return OrderedDict(sorted(_branches, key=get_name, reverse=False)) |
|
140 | 140 | |
|
141 | 141 | @LazyProperty |
|
142 | 142 | def tags(self): |
|
143 | 143 | """ |
|
144 | 144 | Gets tags for this repository |
|
145 | 145 | """ |
|
146 | 146 | return self._get_tags() |
|
147 | 147 | |
|
148 | 148 | def _get_tags(self): |
|
149 | 149 | if self.is_empty(): |
|
150 | 150 | return {} |
|
151 | 151 | |
|
152 | 152 | def get_name(ctx): |
|
153 | 153 | return ctx[0] |
|
154 | 154 | |
|
155 | 155 | _tags = [(safe_unicode(n), hexlify(h),) for n, h in |
|
156 | 156 | self._remote.tags().items()] |
|
157 | 157 | |
|
158 | 158 | return OrderedDict(sorted(_tags, key=get_name, reverse=True)) |
|
159 | 159 | |
|
160 | 160 | def tag(self, name, user, commit_id=None, message=None, date=None, |
|
161 | 161 | **kwargs): |
|
162 | 162 | """ |
|
163 | 163 | Creates and returns a tag for the given ``commit_id``. |
|
164 | 164 | |
|
165 | 165 | :param name: name for new tag |
|
166 | 166 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
167 | 167 | :param commit_id: commit id for which new tag would be created |
|
168 | 168 | :param message: message of the tag's commit |
|
169 | 169 | :param date: date of tag's commit |
|
170 | 170 | |
|
171 | 171 | :raises TagAlreadyExistError: if tag with same name already exists |
|
172 | 172 | """ |
|
173 | 173 | if name in self.tags: |
|
174 | 174 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
175 | 175 | commit = self.get_commit(commit_id=commit_id) |
|
176 | 176 | local = kwargs.setdefault('local', False) |
|
177 | 177 | |
|
178 | 178 | if message is None: |
|
179 | 179 | message = "Added tag %s for commit %s" % (name, commit.short_id) |
|
180 | 180 | |
|
181 | 181 | date, tz = date_to_timestamp_plus_offset(date) |
|
182 | 182 | |
|
183 | 183 | self._remote.tag( |
|
184 | 184 | name, commit.raw_id, message, local, user, date, tz) |
|
185 | 185 | self._remote.invalidate_vcs_cache() |
|
186 | 186 | |
|
187 | 187 | # Reinitialize tags |
|
188 | 188 | self.tags = self._get_tags() |
|
189 | 189 | tag_id = self.tags[name] |
|
190 | 190 | |
|
191 | 191 | return self.get_commit(commit_id=tag_id) |
|
192 | 192 | |
|
193 | 193 | def remove_tag(self, name, user, message=None, date=None): |
|
194 | 194 | """ |
|
195 | 195 | Removes tag with the given `name`. |
|
196 | 196 | |
|
197 | 197 | :param name: name of the tag to be removed |
|
198 | 198 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
199 | 199 | :param message: message of the tag's removal commit |
|
200 | 200 | :param date: date of tag's removal commit |
|
201 | 201 | |
|
202 | 202 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
203 | 203 | """ |
|
204 | 204 | if name not in self.tags: |
|
205 | 205 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
206 | 206 | if message is None: |
|
207 | 207 | message = "Removed tag %s" % name |
|
208 | 208 | local = False |
|
209 | 209 | |
|
210 | 210 | date, tz = date_to_timestamp_plus_offset(date) |
|
211 | 211 | |
|
212 | 212 | self._remote.tag(name, nullid, message, local, user, date, tz) |
|
213 | 213 | self._remote.invalidate_vcs_cache() |
|
214 | 214 | self.tags = self._get_tags() |
|
215 | 215 | |
|
216 | 216 | @LazyProperty |
|
217 | 217 | def bookmarks(self): |
|
218 | 218 | """ |
|
219 | 219 | Gets bookmarks for this repository |
|
220 | 220 | """ |
|
221 | 221 | return self._get_bookmarks() |
|
222 | 222 | |
|
223 | 223 | def _get_bookmarks(self): |
|
224 | 224 | if self.is_empty(): |
|
225 | 225 | return {} |
|
226 | 226 | |
|
227 | 227 | def get_name(ctx): |
|
228 | 228 | return ctx[0] |
|
229 | 229 | |
|
230 | 230 | _bookmarks = [ |
|
231 | 231 | (safe_unicode(n), hexlify(h)) for n, h in |
|
232 | 232 | self._remote.bookmarks().items()] |
|
233 | 233 | |
|
234 | 234 | return OrderedDict(sorted(_bookmarks, key=get_name)) |
|
235 | 235 | |
|
236 | 236 | def _get_all_commit_ids(self): |
|
237 | 237 | return self._remote.get_all_commit_ids('visible') |
|
238 | 238 | |
|
239 | 239 | def get_diff( |
|
240 | 240 | self, commit1, commit2, path='', ignore_whitespace=False, |
|
241 | 241 | context=3, path1=None): |
|
242 | 242 | """ |
|
243 | 243 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
244 | 244 | `commit2` since `commit1`. |
|
245 | 245 | |
|
246 | 246 | :param commit1: Entry point from which diff is shown. Can be |
|
247 | 247 | ``self.EMPTY_COMMIT`` - in this case, patch showing all |
|
248 | 248 | the changes since empty state of the repository until `commit2` |
|
249 | 249 | :param commit2: Until which commit changes should be shown. |
|
250 | 250 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
251 | 251 | changes. Defaults to ``False``. |
|
252 | 252 | :param context: How many lines before/after changed lines should be |
|
253 | 253 | shown. Defaults to ``3``. |
|
254 | 254 | """ |
|
255 | 255 | self._validate_diff_commits(commit1, commit2) |
|
256 | 256 | if path1 is not None and path1 != path: |
|
257 | 257 | raise ValueError("Diff of two different paths not supported.") |
|
258 | 258 | |
|
259 | 259 | if path: |
|
260 | 260 | file_filter = [self.path, path] |
|
261 | 261 | else: |
|
262 | 262 | file_filter = None |
|
263 | 263 | |
|
264 | 264 | diff = self._remote.diff( |
|
265 | 265 | commit1.raw_id, commit2.raw_id, file_filter=file_filter, |
|
266 | 266 | opt_git=True, opt_ignorews=ignore_whitespace, |
|
267 | 267 | context=context) |
|
268 | 268 | return MercurialDiff(diff) |
|
269 | 269 | |
|
270 | 270 | def strip(self, commit_id, branch=None): |
|
271 | 271 | self._remote.strip(commit_id, update=False, backup="none") |
|
272 | 272 | |
|
273 | 273 | self._remote.invalidate_vcs_cache() |
|
274 | 274 | self.commit_ids = self._get_all_commit_ids() |
|
275 | 275 | self._rebuild_cache(self.commit_ids) |
|
276 | 276 | |
|
277 | 277 | def verify(self): |
|
278 | 278 | verify = self._remote.verify() |
|
279 | 279 | |
|
280 | 280 | self._remote.invalidate_vcs_cache() |
|
281 | 281 | return verify |
|
282 | 282 | |
|
283 | 283 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
284 | 284 | if commit_id1 == commit_id2: |
|
285 | 285 | return commit_id1 |
|
286 | 286 | |
|
287 | 287 | ancestors = self._remote.revs_from_revspec( |
|
288 | 288 | "ancestor(id(%s), id(%s))", commit_id1, commit_id2, |
|
289 | 289 | other_path=repo2.path) |
|
290 | 290 | return repo2[ancestors[0]].raw_id if ancestors else None |
|
291 | 291 | |
|
292 | 292 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
293 | 293 | if commit_id1 == commit_id2: |
|
294 | 294 | commits = [] |
|
295 | 295 | else: |
|
296 | 296 | if merge: |
|
297 | 297 | indexes = self._remote.revs_from_revspec( |
|
298 | 298 | "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)", |
|
299 | 299 | commit_id2, commit_id1, commit_id1, other_path=repo2.path) |
|
300 | 300 | else: |
|
301 | 301 | indexes = self._remote.revs_from_revspec( |
|
302 | 302 | "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2, |
|
303 | 303 | commit_id1, other_path=repo2.path) |
|
304 | 304 | |
|
305 | 305 | commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load) |
|
306 | 306 | for idx in indexes] |
|
307 | 307 | |
|
308 | 308 | return commits |
|
309 | 309 | |
|
310 | 310 | @staticmethod |
|
311 | 311 | def check_url(url, config): |
|
312 | 312 | """ |
|
313 | 313 | Function will check given url and try to verify if it's a valid |
|
314 | 314 | link. Sometimes it may happened that mercurial will issue basic |
|
315 | 315 | auth request that can cause whole API to hang when used from python |
|
316 | 316 | or other external calls. |
|
317 | 317 | |
|
318 | 318 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
319 | 319 | when the return code is non 200 |
|
320 | 320 | """ |
|
321 | 321 | # check first if it's not an local url |
|
322 | 322 | if os.path.isdir(url) or url.startswith('file:'): |
|
323 | 323 | return True |
|
324 | 324 | |
|
325 | 325 | # Request the _remote to verify the url |
|
326 | 326 | return connection.Hg.check_url(url, config.serialize()) |
|
327 | 327 | |
|
328 | 328 | @staticmethod |
|
329 | 329 | def is_valid_repository(path): |
|
330 | 330 | return os.path.isdir(os.path.join(path, '.hg')) |
|
331 | 331 | |
|
332 | 332 | def _init_repo(self, create, src_url=None, do_workspace_checkout=False): |
|
333 | 333 | """ |
|
334 | 334 | Function will check for mercurial repository in given path. If there |
|
335 | 335 | is no repository in that path it will raise an exception unless |
|
336 | 336 | `create` parameter is set to True - in that case repository would |
|
337 | 337 | be created. |
|
338 | 338 | |
|
339 | 339 | If `src_url` is given, would try to clone repository from the |
|
340 | 340 | location at given clone_point. Additionally it'll make update to |
|
341 | 341 | working copy accordingly to `do_workspace_checkout` flag. |
|
342 | 342 | """ |
|
343 | 343 | if create and os.path.exists(self.path): |
|
344 | 344 | raise RepositoryError( |
|
345 | 345 | "Cannot create repository at %s, location already exist" |
|
346 | 346 | % self.path) |
|
347 | 347 | |
|
348 | 348 | if src_url: |
|
349 | 349 | url = str(self._get_url(src_url)) |
|
350 | 350 | MercurialRepository.check_url(url, self.config) |
|
351 | 351 | |
|
352 | 352 | self._remote.clone(url, self.path, do_workspace_checkout) |
|
353 | 353 | |
|
354 | 354 | # Don't try to create if we've already cloned repo |
|
355 | 355 | create = False |
|
356 | 356 | |
|
357 | 357 | if create: |
|
358 | 358 | os.makedirs(self.path, mode=0o755) |
|
359 | 359 | |
|
360 | 360 | self._remote.localrepository(create) |
|
361 | 361 | |
|
362 | 362 | @LazyProperty |
|
363 | 363 | def in_memory_commit(self): |
|
364 | 364 | return MercurialInMemoryCommit(self) |
|
365 | 365 | |
|
366 | 366 | @LazyProperty |
|
367 | 367 | def description(self): |
|
368 | 368 | description = self._remote.get_config_value( |
|
369 | 369 | 'web', 'description', untrusted=True) |
|
370 | 370 | return safe_unicode(description or self.DEFAULT_DESCRIPTION) |
|
371 | 371 | |
|
372 | 372 | @LazyProperty |
|
373 | 373 | def contact(self): |
|
374 | 374 | contact = ( |
|
375 | 375 | self._remote.get_config_value("web", "contact") or |
|
376 | 376 | self._remote.get_config_value("ui", "username")) |
|
377 | 377 | return safe_unicode(contact or self.DEFAULT_CONTACT) |
|
378 | 378 | |
|
379 | 379 | @LazyProperty |
|
380 | 380 | def last_change(self): |
|
381 | 381 | """ |
|
382 | 382 | Returns last change made on this repository as |
|
383 | 383 | `datetime.datetime` object. |
|
384 | 384 | """ |
|
385 | 385 | try: |
|
386 | 386 | return self.get_commit().date |
|
387 | 387 | except RepositoryError: |
|
388 | 388 | tzoffset = makedate()[1] |
|
389 | 389 | return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset) |
|
390 | 390 | |
|
391 | 391 | def _get_fs_mtime(self): |
|
392 | 392 | # fallback to filesystem |
|
393 | 393 | cl_path = os.path.join(self.path, '.hg', "00changelog.i") |
|
394 | 394 | st_path = os.path.join(self.path, '.hg', "store") |
|
395 | 395 | if os.path.exists(cl_path): |
|
396 | 396 | return os.stat(cl_path).st_mtime |
|
397 | 397 | else: |
|
398 | 398 | return os.stat(st_path).st_mtime |
|
399 | 399 | |
|
400 | 400 | def _get_url(self, url): |
|
401 | 401 | """ |
|
402 | 402 | Returns normalized url. If schema is not given, would fall |
|
403 | 403 | to filesystem |
|
404 | 404 | (``file:///``) schema. |
|
405 | 405 | """ |
|
406 | 406 | url = url.encode('utf8') |
|
407 | 407 | if url != 'default' and '://' not in url: |
|
408 | 408 | url = "file:" + urllib.pathname2url(url) |
|
409 | 409 | return url |
|
410 | 410 | |
|
411 | 411 | def get_hook_location(self): |
|
412 | 412 | """ |
|
413 | 413 | returns absolute path to location where hooks are stored |
|
414 | 414 | """ |
|
415 | 415 | return os.path.join(self.path, '.hg', '.hgrc') |
|
416 | 416 | |
|
417 | 417 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None): |
|
418 | 418 | """ |
|
419 | 419 | Returns ``MercurialCommit`` object representing repository's |
|
420 | 420 | commit at the given `commit_id` or `commit_idx`. |
|
421 | 421 | """ |
|
422 | 422 | if self.is_empty(): |
|
423 | 423 | raise EmptyRepositoryError("There are no commits yet") |
|
424 | 424 | |
|
425 | 425 | if commit_id is not None: |
|
426 | 426 | self._validate_commit_id(commit_id) |
|
427 | 427 | try: |
|
428 | 428 | # we have cached idx, use it without contacting the remote |
|
429 | 429 | idx = self._commit_ids[commit_id] |
|
430 | 430 | return MercurialCommit(self, commit_id, idx, pre_load=pre_load) |
|
431 | 431 | except KeyError: |
|
432 | 432 | pass |
|
433 | 433 | |
|
434 | 434 | elif commit_idx is not None: |
|
435 | 435 | self._validate_commit_idx(commit_idx) |
|
436 | 436 | try: |
|
437 | 437 | _commit_id = self.commit_ids[commit_idx] |
|
438 | 438 | if commit_idx < 0: |
|
439 | 439 | commit_idx = self.commit_ids.index(_commit_id) |
|
440 | 440 | |
|
441 | 441 | return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load) |
|
442 | 442 | except IndexError: |
|
443 | 443 | commit_id = commit_idx |
|
444 | 444 | else: |
|
445 | 445 | commit_id = "tip" |
|
446 | 446 | |
|
447 | 447 | if isinstance(commit_id, unicode): |
|
448 | 448 | commit_id = safe_str(commit_id) |
|
449 | 449 | |
|
450 | 450 | try: |
|
451 | 451 | raw_id, idx = self._remote.lookup(commit_id, both=True) |
|
452 | 452 | except CommitDoesNotExistError: |
|
453 | 453 | msg = "Commit %s does not exist for %s" % (commit_id, self.name) |
|
454 | 454 | raise CommitDoesNotExistError(msg) |
|
455 | 455 | |
|
456 | 456 | return MercurialCommit(self, raw_id, idx, pre_load=pre_load) |
|
457 | 457 | |
|
458 | 458 | def get_commits( |
|
459 | 459 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
460 | 460 | branch_name=None, show_hidden=False, pre_load=None, translate_tags=None): |
|
461 | 461 | """ |
|
462 | 462 | Returns generator of ``MercurialCommit`` objects from start to end |
|
463 | 463 | (both are inclusive) |
|
464 | 464 | |
|
465 | 465 | :param start_id: None, str(commit_id) |
|
466 | 466 | :param end_id: None, str(commit_id) |
|
467 | 467 | :param start_date: if specified, commits with commit date less than |
|
468 | 468 | ``start_date`` would be filtered out from returned set |
|
469 | 469 | :param end_date: if specified, commits with commit date greater than |
|
470 | 470 | ``end_date`` would be filtered out from returned set |
|
471 | 471 | :param branch_name: if specified, commits not reachable from given |
|
472 | 472 | branch would be filtered out from returned set |
|
473 | 473 | :param show_hidden: Show hidden commits such as obsolete or hidden from |
|
474 | 474 | Mercurial evolve |
|
475 | 475 | :raise BranchDoesNotExistError: If given ``branch_name`` does not |
|
476 | 476 | exist. |
|
477 | 477 | :raise CommitDoesNotExistError: If commit for given ``start`` or |
|
478 | 478 | ``end`` could not be found. |
|
479 | 479 | """ |
|
480 | 480 | # actually we should check now if it's not an empty repo |
|
481 | branch_ancestors = False | |
|
482 | 481 | if self.is_empty(): |
|
483 | 482 | raise EmptyRepositoryError("There are no commits yet") |
|
484 | 483 | self._validate_branch_name(branch_name) |
|
485 | 484 | |
|
485 | branch_ancestors = False | |
|
486 | 486 | if start_id is not None: |
|
487 | 487 | self._validate_commit_id(start_id) |
|
488 | 488 | c_start = self.get_commit(commit_id=start_id) |
|
489 | 489 | start_pos = self._commit_ids[c_start.raw_id] |
|
490 | 490 | else: |
|
491 | 491 | start_pos = None |
|
492 | 492 | |
|
493 | 493 | if end_id is not None: |
|
494 | 494 | self._validate_commit_id(end_id) |
|
495 | 495 | c_end = self.get_commit(commit_id=end_id) |
|
496 | 496 | end_pos = max(0, self._commit_ids[c_end.raw_id]) |
|
497 | 497 | else: |
|
498 | 498 | end_pos = None |
|
499 | 499 | |
|
500 | 500 | if None not in [start_id, end_id] and start_pos > end_pos: |
|
501 | 501 | raise RepositoryError( |
|
502 | 502 | "Start commit '%s' cannot be after end commit '%s'" % |
|
503 | 503 | (start_id, end_id)) |
|
504 | 504 | |
|
505 | 505 | if end_pos is not None: |
|
506 | 506 | end_pos += 1 |
|
507 | 507 | |
|
508 | 508 | commit_filter = [] |
|
509 | 509 | |
|
510 | 510 | if branch_name and not branch_ancestors: |
|
511 | 511 | commit_filter.append('branch("%s")' % (branch_name,)) |
|
512 | 512 | elif branch_name and branch_ancestors: |
|
513 | 513 | commit_filter.append('ancestors(branch("%s"))' % (branch_name,)) |
|
514 | 514 | |
|
515 | 515 | if start_date and not end_date: |
|
516 | 516 | commit_filter.append('date(">%s")' % (start_date,)) |
|
517 | 517 | if end_date and not start_date: |
|
518 | 518 | commit_filter.append('date("<%s")' % (end_date,)) |
|
519 | 519 | if start_date and end_date: |
|
520 | 520 | commit_filter.append( |
|
521 | 521 | 'date(">%s") and date("<%s")' % (start_date, end_date)) |
|
522 | 522 | |
|
523 | 523 | if not show_hidden: |
|
524 | 524 | commit_filter.append('not obsolete()') |
|
525 | 525 | commit_filter.append('not hidden()') |
|
526 | 526 | |
|
527 | 527 | # TODO: johbo: Figure out a simpler way for this solution |
|
528 | 528 | collection_generator = CollectionGenerator |
|
529 | 529 | if commit_filter: |
|
530 | 530 | commit_filter = ' and '.join(map(safe_str, commit_filter)) |
|
531 | 531 | revisions = self._remote.rev_range([commit_filter]) |
|
532 | 532 | collection_generator = MercurialIndexBasedCollectionGenerator |
|
533 | 533 | else: |
|
534 | 534 | revisions = self.commit_ids |
|
535 | 535 | |
|
536 | 536 | if start_pos or end_pos: |
|
537 | 537 | revisions = revisions[start_pos:end_pos] |
|
538 | 538 | |
|
539 | 539 | return collection_generator(self, revisions, pre_load=pre_load) |
|
540 | 540 | |
|
541 | 541 | def pull(self, url, commit_ids=None): |
|
542 | 542 | """ |
|
543 | 543 | Pull changes from external location. |
|
544 | 544 | |
|
545 | 545 | :param commit_ids: Optional. Can be set to a list of commit ids |
|
546 | 546 | which shall be pulled from the other repository. |
|
547 | 547 | """ |
|
548 | 548 | url = self._get_url(url) |
|
549 | 549 | self._remote.pull(url, commit_ids=commit_ids) |
|
550 | 550 | self._remote.invalidate_vcs_cache() |
|
551 | 551 | |
|
552 | 552 | def fetch(self, url, commit_ids=None): |
|
553 | 553 | """ |
|
554 | 554 | Backward compatibility with GIT fetch==pull |
|
555 | 555 | """ |
|
556 | 556 | return self.pull(url, commit_ids=commit_ids) |
|
557 | 557 | |
|
558 | 558 | def push(self, url): |
|
559 | 559 | url = self._get_url(url) |
|
560 | 560 | self._remote.sync_push(url) |
|
561 | 561 | |
|
562 | 562 | def _local_clone(self, clone_path): |
|
563 | 563 | """ |
|
564 | 564 | Create a local clone of the current repo. |
|
565 | 565 | """ |
|
566 | 566 | self._remote.clone(self.path, clone_path, update_after_clone=True, |
|
567 | 567 | hooks=False) |
|
568 | 568 | |
|
569 | 569 | def _update(self, revision, clean=False): |
|
570 | 570 | """ |
|
571 | 571 | Update the working copy to the specified revision. |
|
572 | 572 | """ |
|
573 | 573 | log.debug('Doing checkout to commit: `%s` for %s', revision, self) |
|
574 | 574 | self._remote.update(revision, clean=clean) |
|
575 | 575 | |
|
576 | 576 | def _identify(self): |
|
577 | 577 | """ |
|
578 | 578 | Return the current state of the working directory. |
|
579 | 579 | """ |
|
580 | 580 | return self._remote.identify().strip().rstrip('+') |
|
581 | 581 | |
|
582 | 582 | def _heads(self, branch=None): |
|
583 | 583 | """ |
|
584 | 584 | Return the commit ids of the repository heads. |
|
585 | 585 | """ |
|
586 | 586 | return self._remote.heads(branch=branch).strip().split(' ') |
|
587 | 587 | |
|
588 | 588 | def _ancestor(self, revision1, revision2): |
|
589 | 589 | """ |
|
590 | 590 | Return the common ancestor of the two revisions. |
|
591 | 591 | """ |
|
592 | 592 | return self._remote.ancestor(revision1, revision2) |
|
593 | 593 | |
|
594 | 594 | def _local_push( |
|
595 | 595 | self, revision, repository_path, push_branches=False, |
|
596 | 596 | enable_hooks=False): |
|
597 | 597 | """ |
|
598 | 598 | Push the given revision to the specified repository. |
|
599 | 599 | |
|
600 | 600 | :param push_branches: allow to create branches in the target repo. |
|
601 | 601 | """ |
|
602 | 602 | self._remote.push( |
|
603 | 603 | [revision], repository_path, hooks=enable_hooks, |
|
604 | 604 | push_branches=push_branches) |
|
605 | 605 | |
|
606 | 606 | def _local_merge(self, target_ref, merge_message, user_name, user_email, |
|
607 | 607 | source_ref, use_rebase=False, dry_run=False): |
|
608 | 608 | """ |
|
609 | 609 | Merge the given source_revision into the checked out revision. |
|
610 | 610 | |
|
611 | 611 | Returns the commit id of the merge and a boolean indicating if the |
|
612 | 612 | commit needs to be pushed. |
|
613 | 613 | """ |
|
614 | 614 | self._update(target_ref.commit_id, clean=True) |
|
615 | 615 | |
|
616 | 616 | ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id) |
|
617 | 617 | is_the_same_branch = self._is_the_same_branch(target_ref, source_ref) |
|
618 | 618 | |
|
619 | 619 | if ancestor == source_ref.commit_id: |
|
620 | 620 | # Nothing to do, the changes were already integrated |
|
621 | 621 | return target_ref.commit_id, False |
|
622 | 622 | |
|
623 | 623 | elif ancestor == target_ref.commit_id and is_the_same_branch: |
|
624 | 624 | # In this case we should force a commit message |
|
625 | 625 | return source_ref.commit_id, True |
|
626 | 626 | |
|
627 | 627 | if use_rebase: |
|
628 | 628 | try: |
|
629 | 629 | bookmark_name = 'rcbook%s%s' % (source_ref.commit_id, |
|
630 | 630 | target_ref.commit_id) |
|
631 | 631 | self.bookmark(bookmark_name, revision=source_ref.commit_id) |
|
632 | 632 | self._remote.rebase( |
|
633 | 633 | source=source_ref.commit_id, dest=target_ref.commit_id) |
|
634 | 634 | self._remote.invalidate_vcs_cache() |
|
635 | 635 | self._update(bookmark_name, clean=True) |
|
636 | 636 | return self._identify(), True |
|
637 | 637 | except RepositoryError: |
|
638 | 638 | # The rebase-abort may raise another exception which 'hides' |
|
639 | 639 | # the original one, therefore we log it here. |
|
640 | 640 | log.exception('Error while rebasing shadow repo during merge.') |
|
641 | 641 | |
|
642 | 642 | # Cleanup any rebase leftovers |
|
643 | 643 | self._remote.invalidate_vcs_cache() |
|
644 | 644 | self._remote.rebase(abort=True) |
|
645 | 645 | self._remote.invalidate_vcs_cache() |
|
646 | 646 | self._remote.update(clean=True) |
|
647 | 647 | raise |
|
648 | 648 | else: |
|
649 | 649 | try: |
|
650 | 650 | self._remote.merge(source_ref.commit_id) |
|
651 | 651 | self._remote.invalidate_vcs_cache() |
|
652 | 652 | self._remote.commit( |
|
653 | 653 | message=safe_str(merge_message), |
|
654 | 654 | username=safe_str('%s <%s>' % (user_name, user_email))) |
|
655 | 655 | self._remote.invalidate_vcs_cache() |
|
656 | 656 | return self._identify(), True |
|
657 | 657 | except RepositoryError: |
|
658 | 658 | # Cleanup any merge leftovers |
|
659 | 659 | self._remote.update(clean=True) |
|
660 | 660 | raise |
|
661 | 661 | |
|
662 | 662 | def _local_close(self, target_ref, user_name, user_email, |
|
663 | 663 | source_ref, close_message=''): |
|
664 | 664 | """ |
|
665 | 665 | Close the branch of the given source_revision |
|
666 | 666 | |
|
667 | 667 | Returns the commit id of the close and a boolean indicating if the |
|
668 | 668 | commit needs to be pushed. |
|
669 | 669 | """ |
|
670 | 670 | self._update(source_ref.commit_id) |
|
671 | 671 | message = close_message or "Closing branch: `{}`".format(source_ref.name) |
|
672 | 672 | try: |
|
673 | 673 | self._remote.commit( |
|
674 | 674 | message=safe_str(message), |
|
675 | 675 | username=safe_str('%s <%s>' % (user_name, user_email)), |
|
676 | 676 | close_branch=True) |
|
677 | 677 | self._remote.invalidate_vcs_cache() |
|
678 | 678 | return self._identify(), True |
|
679 | 679 | except RepositoryError: |
|
680 | 680 | # Cleanup any commit leftovers |
|
681 | 681 | self._remote.update(clean=True) |
|
682 | 682 | raise |
|
683 | 683 | |
|
684 | 684 | def _is_the_same_branch(self, target_ref, source_ref): |
|
685 | 685 | return ( |
|
686 | 686 | self._get_branch_name(target_ref) == |
|
687 | 687 | self._get_branch_name(source_ref)) |
|
688 | 688 | |
|
689 | 689 | def _get_branch_name(self, ref): |
|
690 | 690 | if ref.type == 'branch': |
|
691 | 691 | return ref.name |
|
692 | 692 | return self._remote.ctx_branch(ref.commit_id) |
|
693 | 693 | |
|
694 | 694 | def _maybe_prepare_merge_workspace( |
|
695 | 695 | self, repo_id, workspace_id, unused_target_ref, unused_source_ref): |
|
696 | 696 | shadow_repository_path = self._get_shadow_repository_path( |
|
697 | 697 | repo_id, workspace_id) |
|
698 | 698 | if not os.path.exists(shadow_repository_path): |
|
699 | 699 | self._local_clone(shadow_repository_path) |
|
700 | 700 | log.debug( |
|
701 | 701 | 'Prepared shadow repository in %s', shadow_repository_path) |
|
702 | 702 | |
|
703 | 703 | return shadow_repository_path |
|
704 | 704 | |
|
705 | 705 | def _merge_repo(self, repo_id, workspace_id, target_ref, |
|
706 | 706 | source_repo, source_ref, merge_message, |
|
707 | 707 | merger_name, merger_email, dry_run=False, |
|
708 | 708 | use_rebase=False, close_branch=False): |
|
709 | 709 | |
|
710 | 710 | log.debug('Executing merge_repo with %s strategy, dry_run mode:%s', |
|
711 | 711 | 'rebase' if use_rebase else 'merge', dry_run) |
|
712 | 712 | if target_ref.commit_id not in self._heads(): |
|
713 | 713 | return MergeResponse( |
|
714 | 714 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD, |
|
715 | 715 | metadata={'target_ref': target_ref}) |
|
716 | 716 | |
|
717 | 717 | try: |
|
718 | 718 | if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1: |
|
719 | 719 | heads = '\n,'.join(self._heads(target_ref.name)) |
|
720 | 720 | metadata = { |
|
721 | 721 | 'target_ref': target_ref, |
|
722 | 722 | 'source_ref': source_ref, |
|
723 | 723 | 'heads': heads |
|
724 | 724 | } |
|
725 | 725 | return MergeResponse( |
|
726 | 726 | False, False, None, |
|
727 | 727 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, |
|
728 | 728 | metadata=metadata) |
|
729 | 729 | except CommitDoesNotExistError: |
|
730 | 730 | log.exception('Failure when looking up branch heads on hg target') |
|
731 | 731 | return MergeResponse( |
|
732 | 732 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, |
|
733 | 733 | metadata={'target_ref': target_ref}) |
|
734 | 734 | |
|
735 | 735 | shadow_repository_path = self._maybe_prepare_merge_workspace( |
|
736 | 736 | repo_id, workspace_id, target_ref, source_ref) |
|
737 | 737 | shadow_repo = self._get_shadow_instance(shadow_repository_path) |
|
738 | 738 | |
|
739 | 739 | log.debug('Pulling in target reference %s', target_ref) |
|
740 | 740 | self._validate_pull_reference(target_ref) |
|
741 | 741 | shadow_repo._local_pull(self.path, target_ref) |
|
742 | 742 | |
|
743 | 743 | try: |
|
744 | 744 | log.debug('Pulling in source reference %s', source_ref) |
|
745 | 745 | source_repo._validate_pull_reference(source_ref) |
|
746 | 746 | shadow_repo._local_pull(source_repo.path, source_ref) |
|
747 | 747 | except CommitDoesNotExistError: |
|
748 | 748 | log.exception('Failure when doing local pull on hg shadow repo') |
|
749 | 749 | return MergeResponse( |
|
750 | 750 | False, False, None, MergeFailureReason.MISSING_SOURCE_REF, |
|
751 | 751 | metadata={'source_ref': source_ref}) |
|
752 | 752 | |
|
753 | 753 | merge_ref = None |
|
754 | 754 | merge_commit_id = None |
|
755 | 755 | close_commit_id = None |
|
756 | 756 | merge_failure_reason = MergeFailureReason.NONE |
|
757 | 757 | metadata = {} |
|
758 | 758 | |
|
759 | 759 | # enforce that close branch should be used only in case we source from |
|
760 | 760 | # an actual Branch |
|
761 | 761 | close_branch = close_branch and source_ref.type == 'branch' |
|
762 | 762 | |
|
763 | 763 | # don't allow to close branch if source and target are the same |
|
764 | 764 | close_branch = close_branch and source_ref.name != target_ref.name |
|
765 | 765 | |
|
766 | 766 | needs_push_on_close = False |
|
767 | 767 | if close_branch and not use_rebase and not dry_run: |
|
768 | 768 | try: |
|
769 | 769 | close_commit_id, needs_push_on_close = shadow_repo._local_close( |
|
770 | 770 | target_ref, merger_name, merger_email, source_ref) |
|
771 | 771 | merge_possible = True |
|
772 | 772 | except RepositoryError: |
|
773 | 773 | log.exception('Failure when doing close branch on ' |
|
774 | 774 | 'shadow repo: %s', shadow_repo) |
|
775 | 775 | merge_possible = False |
|
776 | 776 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
777 | 777 | else: |
|
778 | 778 | merge_possible = True |
|
779 | 779 | |
|
780 | 780 | needs_push = False |
|
781 | 781 | if merge_possible: |
|
782 | 782 | try: |
|
783 | 783 | merge_commit_id, needs_push = shadow_repo._local_merge( |
|
784 | 784 | target_ref, merge_message, merger_name, merger_email, |
|
785 | 785 | source_ref, use_rebase=use_rebase, dry_run=dry_run) |
|
786 | 786 | merge_possible = True |
|
787 | 787 | |
|
788 | 788 | # read the state of the close action, if it |
|
789 | 789 | # maybe required a push |
|
790 | 790 | needs_push = needs_push or needs_push_on_close |
|
791 | 791 | |
|
792 | 792 | # Set a bookmark pointing to the merge commit. This bookmark |
|
793 | 793 | # may be used to easily identify the last successful merge |
|
794 | 794 | # commit in the shadow repository. |
|
795 | 795 | shadow_repo.bookmark('pr-merge', revision=merge_commit_id) |
|
796 | 796 | merge_ref = Reference('book', 'pr-merge', merge_commit_id) |
|
797 | 797 | except SubrepoMergeError: |
|
798 | 798 | log.exception( |
|
799 | 799 | 'Subrepo merge error during local merge on hg shadow repo.') |
|
800 | 800 | merge_possible = False |
|
801 | 801 | merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED |
|
802 | 802 | needs_push = False |
|
803 | 803 | except RepositoryError: |
|
804 | 804 | log.exception('Failure when doing local merge on hg shadow repo') |
|
805 | 805 | merge_possible = False |
|
806 | 806 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
807 | 807 | needs_push = False |
|
808 | 808 | |
|
809 | 809 | if merge_possible and not dry_run: |
|
810 | 810 | if needs_push: |
|
811 | 811 | # In case the target is a bookmark, update it, so after pushing |
|
812 | 812 | # the bookmarks is also updated in the target. |
|
813 | 813 | if target_ref.type == 'book': |
|
814 | 814 | shadow_repo.bookmark( |
|
815 | 815 | target_ref.name, revision=merge_commit_id) |
|
816 | 816 | try: |
|
817 | 817 | shadow_repo_with_hooks = self._get_shadow_instance( |
|
818 | 818 | shadow_repository_path, |
|
819 | 819 | enable_hooks=True) |
|
820 | 820 | # This is the actual merge action, we push from shadow |
|
821 | 821 | # into origin. |
|
822 | 822 | # Note: the push_branches option will push any new branch |
|
823 | 823 | # defined in the source repository to the target. This may |
|
824 | 824 | # be dangerous as branches are permanent in Mercurial. |
|
825 | 825 | # This feature was requested in issue #441. |
|
826 | 826 | shadow_repo_with_hooks._local_push( |
|
827 | 827 | merge_commit_id, self.path, push_branches=True, |
|
828 | 828 | enable_hooks=True) |
|
829 | 829 | |
|
830 | 830 | # maybe we also need to push the close_commit_id |
|
831 | 831 | if close_commit_id: |
|
832 | 832 | shadow_repo_with_hooks._local_push( |
|
833 | 833 | close_commit_id, self.path, push_branches=True, |
|
834 | 834 | enable_hooks=True) |
|
835 | 835 | merge_succeeded = True |
|
836 | 836 | except RepositoryError: |
|
837 | 837 | log.exception( |
|
838 | 838 | 'Failure when doing local push from the shadow ' |
|
839 | 839 | 'repository to the target repository at %s.', self.path) |
|
840 | 840 | merge_succeeded = False |
|
841 | 841 | merge_failure_reason = MergeFailureReason.PUSH_FAILED |
|
842 | 842 | metadata['target'] = 'hg shadow repo' |
|
843 | 843 | metadata['merge_commit'] = merge_commit_id |
|
844 | 844 | else: |
|
845 | 845 | merge_succeeded = True |
|
846 | 846 | else: |
|
847 | 847 | merge_succeeded = False |
|
848 | 848 | |
|
849 | 849 | return MergeResponse( |
|
850 | 850 | merge_possible, merge_succeeded, merge_ref, merge_failure_reason, |
|
851 | 851 | metadata=metadata) |
|
852 | 852 | |
|
853 | 853 | def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False): |
|
854 | 854 | config = self.config.copy() |
|
855 | 855 | if not enable_hooks: |
|
856 | 856 | config.clear_section('hooks') |
|
857 | 857 | return MercurialRepository(shadow_repository_path, config) |
|
858 | 858 | |
|
859 | 859 | def _validate_pull_reference(self, reference): |
|
860 | 860 | if not (reference.name in self.bookmarks or |
|
861 | 861 | reference.name in self.branches or |
|
862 | 862 | self.get_commit(reference.commit_id)): |
|
863 | 863 | raise CommitDoesNotExistError( |
|
864 | 864 | 'Unknown branch, bookmark or commit id') |
|
865 | 865 | |
|
866 | 866 | def _local_pull(self, repository_path, reference): |
|
867 | 867 | """ |
|
868 | 868 | Fetch a branch, bookmark or commit from a local repository. |
|
869 | 869 | """ |
|
870 | 870 | repository_path = os.path.abspath(repository_path) |
|
871 | 871 | if repository_path == self.path: |
|
872 | 872 | raise ValueError('Cannot pull from the same repository') |
|
873 | 873 | |
|
874 | 874 | reference_type_to_option_name = { |
|
875 | 875 | 'book': 'bookmark', |
|
876 | 876 | 'branch': 'branch', |
|
877 | 877 | } |
|
878 | 878 | option_name = reference_type_to_option_name.get( |
|
879 | 879 | reference.type, 'revision') |
|
880 | 880 | |
|
881 | 881 | if option_name == 'revision': |
|
882 | 882 | ref = reference.commit_id |
|
883 | 883 | else: |
|
884 | 884 | ref = reference.name |
|
885 | 885 | |
|
886 | 886 | options = {option_name: [ref]} |
|
887 | 887 | self._remote.pull_cmd(repository_path, hooks=False, **options) |
|
888 | 888 | self._remote.invalidate_vcs_cache() |
|
889 | 889 | |
|
890 | 890 | def bookmark(self, bookmark, revision=None): |
|
891 | 891 | if isinstance(bookmark, unicode): |
|
892 | 892 | bookmark = safe_str(bookmark) |
|
893 | 893 | self._remote.bookmark(bookmark, revision=revision) |
|
894 | 894 | self._remote.invalidate_vcs_cache() |
|
895 | 895 | |
|
896 | 896 | def get_path_permissions(self, username): |
|
897 | 897 | hgacl_file = os.path.join(self.path, '.hg/hgacl') |
|
898 | 898 | |
|
899 | 899 | def read_patterns(suffix): |
|
900 | 900 | svalue = None |
|
901 | 901 | for section, option in [ |
|
902 | 902 | ('narrowacl', username + suffix), |
|
903 | 903 | ('narrowacl', 'default' + suffix), |
|
904 | 904 | ('narrowhgacl', username + suffix), |
|
905 | 905 | ('narrowhgacl', 'default' + suffix) |
|
906 | 906 | ]: |
|
907 | 907 | try: |
|
908 | 908 | svalue = hgacl.get(section, option) |
|
909 | 909 | break # stop at the first value we find |
|
910 | 910 | except configparser.NoOptionError: |
|
911 | 911 | pass |
|
912 | 912 | if not svalue: |
|
913 | 913 | return None |
|
914 | 914 | result = ['/'] |
|
915 | 915 | for pattern in svalue.split(): |
|
916 | 916 | result.append(pattern) |
|
917 | 917 | if '*' not in pattern and '?' not in pattern: |
|
918 | 918 | result.append(pattern + '/*') |
|
919 | 919 | return result |
|
920 | 920 | |
|
921 | 921 | if os.path.exists(hgacl_file): |
|
922 | 922 | try: |
|
923 | 923 | hgacl = configparser.RawConfigParser() |
|
924 | 924 | hgacl.read(hgacl_file) |
|
925 | 925 | |
|
926 | 926 | includes = read_patterns('.includes') |
|
927 | 927 | excludes = read_patterns('.excludes') |
|
928 | 928 | return BasePathPermissionChecker.create_from_patterns( |
|
929 | 929 | includes, excludes) |
|
930 | 930 | except BaseException as e: |
|
931 | 931 | msg = 'Cannot read ACL settings from {} on {}: {}'.format( |
|
932 | 932 | hgacl_file, self.name, e) |
|
933 | 933 | raise exceptions.RepositoryRequirementError(msg) |
|
934 | 934 | else: |
|
935 | 935 | return None |
|
936 | 936 | |
|
937 | 937 | |
|
938 | 938 | class MercurialIndexBasedCollectionGenerator(CollectionGenerator): |
|
939 | 939 | |
|
940 | 940 | def _commit_factory(self, commit_id): |
|
941 | 941 | return self.repo.get_commit( |
|
942 | 942 | commit_idx=commit_id, pre_load=self.pre_load) |
@@ -1,360 +1,360 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | SVN repository module |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import os |
|
27 | 27 | import urllib |
|
28 | 28 | |
|
29 | 29 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | 30 | |
|
31 | 31 | from rhodecode.lib.compat import OrderedDict |
|
32 | 32 | from rhodecode.lib.datelib import date_astimestamp |
|
33 | 33 | from rhodecode.lib.utils import safe_str, safe_unicode |
|
34 | 34 | from rhodecode.lib.vcs import connection, path as vcspath |
|
35 | 35 | from rhodecode.lib.vcs.backends import base |
|
36 | 36 | from rhodecode.lib.vcs.backends.svn.commit import ( |
|
37 | 37 | SubversionCommit, _date_from_svn_properties) |
|
38 | 38 | from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff |
|
39 | 39 | from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit |
|
40 | 40 | from rhodecode.lib.vcs.conf import settings |
|
41 | 41 | from rhodecode.lib.vcs.exceptions import ( |
|
42 | 42 | CommitDoesNotExistError, EmptyRepositoryError, RepositoryError, |
|
43 | 43 | VCSError, NodeDoesNotExistError) |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class SubversionRepository(base.BaseRepository): |
|
50 | 50 | """ |
|
51 | 51 | Subversion backend implementation |
|
52 | 52 | |
|
53 | 53 | .. important:: |
|
54 | 54 | |
|
55 | 55 | It is very important to distinguish the commit index and the commit id |
|
56 | 56 | which is assigned by Subversion. The first one is always handled as an |
|
57 | 57 | `int` by this implementation. The commit id assigned by Subversion on |
|
58 | 58 | the other side will always be a `str`. |
|
59 | 59 | |
|
60 | 60 | There is a specific trap since the first commit will have the index |
|
61 | 61 | ``0`` but the svn id will be ``"1"``. |
|
62 | 62 | |
|
63 | 63 | """ |
|
64 | 64 | |
|
65 | 65 | # Note: Subversion does not really have a default branch name. |
|
66 | 66 | DEFAULT_BRANCH_NAME = None |
|
67 | 67 | |
|
68 | 68 | contact = base.BaseRepository.DEFAULT_CONTACT |
|
69 | 69 | description = base.BaseRepository.DEFAULT_DESCRIPTION |
|
70 | 70 | |
|
71 | 71 | def __init__(self, repo_path, config=None, create=False, src_url=None, bare=False, |
|
72 | 72 | **kwargs): |
|
73 | 73 | self.path = safe_str(os.path.abspath(repo_path)) |
|
74 | 74 | self.config = config if config else self.get_default_config() |
|
75 | 75 | |
|
76 | 76 | self._init_repo(create, src_url) |
|
77 | 77 | |
|
78 | 78 | @LazyProperty |
|
79 | 79 | def _remote(self): |
|
80 | 80 | return connection.Svn(self.path, self.config) |
|
81 | 81 | |
|
82 | 82 | def _init_repo(self, create, src_url): |
|
83 | 83 | if create and os.path.exists(self.path): |
|
84 | 84 | raise RepositoryError( |
|
85 | 85 | "Cannot create repository at %s, location already exist" |
|
86 | 86 | % self.path) |
|
87 | 87 | |
|
88 | 88 | if create: |
|
89 | 89 | self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION) |
|
90 | 90 | if src_url: |
|
91 | 91 | src_url = _sanitize_url(src_url) |
|
92 | 92 | self._remote.import_remote_repository(src_url) |
|
93 | 93 | else: |
|
94 | 94 | self._check_path() |
|
95 | 95 | |
|
96 | 96 | @LazyProperty |
|
97 | 97 | def commit_ids(self): |
|
98 | 98 | head = self._remote.lookup(None) |
|
99 | 99 | return [str(r) for r in xrange(1, head + 1)] |
|
100 | 100 | |
|
101 | 101 | def run_svn_command(self, cmd, **opts): |
|
102 | 102 | """ |
|
103 | 103 | Runs given ``cmd`` as svn command and returns tuple |
|
104 | 104 | (stdout, stderr). |
|
105 | 105 | |
|
106 | 106 | :param cmd: full svn command to be executed |
|
107 | 107 | :param opts: env options to pass into Subprocess command |
|
108 | 108 | """ |
|
109 | 109 | if not isinstance(cmd, list): |
|
110 | 110 | raise ValueError('cmd must be a list, got %s instead' % type(cmd)) |
|
111 | 111 | |
|
112 | 112 | skip_stderr_log = opts.pop('skip_stderr_log', False) |
|
113 | 113 | out, err = self._remote.run_svn_command(cmd, **opts) |
|
114 | 114 | if err and not skip_stderr_log: |
|
115 | 115 | log.debug('Stderr output of svn command "%s":\n%s', cmd, err) |
|
116 | 116 | return out, err |
|
117 | 117 | |
|
118 | 118 | @LazyProperty |
|
119 | 119 | def branches(self): |
|
120 | 120 | return self._tags_or_branches('vcs_svn_branch') |
|
121 | 121 | |
|
122 | 122 | @LazyProperty |
|
123 | 123 | def branches_closed(self): |
|
124 | 124 | return {} |
|
125 | 125 | |
|
126 | 126 | @LazyProperty |
|
127 | 127 | def bookmarks(self): |
|
128 | 128 | return {} |
|
129 | 129 | |
|
130 | 130 | @LazyProperty |
|
131 | 131 | def branches_all(self): |
|
132 | 132 | # TODO: johbo: Implement proper branch support |
|
133 | 133 | all_branches = {} |
|
134 | 134 | all_branches.update(self.branches) |
|
135 | 135 | all_branches.update(self.branches_closed) |
|
136 | 136 | return all_branches |
|
137 | 137 | |
|
138 | 138 | @LazyProperty |
|
139 | 139 | def tags(self): |
|
140 | 140 | return self._tags_or_branches('vcs_svn_tag') |
|
141 | 141 | |
|
142 | 142 | def _tags_or_branches(self, config_section): |
|
143 | 143 | found_items = {} |
|
144 | 144 | |
|
145 | 145 | if self.is_empty(): |
|
146 | 146 | return {} |
|
147 | 147 | |
|
148 | 148 | for pattern in self._patterns_from_section(config_section): |
|
149 | 149 | pattern = vcspath.sanitize(pattern) |
|
150 | 150 | tip = self.get_commit() |
|
151 | 151 | try: |
|
152 | 152 | if pattern.endswith('*'): |
|
153 | 153 | basedir = tip.get_node(vcspath.dirname(pattern)) |
|
154 | 154 | directories = basedir.dirs |
|
155 | 155 | else: |
|
156 | 156 | directories = (tip.get_node(pattern), ) |
|
157 | 157 | except NodeDoesNotExistError: |
|
158 | 158 | continue |
|
159 | 159 | found_items.update( |
|
160 | 160 | (safe_unicode(n.path), |
|
161 | 161 | self.commit_ids[-1]) |
|
162 | 162 | for n in directories) |
|
163 | 163 | |
|
164 | 164 | def get_name(item): |
|
165 | 165 | return item[0] |
|
166 | 166 | |
|
167 | 167 | return OrderedDict(sorted(found_items.items(), key=get_name)) |
|
168 | 168 | |
|
169 | 169 | def _patterns_from_section(self, section): |
|
170 | 170 | return (pattern for key, pattern in self.config.items(section)) |
|
171 | 171 | |
|
172 | 172 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
173 | 173 | if self != repo2: |
|
174 | 174 | raise ValueError( |
|
175 | 175 | "Subversion does not support getting common ancestor of" |
|
176 | 176 | " different repositories.") |
|
177 | 177 | |
|
178 | 178 | if int(commit_id1) < int(commit_id2): |
|
179 | 179 | return commit_id1 |
|
180 | 180 | return commit_id2 |
|
181 | 181 | |
|
182 | 182 | def verify(self): |
|
183 | 183 | verify = self._remote.verify() |
|
184 | 184 | |
|
185 | 185 | self._remote.invalidate_vcs_cache() |
|
186 | 186 | return verify |
|
187 | 187 | |
|
188 | 188 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
189 | 189 | # TODO: johbo: Implement better comparison, this is a very naive |
|
190 | 190 | # version which does not allow to compare branches, tags or folders |
|
191 | 191 | # at all. |
|
192 | 192 | if repo2 != self: |
|
193 | 193 | raise ValueError( |
|
194 | 194 | "Subversion does not support comparison of of different " |
|
195 | 195 | "repositories.") |
|
196 | 196 | |
|
197 | 197 | if commit_id1 == commit_id2: |
|
198 | 198 | return [] |
|
199 | 199 | |
|
200 | 200 | commit_idx1 = self._get_commit_idx(commit_id1) |
|
201 | 201 | commit_idx2 = self._get_commit_idx(commit_id2) |
|
202 | 202 | |
|
203 | 203 | commits = [ |
|
204 | 204 | self.get_commit(commit_idx=idx) |
|
205 | 205 | for idx in range(commit_idx1 + 1, commit_idx2 + 1)] |
|
206 | 206 | |
|
207 | 207 | return commits |
|
208 | 208 | |
|
209 | 209 | def _get_commit_idx(self, commit_id): |
|
210 | 210 | try: |
|
211 | 211 | svn_rev = int(commit_id) |
|
212 | 212 | except: |
|
213 | 213 | # TODO: johbo: this might be only one case, HEAD, check this |
|
214 | 214 | svn_rev = self._remote.lookup(commit_id) |
|
215 | 215 | commit_idx = svn_rev - 1 |
|
216 | 216 | if commit_idx >= len(self.commit_ids): |
|
217 | 217 | raise CommitDoesNotExistError( |
|
218 | 218 | "Commit at index %s does not exist." % (commit_idx, )) |
|
219 | 219 | return commit_idx |
|
220 | 220 | |
|
221 | 221 | @staticmethod |
|
222 | 222 | def check_url(url, config): |
|
223 | 223 | """ |
|
224 | 224 | Check if `url` is a valid source to import a Subversion repository. |
|
225 | 225 | """ |
|
226 | 226 | # convert to URL if it's a local directory |
|
227 | 227 | if os.path.isdir(url): |
|
228 | 228 | url = 'file://' + urllib.pathname2url(url) |
|
229 | 229 | return connection.Svn.check_url(url, config.serialize()) |
|
230 | 230 | |
|
231 | 231 | @staticmethod |
|
232 | 232 | def is_valid_repository(path): |
|
233 | 233 | try: |
|
234 | 234 | SubversionRepository(path) |
|
235 | 235 | return True |
|
236 | 236 | except VCSError: |
|
237 | 237 | pass |
|
238 | 238 | return False |
|
239 | 239 | |
|
240 | 240 | def _check_path(self): |
|
241 | 241 | if not os.path.exists(self.path): |
|
242 | 242 | raise VCSError('Path "%s" does not exist!' % (self.path, )) |
|
243 | 243 | if not self._remote.is_path_valid_repository(self.path): |
|
244 | 244 | raise VCSError( |
|
245 | 245 | 'Path "%s" does not contain a Subversion repository' % |
|
246 | 246 | (self.path, )) |
|
247 | 247 | |
|
248 | 248 | @LazyProperty |
|
249 | 249 | def last_change(self): |
|
250 | 250 | """ |
|
251 | 251 | Returns last change made on this repository as |
|
252 | 252 | `datetime.datetime` object. |
|
253 | 253 | """ |
|
254 | 254 | # Subversion always has a first commit which has id "0" and contains |
|
255 | 255 | # what we are looking for. |
|
256 | 256 | last_id = len(self.commit_ids) |
|
257 | 257 | properties = self._remote.revision_properties(last_id) |
|
258 | 258 | return _date_from_svn_properties(properties) |
|
259 | 259 | |
|
260 | 260 | @LazyProperty |
|
261 | 261 | def in_memory_commit(self): |
|
262 | 262 | return SubversionInMemoryCommit(self) |
|
263 | 263 | |
|
264 | 264 | def get_hook_location(self): |
|
265 | 265 | """ |
|
266 | 266 | returns absolute path to location where hooks are stored |
|
267 | 267 | """ |
|
268 | 268 | return os.path.join(self.path, 'hooks') |
|
269 | 269 | |
|
270 | 270 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None): |
|
271 | 271 | if self.is_empty(): |
|
272 | 272 | raise EmptyRepositoryError("There are no commits yet") |
|
273 | 273 | if commit_id is not None: |
|
274 | 274 | self._validate_commit_id(commit_id) |
|
275 | 275 | elif commit_idx is not None: |
|
276 | 276 | self._validate_commit_idx(commit_idx) |
|
277 | 277 | try: |
|
278 | 278 | commit_id = self.commit_ids[commit_idx] |
|
279 | 279 | except IndexError: |
|
280 | raise CommitDoesNotExistError | |
|
280 | raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx)) | |
|
281 | 281 | |
|
282 | 282 | commit_id = self._sanitize_commit_id(commit_id) |
|
283 | 283 | commit = SubversionCommit(repository=self, commit_id=commit_id) |
|
284 | 284 | return commit |
|
285 | 285 | |
|
286 | 286 | def get_commits( |
|
287 | 287 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
288 | 288 | branch_name=None, show_hidden=False, pre_load=None, translate_tags=None): |
|
289 | 289 | if self.is_empty(): |
|
290 | 290 | raise EmptyRepositoryError("There are no commit_ids yet") |
|
291 | 291 | self._validate_branch_name(branch_name) |
|
292 | 292 | |
|
293 | 293 | if start_id is not None: |
|
294 | 294 | self._validate_commit_id(start_id) |
|
295 | 295 | if end_id is not None: |
|
296 | 296 | self._validate_commit_id(end_id) |
|
297 | 297 | |
|
298 | 298 | start_raw_id = self._sanitize_commit_id(start_id) |
|
299 | 299 | start_pos = self.commit_ids.index(start_raw_id) if start_id else None |
|
300 | 300 | end_raw_id = self._sanitize_commit_id(end_id) |
|
301 | 301 | end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None |
|
302 | 302 | |
|
303 | 303 | if None not in [start_id, end_id] and start_pos > end_pos: |
|
304 | 304 | raise RepositoryError( |
|
305 | 305 | "Start commit '%s' cannot be after end commit '%s'" % |
|
306 | 306 | (start_id, end_id)) |
|
307 | 307 | if end_pos is not None: |
|
308 | 308 | end_pos += 1 |
|
309 | 309 | |
|
310 | 310 | # Date based filtering |
|
311 | 311 | if start_date or end_date: |
|
312 | 312 | start_raw_id, end_raw_id = self._remote.lookup_interval( |
|
313 | 313 | date_astimestamp(start_date) if start_date else None, |
|
314 | 314 | date_astimestamp(end_date) if end_date else None) |
|
315 | 315 | start_pos = start_raw_id - 1 |
|
316 | 316 | end_pos = end_raw_id |
|
317 | 317 | |
|
318 | 318 | commit_ids = self.commit_ids |
|
319 | 319 | |
|
320 | 320 | # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here |
|
321 | 321 | if branch_name not in [None, self.DEFAULT_BRANCH_NAME]: |
|
322 | 322 | svn_rev = long(self.commit_ids[-1]) |
|
323 | 323 | commit_ids = self._remote.node_history( |
|
324 | 324 | path=branch_name, revision=svn_rev, limit=None) |
|
325 | 325 | commit_ids = [str(i) for i in reversed(commit_ids)] |
|
326 | 326 | |
|
327 | 327 | if start_pos or end_pos: |
|
328 | 328 | commit_ids = commit_ids[start_pos:end_pos] |
|
329 | 329 | return base.CollectionGenerator(self, commit_ids, pre_load=pre_load) |
|
330 | 330 | |
|
331 | 331 | def _sanitize_commit_id(self, commit_id): |
|
332 | 332 | if commit_id and commit_id.isdigit(): |
|
333 | 333 | if int(commit_id) <= len(self.commit_ids): |
|
334 | 334 | return commit_id |
|
335 | 335 | else: |
|
336 | 336 | raise CommitDoesNotExistError( |
|
337 | 337 | "Commit %s does not exist." % (commit_id, )) |
|
338 | 338 | if commit_id not in [ |
|
339 | 339 | None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]: |
|
340 | 340 | raise CommitDoesNotExistError( |
|
341 | 341 | "Commit id %s not understood." % (commit_id, )) |
|
342 | 342 | svn_rev = self._remote.lookup('HEAD') |
|
343 | 343 | return str(svn_rev) |
|
344 | 344 | |
|
345 | 345 | def get_diff( |
|
346 | 346 | self, commit1, commit2, path=None, ignore_whitespace=False, |
|
347 | 347 | context=3, path1=None): |
|
348 | 348 | self._validate_diff_commits(commit1, commit2) |
|
349 | 349 | svn_rev1 = long(commit1.raw_id) |
|
350 | 350 | svn_rev2 = long(commit2.raw_id) |
|
351 | 351 | diff = self._remote.diff( |
|
352 | 352 | svn_rev1, svn_rev2, path1=path1, path2=path, |
|
353 | 353 | ignore_whitespace=ignore_whitespace, context=context) |
|
354 | 354 | return SubversionDiff(diff) |
|
355 | 355 | |
|
356 | 356 | |
|
357 | 357 | def _sanitize_url(url): |
|
358 | 358 | if '://' not in url: |
|
359 | 359 | url = 'file://' + urllib.pathname2url(url) |
|
360 | 360 | return url |
@@ -1,133 +1,133 b'' | |||
|
1 | 1 | // Global keyboard bindings |
|
2 | 2 | |
|
3 | 3 | function setRCMouseBindings(repoName, repoLandingRev) { |
|
4 | 4 | |
|
5 | 5 | /** custom callback for supressing mousetrap from firing */ |
|
6 | 6 | Mousetrap.stopCallback = function(e, element) { |
|
7 | 7 | // if the element has the class "mousetrap" then no need to stop |
|
8 | 8 | if ((' ' + element.className + ' ').indexOf(' mousetrap ') > -1) { |
|
9 | 9 | return false; |
|
10 | 10 | } |
|
11 | 11 | |
|
12 | 12 | // stop for input, select, and textarea |
|
13 | 13 | return element.tagName == 'INPUT' || element.tagName == 'SELECT' || element.tagName == 'TEXTAREA' || element.isContentEditable; |
|
14 | 14 | }; |
|
15 | 15 | |
|
16 | 16 | // general help "?" |
|
17 | 17 | Mousetrap.bind(['?'], function(e) { |
|
18 | 18 | $('#help_kb').modal({}); |
|
19 | 19 | }); |
|
20 | 20 | |
|
21 | 21 | // / open the quick filter |
|
22 | 22 | Mousetrap.bind(['/'], function(e) { |
|
23 | 23 | $('#main_filter').get(0).focus(); |
|
24 | 24 | |
|
25 | 25 | // return false to prevent default browser behavior |
|
26 | 26 | // and stop event from bubbling |
|
27 | 27 | return false; |
|
28 | 28 | }); |
|
29 | 29 | |
|
30 | 30 | // ctrl/command+b, show the the main bar |
|
31 | 31 | Mousetrap.bind(['command+b', 'ctrl+b'], function(e) { |
|
32 | 32 | var $headerInner = $('#header-inner'), |
|
33 | 33 | $content = $('#content'); |
|
34 | 34 | if ($headerInner.hasClass('hover') && $content.hasClass('hover')) { |
|
35 | 35 | $headerInner.removeClass('hover'); |
|
36 | 36 | $content.removeClass('hover'); |
|
37 | 37 | } else { |
|
38 | 38 | $headerInner.addClass('hover'); |
|
39 | 39 | $content.addClass('hover'); |
|
40 | 40 | } |
|
41 | 41 | return false; |
|
42 | 42 | }); |
|
43 | 43 | |
|
44 | 44 | // general nav g + action |
|
45 | 45 | Mousetrap.bind(['g h'], function(e) { |
|
46 | 46 | window.location = pyroutes.url('home'); |
|
47 | 47 | }); |
|
48 | 48 | Mousetrap.bind(['g g'], function(e) { |
|
49 | 49 | window.location = pyroutes.url('gists_show', {'private': 1}); |
|
50 | 50 | }); |
|
51 | 51 | Mousetrap.bind(['g G'], function(e) { |
|
52 | 52 | window.location = pyroutes.url('gists_show', {'public': 1}); |
|
53 | 53 | }); |
|
54 | 54 | |
|
55 | 55 | Mousetrap.bind(['g 0'], function(e) { |
|
56 | 56 | window.location = pyroutes.url('my_account_goto_bookmark', {'bookmark_id': 0}); |
|
57 | 57 | }); |
|
58 | 58 | Mousetrap.bind(['g 1'], function(e) { |
|
59 | 59 | window.location = pyroutes.url('my_account_goto_bookmark', {'bookmark_id': 1}); |
|
60 | 60 | }); |
|
61 | 61 | Mousetrap.bind(['g 2'], function(e) { |
|
62 | 62 | window.location = pyroutes.url('my_account_goto_bookmark', {'bookmark_id': 2}); |
|
63 | 63 | }); |
|
64 | 64 | Mousetrap.bind(['g 3'], function(e) { |
|
65 | 65 | window.location = pyroutes.url('my_account_goto_bookmark', {'bookmark_id': 3}); |
|
66 | 66 | }); |
|
67 | 67 | Mousetrap.bind(['g 4'], function(e) { |
|
68 | 68 | window.location = pyroutes.url('my_account_goto_bookmark', {'bookmark_id': 4}); |
|
69 | 69 | }); |
|
70 | 70 | Mousetrap.bind(['g 5'], function(e) { |
|
71 | 71 | window.location = pyroutes.url('my_account_goto_bookmark', {'bookmark_id': 5}); |
|
72 | 72 | }); |
|
73 | 73 | Mousetrap.bind(['g 6'], function(e) { |
|
74 | 74 | window.location = pyroutes.url('my_account_goto_bookmark', {'bookmark_id': 6}); |
|
75 | 75 | }); |
|
76 | 76 | Mousetrap.bind(['g 7'], function(e) { |
|
77 | 77 | window.location = pyroutes.url('my_account_goto_bookmark', {'bookmark_id': 7}); |
|
78 | 78 | }); |
|
79 | 79 | Mousetrap.bind(['g 8'], function(e) { |
|
80 | 80 | window.location = pyroutes.url('my_account_goto_bookmark', {'bookmark_id': 8}); |
|
81 | 81 | }); |
|
82 | 82 | Mousetrap.bind(['g 9'], function(e) { |
|
83 | 83 | window.location = pyroutes.url('my_account_goto_bookmark', {'bookmark_id': 9}); |
|
84 | 84 | }); |
|
85 | 85 | |
|
86 | 86 | Mousetrap.bind(['n g'], function(e) { |
|
87 | 87 | window.location = pyroutes.url('gists_new'); |
|
88 | 88 | }); |
|
89 | 89 | Mousetrap.bind(['n r'], function(e) { |
|
90 | 90 | window.location = pyroutes.url('repo_new'); |
|
91 | 91 | }); |
|
92 | 92 | |
|
93 | 93 | if (repoName && repoName !== '') { |
|
94 | 94 | // nav in repo context |
|
95 | 95 | Mousetrap.bind(['g s'], function(e) { |
|
96 | 96 | window.location = pyroutes.url( |
|
97 | 97 | 'repo_summary', {'repo_name': repoName}); |
|
98 | 98 | }); |
|
99 | 99 | Mousetrap.bind(['g c'], function(e) { |
|
100 | 100 | window.location = pyroutes.url( |
|
101 |
'repo_c |
|
|
101 | 'repo_commits', {'repo_name': repoName}); | |
|
102 | 102 | }); |
|
103 | 103 | Mousetrap.bind(['g F'], function(e) { |
|
104 | 104 | window.location = pyroutes.url( |
|
105 | 105 | 'repo_files', |
|
106 | 106 | { |
|
107 | 107 | 'repo_name': repoName, |
|
108 | 108 | 'commit_id': repoLandingRev, |
|
109 | 109 | 'f_path': '', |
|
110 | 110 | 'search': '1' |
|
111 | 111 | }); |
|
112 | 112 | }); |
|
113 | 113 | Mousetrap.bind(['g f'], function(e) { |
|
114 | 114 | window.location = pyroutes.url( |
|
115 | 115 | 'repo_files', |
|
116 | 116 | { |
|
117 | 117 | 'repo_name': repoName, |
|
118 | 118 | 'commit_id': repoLandingRev, |
|
119 | 119 | 'f_path': '' |
|
120 | 120 | }); |
|
121 | 121 | }); |
|
122 | 122 | Mousetrap.bind(['g o'], function(e) { |
|
123 | 123 | window.location = pyroutes.url( |
|
124 | 124 | 'edit_repo', {'repo_name': repoName}); |
|
125 | 125 | }); |
|
126 | 126 | Mousetrap.bind(['g O'], function(e) { |
|
127 | 127 | window.location = pyroutes.url( |
|
128 | 128 | 'edit_repo_perms', {'repo_name': repoName}); |
|
129 | 129 | }); |
|
130 | 130 | } |
|
131 | 131 | } |
|
132 | 132 | |
|
133 | 133 | setRCMouseBindings(templateContext.repo_name, templateContext.repo_landing_commit); |
@@ -1,371 +1,374 b'' | |||
|
1 | 1 | |
|
2 | 2 | /****************************************************************************** |
|
3 | 3 | * * |
|
4 | 4 | * DO NOT CHANGE THIS FILE MANUALLY * |
|
5 | 5 | * * |
|
6 | 6 | * * |
|
7 | 7 | * This file is automatically generated when the app starts up with * |
|
8 | 8 | * generate_js_files = true * |
|
9 | 9 | * * |
|
10 | 10 | * To add a route here pass jsroute=True to the route definition in the app * |
|
11 | 11 | * * |
|
12 | 12 | ******************************************************************************/ |
|
13 | 13 | function registerRCRoutes() { |
|
14 | 14 | // routes registration |
|
15 | 15 | pyroutes.register('favicon', '/favicon.ico', []); |
|
16 | 16 | pyroutes.register('robots', '/robots.txt', []); |
|
17 | 17 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); |
|
18 | 18 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); |
|
19 | 19 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); |
|
20 | 20 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); |
|
21 | 21 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); |
|
22 | 22 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']); |
|
23 | 23 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']); |
|
24 | 24 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']); |
|
25 | 25 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); |
|
26 | 26 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); |
|
27 | 27 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); |
|
28 | 28 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); |
|
29 | 29 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); |
|
30 | 30 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); |
|
31 | 31 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); |
|
32 | 32 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); |
|
33 | 33 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); |
|
34 | 34 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); |
|
35 | 35 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); |
|
36 | 36 | pyroutes.register('ops_ping_legacy', '/_admin/ping', []); |
|
37 | 37 | pyroutes.register('ops_error_test_legacy', '/_admin/error_test', []); |
|
38 | 38 | pyroutes.register('admin_home', '/_admin', []); |
|
39 | 39 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); |
|
40 | 40 | pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']); |
|
41 | 41 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); |
|
42 | 42 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); |
|
43 | 43 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); |
|
44 | 44 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); |
|
45 | 45 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); |
|
46 | 46 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); |
|
47 | 47 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); |
|
48 | 48 | pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []); |
|
49 | 49 | pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions/delete', []); |
|
50 | 50 | pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']); |
|
51 | 51 | pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']); |
|
52 | 52 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); |
|
53 | 53 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); |
|
54 | 54 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); |
|
55 | 55 | pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []); |
|
56 | 56 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); |
|
57 | 57 | pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []); |
|
58 | 58 | pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []); |
|
59 | 59 | pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []); |
|
60 | 60 | pyroutes.register('admin_settings', '/_admin/settings', []); |
|
61 | 61 | pyroutes.register('admin_settings_update', '/_admin/settings/update', []); |
|
62 | 62 | pyroutes.register('admin_settings_global', '/_admin/settings/global', []); |
|
63 | 63 | pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []); |
|
64 | 64 | pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []); |
|
65 | 65 | pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []); |
|
66 | 66 | pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []); |
|
67 | 67 | pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []); |
|
68 | 68 | pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []); |
|
69 | 69 | pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []); |
|
70 | 70 | pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []); |
|
71 | 71 | pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []); |
|
72 | 72 | pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []); |
|
73 | 73 | pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []); |
|
74 | 74 | pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []); |
|
75 | 75 | pyroutes.register('admin_settings_email', '/_admin/settings/email', []); |
|
76 | 76 | pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []); |
|
77 | 77 | pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []); |
|
78 | 78 | pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []); |
|
79 | 79 | pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []); |
|
80 | 80 | pyroutes.register('admin_settings_search', '/_admin/settings/search', []); |
|
81 | 81 | pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []); |
|
82 | 82 | pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []); |
|
83 | 83 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); |
|
84 | 84 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); |
|
85 | 85 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); |
|
86 | 86 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); |
|
87 | 87 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); |
|
88 | 88 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); |
|
89 | 89 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); |
|
90 | 90 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); |
|
91 | 91 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); |
|
92 | 92 | pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []); |
|
93 | 93 | pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []); |
|
94 | 94 | pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []); |
|
95 | 95 | pyroutes.register('users', '/_admin/users', []); |
|
96 | 96 | pyroutes.register('users_data', '/_admin/users_data', []); |
|
97 | 97 | pyroutes.register('users_create', '/_admin/users/create', []); |
|
98 | 98 | pyroutes.register('users_new', '/_admin/users/new', []); |
|
99 | 99 | pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']); |
|
100 | 100 | pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']); |
|
101 | 101 | pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']); |
|
102 | 102 | pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']); |
|
103 | 103 | pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']); |
|
104 | 104 | pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']); |
|
105 | 105 | pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']); |
|
106 | 106 | pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']); |
|
107 | 107 | pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']); |
|
108 | 108 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); |
|
109 | 109 | pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']); |
|
110 | 110 | pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']); |
|
111 | 111 | pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']); |
|
112 | 112 | pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']); |
|
113 | 113 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); |
|
114 | 114 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); |
|
115 | 115 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); |
|
116 | 116 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); |
|
117 | 117 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); |
|
118 | 118 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); |
|
119 | 119 | pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']); |
|
120 | 120 | pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']); |
|
121 | 121 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); |
|
122 | 122 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); |
|
123 | 123 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); |
|
124 | 124 | pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']); |
|
125 | 125 | pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']); |
|
126 | 126 | pyroutes.register('user_groups', '/_admin/user_groups', []); |
|
127 | 127 | pyroutes.register('user_groups_data', '/_admin/user_groups_data', []); |
|
128 | 128 | pyroutes.register('user_groups_new', '/_admin/user_groups/new', []); |
|
129 | 129 | pyroutes.register('user_groups_create', '/_admin/user_groups/create', []); |
|
130 | 130 | pyroutes.register('repos', '/_admin/repos', []); |
|
131 | 131 | pyroutes.register('repo_new', '/_admin/repos/new', []); |
|
132 | 132 | pyroutes.register('repo_create', '/_admin/repos/create', []); |
|
133 | 133 | pyroutes.register('repo_groups', '/_admin/repo_groups', []); |
|
134 | 134 | pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []); |
|
135 | 135 | pyroutes.register('repo_group_new', '/_admin/repo_group/new', []); |
|
136 | 136 | pyroutes.register('repo_group_create', '/_admin/repo_group/create', []); |
|
137 | 137 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); |
|
138 | 138 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); |
|
139 | 139 | pyroutes.register('channelstream_proxy', '/_channelstream', []); |
|
140 | 140 | pyroutes.register('upload_file', '/_file_store/upload', []); |
|
141 | 141 | pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']); |
|
142 | 142 | pyroutes.register('logout', '/_admin/logout', []); |
|
143 | 143 | pyroutes.register('reset_password', '/_admin/password_reset', []); |
|
144 | 144 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); |
|
145 | 145 | pyroutes.register('home', '/', []); |
|
146 | 146 | pyroutes.register('user_autocomplete_data', '/_users', []); |
|
147 | 147 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); |
|
148 | 148 | pyroutes.register('repo_list_data', '/_repos', []); |
|
149 | 149 | pyroutes.register('repo_group_list_data', '/_repo_groups', []); |
|
150 | 150 | pyroutes.register('goto_switcher_data', '/_goto_data', []); |
|
151 | 151 | pyroutes.register('markup_preview', '/_markup_preview', []); |
|
152 | 152 | pyroutes.register('store_user_session_value', '/_store_session_attr', []); |
|
153 | 153 | pyroutes.register('journal', '/_admin/journal', []); |
|
154 | 154 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); |
|
155 | 155 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); |
|
156 | 156 | pyroutes.register('journal_public', '/_admin/public_journal', []); |
|
157 | 157 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); |
|
158 | 158 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); |
|
159 | 159 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); |
|
160 | 160 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); |
|
161 | 161 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); |
|
162 | 162 | pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']); |
|
163 | 163 | pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']); |
|
164 | 164 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); |
|
165 | 165 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); |
|
166 | 166 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
167 | 167 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); |
|
168 | 168 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); |
|
169 | 169 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); |
|
170 | 170 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); |
|
171 | 171 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); |
|
172 | 172 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); |
|
173 | 173 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); |
|
174 | 174 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); |
|
175 | 175 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); |
|
176 | 176 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
177 | 177 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); |
|
178 | 178 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); |
|
179 | 179 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); |
|
180 | 180 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
181 | 181 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
182 | 182 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); |
|
183 | 183 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
184 | 184 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
185 | 185 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
186 | 186 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
187 | 187 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
188 | 188 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
189 | 189 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
190 | 190 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
191 | 191 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
192 | 192 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
193 | 193 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
194 | 194 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
195 | 195 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
196 | 196 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
197 | 197 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
198 | 198 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
199 | pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
|
199 | 200 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
200 | 201 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); |
|
201 | 202 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); |
|
202 | 203 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); |
|
204 | pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']); | |
|
205 | pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
|
206 | pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']); | |
|
207 | pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
|
203 | 208 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); |
|
204 | 209 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
205 | pyroutes.register('repo_changelog_elements', '/%(repo_name)s/changelog_elements', ['repo_name']); | |
|
206 | pyroutes.register('repo_changelog_elements_file', '/%(repo_name)s/changelog_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
|
207 | 210 | pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']); |
|
208 | 211 | pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); |
|
209 | 212 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); |
|
210 | 213 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); |
|
211 | 214 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); |
|
212 | 215 | pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']); |
|
213 | 216 | pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']); |
|
214 | 217 | pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']); |
|
215 | 218 | pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']); |
|
216 | 219 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
217 | 220 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); |
|
218 | 221 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); |
|
219 | 222 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); |
|
220 | 223 | pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']); |
|
221 | 224 | pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
222 | 225 | pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); |
|
223 | 226 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); |
|
224 | 227 | pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); |
|
225 | 228 | pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); |
|
226 | 229 | pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); |
|
227 | 230 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); |
|
228 | 231 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); |
|
229 | 232 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); |
|
230 | 233 | pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']); |
|
231 | 234 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); |
|
232 | 235 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); |
|
233 | 236 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); |
|
234 | 237 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); |
|
235 | 238 | pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']); |
|
236 | 239 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); |
|
237 | 240 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); |
|
238 | 241 | pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']); |
|
239 | 242 | pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); |
|
240 | 243 | pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']); |
|
241 | 244 | pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']); |
|
242 | 245 | pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']); |
|
243 | 246 | pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']); |
|
244 | 247 | pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']); |
|
245 | 248 | pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']); |
|
246 | 249 | pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']); |
|
247 | 250 | pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']); |
|
248 | 251 | pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']); |
|
249 | 252 | pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']); |
|
250 | 253 | pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']); |
|
251 | 254 | pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']); |
|
252 | 255 | pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']); |
|
253 | 256 | pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']); |
|
254 | 257 | pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']); |
|
255 | 258 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); |
|
256 | 259 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); |
|
257 | 260 | pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']); |
|
258 | 261 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); |
|
259 | 262 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); |
|
260 | 263 | pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']); |
|
261 | 264 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed/rss', ['repo_name']); |
|
262 | 265 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed/atom', ['repo_name']); |
|
263 | 266 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); |
|
264 | 267 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); |
|
265 | 268 | pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']); |
|
266 | 269 | pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']); |
|
267 | 270 | pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']); |
|
268 | 271 | pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']); |
|
269 | 272 | pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']); |
|
270 | 273 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); |
|
271 | 274 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); |
|
272 | 275 | pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']); |
|
273 | 276 | pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']); |
|
274 | 277 | pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']); |
|
275 | 278 | pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']); |
|
276 | 279 | pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']); |
|
277 | 280 | pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']); |
|
278 | 281 | pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']); |
|
279 | 282 | pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']); |
|
280 | 283 | pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']); |
|
281 | 284 | pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']); |
|
282 | 285 | pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']); |
|
283 | 286 | pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']); |
|
284 | 287 | pyroutes.register('search', '/_admin/search', []); |
|
285 | 288 | pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']); |
|
286 | 289 | pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']); |
|
287 | 290 | pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']); |
|
288 | 291 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); |
|
289 | 292 | pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']); |
|
290 | 293 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); |
|
291 | 294 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); |
|
292 | 295 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); |
|
293 | 296 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); |
|
294 | 297 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); |
|
295 | 298 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); |
|
296 | 299 | pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []); |
|
297 | 300 | pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []); |
|
298 | 301 | pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []); |
|
299 | 302 | pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []); |
|
300 | 303 | pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []); |
|
301 | 304 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); |
|
302 | 305 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); |
|
303 | 306 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); |
|
304 | 307 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); |
|
305 | 308 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); |
|
306 | 309 | pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []); |
|
307 | 310 | pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []); |
|
308 | 311 | pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']); |
|
309 | 312 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); |
|
310 | 313 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); |
|
311 | 314 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); |
|
312 | 315 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); |
|
313 | 316 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); |
|
314 | 317 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); |
|
315 | 318 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []); |
|
316 | 319 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); |
|
317 | 320 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); |
|
318 | 321 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); |
|
319 | 322 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); |
|
320 | 323 | pyroutes.register('gists_show', '/_admin/gists', []); |
|
321 | 324 | pyroutes.register('gists_new', '/_admin/gists/new', []); |
|
322 | 325 | pyroutes.register('gists_create', '/_admin/gists/create', []); |
|
323 | 326 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); |
|
324 | 327 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); |
|
325 | 328 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); |
|
326 | 329 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); |
|
327 | 330 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); |
|
328 | 331 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']); |
|
329 | 332 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); |
|
330 | 333 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); |
|
331 | 334 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); |
|
332 | 335 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); |
|
333 | 336 | pyroutes.register('apiv2', '/_admin/api', []); |
|
334 | 337 | pyroutes.register('admin_settings_license', '/_admin/settings/license', []); |
|
335 | 338 | pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []); |
|
336 | 339 | pyroutes.register('login', '/_admin/login', []); |
|
337 | 340 | pyroutes.register('register', '/_admin/register', []); |
|
338 | 341 | pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']); |
|
339 | 342 | pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']); |
|
340 | 343 | pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']); |
|
341 | 344 | pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']); |
|
342 | 345 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); |
|
343 | 346 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); |
|
344 | 347 | pyroutes.register('admin_settings_scheduler_show_tasks', '/_admin/settings/scheduler/_tasks', []); |
|
345 | 348 | pyroutes.register('admin_settings_scheduler_show_all', '/_admin/settings/scheduler', []); |
|
346 | 349 | pyroutes.register('admin_settings_scheduler_new', '/_admin/settings/scheduler/new', []); |
|
347 | 350 | pyroutes.register('admin_settings_scheduler_create', '/_admin/settings/scheduler/create', []); |
|
348 | 351 | pyroutes.register('admin_settings_scheduler_edit', '/_admin/settings/scheduler/%(schedule_id)s', ['schedule_id']); |
|
349 | 352 | pyroutes.register('admin_settings_scheduler_update', '/_admin/settings/scheduler/%(schedule_id)s/update', ['schedule_id']); |
|
350 | 353 | pyroutes.register('admin_settings_scheduler_delete', '/_admin/settings/scheduler/%(schedule_id)s/delete', ['schedule_id']); |
|
351 | 354 | pyroutes.register('admin_settings_scheduler_execute', '/_admin/settings/scheduler/%(schedule_id)s/execute', ['schedule_id']); |
|
352 | 355 | pyroutes.register('admin_settings_automation', '/_admin/settings/automation', []); |
|
353 | 356 | pyroutes.register('admin_settings_automation_update', '/_admin/settings/automation/%(entry_id)s/update', ['entry_id']); |
|
354 | 357 | pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []); |
|
355 | 358 | pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []); |
|
356 | 359 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); |
|
357 | 360 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); |
|
358 | 361 | pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []); |
|
359 | 362 | pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []); |
|
360 | 363 | pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']); |
|
361 | 364 | pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']); |
|
362 | 365 | pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']); |
|
363 | 366 | pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']); |
|
364 | 367 | pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']); |
|
365 | 368 | pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']); |
|
366 | 369 | pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']); |
|
367 | 370 | pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']); |
|
368 | 371 | pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']); |
|
369 | 372 | pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']); |
|
370 | 373 | pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']); |
|
371 | 374 | } |
@@ -1,189 +1,189 b'' | |||
|
1 | 1 | // # Copyright (C) 2016-2019 RhodeCode GmbH |
|
2 | 2 | // # |
|
3 | 3 | // # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | // # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | // # (only), as published by the Free Software Foundation. |
|
6 | 6 | // # |
|
7 | 7 | // # This program is distributed in the hope that it will be useful, |
|
8 | 8 | // # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | // # GNU General Public License for more details. |
|
11 | 11 | // # |
|
12 | 12 | // # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | // # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | // # |
|
15 | 15 | // # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | // # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | // # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | |
|
20 | 20 | var CommitsController = function () { |
|
21 | 21 | var self = this; |
|
22 | 22 | this.$graphCanvas = $('#graph_canvas'); |
|
23 | 23 | this.$commitCounter = $('#commit-counter'); |
|
24 | 24 | |
|
25 | 25 | this.getCurrentGraphData = function () { |
|
26 | 26 | // raw form |
|
27 | 27 | return self.$graphCanvas.data('commits'); |
|
28 | 28 | }; |
|
29 | 29 | |
|
30 | 30 | this.setLabelText = function (graphData) { |
|
31 | 31 | var shown = $('.commit_hash').length; |
|
32 | 32 | var total = self.$commitCounter.data('total'); |
|
33 | 33 | |
|
34 | 34 | if (shown == 1) { |
|
35 | 35 | var text = _gettext('showing {0} out of {1} commit').format(shown, total); |
|
36 | 36 | } else { |
|
37 | 37 | var text = _gettext('showing {0} out of {1} commits').format(shown, total); |
|
38 | 38 | } |
|
39 | 39 | self.$commitCounter.html(text) |
|
40 | 40 | }; |
|
41 | 41 | |
|
42 | 42 | this.reloadGraph = function (chunk) { |
|
43 | 43 | chunk = chunk || 'next'; |
|
44 | 44 | |
|
45 | 45 | // reset state on re-render ! |
|
46 | 46 | self.$graphCanvas.html(''); |
|
47 | 47 | |
|
48 | 48 | var edgeData = $("[data-graph]").data('graph') || this.$graphCanvas.data('graph') || []; |
|
49 | 49 | |
|
50 | 50 | // Determine max number of edges per row in graph |
|
51 | 51 | var edgeCount = 1; |
|
52 | 52 | $.each(edgeData, function (i, item) { |
|
53 | 53 | $.each(item[2], function (key, value) { |
|
54 | 54 | if (value[1] > edgeCount) { |
|
55 | 55 | edgeCount = value[1]; |
|
56 | 56 | } |
|
57 | 57 | }); |
|
58 | 58 | }); |
|
59 | 59 | |
|
60 | 60 | var x_step = Math.min(10, Math.floor(86 / edgeCount)); |
|
61 | 61 | var graph_options = { |
|
62 | 62 | width: 100, |
|
63 | 63 | height: $('#changesets').find('.commits-range').height(), |
|
64 | 64 | x_step: x_step, |
|
65 | 65 | y_step: 42, |
|
66 | 66 | dotRadius: 3.5, |
|
67 | 67 | lineWidth: 2.5 |
|
68 | 68 | }; |
|
69 | 69 | |
|
70 | 70 | var prevCommitsData = this.$graphCanvas.data('commits') || []; |
|
71 | 71 | var nextCommitsData = $("[data-graph]").data('commits') || []; |
|
72 | 72 | |
|
73 | 73 | if (chunk == 'next') { |
|
74 | 74 | var commitData = $.merge(prevCommitsData, nextCommitsData); |
|
75 | 75 | } else { |
|
76 | 76 | var commitData = $.merge(nextCommitsData, prevCommitsData); |
|
77 | 77 | } |
|
78 | 78 | |
|
79 | 79 | this.$graphCanvas.data('graph', edgeData); |
|
80 | 80 | this.$graphCanvas.data('commits', commitData); |
|
81 | 81 | |
|
82 | 82 | // destroy dynamic loaded graph |
|
83 | 83 | $("[data-graph]").remove(); |
|
84 | 84 | |
|
85 | 85 | this.$graphCanvas.commits(graph_options); |
|
86 | 86 | |
|
87 | 87 | this.setLabelText(edgeData); |
|
88 | 88 | if ($('.load-more-commits').find('.prev-commits').get(0)) { |
|
89 | 89 | var padding = 75; |
|
90 | 90 | |
|
91 | 91 | } else { |
|
92 | 92 | var padding = 43; |
|
93 | 93 | } |
|
94 | 94 | $('#graph_nodes').css({'padding-top': padding}); |
|
95 | 95 | }; |
|
96 | 96 | |
|
97 | 97 | this.getChunkUrl = function (page, chunk, branch, commit_id, f_path) { |
|
98 | 98 | var urlData = { |
|
99 | 99 | 'repo_name': templateContext.repo_name, |
|
100 | 100 | 'page': page, |
|
101 | 101 | 'chunk': chunk |
|
102 | 102 | }; |
|
103 | 103 | |
|
104 | 104 | if (branch !== undefined && branch !== '') { |
|
105 | 105 | urlData['branch'] = branch; |
|
106 | 106 | } |
|
107 | 107 | if (commit_id !== undefined && commit_id !== '') { |
|
108 | 108 | urlData['commit_id'] = commit_id; |
|
109 | 109 | } |
|
110 | 110 | if (f_path !== undefined && f_path !== '') { |
|
111 | 111 | urlData['f_path'] = f_path; |
|
112 | 112 | } |
|
113 | 113 | |
|
114 | 114 | if (urlData['commit_id'] && urlData['f_path']) { |
|
115 |
return pyroutes.url('repo_c |
|
|
115 | return pyroutes.url('repo_commits_elements_file', urlData); | |
|
116 | 116 | } |
|
117 | 117 | else { |
|
118 |
return pyroutes.url('repo_c |
|
|
118 | return pyroutes.url('repo_commits_elements', urlData); | |
|
119 | 119 | } |
|
120 | 120 | |
|
121 | 121 | }; |
|
122 | 122 | |
|
123 | 123 | this.loadNext = function (node, page, branch, commit_id, f_path) { |
|
124 | 124 | var loadUrl = this.getChunkUrl(page, 'next', branch, commit_id, f_path); |
|
125 | 125 | var postData = {'graph': JSON.stringify(this.getCurrentGraphData())}; |
|
126 | 126 | |
|
127 | 127 | $.post(loadUrl, postData, function (data) { |
|
128 | 128 | $(node).closest('tbody').append(data); |
|
129 | 129 | $(node).closest('td').remove(); |
|
130 | 130 | self.reloadGraph('next'); |
|
131 | 131 | }) |
|
132 | 132 | }; |
|
133 | 133 | |
|
134 | 134 | this.loadPrev = function (node, page, branch, commit_id, f_path) { |
|
135 | 135 | var loadUrl = this.getChunkUrl(page, 'prev', branch, commit_id, f_path); |
|
136 | 136 | var postData = {'graph': JSON.stringify(this.getCurrentGraphData())}; |
|
137 | 137 | |
|
138 | 138 | $.post(loadUrl, postData, function (data) { |
|
139 | 139 | $(node).closest('tbody').prepend(data); |
|
140 | 140 | $(node).closest('td').remove(); |
|
141 | 141 | self.reloadGraph('prev'); |
|
142 | 142 | }) |
|
143 | 143 | }; |
|
144 | 144 | |
|
145 | 145 | this.expandCommit = function (node, reloadGraph) { |
|
146 | 146 | reloadGraph = reloadGraph || false; |
|
147 | 147 | |
|
148 | 148 | var target_expand = $(node); |
|
149 | 149 | var cid = target_expand.data('commitId'); |
|
150 | 150 | |
|
151 | 151 | if (target_expand.hasClass('open')) { |
|
152 | 152 | $('#c-' + cid).css({ |
|
153 | 153 | 'height': '1.5em', |
|
154 | 154 | 'white-space': 'nowrap', |
|
155 | 155 | 'text-overflow': 'ellipsis', |
|
156 | 156 | 'overflow': 'hidden' |
|
157 | 157 | }); |
|
158 | 158 | $('#t-' + cid).css({ |
|
159 | 159 | 'height': 'auto', |
|
160 | 160 | 'line-height': '.9em', |
|
161 | 161 | 'text-overflow': 'ellipsis', |
|
162 | 162 | 'overflow': 'hidden', |
|
163 | 163 | 'white-space': 'nowrap' |
|
164 | 164 | }); |
|
165 | 165 | target_expand.removeClass('open'); |
|
166 | 166 | } |
|
167 | 167 | else { |
|
168 | 168 | $('#c-' + cid).css({ |
|
169 | 169 | 'height': 'auto', |
|
170 | 170 | 'white-space': 'pre-line', |
|
171 | 171 | 'text-overflow': 'initial', |
|
172 | 172 | 'overflow': 'visible' |
|
173 | 173 | }); |
|
174 | 174 | $('#t-' + cid).css({ |
|
175 | 175 | 'height': 'auto', |
|
176 | 176 | 'max-height': 'none', |
|
177 | 177 | 'text-overflow': 'initial', |
|
178 | 178 | 'overflow': 'visible', |
|
179 | 179 | 'white-space': 'normal' |
|
180 | 180 | }); |
|
181 | 181 | target_expand.addClass('open'); |
|
182 | 182 | } |
|
183 | 183 | |
|
184 | 184 | if (reloadGraph) { |
|
185 | 185 | // redraw the graph |
|
186 | 186 | self.reloadGraph(); |
|
187 | 187 | } |
|
188 | 188 | } |
|
189 | 189 | }; |
@@ -1,965 +1,965 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | <%inherit file="root.mako"/> |
|
3 | 3 | |
|
4 | 4 | <%include file="/ejs_templates/templates.html"/> |
|
5 | 5 | |
|
6 | 6 | <div class="outerwrapper"> |
|
7 | 7 | <!-- HEADER --> |
|
8 | 8 | <div class="header"> |
|
9 | 9 | <div id="header-inner" class="wrapper"> |
|
10 | 10 | <div id="logo"> |
|
11 | 11 | <div class="logo-wrapper"> |
|
12 | 12 | <a href="${h.route_path('home')}"><img src="${h.asset('images/rhodecode-logo-white-60x60.png')}" alt="RhodeCode"/></a> |
|
13 | 13 | </div> |
|
14 | 14 | % if c.rhodecode_name: |
|
15 | 15 | <div class="branding"> |
|
16 | 16 | <a href="${h.route_path('home')}">${h.branding(c.rhodecode_name)}</a> |
|
17 | 17 | </div> |
|
18 | 18 | % endif |
|
19 | 19 | </div> |
|
20 | 20 | <!-- MENU BAR NAV --> |
|
21 | 21 | ${self.menu_bar_nav()} |
|
22 | 22 | <!-- END MENU BAR NAV --> |
|
23 | 23 | </div> |
|
24 | 24 | </div> |
|
25 | 25 | ${self.menu_bar_subnav()} |
|
26 | 26 | <!-- END HEADER --> |
|
27 | 27 | |
|
28 | 28 | <!-- CONTENT --> |
|
29 | 29 | <div id="content" class="wrapper"> |
|
30 | 30 | |
|
31 | 31 | <rhodecode-toast id="notifications"></rhodecode-toast> |
|
32 | 32 | |
|
33 | 33 | <div class="main"> |
|
34 | 34 | ${next.main()} |
|
35 | 35 | </div> |
|
36 | 36 | </div> |
|
37 | 37 | <!-- END CONTENT --> |
|
38 | 38 | |
|
39 | 39 | </div> |
|
40 | 40 | <!-- FOOTER --> |
|
41 | 41 | <div id="footer"> |
|
42 | 42 | <div id="footer-inner" class="title wrapper"> |
|
43 | 43 | <div> |
|
44 | 44 | <p class="footer-link-right"> |
|
45 | 45 | % if c.visual.show_version: |
|
46 | 46 | RhodeCode Enterprise ${c.rhodecode_version} ${c.rhodecode_edition} |
|
47 | 47 | % endif |
|
48 | 48 | © 2010-${h.datetime.today().year}, <a href="${h.route_url('rhodecode_official')}" target="_blank">RhodeCode GmbH</a>. All rights reserved. |
|
49 | 49 | % if c.visual.rhodecode_support_url: |
|
50 | 50 | <a href="${c.visual.rhodecode_support_url}" target="_blank">${_('Support')}</a> |
|
51 | 51 | % endif |
|
52 | 52 | </p> |
|
53 | 53 | <% sid = 'block' if request.GET.get('showrcid') else 'none' %> |
|
54 | 54 | <p class="server-instance" style="display:${sid}"> |
|
55 | 55 | ## display hidden instance ID if specially defined |
|
56 | 56 | % if c.rhodecode_instanceid: |
|
57 | 57 | ${_('RhodeCode instance id: {}').format(c.rhodecode_instanceid)} |
|
58 | 58 | % endif |
|
59 | 59 | </p> |
|
60 | 60 | </div> |
|
61 | 61 | </div> |
|
62 | 62 | </div> |
|
63 | 63 | |
|
64 | 64 | <!-- END FOOTER --> |
|
65 | 65 | |
|
66 | 66 | ### MAKO DEFS ### |
|
67 | 67 | |
|
68 | 68 | <%def name="menu_bar_subnav()"> |
|
69 | 69 | </%def> |
|
70 | 70 | |
|
71 | 71 | <%def name="breadcrumbs(class_='breadcrumbs')"> |
|
72 | 72 | <div class="${class_}"> |
|
73 | 73 | ${self.breadcrumbs_links()} |
|
74 | 74 | </div> |
|
75 | 75 | </%def> |
|
76 | 76 | |
|
77 | 77 | <%def name="admin_menu(active=None)"> |
|
78 | 78 | <% |
|
79 | 79 | def is_active(selected): |
|
80 | 80 | if selected == active: |
|
81 | 81 | return "active" |
|
82 | 82 | %> |
|
83 | 83 | |
|
84 | 84 | <div id="context-bar"> |
|
85 | 85 | <div class="wrapper"> |
|
86 | 86 | <div class="title"> |
|
87 | 87 | <div class="title-content"> |
|
88 | 88 | <div class="title-main"> |
|
89 | 89 | % if c.is_super_admin: |
|
90 | 90 | ${_('Super Admin Panel')} |
|
91 | 91 | % else: |
|
92 | 92 | ${_('Delegated Admin Panel')} |
|
93 | 93 | % endif |
|
94 | 94 | </div> |
|
95 | 95 | </div> |
|
96 | 96 | </div> |
|
97 | 97 | |
|
98 | 98 | <ul id="context-pages" class="navigation horizontal-list"> |
|
99 | 99 | |
|
100 | 100 | ## super admin case |
|
101 | 101 | % if c.is_super_admin: |
|
102 | 102 | <li class="${is_active('audit_logs')}"><a href="${h.route_path('admin_audit_logs')}">${_('Admin audit logs')}</a></li> |
|
103 | 103 | <li class="${is_active('repositories')}"><a href="${h.route_path('repos')}">${_('Repositories')}</a></li> |
|
104 | 104 | <li class="${is_active('repository_groups')}"><a href="${h.route_path('repo_groups')}">${_('Repository groups')}</a></li> |
|
105 | 105 | <li class="${is_active('users')}"><a href="${h.route_path('users')}">${_('Users')}</a></li> |
|
106 | 106 | <li class="${is_active('user_groups')}"><a href="${h.route_path('user_groups')}">${_('User groups')}</a></li> |
|
107 | 107 | <li class="${is_active('permissions')}"><a href="${h.route_path('admin_permissions_application')}">${_('Permissions')}</a></li> |
|
108 | 108 | <li class="${is_active('authentication')}"><a href="${h.route_path('auth_home', traverse='')}">${_('Authentication')}</a></li> |
|
109 | 109 | <li class="${is_active('integrations')}"><a href="${h.route_path('global_integrations_home')}">${_('Integrations')}</a></li> |
|
110 | 110 | <li class="${is_active('defaults')}"><a href="${h.route_path('admin_defaults_repositories')}">${_('Defaults')}</a></li> |
|
111 | 111 | <li class="${is_active('settings')}"><a href="${h.route_path('admin_settings')}">${_('Settings')}</a></li> |
|
112 | 112 | |
|
113 | 113 | ## delegated admin |
|
114 | 114 | % elif c.is_delegated_admin: |
|
115 | 115 | <% |
|
116 | 116 | repositories=c.auth_user.repositories_admin or c.can_create_repo |
|
117 | 117 | repository_groups=c.auth_user.repository_groups_admin or c.can_create_repo_group |
|
118 | 118 | user_groups=c.auth_user.user_groups_admin or c.can_create_user_group |
|
119 | 119 | %> |
|
120 | 120 | |
|
121 | 121 | %if repositories: |
|
122 | 122 | <li class="${is_active('repositories')} local-admin-repos"><a href="${h.route_path('repos')}">${_('Repositories')}</a></li> |
|
123 | 123 | %endif |
|
124 | 124 | %if repository_groups: |
|
125 | 125 | <li class="${is_active('repository_groups')} local-admin-repo-groups"><a href="${h.route_path('repo_groups')}">${_('Repository groups')}</a></li> |
|
126 | 126 | %endif |
|
127 | 127 | %if user_groups: |
|
128 | 128 | <li class="${is_active('user_groups')} local-admin-user-groups"><a href="${h.route_path('user_groups')}">${_('User groups')}</a></li> |
|
129 | 129 | %endif |
|
130 | 130 | % endif |
|
131 | 131 | </ul> |
|
132 | 132 | |
|
133 | 133 | </div> |
|
134 | 134 | <div class="clear"></div> |
|
135 | 135 | </div> |
|
136 | 136 | </%def> |
|
137 | 137 | |
|
138 | 138 | <%def name="dt_info_panel(elements)"> |
|
139 | 139 | <dl class="dl-horizontal"> |
|
140 | 140 | %for dt, dd, title, show_items in elements: |
|
141 | 141 | <dt>${dt}:</dt> |
|
142 | 142 | <dd title="${h.tooltip(title)}"> |
|
143 | 143 | %if callable(dd): |
|
144 | 144 | ## allow lazy evaluation of elements |
|
145 | 145 | ${dd()} |
|
146 | 146 | %else: |
|
147 | 147 | ${dd} |
|
148 | 148 | %endif |
|
149 | 149 | %if show_items: |
|
150 | 150 | <span class="btn-collapse" data-toggle="item-${h.md5_safe(dt)[:6]}-details">${_('Show More')} </span> |
|
151 | 151 | %endif |
|
152 | 152 | </dd> |
|
153 | 153 | |
|
154 | 154 | %if show_items: |
|
155 | 155 | <div class="collapsable-content" data-toggle="item-${h.md5_safe(dt)[:6]}-details" style="display: none"> |
|
156 | 156 | %for item in show_items: |
|
157 | 157 | <dt></dt> |
|
158 | 158 | <dd>${item}</dd> |
|
159 | 159 | %endfor |
|
160 | 160 | </div> |
|
161 | 161 | %endif |
|
162 | 162 | |
|
163 | 163 | %endfor |
|
164 | 164 | </dl> |
|
165 | 165 | </%def> |
|
166 | 166 | |
|
167 | 167 | <%def name="gravatar(email, size=16)"> |
|
168 | 168 | <% |
|
169 | 169 | if (size > 16): |
|
170 | 170 | gravatar_class = 'gravatar gravatar-large' |
|
171 | 171 | else: |
|
172 | 172 | gravatar_class = 'gravatar' |
|
173 | 173 | %> |
|
174 | 174 | <%doc> |
|
175 | 175 | TODO: johbo: For now we serve double size images to make it smooth |
|
176 | 176 | for retina. This is how it worked until now. Should be replaced |
|
177 | 177 | with a better solution at some point. |
|
178 | 178 | </%doc> |
|
179 | 179 | <img class="${gravatar_class}" src="${h.gravatar_url(email, size * 2)}" height="${size}" width="${size}"> |
|
180 | 180 | </%def> |
|
181 | 181 | |
|
182 | 182 | |
|
183 | 183 | <%def name="gravatar_with_user(contact, size=16, show_disabled=False)"> |
|
184 | 184 | <% email = h.email_or_none(contact) %> |
|
185 | 185 | <div class="rc-user tooltip" title="${h.tooltip(h.author_string(email))}"> |
|
186 | 186 | ${self.gravatar(email, size)} |
|
187 | 187 | <span class="${'user user-disabled' if show_disabled else 'user'}"> ${h.link_to_user(contact)}</span> |
|
188 | 188 | </div> |
|
189 | 189 | </%def> |
|
190 | 190 | |
|
191 | 191 | |
|
192 | 192 | <%def name="repo_page_title(repo_instance)"> |
|
193 | 193 | <div class="title-content repo-title"> |
|
194 | 194 | |
|
195 | 195 | <div class="title-main"> |
|
196 | 196 | ## SVN/HG/GIT icons |
|
197 | 197 | %if h.is_hg(repo_instance): |
|
198 | 198 | <i class="icon-hg"></i> |
|
199 | 199 | %endif |
|
200 | 200 | %if h.is_git(repo_instance): |
|
201 | 201 | <i class="icon-git"></i> |
|
202 | 202 | %endif |
|
203 | 203 | %if h.is_svn(repo_instance): |
|
204 | 204 | <i class="icon-svn"></i> |
|
205 | 205 | %endif |
|
206 | 206 | |
|
207 | 207 | ## public/private |
|
208 | 208 | %if repo_instance.private: |
|
209 | 209 | <i class="icon-repo-private"></i> |
|
210 | 210 | %else: |
|
211 | 211 | <i class="icon-repo-public"></i> |
|
212 | 212 | %endif |
|
213 | 213 | |
|
214 | 214 | ## repo name with group name |
|
215 | 215 | ${h.breadcrumb_repo_link(repo_instance)} |
|
216 | 216 | |
|
217 | 217 | ## Context Actions |
|
218 | 218 | <div class="pull-right"> |
|
219 | 219 | %if c.rhodecode_user.username != h.DEFAULT_USER: |
|
220 | 220 | <a href="${h.route_path('atom_feed_home', repo_name=c.rhodecode_db_repo.repo_name, _query=dict(auth_token=c.rhodecode_user.feed_token))}" title="${_('RSS Feed')}" class="btn btn-sm"><i class="icon-rss-sign"></i>RSS</a> |
|
221 | 221 | |
|
222 | 222 | <a href="#WatchRepo" onclick="toggleFollowingRepo(this, templateContext.repo_id); return false" title="${_('Watch this Repository and actions on it in your personalized journal')}" class="btn btn-sm ${('watching' if c.repository_is_user_following else '')}"> |
|
223 | 223 | % if c.repository_is_user_following: |
|
224 | 224 | <i class="icon-eye-off"></i>${_('Unwatch')} |
|
225 | 225 | % else: |
|
226 | 226 | <i class="icon-eye"></i>${_('Watch')} |
|
227 | 227 | % endif |
|
228 | 228 | |
|
229 | 229 | </a> |
|
230 | 230 | %else: |
|
231 | 231 | <a href="${h.route_path('atom_feed_home', repo_name=c.rhodecode_db_repo.repo_name)}" title="${_('RSS Feed')}" class="btn btn-sm"><i class="icon-rss-sign"></i>RSS</a> |
|
232 | 232 | %endif |
|
233 | 233 | </div> |
|
234 | 234 | |
|
235 | 235 | </div> |
|
236 | 236 | |
|
237 | 237 | ## FORKED |
|
238 | 238 | %if repo_instance.fork: |
|
239 | 239 | <p class="discreet"> |
|
240 | 240 | <i class="icon-code-fork"></i> ${_('Fork of')} |
|
241 | 241 | ${h.link_to_if(c.has_origin_repo_read_perm,repo_instance.fork.repo_name, h.route_path('repo_summary', repo_name=repo_instance.fork.repo_name))} |
|
242 | 242 | </p> |
|
243 | 243 | %endif |
|
244 | 244 | |
|
245 | 245 | ## IMPORTED FROM REMOTE |
|
246 | 246 | %if repo_instance.clone_uri: |
|
247 | 247 | <p class="discreet"> |
|
248 | 248 | <i class="icon-code-fork"></i> ${_('Clone from')} |
|
249 | 249 | <a href="${h.safe_str(h.hide_credentials(repo_instance.clone_uri))}">${h.hide_credentials(repo_instance.clone_uri)}</a> |
|
250 | 250 | </p> |
|
251 | 251 | %endif |
|
252 | 252 | |
|
253 | 253 | ## LOCKING STATUS |
|
254 | 254 | %if repo_instance.locked[0]: |
|
255 | 255 | <p class="locking_locked discreet"> |
|
256 | 256 | <i class="icon-repo-lock"></i> |
|
257 | 257 | ${_('Repository locked by %(user)s') % {'user': h.person_by_id(repo_instance.locked[0])}} |
|
258 | 258 | </p> |
|
259 | 259 | %elif repo_instance.enable_locking: |
|
260 | 260 | <p class="locking_unlocked discreet"> |
|
261 | 261 | <i class="icon-repo-unlock"></i> |
|
262 | 262 | ${_('Repository not locked. Pull repository to lock it.')} |
|
263 | 263 | </p> |
|
264 | 264 | %endif |
|
265 | 265 | |
|
266 | 266 | </div> |
|
267 | 267 | </%def> |
|
268 | 268 | |
|
269 | 269 | <%def name="repo_menu(active=None)"> |
|
270 | 270 | <% |
|
271 | 271 | def is_active(selected): |
|
272 | 272 | if selected == active: |
|
273 | 273 | return "active" |
|
274 | 274 | %> |
|
275 | 275 | |
|
276 | 276 | <!--- REPO CONTEXT BAR --> |
|
277 | 277 | <div id="context-bar"> |
|
278 | 278 | <div class="wrapper"> |
|
279 | 279 | |
|
280 | 280 | <div class="title"> |
|
281 | 281 | ${self.repo_page_title(c.rhodecode_db_repo)} |
|
282 | 282 | </div> |
|
283 | 283 | |
|
284 | 284 | <ul id="context-pages" class="navigation horizontal-list"> |
|
285 | 285 | <li class="${is_active('summary')}"><a class="menulink" href="${h.route_path('repo_summary', repo_name=c.repo_name)}"><div class="menulabel">${_('Summary')}</div></a></li> |
|
286 |
<li class="${is_active('c |
|
|
286 | <li class="${is_active('commits')}"><a class="menulink" href="${h.route_path('repo_commits', repo_name=c.repo_name)}"><div class="menulabel">${_('Commits')}</div></a></li> | |
|
287 | 287 | <li class="${is_active('files')}"><a class="menulink" href="${h.route_path('repo_files', repo_name=c.repo_name, commit_id=c.rhodecode_db_repo.landing_rev[1], f_path='')}"><div class="menulabel">${_('Files')}</div></a></li> |
|
288 | 288 | <li class="${is_active('compare')}"><a class="menulink" href="${h.route_path('repo_compare_select',repo_name=c.repo_name)}"><div class="menulabel">${_('Compare')}</div></a></li> |
|
289 | 289 | |
|
290 | 290 | ## TODO: anderson: ideally it would have a function on the scm_instance "enable_pullrequest() and enable_fork()" |
|
291 | 291 | %if c.rhodecode_db_repo.repo_type in ['git','hg']: |
|
292 | 292 | <li class="${is_active('showpullrequest')}"> |
|
293 | 293 | <a class="menulink" href="${h.route_path('pullrequest_show_all', repo_name=c.repo_name)}" title="${h.tooltip(_('Show Pull Requests for %s') % c.repo_name)}"> |
|
294 | 294 | <div class="menulabel"> |
|
295 | 295 | %if c.repository_pull_requests == 1: |
|
296 | 296 | ${c.repository_pull_requests} ${_('Pull Request')} |
|
297 | 297 | %else: |
|
298 | 298 | ${c.repository_pull_requests} ${_('Pull Requests')} |
|
299 | 299 | %endif |
|
300 | 300 | </div> |
|
301 | 301 | </a> |
|
302 | 302 | </li> |
|
303 | 303 | %endif |
|
304 | 304 | |
|
305 | 305 | <li class="${is_active('artifacts')}"><a class="menulink" href="${h.route_path('repo_artifacts_list',repo_name=c.repo_name)}"><div class="menulabel">${_('Artifacts')} (BETA)</div></a></li> |
|
306 | 306 | |
|
307 | 307 | %if h.HasRepoPermissionAll('repository.admin')(c.repo_name): |
|
308 | 308 | <li class="${is_active('settings')}"><a class="menulink" href="${h.route_path('edit_repo',repo_name=c.repo_name)}"><div class="menulabel">${_('Repository Settings')}</div></a></li> |
|
309 | 309 | %endif |
|
310 | 310 | |
|
311 | 311 | ## determine if we have "any" option available |
|
312 | 312 | <% |
|
313 | 313 | can_lock = h.HasRepoPermissionAny('repository.write','repository.admin')(c.repo_name) and c.rhodecode_db_repo.enable_locking |
|
314 | 314 | has_actions = (c.rhodecode_user.username != h.DEFAULT_USER and c.rhodecode_db_repo.repo_type in ['git','hg'] ) or can_lock |
|
315 | 315 | %> |
|
316 | 316 | <li class="${is_active('options')}"> |
|
317 | 317 | % if has_actions: |
|
318 | 318 | <a class="menulink dropdown"> |
|
319 | 319 | <div class="menulabel">${_('Options')}<div class="show_more"></div></div> |
|
320 | 320 | </a> |
|
321 | 321 | <ul class="submenu"> |
|
322 | 322 | <li><a href="${h.route_path('repo_fork_new',repo_name=c.repo_name)}">${_('Fork this repository')}</a></li> |
|
323 | 323 | <li><a href="${h.route_path('pullrequest_new',repo_name=c.repo_name)}">${_('Create Pull Request')}</a></li> |
|
324 | 324 | %if can_lock: |
|
325 | 325 | %if c.rhodecode_db_repo.locked[0]: |
|
326 | 326 | <li><a class="locking_del" href="${h.route_path('repo_edit_toggle_locking',repo_name=c.repo_name)}">${_('Unlock Repository')}</a></li> |
|
327 | 327 | %else: |
|
328 | 328 | <li><a class="locking_add" href="${h.route_path('repo_edit_toggle_locking',repo_name=c.repo_name)}">${_('Lock Repository')}</a></li> |
|
329 | 329 | %endif |
|
330 | 330 | %endif |
|
331 | 331 | </ul> |
|
332 | 332 | % else: |
|
333 | 333 | <a class="menulink disabled"> |
|
334 | 334 | <div class="menulabel">${_('Options')}<div class="show_more"></div></div> |
|
335 | 335 | </a> |
|
336 | 336 | % endif |
|
337 | 337 | </li> |
|
338 | 338 | |
|
339 | 339 | </ul> |
|
340 | 340 | </div> |
|
341 | 341 | <div class="clear"></div> |
|
342 | 342 | </div> |
|
343 | 343 | % if c.rhodecode_db_repo.archived: |
|
344 | 344 | <div class="alert alert-warning text-center"> |
|
345 | 345 | <strong>${_('This repository has been archived. It is now read-only.')}</strong> |
|
346 | 346 | </div> |
|
347 | 347 | % endif |
|
348 | 348 | <!--- REPO END CONTEXT BAR --> |
|
349 | 349 | |
|
350 | 350 | </%def> |
|
351 | 351 | |
|
352 | 352 | <%def name="repo_group_page_title(repo_group_instance)"> |
|
353 | 353 | <div class="title-content"> |
|
354 | 354 | <div class="title-main"> |
|
355 | 355 | ## Repository Group icon |
|
356 | 356 | <i class="icon-repo-group"></i> |
|
357 | 357 | |
|
358 | 358 | ## repo name with group name |
|
359 | 359 | ${h.breadcrumb_repo_group_link(repo_group_instance)} |
|
360 | 360 | </div> |
|
361 | 361 | |
|
362 | 362 | <%namespace name="dt" file="/data_table/_dt_elements.mako"/> |
|
363 | 363 | <div class="repo-group-desc discreet"> |
|
364 | 364 | ${dt.repo_group_desc(repo_group_instance.description_safe, repo_group_instance.personal, c.visual.stylify_metatags)} |
|
365 | 365 | </div> |
|
366 | 366 | |
|
367 | 367 | </div> |
|
368 | 368 | </%def> |
|
369 | 369 | |
|
370 | 370 | <%def name="repo_group_menu(active=None)"> |
|
371 | 371 | <% |
|
372 | 372 | def is_active(selected): |
|
373 | 373 | if selected == active: |
|
374 | 374 | return "active" |
|
375 | 375 | |
|
376 | 376 | gr_name = c.repo_group.group_name if c.repo_group else None |
|
377 | 377 | # create repositories with write permission on group is set to true |
|
378 | 378 | create_on_write = h.HasPermissionAny('hg.create.write_on_repogroup.true')() |
|
379 | 379 | group_admin = h.HasRepoGroupPermissionAny('group.admin')(gr_name, 'group admin index page') |
|
380 | 380 | group_write = h.HasRepoGroupPermissionAny('group.write')(gr_name, 'can write into group index page') |
|
381 | 381 | |
|
382 | 382 | %> |
|
383 | 383 | |
|
384 | 384 | <!--- REPO GROUP CONTEXT BAR --> |
|
385 | 385 | <div id="context-bar"> |
|
386 | 386 | <div class="wrapper"> |
|
387 | 387 | <div class="title"> |
|
388 | 388 | ${self.repo_group_page_title(c.repo_group)} |
|
389 | 389 | </div> |
|
390 | 390 | |
|
391 | 391 | <ul id="context-pages" class="navigation horizontal-list"> |
|
392 | 392 | <li class="${is_active('home')}"><a class="menulink" href="${h.route_path('repo_group_home', repo_group_name=c.repo_group.group_name)}"><div class="menulabel">${_('Group Home')}</div></a></li> |
|
393 | 393 | % if c.is_super_admin or group_admin: |
|
394 | 394 | <li class="${is_active('settings')}"><a class="menulink" href="${h.route_path('edit_repo_group',repo_group_name=c.repo_group.group_name)}" title="${_('You have admin right to this group, and can edit it')}"><div class="menulabel">${_('Group Settings')}</div></a></li> |
|
395 | 395 | % endif |
|
396 | 396 | ## determine if we have "any" option available |
|
397 | 397 | <% |
|
398 | 398 | can_create_repos = c.is_super_admin or group_admin or (group_write and create_on_write) |
|
399 | 399 | can_create_repo_groups = c.is_super_admin or group_admin |
|
400 | 400 | has_actions = can_create_repos or can_create_repo_groups |
|
401 | 401 | %> |
|
402 | 402 | <li class="${is_active('options')}"> |
|
403 | 403 | % if has_actions: |
|
404 | 404 | <a class="menulink dropdown"> |
|
405 | 405 | <div class="menulabel">${_('Options')} <div class="show_more"></div></div> |
|
406 | 406 | </a> |
|
407 | 407 | <ul class="submenu"> |
|
408 | 408 | %if can_create_repos: |
|
409 | 409 | <li><a href="${h.route_path('repo_new',_query=dict(parent_group=c.repo_group.group_id))}">${_('Add Repository')}</a></li> |
|
410 | 410 | %endif |
|
411 | 411 | %if can_create_repo_groups: |
|
412 | 412 | <li><a href="${h.route_path('repo_group_new',_query=dict(parent_group=c.repo_group.group_id))}">${_(u'Add Repository Group')}</a></li> |
|
413 | 413 | %endif |
|
414 | 414 | </ul> |
|
415 | 415 | % else: |
|
416 | 416 | <a class="menulink disabled"> |
|
417 | 417 | <div class="menulabel">${_('Options')} <div class="show_more"></div></div> |
|
418 | 418 | </a> |
|
419 | 419 | % endif |
|
420 | 420 | </li> |
|
421 | 421 | </ul> |
|
422 | 422 | </div> |
|
423 | 423 | <div class="clear"></div> |
|
424 | 424 | </div> |
|
425 | 425 | |
|
426 | 426 | <!--- REPO GROUP CONTEXT BAR --> |
|
427 | 427 | |
|
428 | 428 | </%def> |
|
429 | 429 | |
|
430 | 430 | |
|
431 | 431 | <%def name="usermenu(active=False)"> |
|
432 | 432 | ## USER MENU |
|
433 | 433 | <li id="quick_login_li" class="${'active' if active else ''}"> |
|
434 | 434 | % if c.rhodecode_user.username == h.DEFAULT_USER: |
|
435 | 435 | <a id="quick_login_link" class="menulink childs" href="${h.route_path('login', _query={'came_from': h.current_route_path(request)})}"> |
|
436 | 436 | ${gravatar(c.rhodecode_user.email, 20)} |
|
437 | 437 | <span class="user"> |
|
438 | 438 | <span>${_('Sign in')}</span> |
|
439 | 439 | </span> |
|
440 | 440 | </a> |
|
441 | 441 | % else: |
|
442 | 442 | ## logged in user |
|
443 | 443 | <a id="quick_login_link" class="menulink childs"> |
|
444 | 444 | ${gravatar(c.rhodecode_user.email, 20)} |
|
445 | 445 | <span class="user"> |
|
446 | 446 | <span class="menu_link_user">${c.rhodecode_user.username}</span> |
|
447 | 447 | <div class="show_more"></div> |
|
448 | 448 | </span> |
|
449 | 449 | </a> |
|
450 | 450 | ## subnav with menu for logged in user |
|
451 | 451 | <div class="user-menu submenu"> |
|
452 | 452 | <div id="quick_login"> |
|
453 | 453 | %if c.rhodecode_user.username != h.DEFAULT_USER: |
|
454 | 454 | <div class=""> |
|
455 | 455 | <div class="big_gravatar">${gravatar(c.rhodecode_user.email, 48)}</div> |
|
456 | 456 | <div class="full_name">${c.rhodecode_user.full_name_or_username}</div> |
|
457 | 457 | <div class="email">${c.rhodecode_user.email}</div> |
|
458 | 458 | </div> |
|
459 | 459 | <div class=""> |
|
460 | 460 | <ol class="links"> |
|
461 | 461 | <li>${h.link_to(_(u'My account'),h.route_path('my_account_profile'))}</li> |
|
462 | 462 | % if c.rhodecode_user.personal_repo_group: |
|
463 | 463 | <li>${h.link_to(_(u'My personal group'), h.route_path('repo_group_home', repo_group_name=c.rhodecode_user.personal_repo_group.group_name))}</li> |
|
464 | 464 | % endif |
|
465 | 465 | <li>${h.link_to(_(u'Pull Requests'), h.route_path('my_account_pullrequests'))}</li> |
|
466 | 466 | ## bookmark-items |
|
467 | 467 | <li class="bookmark-items"> |
|
468 | 468 | ${_('Bookmarks')} |
|
469 | 469 | <div class="pull-right"> |
|
470 | 470 | <a href="${h.route_path('my_account_bookmarks')}">${_('Manage')}</a> |
|
471 | 471 | </div> |
|
472 | 472 | </li> |
|
473 | 473 | % if not c.bookmark_items: |
|
474 | 474 | <li> |
|
475 | 475 | <a href="${h.route_path('my_account_bookmarks')}">${_('No Bookmarks yet.')}</a> |
|
476 | 476 | </li> |
|
477 | 477 | % endif |
|
478 | 478 | % for item in c.bookmark_items: |
|
479 | 479 | <li> |
|
480 | 480 | % if item.repository: |
|
481 | 481 | <div> |
|
482 | 482 | <a class="bookmark-item" href="${h.route_path('my_account_goto_bookmark', bookmark_id=item.position)}"> |
|
483 | 483 | <code>${item.position}</code> |
|
484 | 484 | % if item.repository.repo_type == 'hg': |
|
485 | 485 | <i class="icon-hg" title="${_('Repository')}" style="font-size: 16px"></i> |
|
486 | 486 | % elif item.repository.repo_type == 'git': |
|
487 | 487 | <i class="icon-git" title="${_('Repository')}" style="font-size: 16px"></i> |
|
488 | 488 | % elif item.repository.repo_type == 'svn': |
|
489 | 489 | <i class="icon-svn" title="${_('Repository')}" style="font-size: 16px"></i> |
|
490 | 490 | % endif |
|
491 | 491 | ${(item.title or h.shorter(item.repository.repo_name, 30))} |
|
492 | 492 | </a> |
|
493 | 493 | </div> |
|
494 | 494 | % elif item.repository_group: |
|
495 | 495 | <div> |
|
496 | 496 | <a class="bookmark-item" href="${h.route_path('my_account_goto_bookmark', bookmark_id=item.position)}"> |
|
497 | 497 | <code>${item.position}</code> |
|
498 | 498 | <i class="icon-repo-group" title="${_('Repository group')}" style="font-size: 14px"></i> |
|
499 | 499 | ${(item.title or h.shorter(item.repository_group.group_name, 30))} |
|
500 | 500 | </a> |
|
501 | 501 | </div> |
|
502 | 502 | % else: |
|
503 | 503 | <a class="bookmark-item" href="${h.route_path('my_account_goto_bookmark', bookmark_id=item.position)}"> |
|
504 | 504 | <code>${item.position}</code> |
|
505 | 505 | ${item.title} |
|
506 | 506 | </a> |
|
507 | 507 | % endif |
|
508 | 508 | </li> |
|
509 | 509 | % endfor |
|
510 | 510 | |
|
511 | 511 | <li class="logout"> |
|
512 | 512 | ${h.secure_form(h.route_path('logout'), request=request)} |
|
513 | 513 | ${h.submit('log_out', _(u'Sign Out'),class_="btn btn-primary")} |
|
514 | 514 | ${h.end_form()} |
|
515 | 515 | </li> |
|
516 | 516 | </ol> |
|
517 | 517 | </div> |
|
518 | 518 | %endif |
|
519 | 519 | </div> |
|
520 | 520 | </div> |
|
521 | 521 | ## unread counter |
|
522 | 522 | <div class="pill_container"> |
|
523 | 523 | <a class="menu_link_notifications ${'empty' if c.unread_notifications == 0 else ''}" href="${h.route_path('notifications_show_all')}">${c.unread_notifications}</a> |
|
524 | 524 | </div> |
|
525 | 525 | % endif |
|
526 | 526 | </li> |
|
527 | 527 | </%def> |
|
528 | 528 | |
|
529 | 529 | <%def name="menu_items(active=None)"> |
|
530 | 530 | <% |
|
531 | 531 | def is_active(selected): |
|
532 | 532 | if selected == active: |
|
533 | 533 | return "active" |
|
534 | 534 | return "" |
|
535 | 535 | %> |
|
536 | 536 | |
|
537 | 537 | <ul id="quick" class="main_nav navigation horizontal-list"> |
|
538 | 538 | ## notice box for important system messages |
|
539 | 539 | <li style="display: none"> |
|
540 | 540 | <a class="notice-box" href="#openNotice" onclick="showNoticeBox(); return false"> |
|
541 | 541 | <div class="menulabel-notice" > |
|
542 | 542 | 0 |
|
543 | 543 | </div> |
|
544 | 544 | </a> |
|
545 | 545 | </li> |
|
546 | 546 | |
|
547 | 547 | ## Main filter |
|
548 | 548 | <li> |
|
549 | 549 | <div class="menulabel main_filter_box"> |
|
550 | 550 | <div class="main_filter_input_box"> |
|
551 | 551 | <ul class="searchItems"> |
|
552 | 552 | |
|
553 | 553 | % if c.template_context['search_context']['repo_id']: |
|
554 | 554 | <li class="searchTag searchTagFilter searchTagHidable" > |
|
555 | 555 | ##<a href="${h.route_path('search_repo',repo_name=c.template_context['search_context']['repo_name'])}"> |
|
556 | 556 | <span class="tag"> |
|
557 | 557 | This repo |
|
558 | 558 | <a href="#removeGoToFilter" onclick="removeGoToFilter(); return false"><i class="icon-cancel-circled"></i></a> |
|
559 | 559 | </span> |
|
560 | 560 | ##</a> |
|
561 | 561 | </li> |
|
562 | 562 | % elif c.template_context['search_context']['repo_group_id']: |
|
563 | 563 | <li class="searchTag searchTagFilter searchTagHidable"> |
|
564 | 564 | ##<a href="${h.route_path('search_repo_group',repo_group_name=c.template_context['search_context']['repo_group_name'])}"> |
|
565 | 565 | <span class="tag"> |
|
566 | 566 | This group |
|
567 | 567 | <a href="#removeGoToFilter" onclick="removeGoToFilter(); return false"><i class="icon-cancel-circled"></i></a> |
|
568 | 568 | </span> |
|
569 | 569 | ##</a> |
|
570 | 570 | </li> |
|
571 | 571 | % endif |
|
572 | 572 | |
|
573 | 573 | <li class="searchTagInput"> |
|
574 | 574 | <input class="main_filter_input" id="main_filter" size="15" type="text" name="main_filter" placeholder="${_('search / go to...')}" value="" /> |
|
575 | 575 | </li> |
|
576 | 576 | <li class="searchTag searchTagHelp"> |
|
577 | 577 | <a href="#showFilterHelp" onclick="showMainFilterBox(); return false">?</a> |
|
578 | 578 | </li> |
|
579 | 579 | </ul> |
|
580 | 580 | </div> |
|
581 | 581 | </div> |
|
582 | 582 | |
|
583 | 583 | <div id="main_filter_help" style="display: none"> |
|
584 | 584 | - Use '/' key to quickly access this field. |
|
585 | 585 | |
|
586 | 586 | - Enter a name of repository, or repository group for quick search. |
|
587 | 587 | |
|
588 | 588 | - Prefix query to allow special search: |
|
589 | 589 | |
|
590 | 590 | user:admin, to search for usernames, always global |
|
591 | 591 | |
|
592 | 592 | user_group:devops, to search for user groups, always global |
|
593 | 593 | |
|
594 | 594 | commit:efced4, to search for commits, scoped to repositories or groups |
|
595 | 595 | |
|
596 | 596 | file:models.py, to search for file paths, scoped to repositories or groups |
|
597 | 597 | |
|
598 | 598 | % if c.template_context['search_context']['repo_id']: |
|
599 | 599 | For advanced full text search visit: <a href="${h.route_path('search_repo',repo_name=c.template_context['search_context']['repo_name'])}">repository search</a> |
|
600 | 600 | % elif c.template_context['search_context']['repo_group_id']: |
|
601 | 601 | For advanced full text search visit: <a href="${h.route_path('search_repo_group',repo_group_name=c.template_context['search_context']['repo_group_name'])}">repository group search</a> |
|
602 | 602 | % else: |
|
603 | 603 | For advanced full text search visit: <a href="${h.route_path('search')}">global search</a> |
|
604 | 604 | % endif |
|
605 | 605 | </div> |
|
606 | 606 | </li> |
|
607 | 607 | |
|
608 | 608 | ## ROOT MENU |
|
609 | 609 | <li class="${is_active('home')}"> |
|
610 | 610 | <a class="menulink" title="${_('Home')}" href="${h.route_path('home')}"> |
|
611 | 611 | <div class="menulabel">${_('Home')}</div> |
|
612 | 612 | </a> |
|
613 | 613 | </li> |
|
614 | 614 | |
|
615 | 615 | %if c.rhodecode_user.username != h.DEFAULT_USER: |
|
616 | 616 | <li class="${is_active('journal')}"> |
|
617 | 617 | <a class="menulink" title="${_('Show activity journal')}" href="${h.route_path('journal')}"> |
|
618 | 618 | <div class="menulabel">${_('Journal')}</div> |
|
619 | 619 | </a> |
|
620 | 620 | </li> |
|
621 | 621 | %else: |
|
622 | 622 | <li class="${is_active('journal')}"> |
|
623 | 623 | <a class="menulink" title="${_('Show Public activity journal')}" href="${h.route_path('journal_public')}"> |
|
624 | 624 | <div class="menulabel">${_('Public journal')}</div> |
|
625 | 625 | </a> |
|
626 | 626 | </li> |
|
627 | 627 | %endif |
|
628 | 628 | |
|
629 | 629 | <li class="${is_active('gists')}"> |
|
630 | 630 | <a class="menulink childs" title="${_('Show Gists')}" href="${h.route_path('gists_show')}"> |
|
631 | 631 | <div class="menulabel">${_('Gists')}</div> |
|
632 | 632 | </a> |
|
633 | 633 | </li> |
|
634 | 634 | |
|
635 | 635 | % if c.is_super_admin or c.is_delegated_admin: |
|
636 | 636 | <li class="${is_active('admin')}"> |
|
637 | 637 | <a class="menulink childs" title="${_('Admin settings')}" href="${h.route_path('admin_home')}"> |
|
638 | 638 | <div class="menulabel">${_('Admin')} </div> |
|
639 | 639 | </a> |
|
640 | 640 | </li> |
|
641 | 641 | % endif |
|
642 | 642 | |
|
643 | 643 | ## render extra user menu |
|
644 | 644 | ${usermenu(active=(active=='my_account'))} |
|
645 | 645 | |
|
646 | 646 | % if c.debug_style: |
|
647 | 647 | <li> |
|
648 | 648 | <a class="menulink" title="${_('Style')}" href="${h.route_path('debug_style_home')}"> |
|
649 | 649 | <div class="menulabel">${_('[Style]')}</div> |
|
650 | 650 | </a> |
|
651 | 651 | </li> |
|
652 | 652 | % endif |
|
653 | 653 | </ul> |
|
654 | 654 | |
|
655 | 655 | <script type="text/javascript"> |
|
656 | 656 | var visualShowPublicIcon = "${c.visual.show_public_icon}" == "True"; |
|
657 | 657 | |
|
658 | 658 | var formatRepoResult = function(result, container, query, escapeMarkup) { |
|
659 | 659 | return function(data, escapeMarkup) { |
|
660 | 660 | if (!data.repo_id){ |
|
661 | 661 | return data.text; // optgroup text Repositories |
|
662 | 662 | } |
|
663 | 663 | |
|
664 | 664 | var tmpl = ''; |
|
665 | 665 | var repoType = data['repo_type']; |
|
666 | 666 | var repoName = data['text']; |
|
667 | 667 | |
|
668 | 668 | if(data && data.type == 'repo'){ |
|
669 | 669 | if(repoType === 'hg'){ |
|
670 | 670 | tmpl += '<i class="icon-hg"></i> '; |
|
671 | 671 | } |
|
672 | 672 | else if(repoType === 'git'){ |
|
673 | 673 | tmpl += '<i class="icon-git"></i> '; |
|
674 | 674 | } |
|
675 | 675 | else if(repoType === 'svn'){ |
|
676 | 676 | tmpl += '<i class="icon-svn"></i> '; |
|
677 | 677 | } |
|
678 | 678 | if(data['private']){ |
|
679 | 679 | tmpl += '<i class="icon-lock" ></i> '; |
|
680 | 680 | } |
|
681 | 681 | else if(visualShowPublicIcon){ |
|
682 | 682 | tmpl += '<i class="icon-unlock-alt"></i> '; |
|
683 | 683 | } |
|
684 | 684 | } |
|
685 | 685 | tmpl += escapeMarkup(repoName); |
|
686 | 686 | return tmpl; |
|
687 | 687 | |
|
688 | 688 | }(result, escapeMarkup); |
|
689 | 689 | }; |
|
690 | 690 | |
|
691 | 691 | var formatRepoGroupResult = function(result, container, query, escapeMarkup) { |
|
692 | 692 | return function(data, escapeMarkup) { |
|
693 | 693 | if (!data.repo_group_id){ |
|
694 | 694 | return data.text; // optgroup text Repositories |
|
695 | 695 | } |
|
696 | 696 | |
|
697 | 697 | var tmpl = ''; |
|
698 | 698 | var repoGroupName = data['text']; |
|
699 | 699 | |
|
700 | 700 | if(data){ |
|
701 | 701 | |
|
702 | 702 | tmpl += '<i class="icon-repo-group"></i> '; |
|
703 | 703 | |
|
704 | 704 | } |
|
705 | 705 | tmpl += escapeMarkup(repoGroupName); |
|
706 | 706 | return tmpl; |
|
707 | 707 | |
|
708 | 708 | }(result, escapeMarkup); |
|
709 | 709 | }; |
|
710 | 710 | |
|
711 | 711 | var escapeRegExChars = function (value) { |
|
712 | 712 | return value.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&"); |
|
713 | 713 | }; |
|
714 | 714 | |
|
715 | 715 | var getRepoIcon = function(repo_type) { |
|
716 | 716 | if (repo_type === 'hg') { |
|
717 | 717 | return '<i class="icon-hg"></i> '; |
|
718 | 718 | } |
|
719 | 719 | else if (repo_type === 'git') { |
|
720 | 720 | return '<i class="icon-git"></i> '; |
|
721 | 721 | } |
|
722 | 722 | else if (repo_type === 'svn') { |
|
723 | 723 | return '<i class="icon-svn"></i> '; |
|
724 | 724 | } |
|
725 | 725 | return '' |
|
726 | 726 | }; |
|
727 | 727 | |
|
728 | 728 | var autocompleteMainFilterFormatResult = function (data, value, org_formatter) { |
|
729 | 729 | |
|
730 | 730 | if (value.split(':').length === 2) { |
|
731 | 731 | value = value.split(':')[1] |
|
732 | 732 | } |
|
733 | 733 | |
|
734 | 734 | var searchType = data['type']; |
|
735 | 735 | var valueDisplay = data['value_display']; |
|
736 | 736 | |
|
737 | 737 | var pattern = '(' + escapeRegExChars(value) + ')'; |
|
738 | 738 | |
|
739 | 739 | valueDisplay = Select2.util.escapeMarkup(valueDisplay); |
|
740 | 740 | |
|
741 | 741 | // highlight match |
|
742 | 742 | if (searchType != 'text') { |
|
743 | 743 | valueDisplay = valueDisplay.replace(new RegExp(pattern, 'gi'), '<strong>$1<\/strong>'); |
|
744 | 744 | } |
|
745 | 745 | |
|
746 | 746 | var icon = ''; |
|
747 | 747 | |
|
748 | 748 | if (searchType === 'hint') { |
|
749 | 749 | icon += '<i class="icon-repo-group"></i> '; |
|
750 | 750 | } |
|
751 | 751 | // full text search |
|
752 | 752 | else if (searchType === 'search') { |
|
753 | 753 | icon += '<i class="icon-more"></i> '; |
|
754 | 754 | } |
|
755 | 755 | // repository |
|
756 | 756 | else if (searchType === 'repo') { |
|
757 | 757 | |
|
758 | 758 | var repoIcon = getRepoIcon(data['repo_type']); |
|
759 | 759 | icon += repoIcon; |
|
760 | 760 | |
|
761 | 761 | if (data['private']) { |
|
762 | 762 | icon += '<i class="icon-lock" ></i> '; |
|
763 | 763 | } |
|
764 | 764 | else if (visualShowPublicIcon) { |
|
765 | 765 | icon += '<i class="icon-unlock-alt"></i> '; |
|
766 | 766 | } |
|
767 | 767 | } |
|
768 | 768 | // repository groups |
|
769 | 769 | else if (searchType === 'repo_group') { |
|
770 | 770 | icon += '<i class="icon-repo-group"></i> '; |
|
771 | 771 | } |
|
772 | 772 | // user group |
|
773 | 773 | else if (searchType === 'user_group') { |
|
774 | 774 | icon += '<i class="icon-group"></i> '; |
|
775 | 775 | } |
|
776 | 776 | // user |
|
777 | 777 | else if (searchType === 'user') { |
|
778 | 778 | icon += '<img class="gravatar" src="{0}"/>'.format(data['icon_link']); |
|
779 | 779 | } |
|
780 | 780 | // commit |
|
781 | 781 | else if (searchType === 'commit') { |
|
782 | 782 | var repo_data = data['repo_data']; |
|
783 | 783 | var repoIcon = getRepoIcon(repo_data['repository_type']); |
|
784 | 784 | if (repoIcon) { |
|
785 | 785 | icon += repoIcon; |
|
786 | 786 | } else { |
|
787 | 787 | icon += '<i class="icon-tag"></i>'; |
|
788 | 788 | } |
|
789 | 789 | } |
|
790 | 790 | // file |
|
791 | 791 | else if (searchType === 'file') { |
|
792 | 792 | var repo_data = data['repo_data']; |
|
793 | 793 | var repoIcon = getRepoIcon(repo_data['repository_type']); |
|
794 | 794 | if (repoIcon) { |
|
795 | 795 | icon += repoIcon; |
|
796 | 796 | } else { |
|
797 | 797 | icon += '<i class="icon-tag"></i>'; |
|
798 | 798 | } |
|
799 | 799 | } |
|
800 | 800 | // generic text |
|
801 | 801 | else if (searchType === 'text') { |
|
802 | 802 | icon = ''; |
|
803 | 803 | } |
|
804 | 804 | |
|
805 | 805 | var tmpl = '<div class="ac-container-wrap">{0}{1}</div>'; |
|
806 | 806 | return tmpl.format(icon, valueDisplay); |
|
807 | 807 | }; |
|
808 | 808 | |
|
809 | 809 | var handleSelect = function(element, suggestion) { |
|
810 | 810 | if (suggestion.type === "hint") { |
|
811 | 811 | // we skip action |
|
812 | 812 | $('#main_filter').focus(); |
|
813 | 813 | } |
|
814 | 814 | else if (suggestion.type === "text") { |
|
815 | 815 | // we skip action |
|
816 | 816 | $('#main_filter').focus(); |
|
817 | 817 | |
|
818 | 818 | } else { |
|
819 | 819 | window.location = suggestion['url']; |
|
820 | 820 | } |
|
821 | 821 | }; |
|
822 | 822 | |
|
823 | 823 | var autocompleteMainFilterResult = function (suggestion, originalQuery, queryLowerCase) { |
|
824 | 824 | if (queryLowerCase.split(':').length === 2) { |
|
825 | 825 | queryLowerCase = queryLowerCase.split(':')[1] |
|
826 | 826 | } |
|
827 | 827 | if (suggestion.type === "text") { |
|
828 | 828 | // special case we don't want to "skip" display for |
|
829 | 829 | return true |
|
830 | 830 | } |
|
831 | 831 | return suggestion.value_display.toLowerCase().indexOf(queryLowerCase) !== -1; |
|
832 | 832 | }; |
|
833 | 833 | |
|
834 | 834 | var cleanContext = { |
|
835 | 835 | repo_view_type: null, |
|
836 | 836 | |
|
837 | 837 | repo_id: null, |
|
838 | 838 | repo_name: "", |
|
839 | 839 | |
|
840 | 840 | repo_group_id: null, |
|
841 | 841 | repo_group_name: null |
|
842 | 842 | }; |
|
843 | 843 | var removeGoToFilter = function () { |
|
844 | 844 | $('.searchTagHidable').hide(); |
|
845 | 845 | $('#main_filter').autocomplete( |
|
846 | 846 | 'setOptions', {params:{search_context: cleanContext}}); |
|
847 | 847 | }; |
|
848 | 848 | |
|
849 | 849 | $('#main_filter').autocomplete({ |
|
850 | 850 | serviceUrl: pyroutes.url('goto_switcher_data'), |
|
851 | 851 | params: { |
|
852 | 852 | "search_context": templateContext.search_context |
|
853 | 853 | }, |
|
854 | 854 | minChars:2, |
|
855 | 855 | maxHeight:400, |
|
856 | 856 | deferRequestBy: 300, //miliseconds |
|
857 | 857 | tabDisabled: true, |
|
858 | 858 | autoSelectFirst: false, |
|
859 | 859 | formatResult: autocompleteMainFilterFormatResult, |
|
860 | 860 | lookupFilter: autocompleteMainFilterResult, |
|
861 | 861 | onSelect: function (element, suggestion) { |
|
862 | 862 | handleSelect(element, suggestion); |
|
863 | 863 | return false; |
|
864 | 864 | }, |
|
865 | 865 | onSearchError: function (element, query, jqXHR, textStatus, errorThrown) { |
|
866 | 866 | if (jqXHR !== 'abort') { |
|
867 | 867 | alert("Error during search.\nError code: {0}".format(textStatus)); |
|
868 | 868 | window.location = ''; |
|
869 | 869 | } |
|
870 | 870 | } |
|
871 | 871 | }); |
|
872 | 872 | |
|
873 | 873 | showMainFilterBox = function () { |
|
874 | 874 | $('#main_filter_help').toggle(); |
|
875 | 875 | }; |
|
876 | 876 | |
|
877 | 877 | $('#main_filter').on('keydown.autocomplete', function (e) { |
|
878 | 878 | |
|
879 | 879 | var BACKSPACE = 8; |
|
880 | 880 | var el = $(e.currentTarget); |
|
881 | 881 | if(e.which === BACKSPACE){ |
|
882 | 882 | var inputVal = el.val(); |
|
883 | 883 | if (inputVal === ""){ |
|
884 | 884 | removeGoToFilter() |
|
885 | 885 | } |
|
886 | 886 | } |
|
887 | 887 | }); |
|
888 | 888 | |
|
889 | 889 | </script> |
|
890 | 890 | <script src="${h.asset('js/rhodecode/base/keyboard-bindings.js', ver=c.rhodecode_version_hash)}"></script> |
|
891 | 891 | </%def> |
|
892 | 892 | |
|
893 | 893 | <div class="modal" id="help_kb" tabindex="-1" role="dialog" aria-labelledby="myModalLabel" aria-hidden="true"> |
|
894 | 894 | <div class="modal-dialog"> |
|
895 | 895 | <div class="modal-content"> |
|
896 | 896 | <div class="modal-header"> |
|
897 | 897 | <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button> |
|
898 | 898 | <h4 class="modal-title" id="myModalLabel">${_('Keyboard shortcuts')}</h4> |
|
899 | 899 | </div> |
|
900 | 900 | <div class="modal-body"> |
|
901 | 901 | <div class="block-left"> |
|
902 | 902 | <table class="keyboard-mappings"> |
|
903 | 903 | <tbody> |
|
904 | 904 | <tr> |
|
905 | 905 | <th></th> |
|
906 | 906 | <th>${_('Site-wide shortcuts')}</th> |
|
907 | 907 | </tr> |
|
908 | 908 | <% |
|
909 | 909 | elems = [ |
|
910 | 910 | ('/', 'Use quick search box'), |
|
911 | 911 | ('g h', 'Goto home page'), |
|
912 | 912 | ('g g', 'Goto my private gists page'), |
|
913 | 913 | ('g G', 'Goto my public gists page'), |
|
914 | 914 | ('g 0-9', 'Goto bookmarked items from 0-9'), |
|
915 | 915 | ('n r', 'New repository page'), |
|
916 | 916 | ('n g', 'New gist page'), |
|
917 | 917 | ] |
|
918 | 918 | %> |
|
919 | 919 | %for key, desc in elems: |
|
920 | 920 | <tr> |
|
921 | 921 | <td class="keys"> |
|
922 | 922 | <span class="key tag">${key}</span> |
|
923 | 923 | </td> |
|
924 | 924 | <td>${desc}</td> |
|
925 | 925 | </tr> |
|
926 | 926 | %endfor |
|
927 | 927 | </tbody> |
|
928 | 928 | </table> |
|
929 | 929 | </div> |
|
930 | 930 | <div class="block-left"> |
|
931 | 931 | <table class="keyboard-mappings"> |
|
932 | 932 | <tbody> |
|
933 | 933 | <tr> |
|
934 | 934 | <th></th> |
|
935 | 935 | <th>${_('Repositories')}</th> |
|
936 | 936 | </tr> |
|
937 | 937 | <% |
|
938 | 938 | elems = [ |
|
939 | 939 | ('g s', 'Goto summary page'), |
|
940 | 940 | ('g c', 'Goto changelog page'), |
|
941 | 941 | ('g f', 'Goto files page'), |
|
942 | 942 | ('g F', 'Goto files page with file search activated'), |
|
943 | 943 | ('g p', 'Goto pull requests page'), |
|
944 | 944 | ('g o', 'Goto repository settings'), |
|
945 | 945 | ('g O', 'Goto repository permissions settings'), |
|
946 | 946 | ] |
|
947 | 947 | %> |
|
948 | 948 | %for key, desc in elems: |
|
949 | 949 | <tr> |
|
950 | 950 | <td class="keys"> |
|
951 | 951 | <span class="key tag">${key}</span> |
|
952 | 952 | </td> |
|
953 | 953 | <td>${desc}</td> |
|
954 | 954 | </tr> |
|
955 | 955 | %endfor |
|
956 | 956 | </tbody> |
|
957 | 957 | </table> |
|
958 | 958 | </div> |
|
959 | 959 | </div> |
|
960 | 960 | <div class="modal-footer"> |
|
961 | 961 | </div> |
|
962 | 962 | </div><!-- /.modal-content --> |
|
963 | 963 | </div><!-- /.modal-dialog --> |
|
964 | 964 | </div><!-- /.modal --> |
|
965 | 965 |
@@ -1,350 +1,350 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | <%inherit file="/base/base.mako"/> |
|
4 | 4 | <%namespace name="diff_block" file="/changeset/diff_block.mako"/> |
|
5 | 5 | |
|
6 | 6 | <%def name="title()"> |
|
7 | 7 | ${_('%s Commit') % c.repo_name} - ${h.show_id(c.commit)} |
|
8 | 8 | %if c.rhodecode_name: |
|
9 | 9 | · ${h.branding(c.rhodecode_name)} |
|
10 | 10 | %endif |
|
11 | 11 | </%def> |
|
12 | 12 | |
|
13 | 13 | <%def name="menu_bar_nav()"> |
|
14 | 14 | ${self.menu_items(active='repositories')} |
|
15 | 15 | </%def> |
|
16 | 16 | |
|
17 | 17 | <%def name="menu_bar_subnav()"> |
|
18 |
${self.repo_menu(active='c |
|
|
18 | ${self.repo_menu(active='commits')} | |
|
19 | 19 | </%def> |
|
20 | 20 | |
|
21 | 21 | <%def name="main()"> |
|
22 | 22 | <script> |
|
23 | 23 | // TODO: marcink switch this to pyroutes |
|
24 | 24 | AJAX_COMMENT_DELETE_URL = "${h.route_path('repo_commit_comment_delete',repo_name=c.repo_name,commit_id=c.commit.raw_id,comment_id='__COMMENT_ID__')}"; |
|
25 | 25 | templateContext.commit_data.commit_id = "${c.commit.raw_id}"; |
|
26 | 26 | </script> |
|
27 | 27 | <div class="box"> |
|
28 | 28 | |
|
29 | 29 | <div id="changeset_compare_view_content" class="summary changeset"> |
|
30 | 30 | <div class="summary-detail"> |
|
31 | 31 | <div class="fieldset"> |
|
32 | 32 | <div class="left-label-summary"> |
|
33 | 33 | <p>${_('Commit')}</p> |
|
34 | 34 | <div class="right-label-summary"> |
|
35 | 35 | <code> |
|
36 | 36 | ${h.show_id(c.commit)} |
|
37 | 37 | </code> |
|
38 | 38 | <i class="tooltip icon-clipboard clipboard-action" data-clipboard-text="${c.commit.raw_id}" title="${_('Copy the full commit id')}"></i> |
|
39 | 39 | % if hasattr(c.commit, 'phase'): |
|
40 | 40 | <span class="tag phase-${c.commit.phase} tooltip" title="${_('Commit phase')}">${c.commit.phase}</span> |
|
41 | 41 | % endif |
|
42 | 42 | |
|
43 | 43 | ## obsolete commits |
|
44 | 44 | % if hasattr(c.commit, 'obsolete'): |
|
45 | 45 | % if c.commit.obsolete: |
|
46 | 46 | <span class="tag obsolete-${c.commit.obsolete} tooltip" title="${_('Evolve State')}">${_('obsolete')}</span> |
|
47 | 47 | % endif |
|
48 | 48 | % endif |
|
49 | 49 | |
|
50 | 50 | ## hidden commits |
|
51 | 51 | % if hasattr(c.commit, 'hidden'): |
|
52 | 52 | % if c.commit.hidden: |
|
53 | 53 | <span class="tag hidden-${c.commit.hidden} tooltip" title="${_('Evolve State')}">${_('hidden')}</span> |
|
54 | 54 | % endif |
|
55 | 55 | % endif |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | <div class="pull-right"> |
|
59 | 59 | <span id="parent_link"> |
|
60 | 60 | <a href="#parentCommit" title="${_('Parent Commit')}"><i class="icon-left icon-no-margin"></i>${_('parent')}</a> |
|
61 | 61 | </span> |
|
62 | 62 | | |
|
63 | 63 | <span id="child_link"> |
|
64 | 64 | <a href="#childCommit" title="${_('Child Commit')}">${_('child')}<i class="icon-right icon-no-margin"></i></a> |
|
65 | 65 | </span> |
|
66 | 66 | </div> |
|
67 | 67 | |
|
68 | 68 | </div> |
|
69 | 69 | </div> |
|
70 | 70 | </div> |
|
71 | 71 | |
|
72 | 72 | |
|
73 | 73 | |
|
74 | 74 | <div class="fieldset"> |
|
75 | 75 | <div class="left-label-summary"> |
|
76 | 76 | <p>${_('Description')}:</p> |
|
77 | 77 | <div class="right-label-summary"> |
|
78 | 78 | <div id="trimmed_message_box" class="commit">${h.urlify_commit_message(c.commit.message,c.repo_name)}</div> |
|
79 | 79 | <div id="message_expand" style="display:none;"> |
|
80 | 80 | ${_('Expand')} |
|
81 | 81 | </div> |
|
82 | 82 | </div> |
|
83 | 83 | </div> |
|
84 | 84 | </div> |
|
85 | 85 | |
|
86 | 86 | %if c.statuses: |
|
87 | 87 | <div class="fieldset"> |
|
88 | 88 | <div class="left-label-summary"> |
|
89 | 89 | <p>${_('Commit status')}:</p> |
|
90 | 90 | <div class="right-label-summary"> |
|
91 | 91 | <div class="changeset-status-ico"> |
|
92 | 92 | <div class="${'flag_status %s' % c.statuses[0]} pull-left"></div> |
|
93 | 93 | </div> |
|
94 | 94 | <div title="${_('Commit status')}" class="changeset-status-lbl">[${h.commit_status_lbl(c.statuses[0])}]</div> |
|
95 | 95 | </div> |
|
96 | 96 | </div> |
|
97 | 97 | </div> |
|
98 | 98 | %endif |
|
99 | 99 | |
|
100 | 100 | <div class="fieldset"> |
|
101 | 101 | <div class="left-label-summary"> |
|
102 | 102 | <p>${_('References')}:</p> |
|
103 | 103 | <div class="right-label-summary"> |
|
104 | 104 | <div class="tags"> |
|
105 | 105 | %if c.commit.merge: |
|
106 | 106 | <span class="mergetag tag"> |
|
107 | 107 | <i class="icon-merge"></i>${_('merge')} |
|
108 | 108 | </span> |
|
109 | 109 | %endif |
|
110 | 110 | |
|
111 | 111 | %if h.is_hg(c.rhodecode_repo): |
|
112 | 112 | %for book in c.commit.bookmarks: |
|
113 | 113 | <span class="booktag tag" title="${h.tooltip(_('Bookmark %s') % book)}"> |
|
114 | 114 | <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=c.commit.raw_id,_query=dict(at=book))}"><i class="icon-bookmark"></i>${h.shorter(book)}</a> |
|
115 | 115 | </span> |
|
116 | 116 | %endfor |
|
117 | 117 | %endif |
|
118 | 118 | |
|
119 | 119 | %for tag in c.commit.tags: |
|
120 | 120 | <span class="tagtag tag" title="${h.tooltip(_('Tag %s') % tag)}"> |
|
121 | 121 | <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=c.commit.raw_id,_query=dict(at=tag))}"><i class="icon-tag"></i>${tag}</a> |
|
122 | 122 | </span> |
|
123 | 123 | %endfor |
|
124 | 124 | |
|
125 | 125 | %if c.commit.branch: |
|
126 | 126 | <span class="branchtag tag" title="${h.tooltip(_('Branch %s') % c.commit.branch)}"> |
|
127 | 127 | <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=c.commit.raw_id,_query=dict(at=c.commit.branch))}"><i class="icon-code-fork"></i>${h.shorter(c.commit.branch)}</a> |
|
128 | 128 | </span> |
|
129 | 129 | %endif |
|
130 | 130 | </div> |
|
131 | 131 | </div> |
|
132 | 132 | </div> |
|
133 | 133 | </div> |
|
134 | 134 | |
|
135 | 135 | <div class="fieldset"> |
|
136 | 136 | <div class="left-label-summary"> |
|
137 | 137 | <p>${_('Diff options')}:</p> |
|
138 | 138 | <div class="right-label-summary"> |
|
139 | 139 | <div class="diff-actions"> |
|
140 | 140 | <a href="${h.route_path('repo_commit_raw',repo_name=c.repo_name,commit_id=c.commit.raw_id)}" class="tooltip" title="${h.tooltip(_('Raw diff'))}"> |
|
141 | 141 | ${_('Raw Diff')} |
|
142 | 142 | </a> |
|
143 | 143 | | |
|
144 | 144 | <a href="${h.route_path('repo_commit_patch',repo_name=c.repo_name,commit_id=c.commit.raw_id)}" class="tooltip" title="${h.tooltip(_('Patch diff'))}"> |
|
145 | 145 | ${_('Patch Diff')} |
|
146 | 146 | </a> |
|
147 | 147 | | |
|
148 | 148 | <a href="${h.route_path('repo_commit_download',repo_name=c.repo_name,commit_id=c.commit.raw_id,_query=dict(diff='download'))}" class="tooltip" title="${h.tooltip(_('Download diff'))}"> |
|
149 | 149 | ${_('Download Diff')} |
|
150 | 150 | </a> |
|
151 | 151 | </div> |
|
152 | 152 | </div> |
|
153 | 153 | </div> |
|
154 | 154 | </div> |
|
155 | 155 | |
|
156 | 156 | <div class="fieldset"> |
|
157 | 157 | <div class="left-label-summary"> |
|
158 | 158 | <p>${_('Comments')}:</p> |
|
159 | 159 | <div class="right-label-summary"> |
|
160 | 160 | <div class="comments-number"> |
|
161 | 161 | %if c.comments: |
|
162 | 162 | <a href="#comments">${_ungettext("%d Commit comment", "%d Commit comments", len(c.comments)) % len(c.comments)}</a>, |
|
163 | 163 | %else: |
|
164 | 164 | ${_ungettext("%d Commit comment", "%d Commit comments", len(c.comments)) % len(c.comments)} |
|
165 | 165 | %endif |
|
166 | 166 | %if c.inline_cnt: |
|
167 | 167 | <a href="#" onclick="return Rhodecode.comments.nextComment();" id="inline-comments-counter">${_ungettext("%d Inline Comment", "%d Inline Comments", c.inline_cnt) % c.inline_cnt}</a> |
|
168 | 168 | %else: |
|
169 | 169 | ${_ungettext("%d Inline Comment", "%d Inline Comments", c.inline_cnt) % c.inline_cnt} |
|
170 | 170 | %endif |
|
171 | 171 | </div> |
|
172 | 172 | </div> |
|
173 | 173 | </div> |
|
174 | 174 | </div> |
|
175 | 175 | |
|
176 | 176 | <div class="fieldset"> |
|
177 | 177 | <div class="left-label-summary"> |
|
178 | 178 | <p>${_('Unresolved TODOs')}:</p> |
|
179 | 179 | <div class="right-label-summary"> |
|
180 | 180 | <div class="comments-number"> |
|
181 | 181 | % if c.unresolved_comments: |
|
182 | 182 | % for co in c.unresolved_comments: |
|
183 | 183 | <a class="permalink" href="#comment-${co.comment_id}" onclick="Rhodecode.comments.scrollToComment($('#comment-${co.comment_id}'))"> #${co.comment_id}</a>${'' if loop.last else ','} |
|
184 | 184 | % endfor |
|
185 | 185 | % else: |
|
186 | 186 | ${_('There are no unresolved TODOs')} |
|
187 | 187 | % endif |
|
188 | 188 | </div> |
|
189 | 189 | </div> |
|
190 | 190 | </div> |
|
191 | 191 | </div> |
|
192 | 192 | |
|
193 | 193 | <div class="fieldset"> |
|
194 | 194 | <div class="left-label-summary"> |
|
195 | 195 | <p>${_('Author')}</p> |
|
196 | 196 | |
|
197 | 197 | <div class="right-label-summary"> |
|
198 | 198 | ${self.gravatar_with_user(c.commit.author)} |
|
199 | 199 | <div class="user-inline-data">- ${h.age_component(c.commit.date)}</div> |
|
200 | 200 | </div> |
|
201 | 201 | </div> |
|
202 | 202 | |
|
203 | 203 | <div class="clear-fix"></div> |
|
204 | 204 | |
|
205 | 205 | </div> <!-- end summary-detail --> |
|
206 | 206 | </div> <!-- end summary --> |
|
207 | 207 | </div> |
|
208 | 208 | <div class="cs_files"> |
|
209 | 209 | <%namespace name="cbdiffs" file="/codeblocks/diffs.mako"/> |
|
210 | 210 | ${cbdiffs.render_diffset_menu(c.changes[c.commit.raw_id])} |
|
211 | 211 | ${cbdiffs.render_diffset( |
|
212 | 212 | c.changes[c.commit.raw_id], commit=c.commit, use_comments=True,inline_comments=c.inline_comments )} |
|
213 | 213 | </div> |
|
214 | 214 | |
|
215 | 215 | ## template for inline comment form |
|
216 | 216 | <%namespace name="comment" file="/changeset/changeset_file_comment.mako"/> |
|
217 | 217 | |
|
218 | 218 | ## render comments |
|
219 | 219 | ${comment.generate_comments(c.comments)} |
|
220 | 220 | |
|
221 | 221 | ## main comment form and it status |
|
222 | 222 | ${comment.comments(h.route_path('repo_commit_comment_create', repo_name=c.repo_name, commit_id=c.commit.raw_id), |
|
223 | 223 | h.commit_status(c.rhodecode_db_repo, c.commit.raw_id))} |
|
224 | 224 | </div> |
|
225 | 225 | |
|
226 | 226 | ## FORM FOR MAKING JS ACTION AS CHANGESET COMMENTS |
|
227 | 227 | <script type="text/javascript"> |
|
228 | 228 | |
|
229 | 229 | $(document).ready(function() { |
|
230 | 230 | |
|
231 | 231 | var boxmax = parseInt($('#trimmed_message_box').css('max-height'), 10); |
|
232 | 232 | if($('#trimmed_message_box').height() === boxmax){ |
|
233 | 233 | $('#message_expand').show(); |
|
234 | 234 | } |
|
235 | 235 | |
|
236 | 236 | $('#message_expand').on('click', function(e){ |
|
237 | 237 | $('#trimmed_message_box').css('max-height', 'none'); |
|
238 | 238 | $(this).hide(); |
|
239 | 239 | }); |
|
240 | 240 | |
|
241 | 241 | $('.show-inline-comments').on('click', function(e){ |
|
242 | 242 | var boxid = $(this).attr('data-comment-id'); |
|
243 | 243 | var button = $(this); |
|
244 | 244 | |
|
245 | 245 | if(button.hasClass("comments-visible")) { |
|
246 | 246 | $('#{0} .inline-comments'.format(boxid)).each(function(index){ |
|
247 | 247 | $(this).hide(); |
|
248 | 248 | }); |
|
249 | 249 | button.removeClass("comments-visible"); |
|
250 | 250 | } else { |
|
251 | 251 | $('#{0} .inline-comments'.format(boxid)).each(function(index){ |
|
252 | 252 | $(this).show(); |
|
253 | 253 | }); |
|
254 | 254 | button.addClass("comments-visible"); |
|
255 | 255 | } |
|
256 | 256 | }); |
|
257 | 257 | |
|
258 | 258 | |
|
259 | 259 | // next links |
|
260 | 260 | $('#child_link').on('click', function(e){ |
|
261 | 261 | // fetch via ajax what is going to be the next link, if we have |
|
262 | 262 | // >1 links show them to user to choose |
|
263 | 263 | if(!$('#child_link').hasClass('disabled')){ |
|
264 | 264 | $.ajax({ |
|
265 | 265 | url: '${h.route_path('repo_commit_children',repo_name=c.repo_name, commit_id=c.commit.raw_id)}', |
|
266 | 266 | success: function(data) { |
|
267 | 267 | if(data.results.length === 0){ |
|
268 | 268 | $('#child_link').html("${_('No Child Commits')}").addClass('disabled'); |
|
269 | 269 | } |
|
270 | 270 | if(data.results.length === 1){ |
|
271 | 271 | var commit = data.results[0]; |
|
272 | 272 | window.location = pyroutes.url('repo_commit', {'repo_name': '${c.repo_name}','commit_id': commit.raw_id}); |
|
273 | 273 | } |
|
274 | 274 | else if(data.results.length === 2){ |
|
275 | 275 | $('#child_link').addClass('disabled'); |
|
276 | 276 | $('#child_link').addClass('double'); |
|
277 | 277 | var _html = ''; |
|
278 | 278 | _html +='<a title="__title__" href="__url__">__rev__</a> ' |
|
279 | 279 | .replace('__rev__','r{0}:{1}'.format(data.results[0].revision, data.results[0].raw_id.substr(0,6))) |
|
280 | 280 | .replace('__title__', data.results[0].message) |
|
281 | 281 | .replace('__url__', pyroutes.url('repo_commit', {'repo_name': '${c.repo_name}','commit_id': data.results[0].raw_id})); |
|
282 | 282 | _html +=' | '; |
|
283 | 283 | _html +='<a title="__title__" href="__url__">__rev__</a> ' |
|
284 | 284 | .replace('__rev__','r{0}:{1}'.format(data.results[1].revision, data.results[1].raw_id.substr(0,6))) |
|
285 | 285 | .replace('__title__', data.results[1].message) |
|
286 | 286 | .replace('__url__', pyroutes.url('repo_commit', {'repo_name': '${c.repo_name}','commit_id': data.results[1].raw_id})); |
|
287 | 287 | $('#child_link').html(_html); |
|
288 | 288 | } |
|
289 | 289 | } |
|
290 | 290 | }); |
|
291 | 291 | e.preventDefault(); |
|
292 | 292 | } |
|
293 | 293 | }); |
|
294 | 294 | |
|
295 | 295 | // prev links |
|
296 | 296 | $('#parent_link').on('click', function(e){ |
|
297 | 297 | // fetch via ajax what is going to be the next link, if we have |
|
298 | 298 | // >1 links show them to user to choose |
|
299 | 299 | if(!$('#parent_link').hasClass('disabled')){ |
|
300 | 300 | $.ajax({ |
|
301 | 301 | url: '${h.route_path("repo_commit_parents",repo_name=c.repo_name, commit_id=c.commit.raw_id)}', |
|
302 | 302 | success: function(data) { |
|
303 | 303 | if(data.results.length === 0){ |
|
304 | 304 | $('#parent_link').html('${_('No Parent Commits')}').addClass('disabled'); |
|
305 | 305 | } |
|
306 | 306 | if(data.results.length === 1){ |
|
307 | 307 | var commit = data.results[0]; |
|
308 | 308 | window.location = pyroutes.url('repo_commit', {'repo_name': '${c.repo_name}','commit_id': commit.raw_id}); |
|
309 | 309 | } |
|
310 | 310 | else if(data.results.length === 2){ |
|
311 | 311 | $('#parent_link').addClass('disabled'); |
|
312 | 312 | $('#parent_link').addClass('double'); |
|
313 | 313 | var _html = ''; |
|
314 | 314 | _html +='<a title="__title__" href="__url__">Parent __rev__</a>' |
|
315 | 315 | .replace('__rev__','r{0}:{1}'.format(data.results[0].revision, data.results[0].raw_id.substr(0,6))) |
|
316 | 316 | .replace('__title__', data.results[0].message) |
|
317 | 317 | .replace('__url__', pyroutes.url('repo_commit', {'repo_name': '${c.repo_name}','commit_id': data.results[0].raw_id})); |
|
318 | 318 | _html +=' | '; |
|
319 | 319 | _html +='<a title="__title__" href="__url__">Parent __rev__</a>' |
|
320 | 320 | .replace('__rev__','r{0}:{1}'.format(data.results[1].revision, data.results[1].raw_id.substr(0,6))) |
|
321 | 321 | .replace('__title__', data.results[1].message) |
|
322 | 322 | .replace('__url__', pyroutes.url('repo_commit', {'repo_name': '${c.repo_name}','commit_id': data.results[1].raw_id})); |
|
323 | 323 | $('#parent_link').html(_html); |
|
324 | 324 | } |
|
325 | 325 | } |
|
326 | 326 | }); |
|
327 | 327 | e.preventDefault(); |
|
328 | 328 | } |
|
329 | 329 | }); |
|
330 | 330 | |
|
331 | 331 | if (location.hash) { |
|
332 | 332 | var result = splitDelimitedHash(location.hash); |
|
333 | 333 | var line = $('html').find(result.loc); |
|
334 | 334 | if (line.length > 0){ |
|
335 | 335 | offsetScroll(line, 70); |
|
336 | 336 | } |
|
337 | 337 | } |
|
338 | 338 | |
|
339 | 339 | // browse tree @ revision |
|
340 | 340 | $('#files_link').on('click', function(e){ |
|
341 | 341 | window.location = '${h.route_path('repo_files:default_path',repo_name=c.repo_name, commit_id=c.commit.raw_id)}'; |
|
342 | 342 | e.preventDefault(); |
|
343 | 343 | }); |
|
344 | 344 | |
|
345 | 345 | // inject comments into their proper positions |
|
346 | 346 | var file_comments = $('.inline-comment-placeholder'); |
|
347 | 347 | }) |
|
348 | 348 | </script> |
|
349 | 349 | |
|
350 | 350 | </%def> |
@@ -1,102 +1,102 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | <%inherit file="/base/base.mako"/> |
|
3 | 3 | |
|
4 | 4 | <%def name="title()"> |
|
5 | 5 | ${_('%s Commits') % c.repo_name} - |
|
6 | 6 | r${c.commit_ranges[0].idx}:${h.short_id(c.commit_ranges[0].raw_id)} |
|
7 | 7 | ... |
|
8 | 8 | r${c.commit_ranges[-1].idx}:${h.short_id(c.commit_ranges[-1].raw_id)} |
|
9 | 9 | ${_ungettext('(%s commit)','(%s commits)', len(c.commit_ranges)) % len(c.commit_ranges)} |
|
10 | 10 | %if c.rhodecode_name: |
|
11 | 11 | · ${h.branding(c.rhodecode_name)} |
|
12 | 12 | %endif |
|
13 | 13 | </%def> |
|
14 | 14 | |
|
15 | 15 | <%def name="breadcrumbs_links()"> |
|
16 | 16 | ${_('Commits')} - |
|
17 | 17 | r${c.commit_ranges[0].idx}:${h.short_id(c.commit_ranges[0].raw_id)} |
|
18 | 18 | ... |
|
19 | 19 | r${c.commit_ranges[-1].idx}:${h.short_id(c.commit_ranges[-1].raw_id)} |
|
20 | 20 | ${_ungettext('(%s commit)','(%s commits)', len(c.commit_ranges)) % len(c.commit_ranges)} |
|
21 | 21 | </%def> |
|
22 | 22 | |
|
23 | 23 | <%def name="menu_bar_nav()"> |
|
24 | 24 | ${self.menu_items(active='repositories')} |
|
25 | 25 | </%def> |
|
26 | 26 | |
|
27 | 27 | <%def name="menu_bar_subnav()"> |
|
28 |
${self.repo_menu(active='c |
|
|
28 | ${self.repo_menu(active='commits')} | |
|
29 | 29 | </%def> |
|
30 | 30 | |
|
31 | 31 | <%def name="main()"> |
|
32 | 32 | |
|
33 | 33 | <div class="summary changeset"> |
|
34 | 34 | <div class="summary-detail"> |
|
35 | 35 | <div class="summary-detail-header"> |
|
36 | 36 | <span class="breadcrumbs files_location"> |
|
37 | 37 | <h4> |
|
38 | 38 | ${_('Commit Range')} |
|
39 | 39 | <code> |
|
40 | 40 | r${c.commit_ranges[0].idx}:${h.short_id(c.commit_ranges[0].raw_id)}...r${c.commit_ranges[-1].idx}:${h.short_id(c.commit_ranges[-1].raw_id)} |
|
41 | 41 | </code> |
|
42 | 42 | </h4> |
|
43 | 43 | </span> |
|
44 | 44 | </div> |
|
45 | 45 | |
|
46 | 46 | <div class="fieldset"> |
|
47 | 47 | <div class="left-label"> |
|
48 | 48 | ${_('Diff option')}: |
|
49 | 49 | </div> |
|
50 | 50 | <div class="right-content"> |
|
51 | 51 | <div class="btn btn-primary"> |
|
52 | 52 | <a href="${h.route_path('repo_compare', |
|
53 | 53 | repo_name=c.repo_name, |
|
54 | 54 | source_ref_type='rev', |
|
55 | 55 | source_ref=getattr(c.commit_ranges[0].parents[0] if c.commit_ranges[0].parents else h.EmptyCommit(), 'raw_id'), |
|
56 | 56 | target_ref_type='rev', |
|
57 | 57 | target_ref=c.commit_ranges[-1].raw_id)}" |
|
58 | 58 | > |
|
59 | 59 | ${_('Show combined compare')} |
|
60 | 60 | </a> |
|
61 | 61 | </div> |
|
62 | 62 | </div> |
|
63 | 63 | </div> |
|
64 | 64 | |
|
65 | 65 | </div> <!-- end summary-detail --> |
|
66 | 66 | |
|
67 | 67 | </div> <!-- end summary --> |
|
68 | 68 | |
|
69 | 69 | <div id="changeset_compare_view_content"> |
|
70 | 70 | <div class="pull-left"> |
|
71 | 71 | <div class="btn-group"> |
|
72 | 72 | <a |
|
73 | 73 | class="btn" |
|
74 | 74 | href="#" |
|
75 | 75 | onclick="$('.compare_select').show();$('.compare_select_hidden').hide(); return false"> |
|
76 | 76 | ${_ungettext('Expand %s commit','Expand %s commits', len(c.commit_ranges)) % len(c.commit_ranges)} |
|
77 | 77 | </a> |
|
78 | 78 | <a |
|
79 | 79 | class="btn" |
|
80 | 80 | href="#" |
|
81 | 81 | onclick="$('.compare_select').hide();$('.compare_select_hidden').show(); return false"> |
|
82 | 82 | ${_ungettext('Collapse %s commit','Collapse %s commits', len(c.commit_ranges)) % len(c.commit_ranges)} |
|
83 | 83 | </a> |
|
84 | 84 | </div> |
|
85 | 85 | </div> |
|
86 | 86 | ## Commit range generated below |
|
87 | 87 | <%include file="../compare/compare_commits.mako"/> |
|
88 | 88 | <div class="cs_files"> |
|
89 | 89 | <%namespace name="cbdiffs" file="/codeblocks/diffs.mako"/> |
|
90 | 90 | <%namespace name="comment" file="/changeset/changeset_file_comment.mako"/> |
|
91 | 91 | <%namespace name="diff_block" file="/changeset/diff_block.mako"/> |
|
92 | 92 | ${cbdiffs.render_diffset_menu()} |
|
93 | 93 | %for commit in c.commit_ranges: |
|
94 | 94 | ${cbdiffs.render_diffset( |
|
95 | 95 | diffset=c.changes[commit.raw_id], |
|
96 | 96 | collapse_when_files_over=5, |
|
97 | 97 | commit=commit, |
|
98 | 98 | )} |
|
99 | 99 | %endfor |
|
100 | 100 | </div> |
|
101 | 101 | </div> |
|
102 | 102 | </%def> |
@@ -1,314 +1,314 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | <%inherit file="/base/base.mako"/> |
|
4 | 4 | |
|
5 | 5 | <%def name="title()"> |
|
6 | 6 | ${_('%s Changelog') % c.repo_name} |
|
7 | 7 | %if c.changelog_for_path: |
|
8 | 8 | /${c.changelog_for_path} |
|
9 | 9 | %endif |
|
10 | 10 | %if c.rhodecode_name: |
|
11 | 11 | · ${h.branding(c.rhodecode_name)} |
|
12 | 12 | %endif |
|
13 | 13 | </%def> |
|
14 | 14 | |
|
15 | 15 | <%def name="breadcrumbs_links()"> |
|
16 | 16 | %if c.changelog_for_path: |
|
17 | 17 | /${c.changelog_for_path} |
|
18 | 18 | %endif |
|
19 | 19 | </%def> |
|
20 | 20 | |
|
21 | 21 | <%def name="menu_bar_nav()"> |
|
22 | 22 | ${self.menu_items(active='repositories')} |
|
23 | 23 | </%def> |
|
24 | 24 | |
|
25 | 25 | <%def name="menu_bar_subnav()"> |
|
26 |
${self.repo_menu(active='c |
|
|
26 | ${self.repo_menu(active='commits')} | |
|
27 | 27 | </%def> |
|
28 | 28 | |
|
29 | 29 | <%def name="main()"> |
|
30 | 30 | |
|
31 | 31 | <div class="box"> |
|
32 | 32 | <div class="title"> |
|
33 | 33 | <ul class="links"> |
|
34 | 34 | <li> |
|
35 | 35 | <a href="#" class="btn btn-small" id="rev_range_container" style="display:none;"></a> |
|
36 | 36 | %if c.rhodecode_db_repo.fork: |
|
37 | 37 | <span> |
|
38 | 38 | <a id="compare_fork_button" |
|
39 | 39 | title="${h.tooltip(_('Compare fork with %s' % c.rhodecode_db_repo.fork.repo_name))}" |
|
40 | 40 | class="btn btn-small" |
|
41 | 41 | href="${h.route_path('repo_compare', |
|
42 | 42 | repo_name=c.rhodecode_db_repo.fork.repo_name, |
|
43 | 43 | source_ref_type=c.rhodecode_db_repo.landing_rev[0], |
|
44 | 44 | source_ref=c.rhodecode_db_repo.landing_rev[1], |
|
45 | 45 | target_ref_type='branch' if request.GET.get('branch') else c.rhodecode_db_repo.landing_rev[0], |
|
46 | 46 | target_ref=request.GET.get('branch') or c.rhodecode_db_repo.landing_rev[1], |
|
47 | 47 | _query=dict(merge=1, target_repo=c.repo_name))}" |
|
48 | 48 | > |
|
49 | 49 | ${_('Compare fork with Parent (%s)' % c.rhodecode_db_repo.fork.repo_name)} |
|
50 | 50 | </a> |
|
51 | 51 | </span> |
|
52 | 52 | %endif |
|
53 | 53 | |
|
54 | 54 | ## pr open link |
|
55 | 55 | %if h.is_hg(c.rhodecode_repo) or h.is_git(c.rhodecode_repo): |
|
56 | 56 | <span> |
|
57 | 57 | <a id="open_new_pull_request" class="btn btn-small btn-success" href="${h.route_path('pullrequest_new',repo_name=c.repo_name)}"> |
|
58 | 58 | ${_('Open new pull request')} |
|
59 | 59 | </a> |
|
60 | 60 | </span> |
|
61 | 61 | %endif |
|
62 | 62 | |
|
63 | 63 | ## clear selection |
|
64 | 64 | <div title="${_('Clear selection')}" class="btn" id="rev_range_clear" style="display:none"> |
|
65 | 65 | ${_('Clear selection')} |
|
66 | 66 | </div> |
|
67 | 67 | |
|
68 | 68 | </li> |
|
69 | 69 | </ul> |
|
70 | 70 | </div> |
|
71 | 71 | |
|
72 | 72 | % if c.pagination: |
|
73 | 73 | <script type="text/javascript" src="${h.asset('js/src/plugins/jquery.commits-graph.js')}"></script> |
|
74 | 74 | |
|
75 | 75 | <div class="graph-header"> |
|
76 | 76 | <div id="filter_changelog"> |
|
77 | 77 | ${h.hidden('branch_filter')} |
|
78 | 78 | %if c.selected_name: |
|
79 | 79 | <div class="btn btn-default" id="clear_filter" > |
|
80 | 80 | ${_('Clear filter')} |
|
81 | 81 | </div> |
|
82 | 82 | %endif |
|
83 | 83 | </div> |
|
84 | 84 | ${self.breadcrumbs('breadcrumbs_light')} |
|
85 | 85 | <div class="pull-right"> |
|
86 | 86 | % if h.is_hg(c.rhodecode_repo): |
|
87 | 87 | % if c.show_hidden: |
|
88 | 88 | <a class="action-link" href="${h.current_route_path(request, evolve=0)}">${_('Hide obsolete/hidden')}</a> |
|
89 | 89 | % else: |
|
90 | 90 | <a class="action-link" href="${h.current_route_path(request, evolve=1)}">${_('Show obsolete/hidden')}</a> |
|
91 | 91 | % endif |
|
92 | 92 | % else: |
|
93 | 93 | <span class="action-link disabled">${_('Show hidden')}</span> |
|
94 | 94 | % endif |
|
95 | 95 | </div> |
|
96 | 96 | <div id="commit-counter" data-total=${c.total_cs} class="pull-right"> |
|
97 | 97 | ${_ungettext('showing %d out of %d commit', 'showing %d out of %d commits', c.showing_commits) % (c.showing_commits, c.total_cs)} |
|
98 | 98 | </div> |
|
99 | 99 | </div> |
|
100 | 100 | |
|
101 | 101 | <div id="graph"> |
|
102 | 102 | <div class="graph-col-wrapper"> |
|
103 | 103 | <div id="graph_nodes"> |
|
104 | 104 | <div id="graph_canvas"></div> |
|
105 | 105 | </div> |
|
106 | 106 | <div id="graph_content" class="main-content graph_full_width"> |
|
107 | 107 | |
|
108 | 108 | <div class="table"> |
|
109 | 109 | <table id="changesets" class="rctable"> |
|
110 | 110 | <tr> |
|
111 | 111 | ## checkbox |
|
112 | 112 | <th></th> |
|
113 | 113 | <th></th> |
|
114 | 114 | |
|
115 | 115 | <th>${_('Commit')}</th> |
|
116 | 116 | |
|
117 | 117 | ## commit message expand arrow |
|
118 | 118 | <th></th> |
|
119 | 119 | <th>${_('Commit Message')}</th> |
|
120 | 120 | |
|
121 | 121 | <th>${_('Age')}</th> |
|
122 | 122 | <th>${_('Author')}</th> |
|
123 | 123 | |
|
124 | 124 | <th>${_('Refs')}</th> |
|
125 | 125 | ## comments |
|
126 | 126 | <th></th> |
|
127 | 127 | </tr> |
|
128 | 128 | |
|
129 | 129 | <tbody class="commits-range"> |
|
130 | 130 | <%include file='changelog_elements.mako'/> |
|
131 | 131 | </tbody> |
|
132 | 132 | </table> |
|
133 | 133 | </div> |
|
134 | 134 | </div> |
|
135 | 135 | <div class="pagination-wh pagination-left"> |
|
136 | 136 | ${c.pagination.pager('$link_previous ~2~ $link_next')} |
|
137 | 137 | </div> |
|
138 | 138 | </div> |
|
139 | 139 | |
|
140 | 140 | <script type="text/javascript"> |
|
141 | 141 | var cache = {}; |
|
142 | 142 | $(function(){ |
|
143 | 143 | |
|
144 | 144 | // Create links to commit ranges when range checkboxes are selected |
|
145 | 145 | var $commitCheckboxes = $('.commit-range'); |
|
146 | 146 | // cache elements |
|
147 | 147 | var $commitRangeContainer = $('#rev_range_container'); |
|
148 | 148 | var $commitRangeClear = $('#rev_range_clear'); |
|
149 | 149 | |
|
150 | 150 | var checkboxRangeSelector = function(e){ |
|
151 | 151 | var selectedCheckboxes = []; |
|
152 | 152 | for (pos in $commitCheckboxes){ |
|
153 | 153 | if($commitCheckboxes[pos].checked){ |
|
154 | 154 | selectedCheckboxes.push($commitCheckboxes[pos]); |
|
155 | 155 | } |
|
156 | 156 | } |
|
157 | 157 | var open_new_pull_request = $('#open_new_pull_request'); |
|
158 | 158 | if(open_new_pull_request){ |
|
159 | 159 | var selected_changes = selectedCheckboxes.length; |
|
160 | 160 | if (selected_changes > 1 || selected_changes == 1 && templateContext.repo_type == 'svn') { |
|
161 | 161 | open_new_pull_request.hide(); |
|
162 | 162 | } else { |
|
163 | 163 | if (selected_changes == 1) { |
|
164 | 164 | open_new_pull_request.html(_gettext('Open new pull request for selected commit')); |
|
165 | 165 | } else if (selected_changes == 0) { |
|
166 | 166 | open_new_pull_request.html(_gettext('Open new pull request')); |
|
167 | 167 | } |
|
168 | 168 | open_new_pull_request.show(); |
|
169 | 169 | } |
|
170 | 170 | } |
|
171 | 171 | |
|
172 | 172 | if (selectedCheckboxes.length>0){ |
|
173 | 173 | var revEnd = selectedCheckboxes[0].name; |
|
174 | 174 | var revStart = selectedCheckboxes[selectedCheckboxes.length-1].name; |
|
175 | 175 | var url = pyroutes.url('repo_commit', |
|
176 | 176 | {'repo_name': '${c.repo_name}', |
|
177 | 177 | 'commit_id': revStart+'...'+revEnd}); |
|
178 | 178 | |
|
179 | 179 | var link = (revStart == revEnd) |
|
180 | 180 | ? _gettext('Show selected commit __S') |
|
181 | 181 | : _gettext('Show selected commits __S ... __E'); |
|
182 | 182 | |
|
183 | 183 | link = link.replace('__S', revStart.substr(0,6)); |
|
184 | 184 | link = link.replace('__E', revEnd.substr(0,6)); |
|
185 | 185 | |
|
186 | 186 | $commitRangeContainer |
|
187 | 187 | .attr('href',url) |
|
188 | 188 | .html(link) |
|
189 | 189 | .show(); |
|
190 | 190 | |
|
191 | 191 | $commitRangeClear.show(); |
|
192 | 192 | var _url = pyroutes.url('pullrequest_new', |
|
193 | 193 | {'repo_name': '${c.repo_name}', |
|
194 | 194 | 'commit': revEnd}); |
|
195 | 195 | open_new_pull_request.attr('href', _url); |
|
196 | 196 | $('#compare_fork_button').hide(); |
|
197 | 197 | } else { |
|
198 | 198 | $commitRangeContainer.hide(); |
|
199 | 199 | $commitRangeClear.hide(); |
|
200 | 200 | |
|
201 | 201 | %if c.branch_name: |
|
202 | 202 | var _url = pyroutes.url('pullrequest_new', |
|
203 | 203 | {'repo_name': '${c.repo_name}', |
|
204 | 204 | 'branch':'${c.branch_name}'}); |
|
205 | 205 | open_new_pull_request.attr('href', _url); |
|
206 | 206 | %else: |
|
207 | 207 | var _url = pyroutes.url('pullrequest_new', |
|
208 | 208 | {'repo_name': '${c.repo_name}'}); |
|
209 | 209 | open_new_pull_request.attr('href', _url); |
|
210 | 210 | %endif |
|
211 | 211 | $('#compare_fork_button').show(); |
|
212 | 212 | } |
|
213 | 213 | }; |
|
214 | 214 | |
|
215 | 215 | $commitCheckboxes.on('click', checkboxRangeSelector); |
|
216 | 216 | |
|
217 | 217 | $commitRangeClear.on('click',function(e) { |
|
218 | 218 | $commitCheckboxes.attr('checked', false); |
|
219 | 219 | checkboxRangeSelector(); |
|
220 | 220 | e.preventDefault(); |
|
221 | 221 | }); |
|
222 | 222 | |
|
223 | 223 | // make sure the buttons are consistent when navigate back and forth |
|
224 | 224 | checkboxRangeSelector(); |
|
225 | 225 | |
|
226 | 226 | var msgs = $('.message'); |
|
227 | 227 | // get first element height |
|
228 | 228 | var el = $('#graph_content .container')[0]; |
|
229 | 229 | var row_h = el.clientHeight; |
|
230 | 230 | for (var i=0; i < msgs.length; i++) { |
|
231 | 231 | var m = msgs[i]; |
|
232 | 232 | |
|
233 | 233 | var h = m.clientHeight; |
|
234 | 234 | var pad = $(m).css('padding'); |
|
235 | 235 | if (h > row_h) { |
|
236 | 236 | var offset = row_h - (h+12); |
|
237 | 237 | $(m.nextElementSibling).css('display','block'); |
|
238 | 238 | $(m.nextElementSibling).css('margin-top',offset+'px'); |
|
239 | 239 | } |
|
240 | 240 | } |
|
241 | 241 | |
|
242 | 242 | $("#clear_filter").on("click", function() { |
|
243 | 243 | var filter = {'repo_name': '${c.repo_name}'}; |
|
244 |
window.location = pyroutes.url('repo_c |
|
|
244 | window.location = pyroutes.url('repo_commits', filter); | |
|
245 | 245 | }); |
|
246 | 246 | |
|
247 | 247 | $("#branch_filter").select2({ |
|
248 | 248 | 'dropdownAutoWidth': true, |
|
249 | 249 | 'width': 'resolve', |
|
250 | 250 | 'placeholder': "${c.selected_name or _('Filter changelog')}", |
|
251 | 251 | containerCssClass: "drop-menu", |
|
252 | 252 | dropdownCssClass: "drop-menu-dropdown", |
|
253 | 253 | query: function(query){ |
|
254 | 254 | var key = 'cache'; |
|
255 | 255 | var cached = cache[key] ; |
|
256 | 256 | if(cached) { |
|
257 | 257 | var data = {results: []}; |
|
258 | 258 | //filter results |
|
259 | 259 | $.each(cached.results, function(){ |
|
260 | 260 | var section = this.text; |
|
261 | 261 | var children = []; |
|
262 | 262 | $.each(this.children, function(){ |
|
263 | 263 | if(query.term.length == 0 || this.text.toUpperCase().indexOf(query.term.toUpperCase()) >= 0 ){ |
|
264 | 264 | children.push({'id': this.id, 'text': this.text, 'type': this.type}) |
|
265 | 265 | } |
|
266 | 266 | }); |
|
267 | 267 | data.results.push({'text': section, 'children': children}); |
|
268 | 268 | query.callback({results: data.results}); |
|
269 | 269 | }); |
|
270 | 270 | }else{ |
|
271 | 271 | $.ajax({ |
|
272 | 272 | url: pyroutes.url('repo_refs_changelog_data', {'repo_name': '${c.repo_name}'}), |
|
273 | 273 | data: {}, |
|
274 | 274 | dataType: 'json', |
|
275 | 275 | type: 'GET', |
|
276 | 276 | success: function(data) { |
|
277 | 277 | cache[key] = data; |
|
278 | 278 | query.callback({results: data.results}); |
|
279 | 279 | } |
|
280 | 280 | }) |
|
281 | 281 | } |
|
282 | 282 | } |
|
283 | 283 | }); |
|
284 | 284 | $('#branch_filter').on('change', function(e){ |
|
285 | 285 | var data = $('#branch_filter').select2('data'); |
|
286 | 286 | //type: branch_closed |
|
287 | 287 | var selected = data.text; |
|
288 | 288 | var filter = {'repo_name': '${c.repo_name}'}; |
|
289 | 289 | if(data.type == 'branch' || data.type == 'branch_closed'){ |
|
290 | 290 | filter["branch"] = selected; |
|
291 | 291 | if (data.type == 'branch_closed') { |
|
292 | 292 | filter["evolve"] = '1'; |
|
293 | 293 | } |
|
294 | 294 | } |
|
295 | 295 | else if (data.type == 'book'){ |
|
296 | 296 | filter["bookmark"] = selected; |
|
297 | 297 | } |
|
298 |
window.location = pyroutes.url('repo_c |
|
|
298 | window.location = pyroutes.url('repo_commits', filter); | |
|
299 | 299 | }); |
|
300 | 300 | |
|
301 | 301 | commitsController = new CommitsController(); |
|
302 | 302 | % if not c.changelog_for_path: |
|
303 | 303 | commitsController.reloadGraph(); |
|
304 | 304 | % endif |
|
305 | 305 | |
|
306 | 306 | }); |
|
307 | 307 | |
|
308 | 308 | </script> |
|
309 | 309 | </div> |
|
310 | 310 | % else: |
|
311 | 311 | ${_('There are no changes yet')} |
|
312 | 312 | % endif |
|
313 | 313 | </div> |
|
314 | 314 | </%def> |
@@ -1,155 +1,155 b'' | |||
|
1 | 1 | ## small box that displays changed/added/removed details fetched by AJAX |
|
2 | 2 | <%namespace name="base" file="/base/base.mako"/> |
|
3 | 3 | |
|
4 | 4 | % if c.prev_page: |
|
5 | 5 | <tr> |
|
6 | 6 | <td colspan="9" class="load-more-commits"> |
|
7 | 7 | <a class="prev-commits" href="#loadPrevCommits" onclick="commitsController.loadPrev(this, ${c.prev_page}, '${c.branch_name}', '${c.commit_id}', '${c.f_path}');return false"> |
|
8 | 8 | ${_('load previous')} |
|
9 | 9 | </a> |
|
10 | 10 | </td> |
|
11 | 11 | </tr> |
|
12 | 12 | % endif |
|
13 | 13 | |
|
14 | 14 | ## to speed up lookups cache some functions before the loop |
|
15 | 15 | <% |
|
16 | 16 | active_patterns = h.get_active_pattern_entries(c.repo_name) |
|
17 | 17 | urlify_commit_message = h.partial(h.urlify_commit_message, active_pattern_entries=active_patterns) |
|
18 | 18 | %> |
|
19 | 19 | |
|
20 | 20 | % for cnt,commit in enumerate(c.pagination): |
|
21 | 21 | <tr id="sha_${commit.raw_id}" class="changelogRow container ${'tablerow%s' % (cnt%2)}"> |
|
22 | 22 | |
|
23 | 23 | <td class="td-checkbox"> |
|
24 | 24 | ${h.checkbox(commit.raw_id,class_="commit-range")} |
|
25 | 25 | </td> |
|
26 | 26 | |
|
27 | 27 | <td class="td-status"> |
|
28 | 28 | %if c.statuses.get(commit.raw_id): |
|
29 | 29 | <div class="changeset-status-ico"> |
|
30 | 30 | %if c.statuses.get(commit.raw_id)[2]: |
|
31 | 31 | <a class="tooltip" title="${_('Commit status: %s\nClick to open associated pull request #%s') % (h.commit_status_lbl(c.statuses.get(commit.raw_id)[0]), c.statuses.get(commit.raw_id)[2])}" href="${h.route_path('pullrequest_show',repo_name=c.statuses.get(commit.raw_id)[3],pull_request_id=c.statuses.get(commit.raw_id)[2])}"> |
|
32 | 32 | <div class="${'flag_status {}'.format(c.statuses.get(commit.raw_id)[0])}"></div> |
|
33 | 33 | </a> |
|
34 | 34 | %else: |
|
35 | 35 | <a class="tooltip" title="${_('Commit status: {}').format(h.commit_status_lbl(c.statuses.get(commit.raw_id)[0]))}" href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=commit.raw_id,_anchor='comment-%s' % c.comments[commit.raw_id][0].comment_id)}"> |
|
36 | 36 | <div class="${'flag_status {}'.format(c.statuses.get(commit.raw_id)[0])}"></div> |
|
37 | 37 | </a> |
|
38 | 38 | %endif |
|
39 | 39 | </div> |
|
40 | 40 | %else: |
|
41 | 41 | <div class="tooltip flag_status not_reviewed" title="${_('Commit status: Not Reviewed')}"></div> |
|
42 | 42 | %endif |
|
43 | 43 | </td> |
|
44 | 44 | |
|
45 | 45 | <td class="td-hash"> |
|
46 | 46 | <code> |
|
47 | 47 | |
|
48 | 48 | <a href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=commit.raw_id)}"> |
|
49 | 49 | <span class="${'commit_hash obsolete' if getattr(commit, 'obsolete', None) else 'commit_hash'}">${h.show_id(commit)}</span> |
|
50 | 50 | </a> |
|
51 | 51 | |
|
52 | 52 | <i class="tooltip icon-clipboard clipboard-action" data-clipboard-text="${commit.raw_id}" title="${_('Copy the full commit id')}"></i> |
|
53 | 53 | |
|
54 | 54 | ## COMMIT PHASES |
|
55 | 55 | |
|
56 | 56 | ## Draft |
|
57 | 57 | % if hasattr(commit, 'phase'): |
|
58 | 58 | % if commit.phase != 'public': |
|
59 | 59 | <span class="tag phase-${commit.phase} tooltip" title="${_('{} commit phase').format(commit.phase)}">${commit.phase[0].upper()}</span> |
|
60 | 60 | % endif |
|
61 | 61 | % endif |
|
62 | 62 | |
|
63 | 63 | ## obsolete commits |
|
64 | 64 | % if hasattr(commit, 'obsolete') and commit.obsolete: |
|
65 | 65 | <span class="tag obsolete-${commit.obsolete} tooltip" title="${_('Obsolete Evolve State')}">O</span> |
|
66 | 66 | % endif |
|
67 | 67 | |
|
68 | 68 | ## hidden commits |
|
69 | 69 | % if hasattr(commit, 'hidden') and commit.hidden: |
|
70 | 70 | <span class="tag obsolete-${commit.hidden} tooltip" title="${_('Hidden Evolve State')}">H</span> |
|
71 | 71 | % endif |
|
72 | 72 | |
|
73 | 73 | </code> |
|
74 | 74 | </td> |
|
75 | 75 | |
|
76 | 76 | <td class="td-message expand_commit" data-commit-id="${commit.raw_id}" title="${_('Expand commit message')}" onclick="commitsController.expandCommit(this, true); return false"> |
|
77 | 77 | <i class="icon-expand-linked"></i> |
|
78 | 78 | </td> |
|
79 | 79 | <td class="td-description mid"> |
|
80 | 80 | <div class="log-container truncate-wrap"> |
|
81 | 81 | <div class="message truncate" id="c-${commit.raw_id}" data-message-raw="${commit.message}">${urlify_commit_message(commit.message, c.repo_name)}</div> |
|
82 | 82 | </div> |
|
83 | 83 | </td> |
|
84 | 84 | |
|
85 | 85 | <td class="td-time"> |
|
86 | 86 | ${h.age_component(commit.date)} |
|
87 | 87 | </td> |
|
88 | 88 | <td class="td-user"> |
|
89 | 89 | ${base.gravatar_with_user(commit.author)} |
|
90 | 90 | </td> |
|
91 | 91 | |
|
92 | 92 | <td class="td-tags tags-col"> |
|
93 | 93 | <div id="t-${commit.raw_id}"> |
|
94 | 94 | |
|
95 | 95 | ## merge |
|
96 | 96 | %if commit.merge: |
|
97 | 97 | <span class="tag mergetag"> |
|
98 | 98 | <i class="icon-merge"></i>${_('merge')} |
|
99 | 99 | </span> |
|
100 | 100 | %endif |
|
101 | 101 | |
|
102 | 102 | ## branch |
|
103 | 103 | %if commit.branch: |
|
104 | 104 | <span class="tag branchtag" title="${h.tooltip(_('Branch %s') % commit.branch)}"> |
|
105 |
<a href="${h.route_path('repo_c |
|
|
105 | <a href="${h.route_path('repo_commits',repo_name=c.repo_name,_query=dict(branch=commit.branch))}"><i class="icon-code-fork"></i>${h.shorter(commit.branch)}</a> | |
|
106 | 106 | </span> |
|
107 | 107 | %endif |
|
108 | 108 | |
|
109 | 109 | ## bookmarks |
|
110 | 110 | %if h.is_hg(c.rhodecode_repo): |
|
111 | 111 | %for book in commit.bookmarks: |
|
112 | 112 | <span class="tag booktag" title="${h.tooltip(_('Bookmark %s') % book)}"> |
|
113 | 113 | <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=commit.raw_id, _query=dict(at=book))}"><i class="icon-bookmark"></i>${h.shorter(book)}</a> |
|
114 | 114 | </span> |
|
115 | 115 | %endfor |
|
116 | 116 | %endif |
|
117 | 117 | |
|
118 | 118 | ## tags |
|
119 | 119 | %for tag in commit.tags: |
|
120 | 120 | <span class="tag tagtag" title="${h.tooltip(_('Tag %s') % tag)}"> |
|
121 | 121 | <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=commit.raw_id, _query=dict(at=tag))}"><i class="icon-tag"></i>${h.shorter(tag)}</a> |
|
122 | 122 | </span> |
|
123 | 123 | %endfor |
|
124 | 124 | |
|
125 | 125 | </div> |
|
126 | 126 | </td> |
|
127 | 127 | |
|
128 | 128 | <td class="td-comments comments-col"> |
|
129 | 129 | <% cs_comments = c.comments.get(commit.raw_id,[]) %> |
|
130 | 130 | % if cs_comments: |
|
131 | 131 | <a title="${_('Commit has comments')}" href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=commit.raw_id,_anchor='comment-%s' % cs_comments[0].comment_id)}"> |
|
132 | 132 | <i class="icon-comment"></i> ${len(cs_comments)} |
|
133 | 133 | </a> |
|
134 | 134 | % else: |
|
135 | 135 | <i class="icon-comment"></i> ${len(cs_comments)} |
|
136 | 136 | % endif |
|
137 | 137 | </td> |
|
138 | 138 | |
|
139 | 139 | </tr> |
|
140 | 140 | % endfor |
|
141 | 141 | |
|
142 | 142 | % if c.next_page: |
|
143 | 143 | <tr> |
|
144 | 144 | <td colspan="10" class="load-more-commits"> |
|
145 | 145 | <a class="next-commits" href="#loadNextCommits" onclick="commitsController.loadNext(this, ${c.next_page}, '${c.branch_name}', '${c.commit_id}', '${c.f_path}');return false"> |
|
146 | 146 | ${_('load next')} |
|
147 | 147 | </a> |
|
148 | 148 | </td> |
|
149 | 149 | </tr> |
|
150 | 150 | % endif |
|
151 | 151 | <tr class="chunk-graph-data" style="display:none" |
|
152 | 152 | data-graph='${c.graph_data|n}' |
|
153 | 153 | data-node='${c.prev_page}:${c.next_page}' |
|
154 | 154 | data-commits='${c.graph_commits|n}'> |
|
155 | 155 | </tr> No newline at end of file |
@@ -1,51 +1,51 b'' | |||
|
1 | 1 | <%namespace name="base" file="/base/base.mako"/> |
|
2 | 2 | <div class="table"> |
|
3 | 3 | |
|
4 | 4 | <table class="table rctable file_history"> |
|
5 | 5 | %for cnt,cs in enumerate(c.pagination): |
|
6 | 6 | <tr id="chg_${cnt+1}" class="${('tablerow%s' % (cnt%2))}"> |
|
7 | 7 | <td class="td-user"> |
|
8 | 8 | ${base.gravatar_with_user(cs.author, 16)} |
|
9 | 9 | </td> |
|
10 | 10 | <td class="td-time"> |
|
11 | 11 | <div class="date"> |
|
12 | 12 | ${h.age_component(cs.date)} |
|
13 | 13 | </div> |
|
14 | 14 | </td> |
|
15 | 15 | <td class="td-message"> |
|
16 | 16 | <div class="log-container"> |
|
17 | 17 | <div class="message_history" title="${h.tooltip(cs.message)}"> |
|
18 | 18 | <a href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=cs.raw_id)}"> |
|
19 | 19 | ${h.shorter(cs.message, 75)} |
|
20 | 20 | </a> |
|
21 | 21 | </div> |
|
22 | 22 | </div> |
|
23 | 23 | </td> |
|
24 | 24 | <td class="td-hash"> |
|
25 | 25 | <code> |
|
26 | 26 | <a href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=cs.raw_id)}"> |
|
27 | 27 | <span>${h.show_id(cs)}</span> |
|
28 | 28 | </a> |
|
29 | 29 | </code> |
|
30 | 30 | </td> |
|
31 | 31 | <td class="td-actions"> |
|
32 | 32 | <a href="${h.route_path('repo_files',repo_name=c.repo_name,commit_id=cs.raw_id,f_path=c.changelog_for_path)}"> |
|
33 | 33 | ${_('Show File')} |
|
34 | 34 | </a> |
|
35 | 35 | </td> |
|
36 | 36 | <td class="td-actions"> |
|
37 | 37 | <a href="${h.route_path('repo_compare',repo_name=c.repo_name, source_ref_type="rev", source_ref=cs.raw_id,target_ref_type="rev", target_ref=c.commit_id,_query=dict(merge='1',f_path=c.changelog_for_path))}"> |
|
38 | 38 | <span title="${'Diff {} vs {}'.format(cs.raw_id[:8],c.commit_id[:8])}">${_('Diff File')}</span> |
|
39 | 39 | </a> |
|
40 | 40 | </td> |
|
41 | 41 | </tr> |
|
42 | 42 | %endfor |
|
43 | 43 | <tr> |
|
44 | 44 | <td colspan="6"> |
|
45 |
<a id="file_history_overview_full" href="${h.route_path('repo_c |
|
|
45 | <a id="file_history_overview_full" href="${h.route_path('repo_commits_file',repo_name=c.repo_name, commit_id=c.commit_id, f_path=c.f_path)}"> | |
|
46 | 46 | ${_('Show Full History')} |
|
47 | 47 | </a> |
|
48 | 48 | </td> |
|
49 | 49 | </tr> |
|
50 | 50 | </table> |
|
51 | 51 | </div> |
@@ -1,450 +1,450 b'' | |||
|
1 | 1 | ## DATA TABLE RE USABLE ELEMENTS |
|
2 | 2 | ## usage: |
|
3 | 3 | ## <%namespace name="dt" file="/data_table/_dt_elements.mako"/> |
|
4 | 4 | <%namespace name="base" file="/base/base.mako"/> |
|
5 | 5 | |
|
6 | 6 | <%def name="metatags_help()"> |
|
7 | 7 | <table> |
|
8 | 8 | <% |
|
9 | 9 | example_tags = [ |
|
10 | 10 | ('state','[stable]'), |
|
11 | 11 | ('state','[stale]'), |
|
12 | 12 | ('state','[featured]'), |
|
13 | 13 | ('state','[dev]'), |
|
14 | 14 | ('state','[dead]'), |
|
15 | 15 | ('state','[deprecated]'), |
|
16 | 16 | |
|
17 | 17 | ('label','[personal]'), |
|
18 | 18 | ('generic','[v2.0.0]'), |
|
19 | 19 | |
|
20 | 20 | ('lang','[lang => JavaScript]'), |
|
21 | 21 | ('license','[license => LicenseName]'), |
|
22 | 22 | |
|
23 | 23 | ('ref','[requires => RepoName]'), |
|
24 | 24 | ('ref','[recommends => GroupName]'), |
|
25 | 25 | ('ref','[conflicts => SomeName]'), |
|
26 | 26 | ('ref','[base => SomeName]'), |
|
27 | 27 | ('url','[url => [linkName](https://rhodecode.com)]'), |
|
28 | 28 | ('see','[see => http://rhodecode.com]'), |
|
29 | 29 | ] |
|
30 | 30 | %> |
|
31 | 31 | % for tag_type, tag in example_tags: |
|
32 | 32 | <tr> |
|
33 | 33 | <td>${tag|n}</td> |
|
34 | 34 | <td>${h.style_metatag(tag_type, tag)|n}</td> |
|
35 | 35 | </tr> |
|
36 | 36 | % endfor |
|
37 | 37 | </table> |
|
38 | 38 | </%def> |
|
39 | 39 | |
|
40 | 40 | ## REPOSITORY RENDERERS |
|
41 | 41 | <%def name="quick_menu(repo_name)"> |
|
42 | 42 | <i class="icon-more"></i> |
|
43 | 43 | <div class="menu_items_container hidden"> |
|
44 | 44 | <ul class="menu_items"> |
|
45 | 45 | <li> |
|
46 | 46 | <a title="${_('Summary')}" href="${h.route_path('repo_summary',repo_name=repo_name)}"> |
|
47 | 47 | <span>${_('Summary')}</span> |
|
48 | 48 | </a> |
|
49 | 49 | </li> |
|
50 | 50 | <li> |
|
51 |
<a title="${_('C |
|
|
52 |
<span>${_('C |
|
|
51 | <a title="${_('Commits')}" href="${h.route_path('repo_commits',repo_name=repo_name)}"> | |
|
52 | <span>${_('Commits')}</span> | |
|
53 | 53 | </a> |
|
54 | 54 | </li> |
|
55 | 55 | <li> |
|
56 | 56 | <a title="${_('Files')}" href="${h.route_path('repo_files:default_commit',repo_name=repo_name)}"> |
|
57 | 57 | <span>${_('Files')}</span> |
|
58 | 58 | </a> |
|
59 | 59 | </li> |
|
60 | 60 | <li> |
|
61 | 61 | <a title="${_('Fork')}" href="${h.route_path('repo_fork_new',repo_name=repo_name)}"> |
|
62 | 62 | <span>${_('Fork')}</span> |
|
63 | 63 | </a> |
|
64 | 64 | </li> |
|
65 | 65 | </ul> |
|
66 | 66 | </div> |
|
67 | 67 | </%def> |
|
68 | 68 | |
|
69 | 69 | <%def name="repo_name(name,rtype,rstate,private,archived,fork_of,short_name=False,admin=False)"> |
|
70 | 70 | <% |
|
71 | 71 | def get_name(name,short_name=short_name): |
|
72 | 72 | if short_name: |
|
73 | 73 | return name.split('/')[-1] |
|
74 | 74 | else: |
|
75 | 75 | return name |
|
76 | 76 | %> |
|
77 | 77 | <div class="${'repo_state_pending' if rstate == 'repo_state_pending' else ''} truncate"> |
|
78 | 78 | ##NAME |
|
79 | 79 | <a href="${h.route_path('edit_repo',repo_name=name) if admin else h.route_path('repo_summary',repo_name=name)}"> |
|
80 | 80 | |
|
81 | 81 | ##TYPE OF REPO |
|
82 | 82 | %if h.is_hg(rtype): |
|
83 | 83 | <span title="${_('Mercurial repository')}"><i class="icon-hg" style="font-size: 14px;"></i></span> |
|
84 | 84 | %elif h.is_git(rtype): |
|
85 | 85 | <span title="${_('Git repository')}"><i class="icon-git" style="font-size: 14px"></i></span> |
|
86 | 86 | %elif h.is_svn(rtype): |
|
87 | 87 | <span title="${_('Subversion repository')}"><i class="icon-svn" style="font-size: 14px"></i></span> |
|
88 | 88 | %endif |
|
89 | 89 | |
|
90 | 90 | ##PRIVATE/PUBLIC |
|
91 | 91 | %if private is True and c.visual.show_private_icon: |
|
92 | 92 | <i class="icon-lock" title="${_('Private repository')}"></i> |
|
93 | 93 | %elif private is False and c.visual.show_public_icon: |
|
94 | 94 | <i class="icon-unlock-alt" title="${_('Public repository')}"></i> |
|
95 | 95 | %else: |
|
96 | 96 | <span></span> |
|
97 | 97 | %endif |
|
98 | 98 | ${get_name(name)} |
|
99 | 99 | </a> |
|
100 | 100 | %if fork_of: |
|
101 | 101 | <a href="${h.route_path('repo_summary',repo_name=fork_of.repo_name)}"><i class="icon-code-fork"></i></a> |
|
102 | 102 | %endif |
|
103 | 103 | %if rstate == 'repo_state_pending': |
|
104 | 104 | <span class="creation_in_progress tooltip" title="${_('This repository is being created in a background task')}"> |
|
105 | 105 | (${_('creating...')}) |
|
106 | 106 | </span> |
|
107 | 107 | %endif |
|
108 | 108 | |
|
109 | 109 | </div> |
|
110 | 110 | </%def> |
|
111 | 111 | |
|
112 | 112 | <%def name="repo_desc(description, stylify_metatags)"> |
|
113 | 113 | <% |
|
114 | 114 | tags, description = h.extract_metatags(description) |
|
115 | 115 | %> |
|
116 | 116 | |
|
117 | 117 | <div class="truncate-wrap"> |
|
118 | 118 | % if stylify_metatags: |
|
119 | 119 | % for tag_type, tag in tags: |
|
120 | 120 | ${h.style_metatag(tag_type, tag)|n} |
|
121 | 121 | % endfor |
|
122 | 122 | % endif |
|
123 | 123 | ${description} |
|
124 | 124 | </div> |
|
125 | 125 | |
|
126 | 126 | </%def> |
|
127 | 127 | |
|
128 | 128 | <%def name="last_change(last_change)"> |
|
129 | 129 | ${h.age_component(last_change, time_is_local=True)} |
|
130 | 130 | </%def> |
|
131 | 131 | |
|
132 | 132 | <%def name="revision(name,rev,tip,author,last_msg, commit_date)"> |
|
133 | 133 | <div> |
|
134 | 134 | %if rev >= 0: |
|
135 | 135 | <code><a title="${h.tooltip('%s\n%s\n\n%s' % (author, commit_date, last_msg))}" class="tooltip" href="${h.route_path('repo_commit',repo_name=name,commit_id=tip)}">${'r%s:%s' % (rev,h.short_id(tip))}</a></code> |
|
136 | 136 | %else: |
|
137 | 137 | ${_('No commits yet')} |
|
138 | 138 | %endif |
|
139 | 139 | </div> |
|
140 | 140 | </%def> |
|
141 | 141 | |
|
142 | 142 | <%def name="rss(name)"> |
|
143 | 143 | %if c.rhodecode_user.username != h.DEFAULT_USER: |
|
144 | 144 | <a title="${h.tooltip(_('Subscribe to %s rss feed')% name)}" href="${h.route_path('rss_feed_home', repo_name=name, _query=dict(auth_token=c.rhodecode_user.feed_token))}"><i class="icon-rss-sign"></i></a> |
|
145 | 145 | %else: |
|
146 | 146 | <a title="${h.tooltip(_('Subscribe to %s rss feed')% name)}" href="${h.route_path('rss_feed_home', repo_name=name)}"><i class="icon-rss-sign"></i></a> |
|
147 | 147 | %endif |
|
148 | 148 | </%def> |
|
149 | 149 | |
|
150 | 150 | <%def name="atom(name)"> |
|
151 | 151 | %if c.rhodecode_user.username != h.DEFAULT_USER: |
|
152 | 152 | <a title="${h.tooltip(_('Subscribe to %s atom feed')% name)}" href="${h.route_path('atom_feed_home', repo_name=name, _query=dict(auth_token=c.rhodecode_user.feed_token))}"><i class="icon-rss-sign"></i></a> |
|
153 | 153 | %else: |
|
154 | 154 | <a title="${h.tooltip(_('Subscribe to %s atom feed')% name)}" href="${h.route_path('atom_feed_home', repo_name=name)}"><i class="icon-rss-sign"></i></a> |
|
155 | 155 | %endif |
|
156 | 156 | </%def> |
|
157 | 157 | |
|
158 | 158 | <%def name="user_gravatar(email, size=16)"> |
|
159 | 159 | <div class="rc-user tooltip" title="${h.tooltip(h.author_string(email))}"> |
|
160 | 160 | ${base.gravatar(email, 16)} |
|
161 | 161 | </div> |
|
162 | 162 | </%def> |
|
163 | 163 | |
|
164 | 164 | <%def name="repo_actions(repo_name, super_user=True)"> |
|
165 | 165 | <div> |
|
166 | 166 | <div class="grid_edit"> |
|
167 | 167 | <a href="${h.route_path('edit_repo',repo_name=repo_name)}" title="${_('Edit')}"> |
|
168 | 168 | <i class="icon-pencil"></i>Edit</a> |
|
169 | 169 | </div> |
|
170 | 170 | <div class="grid_delete"> |
|
171 | 171 | ${h.secure_form(h.route_path('edit_repo_advanced_delete', repo_name=repo_name), request=request)} |
|
172 | 172 | ${h.submit('remove_%s' % repo_name,_('Delete'),class_="btn btn-link btn-danger", |
|
173 | 173 | onclick="return confirm('"+_('Confirm to delete this repository: %s') % repo_name+"');")} |
|
174 | 174 | ${h.end_form()} |
|
175 | 175 | </div> |
|
176 | 176 | </div> |
|
177 | 177 | </%def> |
|
178 | 178 | |
|
179 | 179 | <%def name="repo_state(repo_state)"> |
|
180 | 180 | <div> |
|
181 | 181 | %if repo_state == 'repo_state_pending': |
|
182 | 182 | <div class="tag tag4">${_('Creating')}</div> |
|
183 | 183 | %elif repo_state == 'repo_state_created': |
|
184 | 184 | <div class="tag tag1">${_('Created')}</div> |
|
185 | 185 | %else: |
|
186 | 186 | <div class="tag alert2" title="${h.tooltip(repo_state)}">invalid</div> |
|
187 | 187 | %endif |
|
188 | 188 | </div> |
|
189 | 189 | </%def> |
|
190 | 190 | |
|
191 | 191 | |
|
192 | 192 | ## REPO GROUP RENDERERS |
|
193 | 193 | <%def name="quick_repo_group_menu(repo_group_name)"> |
|
194 | 194 | <i class="icon-more"></i> |
|
195 | 195 | <div class="menu_items_container hidden"> |
|
196 | 196 | <ul class="menu_items"> |
|
197 | 197 | <li> |
|
198 | 198 | <a href="${h.route_path('repo_group_home', repo_group_name=repo_group_name)}">${_('Summary')}</a> |
|
199 | 199 | </li> |
|
200 | 200 | |
|
201 | 201 | </ul> |
|
202 | 202 | </div> |
|
203 | 203 | </%def> |
|
204 | 204 | |
|
205 | 205 | <%def name="repo_group_name(repo_group_name, children_groups=None)"> |
|
206 | 206 | <div> |
|
207 | 207 | <a href="${h.route_path('repo_group_home', repo_group_name=repo_group_name)}"> |
|
208 | 208 | <i class="icon-repo-group" title="${_('Repository group')}" style="font-size: 14px"></i> |
|
209 | 209 | %if children_groups: |
|
210 | 210 | ${h.literal(' » '.join(children_groups))} |
|
211 | 211 | %else: |
|
212 | 212 | ${repo_group_name} |
|
213 | 213 | %endif |
|
214 | 214 | </a> |
|
215 | 215 | </div> |
|
216 | 216 | </%def> |
|
217 | 217 | |
|
218 | 218 | <%def name="repo_group_desc(description, personal, stylify_metatags)"> |
|
219 | 219 | |
|
220 | 220 | <% |
|
221 | 221 | tags, description = h.extract_metatags(description) |
|
222 | 222 | %> |
|
223 | 223 | |
|
224 | 224 | <div class="truncate-wrap"> |
|
225 | 225 | % if personal: |
|
226 | 226 | <div class="metatag" tag="personal">${_('personal')}</div> |
|
227 | 227 | % endif |
|
228 | 228 | |
|
229 | 229 | % if stylify_metatags: |
|
230 | 230 | % for tag_type, tag in tags: |
|
231 | 231 | ${h.style_metatag(tag_type, tag)|n} |
|
232 | 232 | % endfor |
|
233 | 233 | % endif |
|
234 | 234 | ${description} |
|
235 | 235 | </div> |
|
236 | 236 | |
|
237 | 237 | </%def> |
|
238 | 238 | |
|
239 | 239 | <%def name="repo_group_actions(repo_group_id, repo_group_name, gr_count)"> |
|
240 | 240 | <div class="grid_edit"> |
|
241 | 241 | <a href="${h.route_path('edit_repo_group',repo_group_name=repo_group_name)}" title="${_('Edit')}">Edit</a> |
|
242 | 242 | </div> |
|
243 | 243 | <div class="grid_delete"> |
|
244 | 244 | ${h.secure_form(h.route_path('edit_repo_group_advanced_delete', repo_group_name=repo_group_name), request=request)} |
|
245 | 245 | ${h.submit('remove_%s' % repo_group_name,_('Delete'),class_="btn btn-link btn-danger", |
|
246 | 246 | onclick="return confirm('"+_ungettext('Confirm to delete this group: %s with %s repository','Confirm to delete this group: %s with %s repositories',gr_count) % (repo_group_name, gr_count)+"');")} |
|
247 | 247 | ${h.end_form()} |
|
248 | 248 | </div> |
|
249 | 249 | </%def> |
|
250 | 250 | |
|
251 | 251 | |
|
252 | 252 | <%def name="user_actions(user_id, username)"> |
|
253 | 253 | <div class="grid_edit"> |
|
254 | 254 | <a href="${h.route_path('user_edit',user_id=user_id)}" title="${_('Edit')}"> |
|
255 | 255 | <i class="icon-pencil"></i>${_('Edit')}</a> |
|
256 | 256 | </div> |
|
257 | 257 | <div class="grid_delete"> |
|
258 | 258 | ${h.secure_form(h.route_path('user_delete', user_id=user_id), request=request)} |
|
259 | 259 | ${h.submit('remove_',_('Delete'),id="remove_user_%s" % user_id, class_="btn btn-link btn-danger", |
|
260 | 260 | onclick="return confirm('"+_('Confirm to delete this user: %s') % username+"');")} |
|
261 | 261 | ${h.end_form()} |
|
262 | 262 | </div> |
|
263 | 263 | </%def> |
|
264 | 264 | |
|
265 | 265 | <%def name="user_group_actions(user_group_id, user_group_name)"> |
|
266 | 266 | <div class="grid_edit"> |
|
267 | 267 | <a href="${h.route_path('edit_user_group', user_group_id=user_group_id)}" title="${_('Edit')}">Edit</a> |
|
268 | 268 | </div> |
|
269 | 269 | <div class="grid_delete"> |
|
270 | 270 | ${h.secure_form(h.route_path('user_groups_delete', user_group_id=user_group_id), request=request)} |
|
271 | 271 | ${h.submit('remove_',_('Delete'),id="remove_group_%s" % user_group_id, class_="btn btn-link btn-danger", |
|
272 | 272 | onclick="return confirm('"+_('Confirm to delete this user group: %s') % user_group_name+"');")} |
|
273 | 273 | ${h.end_form()} |
|
274 | 274 | </div> |
|
275 | 275 | </%def> |
|
276 | 276 | |
|
277 | 277 | |
|
278 | 278 | <%def name="user_name(user_id, username)"> |
|
279 | 279 | ${h.link_to(h.person(username, 'username_or_name_or_email'), h.route_path('user_edit', user_id=user_id))} |
|
280 | 280 | </%def> |
|
281 | 281 | |
|
282 | 282 | <%def name="user_profile(username)"> |
|
283 | 283 | ${base.gravatar_with_user(username, 16)} |
|
284 | 284 | </%def> |
|
285 | 285 | |
|
286 | 286 | <%def name="user_group_name(user_group_name)"> |
|
287 | 287 | <div> |
|
288 | 288 | <i class="icon-user-group" title="${_('User group')}"></i> |
|
289 | 289 | ${h.link_to_group(user_group_name)} |
|
290 | 290 | </div> |
|
291 | 291 | </%def> |
|
292 | 292 | |
|
293 | 293 | |
|
294 | 294 | ## GISTS |
|
295 | 295 | |
|
296 | 296 | <%def name="gist_gravatar(full_contact)"> |
|
297 | 297 | <div class="gist_gravatar"> |
|
298 | 298 | ${base.gravatar(full_contact, 30)} |
|
299 | 299 | </div> |
|
300 | 300 | </%def> |
|
301 | 301 | |
|
302 | 302 | <%def name="gist_access_id(gist_access_id, full_contact)"> |
|
303 | 303 | <div> |
|
304 | 304 | <b> |
|
305 | 305 | <a href="${h.route_path('gist_show', gist_id=gist_access_id)}">gist: ${gist_access_id}</a> |
|
306 | 306 | </b> |
|
307 | 307 | </div> |
|
308 | 308 | </%def> |
|
309 | 309 | |
|
310 | 310 | <%def name="gist_author(full_contact, created_on, expires)"> |
|
311 | 311 | ${base.gravatar_with_user(full_contact, 16)} |
|
312 | 312 | </%def> |
|
313 | 313 | |
|
314 | 314 | |
|
315 | 315 | <%def name="gist_created(created_on)"> |
|
316 | 316 | <div class="created"> |
|
317 | 317 | ${h.age_component(created_on, time_is_local=True)} |
|
318 | 318 | </div> |
|
319 | 319 | </%def> |
|
320 | 320 | |
|
321 | 321 | <%def name="gist_expires(expires)"> |
|
322 | 322 | <div class="created"> |
|
323 | 323 | %if expires == -1: |
|
324 | 324 | ${_('never')} |
|
325 | 325 | %else: |
|
326 | 326 | ${h.age_component(h.time_to_utcdatetime(expires))} |
|
327 | 327 | %endif |
|
328 | 328 | </div> |
|
329 | 329 | </%def> |
|
330 | 330 | |
|
331 | 331 | <%def name="gist_type(gist_type)"> |
|
332 | 332 | %if gist_type != 'public': |
|
333 | 333 | <div class="tag">${_('Private')}</div> |
|
334 | 334 | %endif |
|
335 | 335 | </%def> |
|
336 | 336 | |
|
337 | 337 | <%def name="gist_description(gist_description)"> |
|
338 | 338 | ${gist_description} |
|
339 | 339 | </%def> |
|
340 | 340 | |
|
341 | 341 | |
|
342 | 342 | ## PULL REQUESTS GRID RENDERERS |
|
343 | 343 | |
|
344 | 344 | <%def name="pullrequest_target_repo(repo_name)"> |
|
345 | 345 | <div class="truncate"> |
|
346 | 346 | ${h.link_to(repo_name,h.route_path('repo_summary',repo_name=repo_name))} |
|
347 | 347 | </div> |
|
348 | 348 | </%def> |
|
349 | 349 | <%def name="pullrequest_status(status)"> |
|
350 | 350 | <div class="${'flag_status %s' % status} pull-left"></div> |
|
351 | 351 | </%def> |
|
352 | 352 | |
|
353 | 353 | <%def name="pullrequest_title(title, description)"> |
|
354 | 354 | ${title} |
|
355 | 355 | </%def> |
|
356 | 356 | |
|
357 | 357 | <%def name="pullrequest_comments(comments_nr)"> |
|
358 | 358 | <i class="icon-comment"></i> ${comments_nr} |
|
359 | 359 | </%def> |
|
360 | 360 | |
|
361 | 361 | <%def name="pullrequest_name(pull_request_id, target_repo_name, short=False)"> |
|
362 | 362 | <a href="${h.route_path('pullrequest_show',repo_name=target_repo_name,pull_request_id=pull_request_id)}"> |
|
363 | 363 | % if short: |
|
364 | 364 | #${pull_request_id} |
|
365 | 365 | % else: |
|
366 | 366 | ${_('Pull request #%(pr_number)s') % {'pr_number': pull_request_id,}} |
|
367 | 367 | % endif |
|
368 | 368 | </a> |
|
369 | 369 | </%def> |
|
370 | 370 | |
|
371 | 371 | <%def name="pullrequest_updated_on(updated_on)"> |
|
372 | 372 | ${h.age_component(h.time_to_utcdatetime(updated_on))} |
|
373 | 373 | </%def> |
|
374 | 374 | |
|
375 | 375 | <%def name="pullrequest_author(full_contact)"> |
|
376 | 376 | ${base.gravatar_with_user(full_contact, 16)} |
|
377 | 377 | </%def> |
|
378 | 378 | |
|
379 | 379 | |
|
380 | 380 | ## ARTIFACT RENDERERS |
|
381 | 381 | |
|
382 | 382 | <%def name="repo_artifact_uid(repo_name, file_uid)"> |
|
383 | 383 | <code><a href="${h.route_path('repo_artifacts_get', repo_name=repo_name, uid=file_uid)}">${file_uid}</a></code> |
|
384 | 384 | </%def> |
|
385 | 385 | |
|
386 | 386 | <%def name="repo_artifact_uid_action(repo_name, file_uid)"> |
|
387 | 387 | <i class="tooltip icon-clipboard clipboard-action" data-clipboard-text="${h.route_url('repo_artifacts_get', repo_name=repo_name, uid=file_uid)}" title="${_('Copy the full url')}"></i> |
|
388 | 388 | </%def> |
|
389 | 389 | |
|
390 | 390 | <%def name="repo_artifact_actions(repo_name, file_store_id, file_uid)"> |
|
391 | 391 | ## <div class="grid_edit"> |
|
392 | 392 | ## <a href="#Edit" title="${_('Edit')}">${_('Edit')}</a> |
|
393 | 393 | ## </div> |
|
394 | 394 | % if h.HasRepoPermissionAny('repository.admin')(c.repo_name): |
|
395 | 395 | <div class="grid_delete"> |
|
396 | 396 | ${h.secure_form(h.route_path('repo_artifacts_delete', repo_name=repo_name, uid=file_store_id), request=request)} |
|
397 | 397 | ${h.submit('remove_',_('Delete'),id="remove_artifact_%s" % file_store_id, class_="btn btn-link btn-danger", |
|
398 | 398 | onclick="return confirm('"+_('Confirm to delete this artifact: %s') % file_uid+"');")} |
|
399 | 399 | ${h.end_form()} |
|
400 | 400 | </div> |
|
401 | 401 | % endif |
|
402 | 402 | </%def> |
|
403 | 403 | |
|
404 | 404 | <%def name="markup_form(form_id, form_text='', help_text=None)"> |
|
405 | 405 | |
|
406 | 406 | <div class="markup-form"> |
|
407 | 407 | <div class="markup-form-area"> |
|
408 | 408 | <div class="markup-form-area-header"> |
|
409 | 409 | <ul class="nav-links clearfix"> |
|
410 | 410 | <li class="active"> |
|
411 | 411 | <a href="#edit-text" tabindex="-1" id="edit-btn_${form_id}">${_('Write')}</a> |
|
412 | 412 | </li> |
|
413 | 413 | <li class=""> |
|
414 | 414 | <a href="#preview-text" tabindex="-1" id="preview-btn_${form_id}">${_('Preview')}</a> |
|
415 | 415 | </li> |
|
416 | 416 | </ul> |
|
417 | 417 | </div> |
|
418 | 418 | |
|
419 | 419 | <div class="markup-form-area-write" style="display: block;"> |
|
420 | 420 | <div id="edit-container_${form_id}"> |
|
421 | 421 | <textarea id="${form_id}" name="${form_id}" class="comment-block-ta ac-input">${form_text if form_text else ''}</textarea> |
|
422 | 422 | </div> |
|
423 | 423 | <div id="preview-container_${form_id}" class="clearfix" style="display: none;"> |
|
424 | 424 | <div id="preview-box_${form_id}" class="preview-box"></div> |
|
425 | 425 | </div> |
|
426 | 426 | </div> |
|
427 | 427 | |
|
428 | 428 | <div class="markup-form-area-footer"> |
|
429 | 429 | <div class="toolbar"> |
|
430 | 430 | <div class="toolbar-text"> |
|
431 | 431 | ${(_('Parsed using %s syntax') % ( |
|
432 | 432 | ('<a href="%s">%s</a>' % (h.route_url('%s_help' % c.visual.default_renderer), c.visual.default_renderer.upper())), |
|
433 | 433 | ) |
|
434 | 434 | )|n} |
|
435 | 435 | </div> |
|
436 | 436 | </div> |
|
437 | 437 | </div> |
|
438 | 438 | </div> |
|
439 | 439 | |
|
440 | 440 | <div class="markup-form-footer"> |
|
441 | 441 | % if help_text: |
|
442 | 442 | <span class="help-block">${help_text}</span> |
|
443 | 443 | % endif |
|
444 | 444 | </div> |
|
445 | 445 | </div> |
|
446 | 446 | <script type="text/javascript"> |
|
447 | 447 | new MarkupForm('${form_id}'); |
|
448 | 448 | </script> |
|
449 | 449 | |
|
450 | 450 | </%def> |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now