Show More
@@ -0,0 +1,46 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | """ | |||
|
3 | rhodecode.lib.middleware.wrapper | |||
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |||
|
5 | ||||
|
6 | request time mesuring app | |||
|
7 | ||||
|
8 | :created_on: May 23, 2013 | |||
|
9 | :author: marcink | |||
|
10 | :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com> | |||
|
11 | :license: GPLv3, see COPYING for more details. | |||
|
12 | """ | |||
|
13 | # This program is free software: you can redistribute it and/or modify | |||
|
14 | # it under the terms of the GNU General Public License as published by | |||
|
15 | # the Free Software Foundation, either version 3 of the License, or | |||
|
16 | # (at your option) any later version. | |||
|
17 | # | |||
|
18 | # This program is distributed in the hope that it will be useful, | |||
|
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
21 | # GNU General Public License for more details. | |||
|
22 | # | |||
|
23 | # You should have received a copy of the GNU General Public License | |||
|
24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
25 | import time | |||
|
26 | import logging | |||
|
27 | from rhodecode.lib.base import _get_ip_addr, _get_access_path | |||
|
28 | from rhodecode.lib.utils2 import safe_unicode | |||
|
29 | ||||
|
30 | ||||
|
31 | class RequestWrapper(object): | |||
|
32 | ||||
|
33 | def __init__(self, app, config): | |||
|
34 | self.application = app | |||
|
35 | self.config = config | |||
|
36 | ||||
|
37 | def __call__(self, environ, start_response): | |||
|
38 | start = time.time() | |||
|
39 | try: | |||
|
40 | return self.application(environ, start_response) | |||
|
41 | finally: | |||
|
42 | log = logging.getLogger('rhodecode.' + self.__class__.__name__) | |||
|
43 | log.info('IP: %s Request to %s time: %.3fs' % ( | |||
|
44 | _get_ip_addr(environ), | |||
|
45 | safe_unicode(_get_access_path(environ)), time.time() - start) | |||
|
46 | ) |
@@ -65,6 +65,8 b' static_files = true' | |||||
65 | lang = en |
|
65 | lang = en | |
66 | cache_dir = %(here)s/data |
|
66 | cache_dir = %(here)s/data | |
67 | index_dir = %(here)s/data/index |
|
67 | index_dir = %(here)s/data/index | |
|
68 | # set this path to use archive download cache | |||
|
69 | #archive_cache_dir = /tmp/rhodecode_tarballcache | |||
68 | app_instance_uuid = rc-develop |
|
70 | app_instance_uuid = rc-develop | |
69 | cut_off_limit = 256000 |
|
71 | cut_off_limit = 256000 | |
70 | vcs_full_cache = True |
|
72 | vcs_full_cache = True | |
@@ -154,6 +156,11 b' instance_id =' | |||||
154 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
156 | ## handling that. Set this variable to 403 to return HTTPForbidden | |
155 | auth_ret_code = |
|
157 | auth_ret_code = | |
156 |
|
158 | |||
|
159 | ## locking return code. When repository is locked return this HTTP code. 2XX | |||
|
160 | ## codes don't break the transactions while 4XX codes do | |||
|
161 | lock_ret_code = 423 | |||
|
162 | ||||
|
163 | ||||
157 | #################################### |
|
164 | #################################### | |
158 | ### CELERY CONFIG #### |
|
165 | ### CELERY CONFIG #### | |
159 | #################################### |
|
166 | #################################### |
@@ -178,7 +178,8 b' lock' | |||||
178 | ---- |
|
178 | ---- | |
179 |
|
179 | |||
180 | Set locking state on given repository by given user. If userid param is skipped |
|
180 | Set locking state on given repository by given user. If userid param is skipped | |
181 | , then it is set to id of user whos calling this method. |
|
181 | , then it is set to id of user whos calling this method. If locked param is skipped | |
|
182 | then function shows current lock state of given repo. | |||
182 | This command can be executed only using api_key belonging to user with admin |
|
183 | This command can be executed only using api_key belonging to user with admin | |
183 | rights or regular user that have admin or write access to repository. |
|
184 | rights or regular user that have admin or write access to repository. | |
184 |
|
185 | |||
@@ -190,7 +191,7 b' INPUT::' | |||||
190 | args : { |
|
191 | args : { | |
191 | "repoid" : "<reponame or repo_id>" |
|
192 | "repoid" : "<reponame or repo_id>" | |
192 | "userid" : "<user_id or username = Optional(=apiuser)>", |
|
193 | "userid" : "<user_id or username = Optional(=apiuser)>", | |
193 | "locked" : "<bool true|false>" |
|
194 | "locked" : "<bool true|false = Optional(=None)>" | |
194 | } |
|
195 | } | |
195 |
|
196 | |||
196 | OUTPUT:: |
|
197 | OUTPUT:: |
@@ -16,6 +16,27 b' news' | |||||
16 | fixes |
|
16 | fixes | |
17 | +++++ |
|
17 | +++++ | |
18 |
|
18 | |||
|
19 | 1.5.4 (**2013-03-13**) | |||
|
20 | ---------------------- | |||
|
21 | ||||
|
22 | news | |||
|
23 | ++++ | |||
|
24 | ||||
|
25 | ||||
|
26 | fixes | |||
|
27 | +++++ | |||
|
28 | ||||
|
29 | - fixed webtest dependency issues | |||
|
30 | - fixed issues with celery tasks for password reset | |||
|
31 | - fixed #763 gravatar helper function should fallback into default image | |||
|
32 | if email is empty | |||
|
33 | - fixes #762 user global activation flag is also respected for LDAP created | |||
|
34 | accounts | |||
|
35 | - use password obfuscate when clonning a remote repo with credentials inside | |||
|
36 | - fixed issue with renaming repos group together with changing parents | |||
|
37 | - disallow cloning from file:/// URIs | |||
|
38 | - handle all cases with multiple IP addresses in proxy headers | |||
|
39 | ||||
19 | 1.5.3 (**2013-02-12**) |
|
40 | 1.5.3 (**2013-02-12**) | |
20 | ---------------------- |
|
41 | ---------------------- | |
21 |
|
42 | |||
@@ -139,8 +160,8 b' fixes' | |||||
139 | When this is used together with mercurial internal translation system |
|
160 | When this is used together with mercurial internal translation system | |
140 | it can lead to UnicodeDecodeErrors |
|
161 | it can lead to UnicodeDecodeErrors | |
141 | - fixes #645 Fix git handler when doing delete remote branch |
|
162 | - fixes #645 Fix git handler when doing delete remote branch | |
142 | - implements #649 added two seperate method for author and commiter to VCS |
|
163 | - implements #649 added two seperate method for author and committer to VCS | |
143 | changeset class switch author for git backed to be the real author not commiter |
|
164 | changeset class switch author for git backed to be the real author not committer | |
144 | - fix issue #504 RhodeCode is showing different versions of README on |
|
165 | - fix issue #504 RhodeCode is showing different versions of README on | |
145 | different summary page loads |
|
166 | different summary page loads | |
146 | - implemented #658 Changing username in LDAP-Mode should not be allowed. |
|
167 | - implemented #658 Changing username in LDAP-Mode should not be allowed. |
@@ -478,7 +478,7 b' Changing default encoding' | |||||
478 |
|
478 | |||
479 | By default RhodeCode uses utf8 encoding, starting from 1.3 series this |
|
479 | By default RhodeCode uses utf8 encoding, starting from 1.3 series this | |
480 | can be changed, simply edit default_encoding in .ini file to desired one. |
|
480 | can be changed, simply edit default_encoding in .ini file to desired one. | |
481 | This affects many parts in rhodecode including commiters names, filenames, |
|
481 | This affects many parts in rhodecode including committers names, filenames, | |
482 | encoding of commit messages. In addition RhodeCode can detect if `chardet` |
|
482 | encoding of commit messages. In addition RhodeCode can detect if `chardet` | |
483 | library is installed. If `chardet` is detected RhodeCode will fallback to it |
|
483 | library is installed. If `chardet` is detected RhodeCode will fallback to it | |
484 | when there are encode/decode errors. |
|
484 | when there are encode/decode errors. |
@@ -51,4 +51,4 b'' | |||||
51 | .vc { color: #ff99ff } /* Name.Variable.Class */ |
|
51 | .vc { color: #ff99ff } /* Name.Variable.Class */ | |
52 | .vg { color: #ff99ff } /* Name.Variable.Global */ |
|
52 | .vg { color: #ff99ff } /* Name.Variable.Global */ | |
53 | .vi { color: #ff99ff } /* Name.Variable.Instance */ |
|
53 | .vi { color: #ff99ff } /* Name.Variable.Instance */ | |
54 |
.il { color: #009999 } /* Literal.Number.Integer.Long */ |
|
54 | .il { color: #009999 } /* Literal.Number.Integer.Long */ No newline at end of file |
@@ -65,6 +65,8 b' static_files = true' | |||||
65 | lang = en |
|
65 | lang = en | |
66 | cache_dir = %(here)s/data |
|
66 | cache_dir = %(here)s/data | |
67 | index_dir = %(here)s/data/index |
|
67 | index_dir = %(here)s/data/index | |
|
68 | # set this path to use archive download cache | |||
|
69 | #archive_cache_dir = /tmp/rhodecode_tarballcache | |||
68 | app_instance_uuid = rc-production |
|
70 | app_instance_uuid = rc-production | |
69 | cut_off_limit = 256000 |
|
71 | cut_off_limit = 256000 | |
70 | vcs_full_cache = True |
|
72 | vcs_full_cache = True | |
@@ -154,6 +156,11 b' instance_id =' | |||||
154 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
156 | ## handling that. Set this variable to 403 to return HTTPForbidden | |
155 | auth_ret_code = |
|
157 | auth_ret_code = | |
156 |
|
158 | |||
|
159 | ## locking return code. When repository is locked return this HTTP code. 2XX | |||
|
160 | ## codes don't break the transactions while 4XX codes do | |||
|
161 | lock_ret_code = 423 | |||
|
162 | ||||
|
163 | ||||
157 | #################################### |
|
164 | #################################### | |
158 | ### CELERY CONFIG #### |
|
165 | ### CELERY CONFIG #### | |
159 | #################################### |
|
166 | #################################### |
@@ -65,6 +65,8 b' static_files = true' | |||||
65 | lang = en |
|
65 | lang = en | |
66 | cache_dir = %(here)s/data |
|
66 | cache_dir = %(here)s/data | |
67 | index_dir = %(here)s/data/index |
|
67 | index_dir = %(here)s/data/index | |
|
68 | # set this path to use archive download cache | |||
|
69 | #archive_cache_dir = /tmp/rhodecode_tarballcache | |||
68 | app_instance_uuid = ${app_instance_uuid} |
|
70 | app_instance_uuid = ${app_instance_uuid} | |
69 | cut_off_limit = 256000 |
|
71 | cut_off_limit = 256000 | |
70 | vcs_full_cache = True |
|
72 | vcs_full_cache = True | |
@@ -154,6 +156,11 b' instance_id =' | |||||
154 | ## handling that. Set this variable to 403 to return HTTPForbidden |
|
156 | ## handling that. Set this variable to 403 to return HTTPForbidden | |
155 | auth_ret_code = |
|
157 | auth_ret_code = | |
156 |
|
158 | |||
|
159 | ## locking return code. When repository is locked return this HTTP code. 2XX | |||
|
160 | ## codes don't break the transactions while 4XX codes do | |||
|
161 | lock_ret_code = 423 | |||
|
162 | ||||
|
163 | ||||
157 | #################################### |
|
164 | #################################### | |
158 | ### CELERY CONFIG #### |
|
165 | ### CELERY CONFIG #### | |
159 | #################################### |
|
166 | #################################### |
@@ -15,6 +15,7 b' from rhodecode.lib.middleware.simplehg i' | |||||
15 | from rhodecode.lib.middleware.simplegit import SimpleGit |
|
15 | from rhodecode.lib.middleware.simplegit import SimpleGit | |
16 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
16 | from rhodecode.lib.middleware.https_fixup import HttpsFixup | |
17 | from rhodecode.config.environment import load_environment |
|
17 | from rhodecode.config.environment import load_environment | |
|
18 | from rhodecode.lib.middleware.wrapper import RequestWrapper | |||
18 |
|
19 | |||
19 |
|
20 | |||
20 | def make_app(global_conf, full_stack=True, static_files=True, **app_conf): |
|
21 | def make_app(global_conf, full_stack=True, static_files=True, **app_conf): | |
@@ -55,7 +56,7 b' def make_app(global_conf, full_stack=Tru' | |||||
55 |
|
56 | |||
56 | from rhodecode.lib.middleware.sentry import Sentry |
|
57 | from rhodecode.lib.middleware.sentry import Sentry | |
57 | from rhodecode.lib.middleware.errormator import Errormator |
|
58 | from rhodecode.lib.middleware.errormator import Errormator | |
58 | if Errormator: |
|
59 | if Errormator and asbool(config['app_conf'].get('errormator')): | |
59 | app = Errormator(app, config) |
|
60 | app = Errormator(app, config) | |
60 | elif Sentry: |
|
61 | elif Sentry: | |
61 | app = Sentry(app, config) |
|
62 | app = Sentry(app, config) | |
@@ -67,7 +68,7 b' def make_app(global_conf, full_stack=Tru' | |||||
67 | # need any pylons stack middleware in them |
|
68 | # need any pylons stack middleware in them | |
68 | app = SimpleHg(app, config) |
|
69 | app = SimpleHg(app, config) | |
69 | app = SimpleGit(app, config) |
|
70 | app = SimpleGit(app, config) | |
70 |
|
71 | app = RequestWrapper(app, config) | ||
71 | # Display error documents for 401, 403, 404 status codes (and |
|
72 | # Display error documents for 401, 403, 404 status codes (and | |
72 | # 500 when debug is disabled) |
|
73 | # 500 when debug is disabled) | |
73 | if asbool(config['debug']): |
|
74 | if asbool(config['debug']): |
@@ -56,6 +56,18 b' def make_map(config):' | |||||
56 | repos_group_name = match_dict.get('group_name') |
|
56 | repos_group_name = match_dict.get('group_name') | |
57 | return is_valid_repos_group(repos_group_name, config['base_path']) |
|
57 | return is_valid_repos_group(repos_group_name, config['base_path']) | |
58 |
|
58 | |||
|
59 | def check_group_skip_path(environ, match_dict): | |||
|
60 | """ | |||
|
61 | check for valid repository group for proper 404 handling, but skips | |||
|
62 | verification of existing path | |||
|
63 | ||||
|
64 | :param environ: | |||
|
65 | :param match_dict: | |||
|
66 | """ | |||
|
67 | repos_group_name = match_dict.get('group_name') | |||
|
68 | return is_valid_repos_group(repos_group_name, config['base_path'], | |||
|
69 | skip_path_check=True) | |||
|
70 | ||||
59 | def check_int(environ, match_dict): |
|
71 | def check_int(environ, match_dict): | |
60 | return match_dict.get('id').isdigit() |
|
72 | return match_dict.get('id').isdigit() | |
61 |
|
73 | |||
@@ -171,9 +183,10 b' def make_map(config):' | |||||
171 | function=check_group)) |
|
183 | function=check_group)) | |
172 | m.connect("delete_repos_group", "/repos_groups/{group_name:.*?}", |
|
184 | m.connect("delete_repos_group", "/repos_groups/{group_name:.*?}", | |
173 | action="delete", conditions=dict(method=["DELETE"], |
|
185 | action="delete", conditions=dict(method=["DELETE"], | |
174 | function=check_group)) |
|
186 | function=check_group_skip_path)) | |
175 | m.connect("edit_repos_group", "/repos_groups/{group_name:.*?}/edit", |
|
187 | m.connect("edit_repos_group", "/repos_groups/{group_name:.*?}/edit", | |
176 |
action="edit", conditions=dict(method=["GET"], |
|
188 | action="edit", conditions=dict(method=["GET"], | |
|
189 | function=check_group)) | |||
177 | m.connect("formatted_edit_repos_group", |
|
190 | m.connect("formatted_edit_repos_group", | |
178 | "/repos_groups/{group_name:.*?}.{format}/edit", |
|
191 | "/repos_groups/{group_name:.*?}.{format}/edit", | |
179 | action="edit", conditions=dict(method=["GET"], |
|
192 | action="edit", conditions=dict(method=["GET"], |
@@ -251,31 +251,25 b' class ReposGroupsController(BaseControll' | |||||
251 | repos = gr.repositories.all() |
|
251 | repos = gr.repositories.all() | |
252 | if repos: |
|
252 | if repos: | |
253 | h.flash(_('This group contains %s repositores and cannot be ' |
|
253 | h.flash(_('This group contains %s repositores and cannot be ' | |
254 | 'deleted') % len(repos), |
|
254 | 'deleted') % len(repos), category='warning') | |
255 | category='error') |
|
255 | return redirect(url('repos_groups')) | |
|
256 | ||||
|
257 | children = gr.children.all() | |||
|
258 | if children: | |||
|
259 | h.flash(_('This group contains %s subgroups and cannot be deleted' | |||
|
260 | % (len(children))), category='warning') | |||
256 | return redirect(url('repos_groups')) |
|
261 | return redirect(url('repos_groups')) | |
257 |
|
262 | |||
258 | try: |
|
263 | try: | |
259 | ReposGroupModel().delete(group_name) |
|
264 | ReposGroupModel().delete(group_name) | |
260 | Session().commit() |
|
265 | Session().commit() | |
261 |
h.flash(_('removed repos group %s') % |
|
266 | h.flash(_('removed repos group %s') % group_name, | |
262 | category='success') |
|
267 | category='success') | |
263 | #TODO: in future action_logger(, '', '', '', self.sa) |
|
268 | #TODO: in future action_logger(, '', '', '', self.sa) | |
264 | except IntegrityError, e: |
|
|||
265 | if str(e.message).find('groups_group_parent_id_fkey') != -1: |
|
|||
266 | log.error(traceback.format_exc()) |
|
|||
267 | h.flash(_('Cannot delete this group it still contains ' |
|
|||
268 | 'subgroups'), |
|
|||
269 | category='warning') |
|
|||
270 | else: |
|
|||
271 | log.error(traceback.format_exc()) |
|
|||
272 | h.flash(_('error occurred during deletion of repos ' |
|
|||
273 | 'group %s') % gr.group_name, category='error') |
|
|||
274 |
|
||||
275 | except Exception: |
|
269 | except Exception: | |
276 | log.error(traceback.format_exc()) |
|
270 | log.error(traceback.format_exc()) | |
277 | h.flash(_('error occurred during deletion of repos ' |
|
271 | h.flash(_('error occurred during deletion of repos ' | |
278 |
'group %s') % |
|
272 | 'group %s') % group_name, category='error') | |
279 |
|
273 | |||
280 | return redirect(url('repos_groups')) |
|
274 | return redirect(url('repos_groups')) | |
281 |
|
275 |
@@ -38,7 +38,7 b' from pylons.i18n.translation import _' | |||||
38 | from rhodecode.lib import helpers as h |
|
38 | from rhodecode.lib import helpers as h | |
39 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator, \ |
|
39 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator, \ | |
40 | HasPermissionAnyDecorator, NotAnonymous, HasPermissionAny,\ |
|
40 | HasPermissionAnyDecorator, NotAnonymous, HasPermissionAny,\ | |
41 | HasReposGroupPermissionAll, HasReposGroupPermissionAny |
|
41 | HasReposGroupPermissionAll, HasReposGroupPermissionAny, AuthUser | |
42 | from rhodecode.lib.base import BaseController, render |
|
42 | from rhodecode.lib.base import BaseController, render | |
43 | from rhodecode.lib.celerylib import tasks, run_task |
|
43 | from rhodecode.lib.celerylib import tasks, run_task | |
44 | from rhodecode.lib.utils import repo2db_mapper, invalidate_cache, \ |
|
44 | from rhodecode.lib.utils import repo2db_mapper, invalidate_cache, \ | |
@@ -409,6 +409,8 b' class SettingsController(BaseController)' | |||||
409 | # url('admin_settings_my_account') |
|
409 | # url('admin_settings_my_account') | |
410 |
|
410 | |||
411 | c.user = User.get(self.rhodecode_user.user_id) |
|
411 | c.user = User.get(self.rhodecode_user.user_id) | |
|
412 | c.perm_user = AuthUser(user_id=self.rhodecode_user.user_id, | |||
|
413 | ip_addr=self.ip_addr) | |||
412 | c.ldap_dn = c.user.ldap_dn |
|
414 | c.ldap_dn = c.user.ldap_dn | |
413 |
|
415 | |||
414 | if c.user.username == 'default': |
|
416 | if c.user.username == 'default': | |
@@ -440,6 +442,8 b' class SettingsController(BaseController)' | |||||
440 | # url('admin_settings_my_account_update', id=ID) |
|
442 | # url('admin_settings_my_account_update', id=ID) | |
441 | uid = self.rhodecode_user.user_id |
|
443 | uid = self.rhodecode_user.user_id | |
442 | c.user = User.get(self.rhodecode_user.user_id) |
|
444 | c.user = User.get(self.rhodecode_user.user_id) | |
|
445 | c.perm_user = AuthUser(user_id=self.rhodecode_user.user_id, | |||
|
446 | ip_addr=self.ip_addr) | |||
443 | c.ldap_dn = c.user.ldap_dn |
|
447 | c.ldap_dn = c.user.ldap_dn | |
444 | email = self.rhodecode_user.email |
|
448 | email = self.rhodecode_user.email | |
445 | _form = UserForm(edit=True, |
|
449 | _form = UserForm(edit=True, |
@@ -27,14 +27,14 b'' | |||||
27 |
|
27 | |||
28 | import traceback |
|
28 | import traceback | |
29 | import logging |
|
29 | import logging | |
30 | from pylons.controllers.util import abort |
|
|||
31 |
|
30 | |||
32 | from rhodecode.controllers.api import JSONRPCController, JSONRPCError |
|
31 | from rhodecode.controllers.api import JSONRPCController, JSONRPCError | |
33 | from rhodecode.lib.auth import PasswordGenerator, AuthUser, \ |
|
32 | from rhodecode.lib.auth import PasswordGenerator, AuthUser, \ | |
34 | HasPermissionAllDecorator, HasPermissionAnyDecorator, \ |
|
33 | HasPermissionAllDecorator, HasPermissionAnyDecorator, \ | |
35 | HasPermissionAnyApi, HasRepoPermissionAnyApi |
|
34 | HasPermissionAnyApi, HasRepoPermissionAnyApi | |
36 | from rhodecode.lib.utils import map_groups, repo2db_mapper |
|
35 | from rhodecode.lib.utils import map_groups, repo2db_mapper | |
37 | from rhodecode.lib.utils2 import str2bool |
|
36 | from rhodecode.lib.utils2 import str2bool, time_to_datetime, safe_int | |
|
37 | from rhodecode.lib import helpers as h | |||
38 | from rhodecode.model.meta import Session |
|
38 | from rhodecode.model.meta import Session | |
39 | from rhodecode.model.scm import ScmModel |
|
39 | from rhodecode.model.scm import ScmModel | |
40 | from rhodecode.model.repo import RepoModel |
|
40 | from rhodecode.model.repo import RepoModel | |
@@ -42,6 +42,7 b' from rhodecode.model.user import UserMod' | |||||
42 | from rhodecode.model.users_group import UserGroupModel |
|
42 | from rhodecode.model.users_group import UserGroupModel | |
43 | from rhodecode.model.permission import PermissionModel |
|
43 | from rhodecode.model.permission import PermissionModel | |
44 | from rhodecode.model.db import Repository, RhodeCodeSetting, UserIpMap |
|
44 | from rhodecode.model.db import Repository, RhodeCodeSetting, UserIpMap | |
|
45 | from rhodecode.lib.compat import json | |||
45 |
|
46 | |||
46 | log = logging.getLogger(__name__) |
|
47 | log = logging.getLogger(__name__) | |
47 |
|
48 | |||
@@ -229,7 +230,8 b' class ApiController(JSONRPCController):' | |||||
229 | 'Error occurred during cache invalidation action' |
|
230 | 'Error occurred during cache invalidation action' | |
230 | ) |
|
231 | ) | |
231 |
|
232 | |||
232 |
def lock(self, apiuser, repoid, locked |
|
233 | def lock(self, apiuser, repoid, locked=Optional(None), | |
|
234 | userid=Optional(OAttr('apiuser'))): | |||
233 | """ |
|
235 | """ | |
234 | Set locking state on particular repository by given user, if |
|
236 | Set locking state on particular repository by given user, if | |
235 | this command is runned by non-admin account userid is set to user |
|
237 | this command is runned by non-admin account userid is set to user | |
@@ -257,21 +259,77 b' class ApiController(JSONRPCController):' | |||||
257 |
|
259 | |||
258 | if isinstance(userid, Optional): |
|
260 | if isinstance(userid, Optional): | |
259 | userid = apiuser.user_id |
|
261 | userid = apiuser.user_id | |
|
262 | ||||
260 | user = get_user_or_error(userid) |
|
263 | user = get_user_or_error(userid) | |
261 | locked = str2bool(locked) |
|
264 | ||
262 | try: |
|
265 | if isinstance(locked, Optional): | |
263 | if locked: |
|
266 | lockobj = Repository.getlock(repo) | |
264 | Repository.lock(repo, user.user_id) |
|
267 | ||
|
268 | if lockobj[0] is None: | |||
|
269 | return ('Repo `%s` not locked. Locked=`False`.' | |||
|
270 | % (repo.repo_name)) | |||
265 | else: |
|
271 | else: | |
266 |
|
|
272 | userid, time_ = lockobj | |
|
273 | user = get_user_or_error(userid) | |||
|
274 | ||||
|
275 | return ('Repo `%s` locked by `%s`. Locked=`True`. ' | |||
|
276 | 'Locked since: `%s`' | |||
|
277 | % (repo.repo_name, user.username, | |||
|
278 | json.dumps(time_to_datetime(time_)))) | |||
|
279 | ||||
|
280 | else: | |||
|
281 | locked = str2bool(locked) | |||
|
282 | try: | |||
|
283 | if locked: | |||
|
284 | Repository.lock(repo, user.user_id) | |||
|
285 | else: | |||
|
286 | Repository.unlock(repo) | |||
|
287 | ||||
|
288 | return ('User `%s` set lock state for repo `%s` to `%s`' | |||
|
289 | % (user.username, repo.repo_name, locked)) | |||
|
290 | except Exception: | |||
|
291 | log.error(traceback.format_exc()) | |||
|
292 | raise JSONRPCError( | |||
|
293 | 'Error occurred locking repository `%s`' % repo.repo_name | |||
|
294 | ) | |||
267 |
|
295 | |||
268 | return ('User `%s` set lock state for repo `%s` to `%s`' |
|
296 | def get_locks(self, apiuser, userid=Optional(OAttr('apiuser'))): | |
269 | % (user.username, repo.repo_name, locked)) |
|
297 | """ | |
270 | except Exception: |
|
298 | Get all locks for given userid, if | |
271 | log.error(traceback.format_exc()) |
|
299 | this command is runned by non-admin account userid is set to user | |
272 | raise JSONRPCError( |
|
300 | who is calling this method, thus returning locks for himself | |
273 | 'Error occurred locking repository `%s`' % repo.repo_name |
|
301 | ||
274 | ) |
|
302 | :param apiuser: | |
|
303 | :param userid: | |||
|
304 | """ | |||
|
305 | if HasPermissionAnyApi('hg.admin')(user=apiuser): | |||
|
306 | pass | |||
|
307 | else: | |||
|
308 | #make sure normal user does not pass someone else userid, | |||
|
309 | #he is not allowed to do that | |||
|
310 | if not isinstance(userid, Optional) and userid != apiuser.user_id: | |||
|
311 | raise JSONRPCError( | |||
|
312 | 'userid is not the same as your user' | |||
|
313 | ) | |||
|
314 | ret = [] | |||
|
315 | if isinstance(userid, Optional): | |||
|
316 | user = None | |||
|
317 | else: | |||
|
318 | user = get_user_or_error(userid) | |||
|
319 | ||||
|
320 | #show all locks | |||
|
321 | for r in Repository.getAll(): | |||
|
322 | userid, time_ = r.locked | |||
|
323 | if time_: | |||
|
324 | _api_data = r.get_api_data() | |||
|
325 | # if we use userfilter just show the locks for this user | |||
|
326 | if user: | |||
|
327 | if safe_int(userid) == user.user_id: | |||
|
328 | ret.append(_api_data) | |||
|
329 | else: | |||
|
330 | ret.append(_api_data) | |||
|
331 | ||||
|
332 | return ret | |||
275 |
|
333 | |||
276 | @HasPermissionAllDecorator('hg.admin') |
|
334 | @HasPermissionAllDecorator('hg.admin') | |
277 | def show_ip(self, apiuser, userid): |
|
335 | def show_ip(self, apiuser, userid): |
@@ -89,11 +89,17 b' class CompareController(BaseRepoControll' | |||||
89 | # other_ref will be evaluated in other_repo |
|
89 | # other_ref will be evaluated in other_repo | |
90 | other_ref = (other_ref_type, other_ref) |
|
90 | other_ref = (other_ref_type, other_ref) | |
91 | other_repo = request.GET.get('other_repo', org_repo) |
|
91 | other_repo = request.GET.get('other_repo', org_repo) | |
|
92 | # If merge is True: | |||
|
93 | # Show what org would get if merged with other: | |||
|
94 | # List changesets that are ancestors of other but not of org. | |||
|
95 | # New changesets in org is thus ignored. | |||
|
96 | # Diff will be from common ancestor, and merges of org to other will thus be ignored. | |||
|
97 | # If merge is False: | |||
|
98 | # Make a raw diff from org to other, no matter if related or not. | |||
|
99 | # Changesets in one and not in the other will be ignored | |||
|
100 | merge = bool(request.GET.get('merge')) | |||
92 | # fulldiff disables cut_off_limit |
|
101 | # fulldiff disables cut_off_limit | |
93 | c.fulldiff = request.GET.get('fulldiff') |
|
102 | c.fulldiff = request.GET.get('fulldiff') | |
94 | # only consider this range of changesets |
|
|||
95 | rev_start = request.GET.get('rev_start') |
|
|||
96 | rev_end = request.GET.get('rev_end') |
|
|||
97 | # partial uses compare_cs.html template directly |
|
103 | # partial uses compare_cs.html template directly | |
98 | partial = request.environ.get('HTTP_X_PARTIAL_XHR') |
|
104 | partial = request.environ.get('HTTP_X_PARTIAL_XHR') | |
99 | # as_form puts hidden input field with changeset revisions |
|
105 | # as_form puts hidden input field with changeset revisions | |
@@ -103,7 +109,8 b' class CompareController(BaseRepoControll' | |||||
103 | repo_name=other_repo, |
|
109 | repo_name=other_repo, | |
104 | org_ref_type=other_ref[0], org_ref=other_ref[1], |
|
110 | org_ref_type=other_ref[0], org_ref=other_ref[1], | |
105 | other_repo=org_repo, |
|
111 | other_repo=org_repo, | |
106 |
other_ref_type=org_ref[0], other_ref=org_ref[1] |
|
112 | other_ref_type=org_ref[0], other_ref=org_ref[1], | |
|
113 | merge=merge or '') | |||
107 |
|
114 | |||
108 | org_repo = Repository.get_by_repo_name(org_repo) |
|
115 | org_repo = Repository.get_by_repo_name(org_repo) | |
109 | other_repo = Repository.get_by_repo_name(other_repo) |
|
116 | other_repo = Repository.get_by_repo_name(other_repo) | |
@@ -133,37 +140,23 b' class CompareController(BaseRepoControll' | |||||
133 | c.org_ref_type = org_ref[0] |
|
140 | c.org_ref_type = org_ref[0] | |
134 | c.other_ref_type = other_ref[0] |
|
141 | c.other_ref_type = other_ref[0] | |
135 |
|
142 | |||
136 | if rev_start and rev_end: |
|
143 | c.cs_ranges, c.ancestor = PullRequestModel().get_compare_data( | |
137 | # swap revs with cherry picked ones, save them for display |
|
144 | org_repo, org_ref, other_repo, other_ref, merge) | |
138 | #org_ref = ('rev', rev_start) |
|
|||
139 | #other_ref = ('rev', rev_end) |
|
|||
140 | c.org_ref = rev_start[:12] |
|
|||
141 | c.other_ref = rev_end[:12] |
|
|||
142 | # get parent of |
|
|||
143 | # rev start to include it in the diff |
|
|||
144 | _cs = other_repo.scm_instance.get_changeset(rev_start) |
|
|||
145 | rev_start = _cs.parents[0].raw_id if _cs.parents else EmptyChangeset().raw_id |
|
|||
146 | org_ref = ('rev', rev_start) |
|
|||
147 | other_ref = ('rev', rev_end) |
|
|||
148 | #if we cherry pick it's not remote, make the other_repo org_repo |
|
|||
149 | org_repo = other_repo |
|
|||
150 |
|
||||
151 | c.cs_ranges, ancestor = PullRequestModel().get_compare_data( |
|
|||
152 | org_repo, org_ref, other_repo, other_ref) |
|
|||
153 |
|
145 | |||
154 | c.statuses = c.rhodecode_db_repo.statuses([x.raw_id for x in |
|
146 | c.statuses = c.rhodecode_db_repo.statuses([x.raw_id for x in | |
155 | c.cs_ranges]) |
|
147 | c.cs_ranges]) | |
156 | if partial: |
|
148 | if partial: | |
|
149 | assert c.ancestor | |||
157 | return render('compare/compare_cs.html') |
|
150 | return render('compare/compare_cs.html') | |
158 |
|
151 | |||
159 | if ancestor and org_repo != other_repo: |
|
152 | if c.ancestor: | |
|
153 | assert merge | |||
160 | # case we want a simple diff without incoming changesets, |
|
154 | # case we want a simple diff without incoming changesets, | |
161 | # previewing what will be merged. |
|
155 | # previewing what will be merged. | |
162 |
# Make the diff on the |
|
156 | # Make the diff on the other repo (which is known to have other_ref) | |
163 | # revision that is common ancestor |
|
|||
164 | log.debug('Using ancestor %s as org_ref instead of %s' |
|
157 | log.debug('Using ancestor %s as org_ref instead of %s' | |
165 | % (ancestor, org_ref)) |
|
158 | % (c.ancestor, org_ref)) | |
166 | org_ref = ('rev', ancestor) |
|
159 | org_ref = ('rev', c.ancestor) | |
167 | org_repo = other_repo |
|
160 | org_repo = other_repo | |
168 |
|
161 | |||
169 | diff_limit = self.cut_off_limit if not c.fulldiff else None |
|
162 | diff_limit = self.cut_off_limit if not c.fulldiff else None |
@@ -27,6 +27,7 b' import os' | |||||
27 | import logging |
|
27 | import logging | |
28 | import traceback |
|
28 | import traceback | |
29 | import tempfile |
|
29 | import tempfile | |
|
30 | import shutil | |||
30 |
|
31 | |||
31 | from pylons import request, response, tmpl_context as c, url |
|
32 | from pylons import request, response, tmpl_context as c, url | |
32 | from pylons.i18n.translation import _ |
|
33 | from pylons.i18n.translation import _ | |
@@ -315,7 +316,7 b' class FilesController(BaseRepoController' | |||||
315 | try: |
|
316 | try: | |
316 | self.scm_model.commit_change(repo=c.rhodecode_repo, |
|
317 | self.scm_model.commit_change(repo=c.rhodecode_repo, | |
317 | repo_name=repo_name, cs=c.cs, |
|
318 | repo_name=repo_name, cs=c.cs, | |
318 | user=self.rhodecode_user, |
|
319 | user=self.rhodecode_user.user_id, | |
319 | author=author, message=message, |
|
320 | author=author, message=message, | |
320 | content=content, f_path=f_path) |
|
321 | content=content, f_path=f_path) | |
321 | h.flash(_('Successfully committed to %s') % f_path, |
|
322 | h.flash(_('Successfully committed to %s') % f_path, | |
@@ -378,7 +379,7 b' class FilesController(BaseRepoController' | |||||
378 | try: |
|
379 | try: | |
379 | self.scm_model.create_node(repo=c.rhodecode_repo, |
|
380 | self.scm_model.create_node(repo=c.rhodecode_repo, | |
380 | repo_name=repo_name, cs=c.cs, |
|
381 | repo_name=repo_name, cs=c.cs, | |
381 | user=self.rhodecode_user, |
|
382 | user=self.rhodecode_user.user_id, | |
382 | author=author, message=message, |
|
383 | author=author, message=message, | |
383 | content=content, f_path=node_path) |
|
384 | content=content, f_path=node_path) | |
384 | h.flash(_('Successfully committed to %s') % node_path, |
|
385 | h.flash(_('Successfully committed to %s') % node_path, | |
@@ -429,11 +430,40 b' class FilesController(BaseRepoController' | |||||
429 | return _('Empty repository') |
|
430 | return _('Empty repository') | |
430 | except (ImproperArchiveTypeError, KeyError): |
|
431 | except (ImproperArchiveTypeError, KeyError): | |
431 | return _('Unknown archive type') |
|
432 | return _('Unknown archive type') | |
|
433 | # archive cache | |||
|
434 | from rhodecode import CONFIG | |||
|
435 | rev_name = cs.raw_id[:12] | |||
|
436 | archive_name = '%s-%s%s' % (safe_str(repo_name.replace('/', '_')), | |||
|
437 | safe_str(rev_name), ext) | |||
432 |
|
438 | |||
433 | fd, archive = tempfile.mkstemp() |
|
439 | use_cached_archive = False # defines if we use cached version of archive | |
434 | t = open(archive, 'wb') |
|
440 | archive_cache_enabled = CONFIG.get('archive_cache_dir') | |
435 | cs.fill_archive(stream=t, kind=fileformat, subrepos=subrepos) |
|
441 | if not subrepos and archive_cache_enabled: | |
436 | t.close() |
|
442 | #check if we it's ok to write | |
|
443 | if not os.path.isdir(CONFIG['archive_cache_dir']): | |||
|
444 | os.makedirs(CONFIG['archive_cache_dir']) | |||
|
445 | cached_archive_path = os.path.join(CONFIG['archive_cache_dir'], archive_name) | |||
|
446 | if os.path.isfile(cached_archive_path): | |||
|
447 | log.debug('Found cached archive in %s' % cached_archive_path) | |||
|
448 | fd, archive = None, cached_archive_path | |||
|
449 | use_cached_archive = True | |||
|
450 | else: | |||
|
451 | log.debug('Archive %s is not yet cached' % (archive_name)) | |||
|
452 | ||||
|
453 | if not use_cached_archive: | |||
|
454 | #generate new archive | |||
|
455 | try: | |||
|
456 | fd, archive = tempfile.mkstemp() | |||
|
457 | t = open(archive, 'wb') | |||
|
458 | log.debug('Creating new temp archive in %s' % archive) | |||
|
459 | cs.fill_archive(stream=t, kind=fileformat, subrepos=subrepos) | |||
|
460 | if archive_cache_enabled: | |||
|
461 | #if we generated the archive and use cache rename that | |||
|
462 | log.debug('Storing new archive in %s' % cached_archive_path) | |||
|
463 | shutil.move(archive, cached_archive_path) | |||
|
464 | archive = cached_archive_path | |||
|
465 | finally: | |||
|
466 | t.close() | |||
437 |
|
467 | |||
438 | def get_chunked_archive(archive): |
|
468 | def get_chunked_archive(archive): | |
439 | stream = open(archive, 'rb') |
|
469 | stream = open(archive, 'rb') | |
@@ -441,13 +471,15 b' class FilesController(BaseRepoController' | |||||
441 | data = stream.read(16 * 1024) |
|
471 | data = stream.read(16 * 1024) | |
442 | if not data: |
|
472 | if not data: | |
443 | stream.close() |
|
473 | stream.close() | |
444 | os.close(fd) |
|
474 | if fd: # fd means we used temporary file | |
445 |
os. |
|
475 | os.close(fd) | |
|
476 | if not archive_cache_enabled: | |||
|
477 | log.debug('Destroing temp archive %s' % archive) | |||
|
478 | os.remove(archive) | |||
446 | break |
|
479 | break | |
447 | yield data |
|
480 | yield data | |
448 |
|
481 | |||
449 |
response.content_disposition = str('attachment; filename=%s |
|
482 | response.content_disposition = str('attachment; filename=%s' % (archive_name)) | |
450 | % (repo_name, revision[:12], ext)) |
|
|||
451 | response.content_type = str(content_type) |
|
483 | response.content_type = str(content_type) | |
452 | return get_chunked_archive(archive) |
|
484 | return get_chunked_archive(archive) | |
453 |
|
485 |
@@ -52,6 +52,7 b' from rhodecode.model.repo import RepoMod' | |||||
52 | from rhodecode.model.comment import ChangesetCommentsModel |
|
52 | from rhodecode.model.comment import ChangesetCommentsModel | |
53 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
53 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
54 | from rhodecode.model.forms import PullRequestForm |
|
54 | from rhodecode.model.forms import PullRequestForm | |
|
55 | from mercurial import scmutil | |||
55 |
|
56 | |||
56 | log = logging.getLogger(__name__) |
|
57 | log = logging.getLogger(__name__) | |
57 |
|
58 | |||
@@ -67,7 +68,7 b' class PullrequestsController(BaseRepoCon' | |||||
67 | c.users_array = repo_model.get_users_js() |
|
68 | c.users_array = repo_model.get_users_js() | |
68 | c.users_groups_array = repo_model.get_users_groups_js() |
|
69 | c.users_groups_array = repo_model.get_users_groups_js() | |
69 |
|
70 | |||
70 | def _get_repo_refs(self, repo, rev=None): |
|
71 | def _get_repo_refs(self, repo, rev=None, branch_rev=None): | |
71 | """return a structure with repo's interesting changesets, suitable for |
|
72 | """return a structure with repo's interesting changesets, suitable for | |
72 | the selectors in pullrequest.html""" |
|
73 | the selectors in pullrequest.html""" | |
73 | branches = [('branch:%s:%s' % (k, v), k) |
|
74 | branches = [('branch:%s:%s' % (k, v), k) | |
@@ -83,11 +84,25 b' class PullrequestsController(BaseRepoCon' | |||||
83 | tips = [x[1] for x in branches + bookmarks + tags |
|
84 | tips = [x[1] for x in branches + bookmarks + tags | |
84 | if x[0].endswith(colontip)] |
|
85 | if x[0].endswith(colontip)] | |
85 | selected = 'tag:tip:%s' % tip |
|
86 | selected = 'tag:tip:%s' % tip | |
86 |
special = [(selected, 'tip |
|
87 | special = [(selected, 'tip: %s' % ', '.join(tips))] | |
87 |
|
88 | |||
88 | if rev: |
|
89 | if rev: | |
89 | selected = 'rev:%s:%s' % (rev, rev) |
|
90 | selected = 'rev:%s:%s' % (rev, rev) | |
90 | special.append((selected, rev)) |
|
91 | special.append((selected, '%s: %s' % (_("Selected"), rev[:12]))) | |
|
92 | ||||
|
93 | # list named branches that has been merged to this named branch - it should probably merge back | |||
|
94 | if branch_rev: | |||
|
95 | # not restricting to merge() would also get branch point and be better | |||
|
96 | # (especially because it would get the branch point) ... but is currently too expensive | |||
|
97 | revs = ["sort(parents(branch(id('%s')) and merge()) - branch(id('%s')))" % | |||
|
98 | (branch_rev, branch_rev)] | |||
|
99 | otherbranches = {} | |||
|
100 | for i in scmutil.revrange(repo._repo, revs): | |||
|
101 | cs = repo.get_changeset(i) | |||
|
102 | otherbranches[cs.branch] = cs.raw_id | |||
|
103 | for branch, node in otherbranches.iteritems(): | |||
|
104 | selected = 'branch:%s:%s' % (branch, node) | |||
|
105 | special.append((selected, '%s: %s' % (_('Peer'), branch))) | |||
91 |
|
106 | |||
92 | return [(special, _("Special")), |
|
107 | return [(special, _("Special")), | |
93 | (bookmarks, _("Bookmarks")), |
|
108 | (bookmarks, _("Bookmarks")), | |
@@ -121,18 +136,23 b' class PullrequestsController(BaseRepoCon' | |||||
121 | category='warning') |
|
136 | category='warning') | |
122 | redirect(url('summary_home', repo_name=org_repo.repo_name)) |
|
137 | redirect(url('summary_home', repo_name=org_repo.repo_name)) | |
123 |
|
138 | |||
|
139 | org_rev = request.GET.get('rev_end') | |||
|
140 | # rev_start is not directly useful - its parent could however be used | |||
|
141 | # as default for other and thus give a simple compare view | |||
|
142 | #other_rev = request.POST.get('rev_start') | |||
|
143 | ||||
124 | other_repos_info = {} |
|
144 | other_repos_info = {} | |
125 |
|
145 | |||
126 | c.org_repos = [] |
|
146 | c.org_repos = [] | |
127 | c.org_repos.append((org_repo.repo_name, org_repo.repo_name)) |
|
147 | c.org_repos.append((org_repo.repo_name, org_repo.repo_name)) | |
128 | c.default_org_repo = org_repo.repo_name |
|
148 | c.default_org_repo = org_repo.repo_name | |
129 | c.org_refs, c.default_org_ref = self._get_repo_refs(org_repo.scm_instance) |
|
149 | c.org_refs, c.default_org_ref = self._get_repo_refs(org_repo.scm_instance, org_rev) | |
130 |
|
150 | |||
131 | c.other_repos = [] |
|
151 | c.other_repos = [] | |
132 | # add org repo to other so we can open pull request against itself |
|
152 | # add org repo to other so we can open pull request against itself | |
133 | c.other_repos.extend(c.org_repos) |
|
153 | c.other_repos.extend(c.org_repos) | |
134 | c.default_other_repo = org_repo.repo_name |
|
154 | c.default_other_repo = org_repo.repo_name | |
135 | c.default_other_refs, c.default_other_ref = self._get_repo_refs(org_repo.scm_instance) |
|
155 | c.default_other_refs, c.default_other_ref = self._get_repo_refs(org_repo.scm_instance, branch_rev=org_rev) | |
136 | usr_data = lambda usr: dict(user_id=usr.user_id, |
|
156 | usr_data = lambda usr: dict(user_id=usr.user_id, | |
137 | username=usr.username, |
|
157 | username=usr.username, | |
138 | firstname=usr.firstname, |
|
158 | firstname=usr.firstname, | |
@@ -191,23 +211,12 b' class PullrequestsController(BaseRepoCon' | |||||
191 | return redirect(url('pullrequest_home', repo_name=repo_name)) |
|
211 | return redirect(url('pullrequest_home', repo_name=repo_name)) | |
192 |
|
212 | |||
193 | org_repo = _form['org_repo'] |
|
213 | org_repo = _form['org_repo'] | |
194 |
org_ref = _form[' |
|
214 | org_ref = 'rev:merge:%s' % _form['merge_rev'] | |
195 | other_repo = _form['other_repo'] |
|
215 | other_repo = _form['other_repo'] | |
196 |
other_ref = _form[' |
|
216 | other_ref = 'rev:ancestor:%s' % _form['ancestor_rev'] | |
197 | revisions = _form['revisions'] |
|
217 | revisions = _form['revisions'] | |
198 | reviewers = _form['review_members'] |
|
218 | reviewers = _form['review_members'] | |
199 |
|
219 | |||
200 | # if we have cherry picked pull request we don't care what is in |
|
|||
201 | # org_ref/other_ref |
|
|||
202 | rev_start = request.POST.get('rev_start') |
|
|||
203 | rev_end = request.POST.get('rev_end') |
|
|||
204 |
|
||||
205 | if rev_start and rev_end: |
|
|||
206 | # this is swapped to simulate that rev_end is a revision from |
|
|||
207 | # parent of the fork |
|
|||
208 | org_ref = 'rev:%s:%s' % (rev_end, rev_end) |
|
|||
209 | other_ref = 'rev:%s:%s' % (rev_start, rev_start) |
|
|||
210 |
|
||||
211 | title = _form['pullrequest_title'] |
|
220 | title = _form['pullrequest_title'] | |
212 | description = _form['pullrequest_desc'] |
|
221 | description = _form['pullrequest_desc'] | |
213 |
|
222 | |||
@@ -265,9 +274,6 b' class PullrequestsController(BaseRepoCon' | |||||
265 | :param pull_request: |
|
274 | :param pull_request: | |
266 | :type pull_request: |
|
275 | :type pull_request: | |
267 | """ |
|
276 | """ | |
268 | rev_start = request.GET.get('rev_start') |
|
|||
269 | rev_end = request.GET.get('rev_end') |
|
|||
270 |
|
||||
271 | org_repo = pull_request.org_repo |
|
277 | org_repo = pull_request.org_repo | |
272 | (org_ref_type, |
|
278 | (org_ref_type, | |
273 | org_ref_name, |
|
279 | org_ref_name, | |
@@ -279,7 +285,7 b' class PullrequestsController(BaseRepoCon' | |||||
279 | other_ref_rev) = pull_request.other_ref.split(':') |
|
285 | other_ref_rev) = pull_request.other_ref.split(':') | |
280 |
|
286 | |||
281 | # despite opening revisions for bookmarks/branches/tags, we always |
|
287 | # despite opening revisions for bookmarks/branches/tags, we always | |
282 | # convert this to rev to prevent changes after book or branch change |
|
288 | # convert this to rev to prevent changes after bookmark or branch change | |
283 | org_ref = ('rev', org_ref_rev) |
|
289 | org_ref = ('rev', org_ref_rev) | |
284 | other_ref = ('rev', other_ref_rev) |
|
290 | other_ref = ('rev', other_ref_rev) | |
285 |
|
291 | |||
@@ -290,10 +296,6 b' class PullrequestsController(BaseRepoCon' | |||||
290 |
|
296 | |||
291 | c.cs_ranges = [org_repo.get_changeset(x) for x in pull_request.revisions] |
|
297 | c.cs_ranges = [org_repo.get_changeset(x) for x in pull_request.revisions] | |
292 |
|
298 | |||
293 | other_ref = ('rev', getattr(c.cs_ranges[0].parents[0] |
|
|||
294 | if c.cs_ranges[0].parents |
|
|||
295 | else EmptyChangeset(), 'raw_id')) |
|
|||
296 |
|
||||
297 | c.statuses = org_repo.statuses([x.raw_id for x in c.cs_ranges]) |
|
299 | c.statuses = org_repo.statuses([x.raw_id for x in c.cs_ranges]) | |
298 |
|
300 | |||
299 | c.org_ref = org_ref[1] |
|
301 | c.org_ref = org_ref[1] | |
@@ -394,6 +396,7 b' class PullrequestsController(BaseRepoCon' | |||||
394 | c.changeset_statuses = ChangesetStatus.STATUSES |
|
396 | c.changeset_statuses = ChangesetStatus.STATUSES | |
395 |
|
397 | |||
396 | c.as_form = False |
|
398 | c.as_form = False | |
|
399 | c.ancestor = None # there is one - but right here we don't know which | |||
397 | return render('/pullrequests/pullrequest_show.html') |
|
400 | return render('/pullrequests/pullrequest_show.html') | |
398 |
|
401 | |||
399 | @NotAnonymous() |
|
402 | @NotAnonymous() |
@@ -3730,7 +3730,7 b' msgid "Last modified"' | |||||
3730 | msgstr "" |
|
3730 | msgstr "" | |
3731 |
|
3731 | |||
3732 | #: rhodecode/templates/files/files_browser.html:52 |
|
3732 | #: rhodecode/templates/files/files_browser.html:52 | |
3733 | msgid "Last commiter" |
|
3733 | msgid "Last committer" | |
3734 | msgstr "" |
|
3734 | msgstr "" | |
3735 |
|
3735 | |||
3736 | #: rhodecode/templates/files/files_edit.html:19 |
|
3736 | #: rhodecode/templates/files/files_edit.html:19 |
@@ -3869,7 +3869,7 b' msgid "Last modified"' | |||||
3869 | msgstr "Dernière modification" |
|
3869 | msgstr "Dernière modification" | |
3870 |
|
3870 | |||
3871 | #: rhodecode/templates/files/files_browser.html:52 |
|
3871 | #: rhodecode/templates/files/files_browser.html:52 | |
3872 | msgid "Last commiter" |
|
3872 | msgid "Last committer" | |
3873 | msgstr "Dernier commiteur" |
|
3873 | msgstr "Dernier commiteur" | |
3874 |
|
3874 | |||
3875 | #: rhodecode/templates/files/files_edit.html:19 |
|
3875 | #: rhodecode/templates/files/files_edit.html:19 |
@@ -3743,7 +3743,7 b' msgid "Last modified"' | |||||
3743 | msgstr "最終更新日" |
|
3743 | msgstr "最終更新日" | |
3744 |
|
3744 | |||
3745 | #: rhodecode/templates/files/files_browser.html:52 |
|
3745 | #: rhodecode/templates/files/files_browser.html:52 | |
3746 | msgid "Last commiter" |
|
3746 | msgid "Last committer" | |
3747 | msgstr "最後の作成者" |
|
3747 | msgstr "最後の作成者" | |
3748 |
|
3748 | |||
3749 | #: rhodecode/templates/files/files_edit.html:19 |
|
3749 | #: rhodecode/templates/files/files_edit.html:19 |
@@ -3836,7 +3836,7 b' msgid "Last modified"' | |||||
3836 | msgstr "Ostatnio modyfikowany" |
|
3836 | msgstr "Ostatnio modyfikowany" | |
3837 |
|
3837 | |||
3838 | #: rhodecode/templates/files/files_browser.html:52 |
|
3838 | #: rhodecode/templates/files/files_browser.html:52 | |
3839 | msgid "Last commiter" |
|
3839 | msgid "Last committer" | |
3840 | msgstr "Autor" |
|
3840 | msgstr "Autor" | |
3841 |
|
3841 | |||
3842 | #: rhodecode/templates/files/files_edit.html:19 |
|
3842 | #: rhodecode/templates/files/files_edit.html:19 |
@@ -3910,7 +3910,7 b' msgid "Last modified"' | |||||
3910 | msgstr "Última alteração" |
|
3910 | msgstr "Última alteração" | |
3911 |
|
3911 | |||
3912 | #: rhodecode/templates/files/files_browser.html:52 |
|
3912 | #: rhodecode/templates/files/files_browser.html:52 | |
3913 | msgid "Last commiter" |
|
3913 | msgid "Last committer" | |
3914 | msgstr "Último commiter" |
|
3914 | msgstr "Último commiter" | |
3915 |
|
3915 | |||
3916 | #: rhodecode/templates/files/files_edit.html:19 |
|
3916 | #: rhodecode/templates/files/files_edit.html:19 |
@@ -3680,7 +3680,7 b' msgid "Last modified"' | |||||
3680 | msgstr "" |
|
3680 | msgstr "" | |
3681 |
|
3681 | |||
3682 | #: rhodecode/templates/files/files_browser.html:52 |
|
3682 | #: rhodecode/templates/files/files_browser.html:52 | |
3683 | msgid "Last commiter" |
|
3683 | msgid "Last committer" | |
3684 | msgstr "" |
|
3684 | msgstr "" | |
3685 |
|
3685 | |||
3686 | #: rhodecode/templates/files/files_edit.html:19 |
|
3686 | #: rhodecode/templates/files/files_edit.html:19 |
@@ -3733,7 +3733,7 b' msgid "Last modified"' | |||||
3733 | msgstr "最后修改于" |
|
3733 | msgstr "最后修改于" | |
3734 |
|
3734 | |||
3735 | #: rhodecode/templates/files/files_browser.html:52 |
|
3735 | #: rhodecode/templates/files/files_browser.html:52 | |
3736 | msgid "Last commiter" |
|
3736 | msgid "Last committer" | |
3737 | msgstr "最后提交者" |
|
3737 | msgstr "最后提交者" | |
3738 |
|
3738 | |||
3739 | #: rhodecode/templates/files/files_edit.html:19 |
|
3739 | #: rhodecode/templates/files/files_edit.html:19 |
@@ -3860,7 +3860,7 b' msgid "Last modified"' | |||||
3860 | msgstr "最後修改" |
|
3860 | msgstr "最後修改" | |
3861 |
|
3861 | |||
3862 | #: rhodecode/templates/files/files_browser.html:52 |
|
3862 | #: rhodecode/templates/files/files_browser.html:52 | |
3863 | msgid "Last commiter" |
|
3863 | msgid "Last committer" | |
3864 | msgstr "最後的遞交者" |
|
3864 | msgstr "最後的遞交者" | |
3865 |
|
3865 | |||
3866 | #: rhodecode/templates/files/files_edit.html:19 |
|
3866 | #: rhodecode/templates/files/files_edit.html:19 |
@@ -43,15 +43,17 b' def _get_ip_addr(environ):' | |||||
43 |
|
43 | |||
44 | ip = environ.get(proxy_key2) |
|
44 | ip = environ.get(proxy_key2) | |
45 | if ip: |
|
45 | if ip: | |
46 | # HTTP_X_FORWARDED_FOR can have mutliple ips inside |
|
|||
47 | # the left-most being the original client, and each successive proxy |
|
|||
48 | # that passed the request adding the IP address where it received the |
|
|||
49 | # request from. |
|
|||
50 | if ',' in ip: |
|
|||
51 | ip = ip.split(',')[0].strip() |
|
|||
52 | return ip |
|
46 | return ip | |
53 |
|
47 | |||
54 | ip = environ.get(def_key, '0.0.0.0') |
|
48 | ip = environ.get(def_key, '0.0.0.0') | |
|
49 | ||||
|
50 | # HEADERS can have mutliple ips inside | |||
|
51 | # the left-most being the original client, and each successive proxy | |||
|
52 | # that passed the request adding the IP address where it received the | |||
|
53 | # request from. | |||
|
54 | if ',' in ip: | |||
|
55 | ip = ip.split(',')[0].strip() | |||
|
56 | ||||
55 | return ip |
|
57 | return ip | |
56 |
|
58 | |||
57 |
|
59 | |||
@@ -279,7 +281,6 b' class BaseController(WSGIController):' | |||||
279 | # WSGIController.__call__ dispatches to the Controller method |
|
281 | # WSGIController.__call__ dispatches to the Controller method | |
280 | # the request is routed to. This routing information is |
|
282 | # the request is routed to. This routing information is | |
281 | # available in environ['pylons.routes_dict'] |
|
283 | # available in environ['pylons.routes_dict'] | |
282 | start = time.time() |
|
|||
283 | try: |
|
284 | try: | |
284 | self.ip_addr = _get_ip_addr(environ) |
|
285 | self.ip_addr = _get_ip_addr(environ) | |
285 | # make sure that we update permissions each time we call controller |
|
286 | # make sure that we update permissions each time we call controller | |
@@ -300,10 +301,6 b' class BaseController(WSGIController):' | |||||
300 | ) |
|
301 | ) | |
301 | return WSGIController.__call__(self, environ, start_response) |
|
302 | return WSGIController.__call__(self, environ, start_response) | |
302 | finally: |
|
303 | finally: | |
303 | log.info('IP: %s Request to %s time: %.3fs' % ( |
|
|||
304 | _get_ip_addr(environ), |
|
|||
305 | safe_unicode(_get_access_path(environ)), time.time() - start) |
|
|||
306 | ) |
|
|||
307 | meta.Session.remove() |
|
304 | meta.Session.remove() | |
308 |
|
305 | |||
309 |
|
306 |
@@ -59,6 +59,7 b' class ResultWrapper(object):' | |||||
59 |
|
59 | |||
60 |
|
60 | |||
61 | def run_task(task, *args, **kwargs): |
|
61 | def run_task(task, *args, **kwargs): | |
|
62 | global CELERY_ON | |||
62 | if CELERY_ON: |
|
63 | if CELERY_ON: | |
63 | try: |
|
64 | try: | |
64 | t = task.apply_async(args=args, kwargs=kwargs) |
|
65 | t = task.apply_async(args=args, kwargs=kwargs) | |
@@ -68,7 +69,6 b' def run_task(task, *args, **kwargs):' | |||||
68 | except socket.error, e: |
|
69 | except socket.error, e: | |
69 | if isinstance(e, IOError) and e.errno == 111: |
|
70 | if isinstance(e, IOError) and e.errno == 111: | |
70 | log.debug('Unable to connect to celeryd. Sync execution') |
|
71 | log.debug('Unable to connect to celeryd. Sync execution') | |
71 | global CELERY_ON |
|
|||
72 | CELERY_ON = False |
|
72 | CELERY_ON = False | |
73 | else: |
|
73 | else: | |
74 | log.error(traceback.format_exc()) |
|
74 | log.error(traceback.format_exc()) |
@@ -60,12 +60,17 b' class StatusChangeOnClosedPullRequestErr' | |||||
60 |
|
60 | |||
61 | class HTTPLockedRC(HTTPClientError): |
|
61 | class HTTPLockedRC(HTTPClientError): | |
62 | """ |
|
62 | """ | |
63 | Special Exception For locked Repos in RhodeCode |
|
63 | Special Exception For locked Repos in RhodeCode, the return code can | |
|
64 | be overwritten by _code keyword argument passed into constructors | |||
64 | """ |
|
65 | """ | |
65 | code = 423 |
|
66 | code = 423 | |
66 | title = explanation = 'Repository Locked' |
|
67 | title = explanation = 'Repository Locked' | |
67 |
|
68 | |||
68 | def __init__(self, reponame, username, *args, **kwargs): |
|
69 | def __init__(self, reponame, username, *args, **kwargs): | |
|
70 | from rhodecode import CONFIG | |||
|
71 | from rhodecode.lib.utils2 import safe_int | |||
|
72 | _code = CONFIG.get('lock_ret_code') | |||
|
73 | self.code = safe_int(_code, self.code) | |||
69 | self.title = self.explanation = ('Repository `%s` locked by ' |
|
74 | self.title = self.explanation = ('Repository `%s` locked by ' | |
70 | 'user `%s`' % (reponame, username)) |
|
75 | 'user `%s`' % (reponame, username)) | |
71 | super(HTTPLockedRC, self).__init__(*args, **kwargs) |
|
76 | super(HTTPLockedRC, self).__init__(*args, **kwargs) |
@@ -550,13 +550,18 b' def action_parser(user_log, feed=False, ' | |||||
550 | return link_to(lbl, _url, raw_id=rev.raw_id, repo_name=repo_name, |
|
550 | return link_to(lbl, _url, raw_id=rev.raw_id, repo_name=repo_name, | |
551 | class_='lazy-cs' if lazy_cs else '') |
|
551 | class_='lazy-cs' if lazy_cs else '') | |
552 |
|
552 | |||
|
553 | def _get_op(rev_txt): | |||
|
554 | _op = None | |||
|
555 | _name = rev_txt | |||
|
556 | if len(rev_txt.split('=>')) == 2: | |||
|
557 | _op, _name = rev_txt.split('=>') | |||
|
558 | return _op, _name | |||
|
559 | ||||
553 | revs = [] |
|
560 | revs = [] | |
554 | if len(filter(lambda v: v != '', revs_ids)) > 0: |
|
561 | if len(filter(lambda v: v != '', revs_ids)) > 0: | |
555 | repo = None |
|
562 | repo = None | |
556 | for rev in revs_ids[:revs_top_limit]: |
|
563 | for rev in revs_ids[:revs_top_limit]: | |
557 |
_op |
|
564 | _op, _name = _get_op(rev) | |
558 | if len(rev.split('=>')) == 2: |
|
|||
559 | _op, _name = rev.split('=>') |
|
|||
560 |
|
565 | |||
561 | # we want parsed changesets, or new log store format is bad |
|
566 | # we want parsed changesets, or new log store format is bad | |
562 | if parse_cs: |
|
567 | if parse_cs: | |
@@ -583,6 +588,10 b' def action_parser(user_log, feed=False, ' | |||||
583 | [lnk(rev, repo_name) for rev in revs[:revs_limit]] |
|
588 | [lnk(rev, repo_name) for rev in revs[:revs_limit]] | |
584 | ) |
|
589 | ) | |
585 | ) |
|
590 | ) | |
|
591 | _op1, _name1 = _get_op(revs_ids[0]) | |||
|
592 | _op2, _name2 = _get_op(revs_ids[-1]) | |||
|
593 | ||||
|
594 | _rev = '%s...%s' % (_name1, _name2) | |||
586 |
|
595 | |||
587 | compare_view = ( |
|
596 | compare_view = ( | |
588 | ' <div class="compare_view tooltip" title="%s">' |
|
597 | ' <div class="compare_view tooltip" title="%s">' | |
@@ -591,7 +600,7 b' def action_parser(user_log, feed=False, ' | |||||
591 | revs_ids[0][:12], revs_ids[-1][:12] |
|
600 | revs_ids[0][:12], revs_ids[-1][:12] | |
592 | ), |
|
601 | ), | |
593 | url('changeset_home', repo_name=repo_name, |
|
602 | url('changeset_home', repo_name=repo_name, | |
594 |
revision= |
|
603 | revision=_rev | |
595 | ), |
|
604 | ), | |
596 | _('compare view') |
|
605 | _('compare view') | |
597 | ) |
|
606 | ) |
@@ -36,7 +36,7 b' from rhodecode.lib.utils import action_l' | |||||
36 | from rhodecode.lib.vcs.backends.base import EmptyChangeset |
|
36 | from rhodecode.lib.vcs.backends.base import EmptyChangeset | |
37 | from rhodecode.lib.compat import json |
|
37 | from rhodecode.lib.compat import json | |
38 | from rhodecode.lib.exceptions import HTTPLockedRC |
|
38 | from rhodecode.lib.exceptions import HTTPLockedRC | |
39 |
from rhodecode.lib.utils2 import safe_str |
|
39 | from rhodecode.lib.utils2 import safe_str | |
40 | from rhodecode.model.db import Repository, User |
|
40 | from rhodecode.model.db import Repository, User | |
41 |
|
41 | |||
42 |
|
42 | |||
@@ -113,7 +113,14 b' def pre_push(ui, repo, **kwargs):' | |||||
113 | usr = User.get_by_username(username) |
|
113 | usr = User.get_by_username(username) | |
114 | if locked_by[0] and usr.user_id != int(locked_by[0]): |
|
114 | if locked_by[0] and usr.user_id != int(locked_by[0]): | |
115 | locked_by = User.get(locked_by[0]).username |
|
115 | locked_by = User.get(locked_by[0]).username | |
116 | raise HTTPLockedRC(repository, locked_by) |
|
116 | # this exception is interpreted in git/hg middlewares and based | |
|
117 | # on that proper return code is server to client | |||
|
118 | _http_ret = HTTPLockedRC(repository, locked_by) | |||
|
119 | if str(_http_ret.code).startswith('2'): | |||
|
120 | #2xx Codes don't raise exceptions | |||
|
121 | sys.stdout.write(_http_ret.title) | |||
|
122 | else: | |||
|
123 | raise _http_ret | |||
117 |
|
124 | |||
118 |
|
125 | |||
119 | def pre_pull(ui, repo, **kwargs): |
|
126 | def pre_pull(ui, repo, **kwargs): | |
@@ -139,7 +146,14 b' def pre_pull(ui, repo, **kwargs):' | |||||
139 |
|
146 | |||
140 | if locked_by[0]: |
|
147 | if locked_by[0]: | |
141 | locked_by = User.get(locked_by[0]).username |
|
148 | locked_by = User.get(locked_by[0]).username | |
142 | raise HTTPLockedRC(repository, locked_by) |
|
149 | # this exception is interpreted in git/hg middlewares and based | |
|
150 | # on that proper return code is server to client | |||
|
151 | _http_ret = HTTPLockedRC(repository, locked_by) | |||
|
152 | if str(_http_ret.code).startswith('2'): | |||
|
153 | #2xx Codes don't raise exceptions | |||
|
154 | sys.stdout.write(_http_ret.title) | |||
|
155 | else: | |||
|
156 | raise _http_ret | |||
143 |
|
157 | |||
144 |
|
158 | |||
145 | def log_pull_action(ui, repo, **kwargs): |
|
159 | def log_pull_action(ui, repo, **kwargs): | |
@@ -159,12 +173,14 b' def log_pull_action(ui, repo, **kwargs):' | |||||
159 | repository = extras['repository'] |
|
173 | repository = extras['repository'] | |
160 | scm = extras['scm'] |
|
174 | scm = extras['scm'] | |
161 | make_lock = extras['make_lock'] |
|
175 | make_lock = extras['make_lock'] | |
|
176 | locked_by = extras['locked_by'] | |||
162 | ip = extras['ip'] |
|
177 | ip = extras['ip'] | |
163 | elif 'username' in rc_extras: |
|
178 | elif 'username' in rc_extras: | |
164 | username = rc_extras['username'] |
|
179 | username = rc_extras['username'] | |
165 | repository = rc_extras['repository'] |
|
180 | repository = rc_extras['repository'] | |
166 | scm = rc_extras['scm'] |
|
181 | scm = rc_extras['scm'] | |
167 | make_lock = rc_extras['make_lock'] |
|
182 | make_lock = rc_extras['make_lock'] | |
|
183 | locked_by = rc_extras['locked_by'] | |||
168 | ip = rc_extras['ip'] |
|
184 | ip = rc_extras['ip'] | |
169 | else: |
|
185 | else: | |
170 | raise Exception('Missing data in repo.ui and os.environ') |
|
186 | raise Exception('Missing data in repo.ui and os.environ') | |
@@ -185,6 +201,12 b' def log_pull_action(ui, repo, **kwargs):' | |||||
185 | #msg = 'Made lock on repo `%s`' % repository |
|
201 | #msg = 'Made lock on repo `%s`' % repository | |
186 | #sys.stdout.write(msg) |
|
202 | #sys.stdout.write(msg) | |
187 |
|
203 | |||
|
204 | if locked_by[0]: | |||
|
205 | locked_by = User.get(locked_by[0]).username | |||
|
206 | _http_ret = HTTPLockedRC(repository, locked_by) | |||
|
207 | if str(_http_ret.code).startswith('2'): | |||
|
208 | #2xx Codes don't raise exceptions | |||
|
209 | sys.stdout.write(_http_ret.title) | |||
188 | return 0 |
|
210 | return 0 | |
189 |
|
211 | |||
190 |
|
212 | |||
@@ -207,15 +229,19 b' def log_push_action(ui, repo, **kwargs):' | |||||
207 | repository = extras['repository'] |
|
229 | repository = extras['repository'] | |
208 | scm = extras['scm'] |
|
230 | scm = extras['scm'] | |
209 | make_lock = extras['make_lock'] |
|
231 | make_lock = extras['make_lock'] | |
|
232 | locked_by = extras['locked_by'] | |||
|
233 | action = extras['action'] | |||
210 | elif 'username' in rc_extras: |
|
234 | elif 'username' in rc_extras: | |
211 | username = rc_extras['username'] |
|
235 | username = rc_extras['username'] | |
212 | repository = rc_extras['repository'] |
|
236 | repository = rc_extras['repository'] | |
213 | scm = rc_extras['scm'] |
|
237 | scm = rc_extras['scm'] | |
214 | make_lock = rc_extras['make_lock'] |
|
238 | make_lock = rc_extras['make_lock'] | |
|
239 | locked_by = rc_extras['locked_by'] | |||
|
240 | action = extras['action'] | |||
215 | else: |
|
241 | else: | |
216 | raise Exception('Missing data in repo.ui and os.environ') |
|
242 | raise Exception('Missing data in repo.ui and os.environ') | |
217 |
|
243 | |||
218 |
action = |
|
244 | action = action + ':%s' | |
219 |
|
245 | |||
220 | if scm == 'hg': |
|
246 | if scm == 'hg': | |
221 | node = kwargs['node'] |
|
247 | node = kwargs['node'] | |
@@ -255,6 +281,13 b' def log_push_action(ui, repo, **kwargs):' | |||||
255 | msg = 'Released lock on repo `%s`\n' % repository |
|
281 | msg = 'Released lock on repo `%s`\n' % repository | |
256 | sys.stdout.write(msg) |
|
282 | sys.stdout.write(msg) | |
257 |
|
283 | |||
|
284 | if locked_by[0]: | |||
|
285 | locked_by = User.get(locked_by[0]).username | |||
|
286 | _http_ret = HTTPLockedRC(repository, locked_by) | |||
|
287 | if str(_http_ret.code).startswith('2'): | |||
|
288 | #2xx Codes don't raise exceptions | |||
|
289 | sys.stdout.write(_http_ret.title) | |||
|
290 | ||||
258 | return 0 |
|
291 | return 0 | |
259 |
|
292 | |||
260 |
|
293 |
@@ -234,7 +234,8 b' class SimpleGit(BaseVCSController):' | |||||
234 | app = self.__make_app(repo_name, repo_path, extras) |
|
234 | app = self.__make_app(repo_name, repo_path, extras) | |
235 | return app(environ, start_response) |
|
235 | return app(environ, start_response) | |
236 | except HTTPLockedRC, e: |
|
236 | except HTTPLockedRC, e: | |
237 | log.debug('Repository LOCKED ret code 423!') |
|
237 | _code = CONFIG.get('lock_ret_code') | |
|
238 | log.debug('Repository LOCKED ret code %s!' % (_code)) | |||
238 | return e(environ, start_response) |
|
239 | return e(environ, start_response) | |
239 | except Exception: |
|
240 | except Exception: | |
240 | log.error(traceback.format_exc()) |
|
241 | log.error(traceback.format_exc()) |
@@ -199,7 +199,8 b' class SimpleHg(BaseVCSController):' | |||||
199 | if str(e).find('not found') != -1: |
|
199 | if str(e).find('not found') != -1: | |
200 | return HTTPNotFound()(environ, start_response) |
|
200 | return HTTPNotFound()(environ, start_response) | |
201 | except HTTPLockedRC, e: |
|
201 | except HTTPLockedRC, e: | |
202 | log.debug('Repository LOCKED ret code 423!') |
|
202 | _code = CONFIG.get('lock_ret_code') | |
|
203 | log.debug('Repository LOCKED ret code %s!' % (_code)) | |||
203 | return e(environ, start_response) |
|
204 | return e(environ, start_response) | |
204 | except Exception: |
|
205 | except Exception: | |
205 | log.error(traceback.format_exc()) |
|
206 | log.error(traceback.format_exc()) |
@@ -240,7 +240,7 b' def is_valid_repo(repo_name, base_path, ' | |||||
240 | return False |
|
240 | return False | |
241 |
|
241 | |||
242 |
|
242 | |||
243 | def is_valid_repos_group(repos_group_name, base_path): |
|
243 | def is_valid_repos_group(repos_group_name, base_path, skip_path_check=False): | |
244 | """ |
|
244 | """ | |
245 | Returns True if given path is a repos group False otherwise |
|
245 | Returns True if given path is a repos group False otherwise | |
246 |
|
246 | |||
@@ -263,7 +263,7 b' def is_valid_repos_group(repos_group_nam' | |||||
263 | pass |
|
263 | pass | |
264 |
|
264 | |||
265 | # check if it's a valid path |
|
265 | # check if it's a valid path | |
266 | if os.path.isdir(full_path): |
|
266 | if skip_path_check or os.path.isdir(full_path): | |
267 | return True |
|
267 | return True | |
268 |
|
268 | |||
269 | return False |
|
269 | return False | |
@@ -495,7 +495,6 b' def repo2db_mapper(initial_repo_list, re' | |||||
495 | #don't hold further removals on error |
|
495 | #don't hold further removals on error | |
496 | log.error(traceback.format_exc()) |
|
496 | log.error(traceback.format_exc()) | |
497 | sa.rollback() |
|
497 | sa.rollback() | |
498 |
|
||||
499 | return added, removed |
|
498 | return added, removed | |
500 |
|
499 | |||
501 |
|
500 |
@@ -565,11 +565,15 b' def fix_PATH(os_=None):' | |||||
565 |
|
565 | |||
566 |
|
566 | |||
567 | def obfuscate_url_pw(engine): |
|
567 | def obfuscate_url_pw(engine): | |
568 | from sqlalchemy.engine import url |
|
568 | _url = engine or '' | |
569 | url = url.make_url(engine) |
|
569 | from sqlalchemy.engine import url as sa_url | |
570 | if url.password: |
|
570 | try: | |
571 | url.password = 'XXXXX' |
|
571 | _url = sa_url.make_url(engine) | |
572 | return str(url) |
|
572 | if _url.password: | |
|
573 | _url.password = 'XXXXX' | |||
|
574 | except: | |||
|
575 | pass | |||
|
576 | return str(_url) | |||
573 |
|
577 | |||
574 |
|
578 | |||
575 | def get_server_url(environ): |
|
579 | def get_server_url(environ): |
@@ -9,7 +9,7 b'' | |||||
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. |
|
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. | |
10 | """ |
|
10 | """ | |
11 |
|
11 | |||
12 |
|
12 | import datetime | ||
13 | from itertools import chain |
|
13 | from itertools import chain | |
14 | from rhodecode.lib.vcs.utils import author_name, author_email |
|
14 | from rhodecode.lib.vcs.utils import author_name, author_email | |
15 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
15 | from rhodecode.lib.vcs.utils.lazy import LazyProperty | |
@@ -311,6 +311,27 b' class BaseRepository(object):' | |||||
311 | """ |
|
311 | """ | |
312 | raise NotImplementedError |
|
312 | raise NotImplementedError | |
313 |
|
313 | |||
|
314 | def inject_ui(self, **extras): | |||
|
315 | """ | |||
|
316 | Injects extra parameters into UI object of this repo | |||
|
317 | """ | |||
|
318 | required_extras = [ | |||
|
319 | 'ip', | |||
|
320 | 'username', | |||
|
321 | 'action', | |||
|
322 | 'repository', | |||
|
323 | 'scm', | |||
|
324 | 'config', | |||
|
325 | 'server_url', | |||
|
326 | 'make_lock', | |||
|
327 | 'locked_by', | |||
|
328 | ] | |||
|
329 | for req in required_extras: | |||
|
330 | if req not in extras: | |||
|
331 | raise AttributeError('Missing attribute %s in extras' % (req)) | |||
|
332 | for k, v in extras.items(): | |||
|
333 | self._repo.ui.setconfig('rhodecode_extras', k, v) | |||
|
334 | ||||
314 |
|
335 | |||
315 | class BaseChangeset(object): |
|
336 | class BaseChangeset(object): | |
316 | """ |
|
337 | """ | |
@@ -433,28 +454,28 b' class BaseChangeset(object):' | |||||
433 | raise NotImplementedError |
|
454 | raise NotImplementedError | |
434 |
|
455 | |||
435 | @LazyProperty |
|
456 | @LazyProperty | |
436 | def commiter(self): |
|
457 | def committer(self): | |
437 | """ |
|
458 | """ | |
438 | Returns Commiter for given commit |
|
459 | Returns Committer for given commit | |
439 | """ |
|
460 | """ | |
440 |
|
461 | |||
441 | raise NotImplementedError |
|
462 | raise NotImplementedError | |
442 |
|
463 | |||
443 | @LazyProperty |
|
464 | @LazyProperty | |
444 | def commiter_name(self): |
|
465 | def committer_name(self): | |
445 | """ |
|
466 | """ | |
446 | Returns Author name for given commit |
|
467 | Returns Author name for given commit | |
447 | """ |
|
468 | """ | |
448 |
|
469 | |||
449 | return author_name(self.commiter) |
|
470 | return author_name(self.committer) | |
450 |
|
471 | |||
451 | @LazyProperty |
|
472 | @LazyProperty | |
452 | def commiter_email(self): |
|
473 | def committer_email(self): | |
453 | """ |
|
474 | """ | |
454 | Returns Author email address for given commit |
|
475 | Returns Author email address for given commit | |
455 | """ |
|
476 | """ | |
456 |
|
477 | |||
457 | return author_email(self.commiter) |
|
478 | return author_email(self.committer) | |
458 |
|
479 | |||
459 | @LazyProperty |
|
480 | @LazyProperty | |
460 | def author(self): |
|
481 | def author(self): | |
@@ -959,12 +980,12 b' class EmptyChangeset(BaseChangeset):' | |||||
959 | """ |
|
980 | """ | |
960 |
|
981 | |||
961 | def __init__(self, cs='0' * 40, repo=None, requested_revision=None, |
|
982 | def __init__(self, cs='0' * 40, repo=None, requested_revision=None, | |
962 |
alias=None, revision=-1, message='', author='', date= |
|
983 | alias=None, revision=-1, message='', author='', date=None): | |
963 | self._empty_cs = cs |
|
984 | self._empty_cs = cs | |
964 | self.revision = revision |
|
985 | self.revision = revision | |
965 | self.message = message |
|
986 | self.message = message | |
966 | self.author = author |
|
987 | self.author = author | |
967 | self.date = date |
|
988 | self.date = date or datetime.datetime.fromtimestamp(0) | |
968 | self.repository = repo |
|
989 | self.repository = repo | |
969 | self.requested_revision = requested_revision |
|
990 | self.requested_revision = requested_revision | |
970 | self.alias = alias |
|
991 | self.alias = alias |
@@ -17,6 +17,7 b' from rhodecode.lib.vcs.nodes import File' | |||||
17 | from rhodecode.lib.vcs.utils import safe_unicode |
|
17 | from rhodecode.lib.vcs.utils import safe_unicode | |
18 | from rhodecode.lib.vcs.utils import date_fromtimestamp |
|
18 | from rhodecode.lib.vcs.utils import date_fromtimestamp | |
19 | from rhodecode.lib.vcs.utils.lazy import LazyProperty |
|
19 | from rhodecode.lib.vcs.utils.lazy import LazyProperty | |
|
20 | from rhodecode.lib.utils2 import safe_int | |||
20 |
|
21 | |||
21 |
|
22 | |||
22 | class GitChangeset(BaseChangeset): |
|
23 | class GitChangeset(BaseChangeset): | |
@@ -41,7 +42,7 b' class GitChangeset(BaseChangeset):' | |||||
41 | self._commit = commit |
|
42 | self._commit = commit | |
42 |
|
43 | |||
43 | self._tree_id = commit.tree |
|
44 | self._tree_id = commit.tree | |
44 | self._commiter_property = 'committer' |
|
45 | self._committer_property = 'committer' | |
45 | self._author_property = 'author' |
|
46 | self._author_property = 'author' | |
46 | self._date_property = 'commit_time' |
|
47 | self._date_property = 'commit_time' | |
47 | self._date_tz_property = 'commit_timezone' |
|
48 | self._date_tz_property = 'commit_timezone' | |
@@ -53,8 +54,8 b' class GitChangeset(BaseChangeset):' | |||||
53 | self._paths = {} |
|
54 | self._paths = {} | |
54 |
|
55 | |||
55 | @LazyProperty |
|
56 | @LazyProperty | |
56 | def commiter(self): |
|
57 | def committer(self): | |
57 | return safe_unicode(getattr(self._commit, self._commiter_property)) |
|
58 | return safe_unicode(getattr(self._commit, self._committer_property)) | |
58 |
|
59 | |||
59 | @LazyProperty |
|
60 | @LazyProperty | |
60 | def author(self): |
|
61 | def author(self): | |
@@ -275,10 +276,9 b' class GitChangeset(BaseChangeset):' | |||||
275 | """ |
|
276 | """ | |
276 | Returns last commit of the file at the given ``path``. |
|
277 | Returns last commit of the file at the given ``path``. | |
277 | """ |
|
278 | """ | |
278 | node = self.get_node(path) |
|
279 | return self.get_file_history(path, limit=1)[0] | |
279 | return node.history[0] |
|
|||
280 |
|
280 | |||
281 | def get_file_history(self, path): |
|
281 | def get_file_history(self, path, limit=None): | |
282 | """ |
|
282 | """ | |
283 | Returns history of file as reversed list of ``Changeset`` objects for |
|
283 | Returns history of file as reversed list of ``Changeset`` objects for | |
284 | which file at given ``path`` has been modified. |
|
284 | which file at given ``path`` has been modified. | |
@@ -287,11 +287,16 b' class GitChangeset(BaseChangeset):' | |||||
287 | which is generally not good. Should be replaced with algorithm |
|
287 | which is generally not good. Should be replaced with algorithm | |
288 | iterating commits. |
|
288 | iterating commits. | |
289 | """ |
|
289 | """ | |
|
290 | ||||
290 | self._get_filectx(path) |
|
291 | self._get_filectx(path) | |
291 |
|
292 | if limit: | ||
292 | cmd = 'log --pretty="format: %%H" -s -p %s -- "%s"' % ( |
|
293 | cmd = 'log -n %s --pretty="format: %%H" -s -p %s -- "%s"' % ( | |
293 | self.id, path |
|
294 | safe_int(limit, 0), self.id, path | |
294 | ) |
|
295 | ) | |
|
296 | else: | |||
|
297 | cmd = 'log --pretty="format: %%H" -s -p %s -- "%s"' % ( | |||
|
298 | self.id, path | |||
|
299 | ) | |||
295 | so, se = self.repository.run_git_command(cmd) |
|
300 | so, se = self.repository.run_git_command(cmd) | |
296 | ids = re.findall(r'[0-9a-fA-F]{40}', so) |
|
301 | ids = re.findall(r'[0-9a-fA-F]{40}', so) | |
297 | return [self.repository.get_changeset(id) for id in ids] |
|
302 | return [self.repository.get_changeset(id) for id in ids] |
@@ -67,14 +67,12 b' class GitRepository(BaseRepository):' | |||||
67 | @ThreadLocalLazyProperty |
|
67 | @ThreadLocalLazyProperty | |
68 | def _repo(self): |
|
68 | def _repo(self): | |
69 | repo = Repo(self.path) |
|
69 | repo = Repo(self.path) | |
70 | #temporary set that to now at later we will move it to constructor |
|
70 | # patch the instance of GitRepo with an "FAKE" ui object to add | |
71 | baseui = None |
|
71 | # compatibility layer with Mercurial | |
72 | if baseui is None: |
|
72 | if not hasattr(repo, 'ui'): | |
73 | from mercurial.ui import ui |
|
73 | from mercurial.ui import ui | |
74 | baseui = ui() |
|
74 | baseui = ui() | |
75 | # patch the instance of GitRepo with an "FAKE" ui object to add |
|
75 | setattr(repo, 'ui', baseui) | |
76 | # compatibility layer with Mercurial |
|
|||
77 | setattr(repo, 'ui', baseui) |
|
|||
78 | return repo |
|
76 | return repo | |
79 |
|
77 | |||
80 | @property |
|
78 | @property | |
@@ -306,6 +304,15 b' class GitRepository(BaseRepository):' | |||||
306 | url = ':///'.join(('file', url)) |
|
304 | url = ':///'.join(('file', url)) | |
307 | return url |
|
305 | return url | |
308 |
|
306 | |||
|
307 | def get_hook_location(self): | |||
|
308 | """ | |||
|
309 | returns absolute path to location where hooks are stored | |||
|
310 | """ | |||
|
311 | loc = os.path.join(self.path, 'hooks') | |||
|
312 | if not self.bare: | |||
|
313 | loc = os.path.join(self.path, '.git', 'hooks') | |||
|
314 | return loc | |||
|
315 | ||||
309 | @LazyProperty |
|
316 | @LazyProperty | |
310 | def name(self): |
|
317 | def name(self): | |
311 | return os.path.basename(self.path) |
|
318 | return os.path.basename(self.path) |
@@ -44,8 +44,8 b' class MercurialChangeset(BaseChangeset):' | |||||
44 | return safe_unicode(self._ctx.description()) |
|
44 | return safe_unicode(self._ctx.description()) | |
45 |
|
45 | |||
46 | @LazyProperty |
|
46 | @LazyProperty | |
47 | def commiter(self): |
|
47 | def committer(self): | |
48 |
return safe_unicode(self.au |
|
48 | return safe_unicode(self.author) | |
49 |
|
49 | |||
50 | @LazyProperty |
|
50 | @LazyProperty | |
51 | def author(self): |
|
51 | def author(self): | |
@@ -219,19 +219,23 b' class MercurialChangeset(BaseChangeset):' | |||||
219 | """ |
|
219 | """ | |
220 | Returns last commit of the file at the given ``path``. |
|
220 | Returns last commit of the file at the given ``path``. | |
221 | """ |
|
221 | """ | |
222 | node = self.get_node(path) |
|
222 | return self.get_file_history(path, limit=1)[0] | |
223 | return node.history[0] |
|
|||
224 |
|
223 | |||
225 | def get_file_history(self, path): |
|
224 | def get_file_history(self, path, limit=None): | |
226 | """ |
|
225 | """ | |
227 | Returns history of file as reversed list of ``Changeset`` objects for |
|
226 | Returns history of file as reversed list of ``Changeset`` objects for | |
228 | which file at given ``path`` has been modified. |
|
227 | which file at given ``path`` has been modified. | |
229 | """ |
|
228 | """ | |
230 | fctx = self._get_filectx(path) |
|
229 | fctx = self._get_filectx(path) | |
231 | nodes = [fctx.filectx(x).node() for x in fctx.filelog()] |
|
230 | hist = [] | |
232 | changesets = [self.repository.get_changeset(hex(node)) |
|
231 | cnt = 0 | |
233 |
|
|
232 | for cs in reversed([x for x in fctx.filelog()]): | |
234 | return changesets |
|
233 | cnt += 1 | |
|
234 | hist.append(hex(fctx.filectx(cs).node())) | |||
|
235 | if limit and cnt == limit: | |||
|
236 | break | |||
|
237 | ||||
|
238 | return [self.repository.get_changeset(node) for node in hist] | |||
235 |
|
239 | |||
236 | def get_file_annotate(self, path): |
|
240 | def get_file_annotate(self, path): | |
237 | """ |
|
241 | """ |
@@ -422,6 +422,12 b' class MercurialRepository(BaseRepository' | |||||
422 | url = "file:" + urllib.pathname2url(url) |
|
422 | url = "file:" + urllib.pathname2url(url) | |
423 | return url |
|
423 | return url | |
424 |
|
424 | |||
|
425 | def get_hook_location(self): | |||
|
426 | """ | |||
|
427 | returns absolute path to location where hooks are stored | |||
|
428 | """ | |||
|
429 | return os.path.join(self.path, '.hg', '.hgrc') | |||
|
430 | ||||
425 | def get_changeset(self, revision=None): |
|
431 | def get_changeset(self, revision=None): | |
426 | """ |
|
432 | """ | |
427 | Returns ``MercurialChangeset`` object representing repository's |
|
433 | Returns ``MercurialChangeset`` object representing repository's | |
@@ -492,7 +498,7 b' class MercurialRepository(BaseRepository' | |||||
492 | """ |
|
498 | """ | |
493 | return MercurialWorkdir(self) |
|
499 | return MercurialWorkdir(self) | |
494 |
|
500 | |||
495 | def get_config_value(self, section, name, config_file=None): |
|
501 | def get_config_value(self, section, name=None, config_file=None): | |
496 | """ |
|
502 | """ | |
497 | Returns configuration value for a given [``section``] and ``name``. |
|
503 | Returns configuration value for a given [``section``] and ``name``. | |
498 |
|
504 |
@@ -1,3 +1,14 b'' | |||||
|
1 | class _Missing(object): | |||
|
2 | ||||
|
3 | def __repr__(self): | |||
|
4 | return 'no value' | |||
|
5 | ||||
|
6 | def __reduce__(self): | |||
|
7 | return '_missing' | |||
|
8 | ||||
|
9 | _missing = _Missing() | |||
|
10 | ||||
|
11 | ||||
1 | class LazyProperty(object): |
|
12 | class LazyProperty(object): | |
2 | """ |
|
13 | """ | |
3 | Decorator for easier creation of ``property`` from potentially expensive to |
|
14 | Decorator for easier creation of ``property`` from potentially expensive to | |
@@ -24,8 +35,11 b' class LazyProperty(object):' | |||||
24 | def __get__(self, obj, klass=None): |
|
35 | def __get__(self, obj, klass=None): | |
25 | if obj is None: |
|
36 | if obj is None: | |
26 | return self |
|
37 | return self | |
27 |
|
|
38 | value = obj.__dict__.get(self.__name__, _missing) | |
28 | return result |
|
39 | if value is _missing: | |
|
40 | value = self._func(obj) | |||
|
41 | obj.__dict__[self.__name__] = value | |||
|
42 | return value | |||
29 |
|
43 | |||
30 | import threading |
|
44 | import threading | |
31 |
|
45 | |||
@@ -41,5 +55,8 b' class ThreadLocalLazyProperty(LazyProper' | |||||
41 | if not hasattr(obj, '__tl_dict__'): |
|
55 | if not hasattr(obj, '__tl_dict__'): | |
42 | obj.__tl_dict__ = threading.local().__dict__ |
|
56 | obj.__tl_dict__ = threading.local().__dict__ | |
43 |
|
57 | |||
44 |
|
|
58 | value = obj.__tl_dict__.get(self.__name__, _missing) | |
45 | return result |
|
59 | if value is _missing: | |
|
60 | value = self._func(obj) | |||
|
61 | obj.__tl_dict__[self.__name__] = value | |||
|
62 | return value |
@@ -47,7 +47,7 b' from rhodecode.lib.vcs.utils.lazy import' | |||||
47 | from rhodecode.lib.vcs.backends.base import EmptyChangeset |
|
47 | from rhodecode.lib.vcs.backends.base import EmptyChangeset | |
48 |
|
48 | |||
49 | from rhodecode.lib.utils2 import str2bool, safe_str, get_changeset_safe, \ |
|
49 | from rhodecode.lib.utils2 import str2bool, safe_str, get_changeset_safe, \ | |
50 | safe_unicode, remove_suffix, remove_prefix |
|
50 | safe_unicode, remove_suffix, remove_prefix, time_to_datetime | |
51 | from rhodecode.lib.compat import json |
|
51 | from rhodecode.lib.compat import json | |
52 | from rhodecode.lib.caching_query import FromCache |
|
52 | from rhodecode.lib.caching_query import FromCache | |
53 |
|
53 | |||
@@ -938,15 +938,7 b' class Repository(Base, BaseModel):' | |||||
938 |
|
938 | |||
939 | @classmethod |
|
939 | @classmethod | |
940 | def inject_ui(cls, repo, extras={}): |
|
940 | def inject_ui(cls, repo, extras={}): | |
941 | from rhodecode.lib.vcs.backends.hg import MercurialRepository |
|
941 | repo.inject_ui(extras) | |
942 | from rhodecode.lib.vcs.backends.git import GitRepository |
|
|||
943 | required = (MercurialRepository, GitRepository) |
|
|||
944 | if not isinstance(repo, required): |
|
|||
945 | raise Exception('repo must be instance of %s' % required) |
|
|||
946 |
|
||||
947 | # inject ui extra param to log this action via push logger |
|
|||
948 | for k, v in extras.items(): |
|
|||
949 | repo._repo.ui.setconfig('rhodecode_extras', k, v) |
|
|||
950 |
|
942 | |||
951 | @classmethod |
|
943 | @classmethod | |
952 | def is_valid(cls, repo_name): |
|
944 | def is_valid(cls, repo_name): | |
@@ -980,7 +972,11 b' class Repository(Base, BaseModel):' | |||||
980 | enable_statistics=repo.enable_statistics, |
|
972 | enable_statistics=repo.enable_statistics, | |
981 | enable_locking=repo.enable_locking, |
|
973 | enable_locking=repo.enable_locking, | |
982 | enable_downloads=repo.enable_downloads, |
|
974 | enable_downloads=repo.enable_downloads, | |
983 | last_changeset=repo.changeset_cache |
|
975 | last_changeset=repo.changeset_cache, | |
|
976 | locked_by=User.get(self.locked[0]).get_api_data() \ | |||
|
977 | if self.locked[0] else None, | |||
|
978 | locked_date=time_to_datetime(self.locked[1]) \ | |||
|
979 | if self.locked[1] else None | |||
984 | ) |
|
980 | ) | |
985 | rc_config = RhodeCodeSetting.get_app_settings() |
|
981 | rc_config = RhodeCodeSetting.get_app_settings() | |
986 | repository_fields = str2bool(rc_config.get('rhodecode_repository_fields')) |
|
982 | repository_fields = str2bool(rc_config.get('rhodecode_repository_fields')) | |
@@ -1002,6 +998,10 b' class Repository(Base, BaseModel):' | |||||
1002 | Session().add(repo) |
|
998 | Session().add(repo) | |
1003 | Session().commit() |
|
999 | Session().commit() | |
1004 |
|
1000 | |||
|
1001 | @classmethod | |||
|
1002 | def getlock(cls, repo): | |||
|
1003 | return repo.locked | |||
|
1004 | ||||
1005 | @property |
|
1005 | @property | |
1006 | def last_db_change(self): |
|
1006 | def last_db_change(self): | |
1007 | return self.updated_on |
|
1007 | return self.updated_on | |
@@ -1341,15 +1341,13 b' class RepoGroup(Base, BaseModel):' | |||||
1341 |
|
1341 | |||
1342 | return cnt + children_count(self) |
|
1342 | return cnt + children_count(self) | |
1343 |
|
1343 | |||
1344 |
def recursive_ |
|
1344 | def _recursive_objects(self, include_repos=True): | |
1345 | """ |
|
|||
1346 | Recursive return all groups, with repositories in those groups |
|
|||
1347 | """ |
|
|||
1348 | all_ = [] |
|
1345 | all_ = [] | |
1349 |
|
1346 | |||
1350 | def _get_members(root_gr): |
|
1347 | def _get_members(root_gr): | |
1351 |
f |
|
1348 | if include_repos: | |
1352 | all_.append(r) |
|
1349 | for r in root_gr.repositories: | |
|
1350 | all_.append(r) | |||
1353 | childs = root_gr.children.all() |
|
1351 | childs = root_gr.children.all() | |
1354 | if childs: |
|
1352 | if childs: | |
1355 | for gr in childs: |
|
1353 | for gr in childs: | |
@@ -1359,6 +1357,18 b' class RepoGroup(Base, BaseModel):' | |||||
1359 | _get_members(self) |
|
1357 | _get_members(self) | |
1360 | return [self] + all_ |
|
1358 | return [self] + all_ | |
1361 |
|
1359 | |||
|
1360 | def recursive_groups_and_repos(self): | |||
|
1361 | """ | |||
|
1362 | Recursive return all groups, with repositories in those groups | |||
|
1363 | """ | |||
|
1364 | return self._recursive_objects() | |||
|
1365 | ||||
|
1366 | def recursive_groups(self): | |||
|
1367 | """ | |||
|
1368 | Returns all children groups for this group including children of children | |||
|
1369 | """ | |||
|
1370 | return self._recursive_objects(include_repos=False) | |||
|
1371 | ||||
1362 | def get_new_name(self, group_name): |
|
1372 | def get_new_name(self, group_name): | |
1363 | """ |
|
1373 | """ | |
1364 | returns new full group name based on parent and new name |
|
1374 | returns new full group name based on parent and new name | |
@@ -1728,7 +1738,7 b' class CacheInvalidation(Base, BaseModel)' | |||||
1728 | for inv_obj in inv_objs: |
|
1738 | for inv_obj in inv_objs: | |
1729 | inv_obj.cache_active = False |
|
1739 | inv_obj.cache_active = False | |
1730 | log.debug('marking %s key for invalidation based on key=%s,repo_name=%s' |
|
1740 | log.debug('marking %s key for invalidation based on key=%s,repo_name=%s' | |
1731 | % (inv_obj, key, repo_name)) |
|
1741 | % (inv_obj, key, safe_str(repo_name))) | |
1732 | invalidated_keys.append(inv_obj.cache_key) |
|
1742 | invalidated_keys.append(inv_obj.cache_key) | |
1733 | Session().add(inv_obj) |
|
1743 | Session().add(inv_obj) | |
1734 | Session().commit() |
|
1744 | Session().commit() |
@@ -395,4 +395,7 b' def PullRequestForm(repo_id):' | |||||
395 | pullrequest_title = v.UnicodeString(strip=True, required=True, min=3) |
|
395 | pullrequest_title = v.UnicodeString(strip=True, required=True, min=3) | |
396 | pullrequest_desc = v.UnicodeString(strip=True, required=False) |
|
396 | pullrequest_desc = v.UnicodeString(strip=True, required=False) | |
397 |
|
397 | |||
|
398 | ancestor_rev = v.UnicodeString(strip=True, required=True) | |||
|
399 | merge_rev = v.UnicodeString(strip=True, required=True) | |||
|
400 | ||||
398 | return _PullRequestForm |
|
401 | return _PullRequestForm |
@@ -161,7 +161,7 b' class PullRequestModel(BaseModel):' | |||||
161 | pull_request.updated_on = datetime.datetime.now() |
|
161 | pull_request.updated_on = datetime.datetime.now() | |
162 | Session().add(pull_request) |
|
162 | Session().add(pull_request) | |
163 |
|
163 | |||
164 | def _get_changesets(self, alias, org_repo, org_ref, other_repo, other_ref): |
|
164 | def _get_changesets(self, alias, org_repo, org_ref, other_repo, other_ref, merge): | |
165 | """ |
|
165 | """ | |
166 | Returns a list of changesets that can be merged from org_repo@org_ref |
|
166 | Returns a list of changesets that can be merged from org_repo@org_ref | |
167 | to other_repo@other_ref ... and the ancestor that would be used for merge |
|
167 | to other_repo@other_ref ... and the ancestor that would be used for merge | |
@@ -211,16 +211,21 b' class PullRequestModel(BaseModel):' | |||||
211 | else: |
|
211 | else: | |
212 | hgrepo = other_repo._repo |
|
212 | hgrepo = other_repo._repo | |
213 |
|
213 | |||
214 | revs = ["ancestors(id('%s')) and not ancestors(id('%s'))" % |
|
214 | if merge: | |
215 | (other_rev, org_rev)] |
|
215 | revs = ["ancestors(id('%s')) and not ancestors(id('%s')) and not id('%s')" % | |
216 | changesets = [other_repo.get_changeset(cs) |
|
216 | (other_rev, org_rev, org_rev)] | |
217 | for cs in scmutil.revrange(hgrepo, revs)] |
|
|||
218 |
|
217 | |||
219 | if org_repo != other_repo: |
|
|||
220 | ancestors = scmutil.revrange(hgrepo, |
|
218 | ancestors = scmutil.revrange(hgrepo, | |
221 | ["ancestor(id('%s'), id('%s'))" % (org_rev, other_rev)]) |
|
219 | ["ancestor(id('%s'), id('%s'))" % (org_rev, other_rev)]) | |
222 | if len(ancestors) == 1: |
|
220 | if len(ancestors) == 1: | |
223 | ancestor = hgrepo[ancestors[0]].hex() |
|
221 | ancestor = hgrepo[ancestors[0]].hex() | |
|
222 | else: | |||
|
223 | # TODO: have both + and - changesets | |||
|
224 | revs = ["id('%s') :: id('%s') - id('%s')" % | |||
|
225 | (org_rev, other_rev, org_rev)] | |||
|
226 | ||||
|
227 | changesets = [other_repo.get_changeset(cs) | |||
|
228 | for cs in scmutil.revrange(hgrepo, revs)] | |||
224 |
|
229 | |||
225 | elif alias == 'git': |
|
230 | elif alias == 'git': | |
226 | assert org_repo == other_repo, (org_repo, other_repo) # no git support for different repos |
|
231 | assert org_repo == other_repo, (org_repo, other_repo) # no git support for different repos | |
@@ -233,7 +238,7 b' class PullRequestModel(BaseModel):' | |||||
233 |
|
238 | |||
234 | return changesets, ancestor |
|
239 | return changesets, ancestor | |
235 |
|
240 | |||
236 | def get_compare_data(self, org_repo, org_ref, other_repo, other_ref): |
|
241 | def get_compare_data(self, org_repo, org_ref, other_repo, other_ref, merge): | |
237 | """ |
|
242 | """ | |
238 | Returns incoming changesets for mercurial repositories |
|
243 | Returns incoming changesets for mercurial repositories | |
239 |
|
244 | |||
@@ -251,5 +256,6 b' class PullRequestModel(BaseModel):' | |||||
251 |
|
256 | |||
252 | cs_ranges, ancestor = self._get_changesets(org_repo.scm_instance.alias, |
|
257 | cs_ranges, ancestor = self._get_changesets(org_repo.scm_instance.alias, | |
253 | org_repo.scm_instance, org_ref, |
|
258 | org_repo.scm_instance, org_ref, | |
254 |
other_repo.scm_instance, other_ref |
|
259 | other_repo.scm_instance, other_ref, | |
|
260 | merge) | |||
255 | return cs_ranges, ancestor |
|
261 | return cs_ranges, ancestor |
@@ -32,7 +32,7 b' from datetime import datetime' | |||||
32 | from rhodecode.lib.vcs.backends import get_backend |
|
32 | from rhodecode.lib.vcs.backends import get_backend | |
33 | from rhodecode.lib.compat import json |
|
33 | from rhodecode.lib.compat import json | |
34 | from rhodecode.lib.utils2 import LazyProperty, safe_str, safe_unicode,\ |
|
34 | from rhodecode.lib.utils2 import LazyProperty, safe_str, safe_unicode,\ | |
35 | remove_prefix |
|
35 | remove_prefix, obfuscate_url_pw | |
36 | from rhodecode.lib.caching_query import FromCache |
|
36 | from rhodecode.lib.caching_query import FromCache | |
37 | from rhodecode.lib.hooks import log_create_repository, log_delete_repository |
|
37 | from rhodecode.lib.hooks import log_create_repository, log_delete_repository | |
38 |
|
38 | |||
@@ -42,8 +42,6 b' from rhodecode.model.db import Repositor' | |||||
42 | RhodeCodeSetting, RepositoryField |
|
42 | RhodeCodeSetting, RepositoryField | |
43 | from rhodecode.lib import helpers as h |
|
43 | from rhodecode.lib import helpers as h | |
44 | from rhodecode.lib.auth import HasRepoPermissionAny |
|
44 | from rhodecode.lib.auth import HasRepoPermissionAny | |
45 | from rhodecode.lib.vcs.backends.base import EmptyChangeset |
|
|||
46 |
|
||||
47 |
|
45 | |||
48 | log = logging.getLogger(__name__) |
|
46 | log = logging.getLogger(__name__) | |
49 |
|
47 | |||
@@ -640,7 +638,8 b' class RepoModel(BaseModel):' | |||||
640 | raise Exception('This path %s is a valid group' % repo_path) |
|
638 | raise Exception('This path %s is a valid group' % repo_path) | |
641 |
|
639 | |||
642 | log.info('creating repo %s in %s @ %s' % ( |
|
640 | log.info('creating repo %s in %s @ %s' % ( | |
643 |
repo_name, safe_unicode(repo_path), |
|
641 | repo_name, safe_unicode(repo_path), | |
|
642 | obfuscate_url_pw(clone_uri) | |||
644 | ) |
|
643 | ) | |
645 | ) |
|
644 | ) | |
646 | backend = get_backend(alias) |
|
645 | backend = get_backend(alias) |
@@ -249,27 +249,37 b' class ReposGroupModel(BaseModel):' | |||||
249 |
|
249 | |||
250 | # change properties |
|
250 | # change properties | |
251 | repos_group.group_description = form_data['group_description'] |
|
251 | repos_group.group_description = form_data['group_description'] | |
252 | repos_group.parent_group = RepoGroup.get(form_data['group_parent_id']) |
|
|||
253 | repos_group.group_parent_id = form_data['group_parent_id'] |
|
252 | repos_group.group_parent_id = form_data['group_parent_id'] | |
254 | repos_group.enable_locking = form_data['enable_locking'] |
|
253 | repos_group.enable_locking = form_data['enable_locking'] | |
|
254 | ||||
|
255 | repos_group.parent_group = RepoGroup.get(form_data['group_parent_id']) | |||
255 | repos_group.group_name = repos_group.get_new_name(form_data['group_name']) |
|
256 | repos_group.group_name = repos_group.get_new_name(form_data['group_name']) | |
256 | new_path = repos_group.full_path |
|
257 | new_path = repos_group.full_path | |
257 |
|
||||
258 | self.sa.add(repos_group) |
|
258 | self.sa.add(repos_group) | |
259 |
|
259 | |||
260 |
# iterate over all members of this groups and |
|
260 | # iterate over all members of this groups and do fixes | |
|
261 | # set locking if given | |||
|
262 | # if obj is a repoGroup also fix the name of the group according | |||
|
263 | # to the parent | |||
|
264 | # if obj is a Repo fix it's name | |||
261 | # this can be potentially heavy operation |
|
265 | # this can be potentially heavy operation | |
262 | for obj in repos_group.recursive_groups_and_repos(): |
|
266 | for obj in repos_group.recursive_groups_and_repos(): | |
263 | #set the value from it's parent |
|
267 | #set the value from it's parent | |
264 | obj.enable_locking = repos_group.enable_locking |
|
268 | obj.enable_locking = repos_group.enable_locking | |
|
269 | if isinstance(obj, RepoGroup): | |||
|
270 | new_name = obj.get_new_name(obj.name) | |||
|
271 | log.debug('Fixing group %s to new name %s' \ | |||
|
272 | % (obj.group_name, new_name)) | |||
|
273 | obj.group_name = new_name | |||
|
274 | elif isinstance(obj, Repository): | |||
|
275 | # we need to get all repositories from this new group and | |||
|
276 | # rename them accordingly to new group path | |||
|
277 | new_name = obj.get_new_name(obj.just_name) | |||
|
278 | log.debug('Fixing repo %s to new name %s' \ | |||
|
279 | % (obj.repo_name, new_name)) | |||
|
280 | obj.repo_name = new_name | |||
265 | self.sa.add(obj) |
|
281 | self.sa.add(obj) | |
266 |
|
282 | |||
267 | # we need to get all repositories from this new group and |
|
|||
268 | # rename them accordingly to new group path |
|
|||
269 | for r in repos_group.repositories: |
|
|||
270 | r.repo_name = r.get_new_name(r.just_name) |
|
|||
271 | self.sa.add(r) |
|
|||
272 |
|
||||
273 | self.__rename_group(old_path, new_path) |
|
283 | self.__rename_group(old_path, new_path) | |
274 |
|
284 | |||
275 | return repos_group |
|
285 | return repos_group |
@@ -44,13 +44,14 b' from rhodecode.lib.vcs.backends.base imp' | |||||
44 |
|
44 | |||
45 | from rhodecode import BACKENDS |
|
45 | from rhodecode import BACKENDS | |
46 | from rhodecode.lib import helpers as h |
|
46 | from rhodecode.lib import helpers as h | |
47 | from rhodecode.lib.utils2 import safe_str, safe_unicode |
|
47 | from rhodecode.lib.utils2 import safe_str, safe_unicode, get_server_url | |
48 | from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny |
|
48 | from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny | |
49 | from rhodecode.lib.utils import get_filesystem_repos, make_ui, \ |
|
49 | from rhodecode.lib.utils import get_filesystem_repos, make_ui, \ | |
50 | action_logger, REMOVED_REPO_PAT |
|
50 | action_logger, REMOVED_REPO_PAT | |
51 | from rhodecode.model import BaseModel |
|
51 | from rhodecode.model import BaseModel | |
52 | from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \ |
|
52 | from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \ | |
53 | UserFollowing, UserLog, User, RepoGroup, PullRequest |
|
53 | UserFollowing, UserLog, User, RepoGroup, PullRequest | |
|
54 | from rhodecode.lib.hooks import log_push_action | |||
54 |
|
55 | |||
55 | log = logging.getLogger(__name__) |
|
56 | log = logging.getLogger(__name__) | |
56 |
|
57 | |||
@@ -402,6 +403,60 b' class ScmModel(BaseModel):' | |||||
402 | self.sa.add(repo) |
|
403 | self.sa.add(repo) | |
403 | return repo |
|
404 | return repo | |
404 |
|
405 | |||
|
406 | def _handle_push(self, repo, username, action, repo_name, revisions): | |||
|
407 | """ | |||
|
408 | Triggers push action hooks | |||
|
409 | ||||
|
410 | :param repo: SCM repo | |||
|
411 | :param username: username who pushes | |||
|
412 | :param action: push/push_loca/push_remote | |||
|
413 | :param repo_name: name of repo | |||
|
414 | :param revisions: list of revisions that we pushed | |||
|
415 | """ | |||
|
416 | from rhodecode import CONFIG | |||
|
417 | from rhodecode.lib.base import _get_ip_addr | |||
|
418 | try: | |||
|
419 | from pylons import request | |||
|
420 | environ = request.environ | |||
|
421 | except TypeError: | |||
|
422 | # we might use this outside of request context, let's fake the | |||
|
423 | # environ data | |||
|
424 | from webob import Request | |||
|
425 | environ = Request.blank('').environ | |||
|
426 | ||||
|
427 | #trigger push hook | |||
|
428 | extras = { | |||
|
429 | 'ip': _get_ip_addr(environ), | |||
|
430 | 'username': username, | |||
|
431 | 'action': 'push_local', | |||
|
432 | 'repository': repo_name, | |||
|
433 | 'scm': repo.alias, | |||
|
434 | 'config': CONFIG['__file__'], | |||
|
435 | 'server_url': get_server_url(environ), | |||
|
436 | 'make_lock': None, | |||
|
437 | 'locked_by': [None, None] | |||
|
438 | } | |||
|
439 | _scm_repo = repo._repo | |||
|
440 | repo.inject_ui(**extras) | |||
|
441 | if repo.alias == 'hg': | |||
|
442 | log_push_action(_scm_repo.ui, _scm_repo, node=revisions[0]) | |||
|
443 | elif repo.alias == 'git': | |||
|
444 | log_push_action(_scm_repo.ui, _scm_repo, _git_revs=revisions) | |||
|
445 | ||||
|
446 | def _get_IMC_module(self, scm_type): | |||
|
447 | """ | |||
|
448 | Returns InMemoryCommit class based on scm_type | |||
|
449 | ||||
|
450 | :param scm_type: | |||
|
451 | """ | |||
|
452 | if scm_type == 'hg': | |||
|
453 | from rhodecode.lib.vcs.backends.hg import \ | |||
|
454 | MercurialInMemoryChangeset as IMC | |||
|
455 | elif scm_type == 'git': | |||
|
456 | from rhodecode.lib.vcs.backends.git import \ | |||
|
457 | GitInMemoryChangeset as IMC | |||
|
458 | return IMC | |||
|
459 | ||||
405 | def pull_changes(self, repo, username): |
|
460 | def pull_changes(self, repo, username): | |
406 | dbrepo = self.__get_repo(repo) |
|
461 | dbrepo = self.__get_repo(repo) | |
407 | clone_uri = dbrepo.clone_uri |
|
462 | clone_uri = dbrepo.clone_uri | |
@@ -409,26 +464,13 b' class ScmModel(BaseModel):' | |||||
409 | raise Exception("This repository doesn't have a clone uri") |
|
464 | raise Exception("This repository doesn't have a clone uri") | |
410 |
|
465 | |||
411 | repo = dbrepo.scm_instance |
|
466 | repo = dbrepo.scm_instance | |
412 | from rhodecode import CONFIG |
|
467 | repo_name = dbrepo.repo_name | |
413 | try: |
|
468 | try: | |
414 | extras = { |
|
|||
415 | 'ip': '', |
|
|||
416 | 'username': username, |
|
|||
417 | 'action': 'push_remote', |
|
|||
418 | 'repository': dbrepo.repo_name, |
|
|||
419 | 'scm': repo.alias, |
|
|||
420 | 'config': CONFIG['__file__'], |
|
|||
421 | 'make_lock': None, |
|
|||
422 | 'locked_by': [None, None] |
|
|||
423 | } |
|
|||
424 |
|
||||
425 | Repository.inject_ui(repo, extras=extras) |
|
|||
426 |
|
||||
427 | if repo.alias == 'git': |
|
469 | if repo.alias == 'git': | |
428 | repo.fetch(clone_uri) |
|
470 | repo.fetch(clone_uri) | |
429 | else: |
|
471 | else: | |
430 | repo.pull(clone_uri) |
|
472 | repo.pull(clone_uri) | |
431 |
self.mark_for_invalidation( |
|
473 | self.mark_for_invalidation(repo_name) | |
432 | except: |
|
474 | except: | |
433 | log.error(traceback.format_exc()) |
|
475 | log.error(traceback.format_exc()) | |
434 | raise |
|
476 | raise | |
@@ -441,13 +483,8 b' class ScmModel(BaseModel):' | |||||
441 | :param repo: SCM instance |
|
483 | :param repo: SCM instance | |
442 |
|
484 | |||
443 | """ |
|
485 | """ | |
444 |
|
486 | user = self._get_user(user) | ||
445 | if repo.alias == 'hg': |
|
487 | IMC = self._get_IMC_module(repo.alias) | |
446 | from rhodecode.lib.vcs.backends.hg import \ |
|
|||
447 | MercurialInMemoryChangeset as IMC |
|
|||
448 | elif repo.alias == 'git': |
|
|||
449 | from rhodecode.lib.vcs.backends.git import \ |
|
|||
450 | GitInMemoryChangeset as IMC |
|
|||
451 |
|
488 | |||
452 | # decoding here will force that we have proper encoded values |
|
489 | # decoding here will force that we have proper encoded values | |
453 | # in any other case this will throw exceptions and deny commit |
|
490 | # in any other case this will throw exceptions and deny commit | |
@@ -463,20 +500,21 b' class ScmModel(BaseModel):' | |||||
463 | author=author, |
|
500 | author=author, | |
464 | parents=[cs], branch=cs.branch) |
|
501 | parents=[cs], branch=cs.branch) | |
465 |
|
502 | |||
466 | action = 'push_local:%s' % tip.raw_id |
|
|||
467 | action_logger(user, action, repo_name) |
|
|||
468 | self.mark_for_invalidation(repo_name) |
|
503 | self.mark_for_invalidation(repo_name) | |
|
504 | self._handle_push(repo, | |||
|
505 | username=user.username, | |||
|
506 | action='push_local', | |||
|
507 | repo_name=repo_name, | |||
|
508 | revisions=[tip.raw_id]) | |||
469 | return tip |
|
509 | return tip | |
470 |
|
510 | |||
471 | def create_node(self, repo, repo_name, cs, user, author, message, content, |
|
511 | def create_node(self, repo, repo_name, cs, user, author, message, content, | |
472 | f_path): |
|
512 | f_path): | |
473 | if repo.alias == 'hg': |
|
513 | user = self._get_user(user) | |
474 | from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC |
|
514 | IMC = self._get_IMC_module(repo.alias) | |
475 | elif repo.alias == 'git': |
|
515 | ||
476 | from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC |
|
|||
477 | # decoding here will force that we have proper encoded values |
|
516 | # decoding here will force that we have proper encoded values | |
478 | # in any other case this will throw exceptions and deny commit |
|
517 | # in any other case this will throw exceptions and deny commit | |
479 |
|
||||
480 | if isinstance(content, (basestring,)): |
|
518 | if isinstance(content, (basestring,)): | |
481 | content = safe_str(content) |
|
519 | content = safe_str(content) | |
482 | elif isinstance(content, (file, cStringIO.OutputType,)): |
|
520 | elif isinstance(content, (file, cStringIO.OutputType,)): | |
@@ -502,9 +540,12 b' class ScmModel(BaseModel):' | |||||
502 | author=author, |
|
540 | author=author, | |
503 | parents=parents, branch=cs.branch) |
|
541 | parents=parents, branch=cs.branch) | |
504 |
|
542 | |||
505 | action = 'push_local:%s' % tip.raw_id |
|
|||
506 | action_logger(user, action, repo_name) |
|
|||
507 | self.mark_for_invalidation(repo_name) |
|
543 | self.mark_for_invalidation(repo_name) | |
|
544 | self._handle_push(repo, | |||
|
545 | username=user.username, | |||
|
546 | action='push_local', | |||
|
547 | repo_name=repo_name, | |||
|
548 | revisions=[tip.raw_id]) | |||
508 | return tip |
|
549 | return tip | |
509 |
|
550 | |||
510 | def get_nodes(self, repo_name, revision, root_path='/', flat=True): |
|
551 | def get_nodes(self, repo_name, revision, root_path='/', flat=True): |
@@ -416,6 +416,8 b' def ValidCloneUri():' | |||||
416 | svnremoterepo(ui, url).capabilities |
|
416 | svnremoterepo(ui, url).capabilities | |
417 | elif url.startswith('git+http'): |
|
417 | elif url.startswith('git+http'): | |
418 | raise NotImplementedError() |
|
418 | raise NotImplementedError() | |
|
419 | else: | |||
|
420 | raise Exception('clone from URI %s not allowed' % (url)) | |||
419 |
|
421 | |||
420 | elif repo_type == 'git': |
|
422 | elif repo_type == 'git': | |
421 | from rhodecode.lib.vcs.backends.git.repository import GitRepository |
|
423 | from rhodecode.lib.vcs.backends.git.repository import GitRepository | |
@@ -427,6 +429,8 b' def ValidCloneUri():' | |||||
427 | raise NotImplementedError() |
|
429 | raise NotImplementedError() | |
428 | elif url.startswith('hg+http'): |
|
430 | elif url.startswith('hg+http'): | |
429 | raise NotImplementedError() |
|
431 | raise NotImplementedError() | |
|
432 | else: | |||
|
433 | raise Exception('clone from URI %s not allowed' % (url)) | |||
430 |
|
434 | |||
431 | class _validator(formencode.validators.FancyValidator): |
|
435 | class _validator(formencode.validators.FancyValidator): | |
432 | messages = { |
|
436 | messages = { |
@@ -171,4 +171,4 b' div.CodeMirror span.CodeMirror-nonmatchi' | |||||
171 | visibility: hidden; |
|
171 | visibility: hidden; | |
172 | } |
|
172 | } | |
173 |
|
173 | |||
174 |
} |
|
174 | } No newline at end of file |
@@ -14,7 +14,7 b' div.codeblock {' | |||||
14 | div.codeblock .code-header { |
|
14 | div.codeblock .code-header { | |
15 | border-bottom: 1px solid #CCCCCC; |
|
15 | border-bottom: 1px solid #CCCCCC; | |
16 | background: #EEEEEE; |
|
16 | background: #EEEEEE; | |
17 | padding:10px 0 10px 0; |
|
17 | padding: 10px 0 10px 0; | |
18 | } |
|
18 | } | |
19 |
|
19 | |||
20 | div.codeblock .code-header .stats { |
|
20 | div.codeblock .code-header .stats { | |
@@ -26,38 +26,38 b' div.codeblock .code-header .stats {' | |||||
26 | } |
|
26 | } | |
27 |
|
27 | |||
28 | div.codeblock .code-header .stats .left { |
|
28 | div.codeblock .code-header .stats .left { | |
29 | float:left; |
|
29 | float: left; | |
30 | } |
|
30 | } | |
31 | div.codeblock .code-header .stats .left.img { |
|
31 | div.codeblock .code-header .stats .left.img { | |
32 | margin-top:-2px; |
|
32 | margin-top: -2px; | |
33 | } |
|
33 | } | |
34 | div.codeblock .code-header .stats .left.item { |
|
34 | div.codeblock .code-header .stats .left.item { | |
35 | float:left; |
|
35 | float: left; | |
36 | padding: 0 9px 0 9px; |
|
36 | padding: 0 9px 0 9px; | |
37 | border-right:1px solid #ccc; |
|
37 | border-right: 1px solid #ccc; | |
38 | } |
|
38 | } | |
39 | div.codeblock .code-header .stats .left.item pre { |
|
39 | div.codeblock .code-header .stats .left.item pre { | |
40 | } |
|
40 | } | |
41 | div.codeblock .code-header .stats .left.item.last { |
|
41 | div.codeblock .code-header .stats .left.item.last { | |
42 | border-right:none; |
|
42 | border-right: none; | |
43 | } |
|
43 | } | |
44 | div.codeblock .code-header .stats .buttons { |
|
44 | div.codeblock .code-header .stats .buttons { | |
45 | float:right; |
|
45 | float: right; | |
46 | padding-right:4px; |
|
46 | padding-right: 4px; | |
47 | } |
|
47 | } | |
48 |
|
48 | |||
49 | div.codeblock .code-header .author { |
|
49 | div.codeblock .code-header .author { | |
50 | margin-left:25px; |
|
50 | margin-left: 25px; | |
51 | font-weight: bold; |
|
51 | font-weight: bold; | |
52 | height: 25px; |
|
52 | height: 25px; | |
53 | } |
|
53 | } | |
54 | div.codeblock .code-header .author .user { |
|
54 | div.codeblock .code-header .author .user { | |
55 | padding-top:3px; |
|
55 | padding-top: 3px; | |
56 | } |
|
56 | } | |
57 | div.codeblock .code-header .commit { |
|
57 | div.codeblock .code-header .commit { | |
58 | margin-left:25px; |
|
58 | margin-left: 25px; | |
59 | font-weight: normal; |
|
59 | font-weight: normal; | |
60 | white-space:pre; |
|
60 | white-space: pre; | |
61 | } |
|
61 | } | |
62 |
|
62 | |||
63 | div.codeblock .code-body table { |
|
63 | div.codeblock .code-body table { | |
@@ -90,8 +90,8 b' div.search-code-body pre .break {' | |||||
90 | display: block; |
|
90 | display: block; | |
91 | } |
|
91 | } | |
92 | div.annotatediv { |
|
92 | div.annotatediv { | |
93 | margin-left:2px; |
|
93 | margin-left: 2px; | |
94 | margin-right:4px; |
|
94 | margin-right: 4px; | |
95 | } |
|
95 | } | |
96 | .code-highlight { |
|
96 | .code-highlight { | |
97 | padding: 0px; |
|
97 | padding: 0px; | |
@@ -170,4 +170,4 b' div.annotatediv {' | |||||
170 | .code-highlight .vc, .codehilite .vc { color: #19177C } /* Name.Variable.Class */ |
|
170 | .code-highlight .vc, .codehilite .vc { color: #19177C } /* Name.Variable.Class */ | |
171 | .code-highlight .vg, .codehilite .vg { color: #19177C } /* Name.Variable.Global */ |
|
171 | .code-highlight .vg, .codehilite .vg { color: #19177C } /* Name.Variable.Global */ | |
172 | .code-highlight .vi, .codehilite .vi { color: #19177C } /* Name.Variable.Instance */ |
|
172 | .code-highlight .vi, .codehilite .vi { color: #19177C } /* Name.Variable.Instance */ | |
173 |
.code-highlight .il, .codehilite .il { color: #666666 } /* Literal.Number.Integer.Long */ |
|
173 | .code-highlight .il, .codehilite .il { color: #666666 } /* Literal.Number.Integer.Long */ No newline at end of file |
@@ -4143,11 +4143,11 b' div.rst-block h2 {' | |||||
4143 | font-weight: normal; |
|
4143 | font-weight: normal; | |
4144 | } |
|
4144 | } | |
4145 |
|
4145 | |||
4146 |
div.rst-block |
|
4146 | div.rst-block { | |
4147 | background-color: #fafafa; |
|
4147 | background-color: #fafafa; | |
4148 | } |
|
4148 | } | |
4149 |
|
4149 | |||
4150 |
div.rst-block |
|
4150 | div.rst-block { | |
4151 | clear: both; |
|
4151 | clear: both; | |
4152 | overflow: hidden; |
|
4152 | overflow: hidden; | |
4153 | margin: 0; |
|
4153 | margin: 0; | |
@@ -4420,7 +4420,7 b' form.comment-inline-form {' | |||||
4420 | padding: 10px 20px; |
|
4420 | padding: 10px 20px; | |
4421 | } |
|
4421 | } | |
4422 |
|
4422 | |||
4423 |
.inline-comments div.rst-block |
|
4423 | .inline-comments div.rst-block { | |
4424 | clear: both; |
|
4424 | clear: both; | |
4425 | overflow: hidden; |
|
4425 | overflow: hidden; | |
4426 | margin: 0; |
|
4426 | margin: 0; | |
@@ -4811,4 +4811,4 b' div.comment:target>.comment-wrapp {' | |||||
4811 | .lineno:target a { |
|
4811 | .lineno:target a { | |
4812 | border: solid 2px #ee0 !important; |
|
4812 | border: solid 2px #ee0 !important; | |
4813 | margin: -2px; |
|
4813 | margin: -2px; | |
4814 |
} |
|
4814 | } No newline at end of file |
@@ -26,7 +26,7 b' function BranchRenderer() {' | |||||
26 |
|
26 | |||
27 | this.canvas = document.getElementById("graph_canvas"); |
|
27 | this.canvas = document.getElementById("graph_canvas"); | |
28 |
|
28 | |||
29 |
if (!document.createElement("canvas").getContext) |
|
29 | if (!document.createElement("canvas").getContext) | |
30 | this.canvas = window.G_vmlCanvasManager.initElement(this.canvas); |
|
30 | this.canvas = window.G_vmlCanvasManager.initElement(this.canvas); | |
31 | this.ctx = this.canvas.getContext('2d'); |
|
31 | this.ctx = this.canvas.getContext('2d'); | |
32 | this.ctx.strokeStyle = 'rgb(0, 0, 0)'; |
|
32 | this.ctx.strokeStyle = 'rgb(0, 0, 0)'; |
@@ -86,19 +86,6 b' var prevElementSibling = function( el ) ' | |||||
86 | } |
|
86 | } | |
87 | } |
|
87 | } | |
88 |
|
88 | |||
89 | var setSelectValue = function(select, val){ |
|
|||
90 | var selection = YUD.get(select); |
|
|||
91 |
|
||||
92 | // select element |
|
|||
93 | for(var i=0;i<selection.options.length;i++){ |
|
|||
94 | if (selection.options[i].innerHTML == val) { |
|
|||
95 | selection.selectedIndex = i; |
|
|||
96 | break; |
|
|||
97 | } |
|
|||
98 | } |
|
|||
99 | } |
|
|||
100 |
|
||||
101 |
|
||||
102 | /** |
|
89 | /** | |
103 | * SmartColorGenerator |
|
90 | * SmartColorGenerator | |
104 | * |
|
91 | * | |
@@ -577,7 +564,7 b' var showRepoSize = function(target, repo' | |||||
577 | var args= 'auth_token='+token; |
|
564 | var args= 'auth_token='+token; | |
578 |
|
565 | |||
579 | if(!YUD.hasClass(target, 'loaded')){ |
|
566 | if(!YUD.hasClass(target, 'loaded')){ | |
580 |
YUD.get(target).innerHTML = _TM[' |
|
567 | YUD.get(target).innerHTML = _TM['Loading ...']; | |
581 | var url = pyroutes.url('repo_size', {"repo_name":repo_name}); |
|
568 | var url = pyroutes.url('repo_size', {"repo_name":repo_name}); | |
582 | YUC.asyncRequest('POST',url,{ |
|
569 | YUC.asyncRequest('POST',url,{ | |
583 | success:function(o){ |
|
570 | success:function(o){ | |
@@ -910,7 +897,7 b' var deleteComment = function(comment_id)' | |||||
910 |
|
897 | |||
911 | var createInlineAddButton = function(tr){ |
|
898 | var createInlineAddButton = function(tr){ | |
912 |
|
899 | |||
913 |
var label = TRANSLATION_MAP[' |
|
900 | var label = TRANSLATION_MAP['Add another comment']; | |
914 |
|
901 | |||
915 | var html_el = document.createElement('div'); |
|
902 | var html_el = document.createElement('div'); | |
916 | YUD.addClass(html_el, 'add-comment'); |
|
903 | YUD.addClass(html_el, 'add-comment'); | |
@@ -1106,7 +1093,7 b' var fileBrowserListeners = function(curr' | |||||
1106 | match.push('<tr><td><a class="browser-{0}" href="{1}">{2}</a></td><td colspan="5"></td></tr>'.format(t,new_url,n_hl)); |
|
1093 | match.push('<tr><td><a class="browser-{0}" href="{1}">{2}</a></td><td colspan="5"></td></tr>'.format(t,new_url,n_hl)); | |
1107 | } |
|
1094 | } | |
1108 | if(match.length >= matches_max){ |
|
1095 | if(match.length >= matches_max){ | |
1109 |
match.push('<tr><td>{0}</td><td colspan="5"></td></tr>'.format(_TM[' |
|
1096 | match.push('<tr><td>{0}</td><td colspan="5"></td></tr>'.format(_TM['Search truncated'])); | |
1110 | } |
|
1097 | } | |
1111 | } |
|
1098 | } | |
1112 | } |
|
1099 | } | |
@@ -1115,7 +1102,7 b' var fileBrowserListeners = function(curr' | |||||
1115 | YUD.setStyle('tbody_filtered','display',''); |
|
1102 | YUD.setStyle('tbody_filtered','display',''); | |
1116 |
|
1103 | |||
1117 | if (match.length==0){ |
|
1104 | if (match.length==0){ | |
1118 |
match.push('<tr><td>{0}</td><td colspan="5"></td></tr>'.format(_TM[' |
|
1105 | match.push('<tr><td>{0}</td><td colspan="5"></td></tr>'.format(_TM['No matching files'])); | |
1119 | } |
|
1106 | } | |
1120 |
|
1107 | |||
1121 | YUD.get('tbody_filtered').innerHTML = match.join(""); |
|
1108 | YUD.get('tbody_filtered').innerHTML = match.join(""); | |
@@ -2173,11 +2160,11 b' YUE.onDOMReady(function(){' | |||||
2173 | console.log(t); |
|
2160 | console.log(t); | |
2174 | if(YUD.hasClass(t, 'hidden')){ |
|
2161 | if(YUD.hasClass(t, 'hidden')){ | |
2175 | YUD.removeClass(t, 'hidden'); |
|
2162 | YUD.removeClass(t, 'hidden'); | |
2176 |
YUD.get(button).innerHTML = "↑ {0} ↑".format(_TM[' |
|
2163 | YUD.get(button).innerHTML = "↑ {0} ↑".format(_TM['Collapse diff']); | |
2177 | } |
|
2164 | } | |
2178 | else if(!YUD.hasClass(t, 'hidden')){ |
|
2165 | else if(!YUD.hasClass(t, 'hidden')){ | |
2179 | YUD.addClass(t, 'hidden'); |
|
2166 | YUD.addClass(t, 'hidden'); | |
2180 |
YUD.get(button).innerHTML = "↓ {0} ↓".format(_TM[' |
|
2167 | YUD.get(button).innerHTML = "↓ {0} ↓".format(_TM['Expand diff']); | |
2181 | } |
|
2168 | } | |
2182 | }); |
|
2169 | }); | |
2183 |
|
2170 |
@@ -43,11 +43,14 b'' | |||||
43 | <label>${_('API key')}:</label> ${c.user.api_key} |
|
43 | <label>${_('API key')}:</label> ${c.user.api_key} | |
44 | </div> |
|
44 | </div> | |
45 | </div> |
|
45 | </div> | |
|
46 | ##show current ip just if we show ourself | |||
|
47 | %if c.rhodecode_user.username == c.user.username: | |||
46 | <div class="field"> |
|
48 | <div class="field"> | |
47 | <div class="label"> |
|
49 | <div class="label"> | |
48 | <label>${_('Current IP')}:</label> ${c.perm_user.ip_addr or "?"} |
|
50 | <label>${_('Current IP')}:</label> ${c.perm_user.ip_addr or "?"} | |
49 | </div> |
|
51 | </div> | |
50 | </div> |
|
52 | </div> | |
|
53 | %endif | |||
51 | <div class="fields"> |
|
54 | <div class="fields"> | |
52 | <div class="field"> |
|
55 | <div class="field"> | |
53 | <div class="label"> |
|
56 | <div class="label"> |
@@ -20,6 +20,11 b'' | |||||
20 | <label>${_('API key')}</label> ${c.user.api_key} |
|
20 | <label>${_('API key')}</label> ${c.user.api_key} | |
21 | </div> |
|
21 | </div> | |
22 | </div> |
|
22 | </div> | |
|
23 | <div class="field"> | |||
|
24 | <div class="label"> | |||
|
25 | <label>${_('Current IP')}:</label> ${c.perm_user.ip_addr or "?"} | |||
|
26 | </div> | |||
|
27 | </div> | |||
23 | <div class="fields"> |
|
28 | <div class="fields"> | |
24 | <div class="field"> |
|
29 | <div class="field"> | |
25 | <div class="label"> |
|
30 | <div class="label"> |
@@ -41,21 +41,21 b'' | |||||
41 | <script type="text/javascript"> |
|
41 | <script type="text/javascript"> | |
42 | //JS translations map |
|
42 | //JS translations map | |
43 | var TRANSLATION_MAP = { |
|
43 | var TRANSLATION_MAP = { | |
44 |
' |
|
44 | 'Add another comment':'${_("Add another comment")}', | |
45 | 'Stop following this repository':"${_('Stop following this repository')}", |
|
45 | 'Stop following this repository':"${_('Stop following this repository')}", | |
46 | 'Start following this repository':"${_('Start following this repository')}", |
|
46 | 'Start following this repository':"${_('Start following this repository')}", | |
47 | 'Group':"${_('Group')}", |
|
47 | 'Group':"${_('Group')}", | |
48 | 'members':"${_('members')}", |
|
48 | 'members':"${_('members')}", | |
49 |
' |
|
49 | 'Loading ...':"${_('Loading ...')}", | |
50 |
' |
|
50 | 'Search truncated': "${_('Search truncated')}", | |
51 |
' |
|
51 | 'No matching files': "${_('No matching files')}", | |
52 | 'Open new pull request': "${_('Open new pull request')}", |
|
52 | 'Open new pull request': "${_('Open new pull request')}", | |
53 | 'Open new pull request for selected changesets': "${_('Open new pull request for selected changesets')}", |
|
53 | 'Open new pull request for selected changesets': "${_('Open new pull request for selected changesets')}", | |
54 | 'Show selected changes __S -> __E': "${_('Show selected changes __S -> __E')}", |
|
54 | 'Show selected changes __S -> __E': "${_('Show selected changes __S -> __E')}", | |
55 | 'Show selected change __S': "${_('Show selected change __S')}", |
|
55 | 'Show selected change __S': "${_('Show selected change __S')}", | |
56 | 'Selection link': "${_('Selection link')}", |
|
56 | 'Selection link': "${_('Selection link')}", | |
57 |
' |
|
57 | 'Collapse diff': "${_('Collapse diff')}", | |
58 |
' |
|
58 | 'Expand diff': "${_('Expand diff')}" | |
59 | }; |
|
59 | }; | |
60 | var _TM = TRANSLATION_MAP; |
|
60 | var _TM = TRANSLATION_MAP; | |
61 |
|
61 |
@@ -25,15 +25,15 b'' | |||||
25 | <div class="table"> |
|
25 | <div class="table"> | |
26 | % if c.pagination: |
|
26 | % if c.pagination: | |
27 | <div id="graph"> |
|
27 | <div id="graph"> | |
28 | <div class="info_box" style="clear: both;padding: 10px 6px;text-align: right;"> |
|
28 | <div class="info_box" style="clear: both;padding: 10px 6px;min-height: 12px;text-align: right;"> | |
29 | <a href="#" class="ui-btn small" id="rev_range_container" style="display:none"></a> |
|
29 | <a href="#" class="ui-btn small" id="rev_range_container" style="display:none"></a> | |
30 | <a href="#" class="ui-btn small" id="rev_range_clear" style="display:none">${_('Clear selection')}</a> |
|
30 | <a href="#" class="ui-btn small" id="rev_range_clear" style="display:none">${_('Clear selection')}</a> | |
31 |
|
31 | |||
32 | %if c.rhodecode_db_repo.fork: |
|
32 | %if c.rhodecode_db_repo.fork: | |
33 | <a title="${_('Compare fork with %s' % c.rhodecode_db_repo.fork.repo_name)}" href="${h.url('compare_url',repo_name=c.rhodecode_db_repo.fork.repo_name,org_ref_type='branch',org_ref='default',other_repo=c.repo_name,other_ref_type='branch',other_ref=request.GET.get('branch') or 'default')}" class="ui-btn small">${_('Compare fork with parent')}</a> |
|
33 | <a id="compare_fork" title="${_('Compare fork with %s' % c.rhodecode_db_repo.fork.repo_name)}" href="${h.url('compare_url',repo_name=c.rhodecode_db_repo.fork.repo_name,org_ref_type='branch',org_ref='default',other_repo=c.repo_name,other_ref_type='branch',other_ref=request.GET.get('branch') or 'default',merge=1)}" class="ui-btn small">${_('Compare fork with parent')}</a> | |
34 | %endif |
|
34 | %endif | |
35 | %if h.is_hg(c.rhodecode_repo): |
|
35 | %if h.is_hg(c.rhodecode_repo): | |
36 |
<a id="open_new_pr" href="${h.url('pullrequest_ |
|
36 | <a id="open_new_pr" href="${h.url('pullrequest_home',repo_name=c.repo_name)}" class="ui-btn small">${_('Open new pull request')}</a> | |
37 | %endif |
|
37 | %endif | |
38 | </div> |
|
38 | </div> | |
39 | <div class="container_header"> |
|
39 | <div class="container_header"> | |
@@ -125,8 +125,7 b'' | |||||
125 | var url_tmpl = "${h.url('changeset_home',repo_name=c.repo_name,revision='__REVRANGE__')}"; |
|
125 | var url_tmpl = "${h.url('changeset_home',repo_name=c.repo_name,revision='__REVRANGE__')}"; | |
126 | var pr_tmpl = "${h.url('pullrequest_home',repo_name=c.repo_name)}"; |
|
126 | var pr_tmpl = "${h.url('pullrequest_home',repo_name=c.repo_name)}"; | |
127 |
|
127 | |||
128 | var checkbox_checker = function(e){ |
|
128 | var checkbox_checker = function(e){ | |
129 | var clicked_cb = e.currentTarget; |
|
|||
130 | var checked_checkboxes = []; |
|
129 | var checked_checkboxes = []; | |
131 | for (pos in checkboxes){ |
|
130 | for (pos in checkboxes){ | |
132 | if(checkboxes[pos].checked){ |
|
131 | if(checkboxes[pos].checked){ | |
@@ -134,13 +133,17 b'' | |||||
134 | } |
|
133 | } | |
135 | } |
|
134 | } | |
136 | if(YUD.get('open_new_pr')){ |
|
135 | if(YUD.get('open_new_pr')){ | |
137 |
if(checked_checkboxes.length> |
|
136 | if(checked_checkboxes.length>1){ | |
138 | // modify open pull request to show we have selected cs |
|
137 | YUD.setStyle('open_new_pr','display','none'); | |
139 | YUD.get('open_new_pr').innerHTML = _TM['Open new pull request for selected changesets']; |
|
138 | } else { | |
140 | }else{ |
|
139 | YUD.setStyle('open_new_pr','display',''); | |
141 | YUD.get('open_new_pr').innerHTML = _TM['Open new pull request']; |
|
140 | if(checked_checkboxes.length>0){ | |
|
141 | YUD.get('open_new_pr').innerHTML = _TM['Open new pull request for selected changesets']; | |||
|
142 | }else{ | |||
|
143 | YUD.get('open_new_pr').innerHTML = _TM['Open new pull request']; | |||
|
144 | } | |||
|
145 | } | |||
142 | } |
|
146 | } | |
143 | } |
|
|||
144 |
|
147 | |||
145 | if(checked_checkboxes.length>0){ |
|
148 | if(checked_checkboxes.length>0){ | |
146 | var rev_end = checked_checkboxes[0].name; |
|
149 | var rev_end = checked_checkboxes[0].name; | |
@@ -160,20 +163,25 b'' | |||||
160 | YUD.setStyle('rev_range_clear','display',''); |
|
163 | YUD.setStyle('rev_range_clear','display',''); | |
161 |
|
164 | |||
162 | YUD.get('open_new_pr').href = pr_tmpl + '?rev_start={0}&rev_end={1}'.format(rev_start,rev_end); |
|
165 | YUD.get('open_new_pr').href = pr_tmpl + '?rev_start={0}&rev_end={1}'.format(rev_start,rev_end); | |
163 |
|
166 | YUD.setStyle('compare_fork','display','none'); | ||
164 | } else{ |
|
167 | } else{ | |
165 | YUD.setStyle('rev_range_container','display','none'); |
|
168 | YUD.setStyle('rev_range_container','display','none'); | |
166 | YUD.setStyle('rev_range_clear','display','none'); |
|
169 | YUD.setStyle('rev_range_clear','display','none'); | |
167 |
|
|
170 | if (checkboxes){ | |
168 | }; |
|
171 | YUD.get('open_new_pr').href = pr_tmpl + '?rev_end={0}'.format(checkboxes[0].name); | |
169 | YUE.onDOMReady(checkbox_checker); |
|
172 | } | |
170 | YUE.on(checkboxes,'click', checkbox_checker); |
|
173 | YUD.setStyle('compare_fork','display',''); | |
|
174 | } | |||
|
175 | }; | |||
|
176 | YUE.onDOMReady(checkbox_checker); | |||
|
177 | YUE.on(checkboxes,'click', checkbox_checker); | |||
171 |
|
178 | |||
172 | YUE.on('rev_range_clear','click',function(e){ |
|
179 | YUE.on('rev_range_clear','click',function(e){ | |
173 | for (var i=0; i<checkboxes.length; i++){ |
|
180 | for (var i=0; i<checkboxes.length; i++){ | |
174 | var cb = checkboxes[i]; |
|
181 | var cb = checkboxes[i]; | |
175 | cb.checked = false; |
|
182 | cb.checked = false; | |
176 | } |
|
183 | } | |
|
184 | checkbox_checker(); | |||
177 | YUE.preventDefault(e); |
|
185 | YUE.preventDefault(e); | |
178 | }); |
|
186 | }); | |
179 |
|
187 |
@@ -52,7 +52,7 b'' | |||||
52 | <div class="changeset_header"> |
|
52 | <div class="changeset_header"> | |
53 | <div class="changeset_file"> |
|
53 | <div class="changeset_file"> | |
54 | ${h.safe_unicode(filenode_path)} | |
|
54 | ${h.safe_unicode(filenode_path)} | | |
55 | <a class="spantag" href="${h.url('files_home', repo_name=c.repo_name, f_path=filenode_path, revision=c.org_ref)}" title="${_('show file at latest version in this repo')}">${c.org_ref_type}@${h.short_id(c.org_ref) if c.org_ref_type=='rev' else c.org_ref}</a> -> |
|
55 | <a class="spantag" href="${h.url('files_home', repo_name=c.other_repo.repo_name, f_path=filenode_path, revision=c.org_ref)}" title="${_('show file at latest version in this repo')}">${c.org_ref_type}@${h.short_id(c.org_ref) if c.org_ref_type=='rev' else c.org_ref}</a> -> | |
56 | <a class="spantag" href="${h.url('files_home', repo_name=c.repo_name, f_path=filenode_path, revision=c.other_ref)}" title="${_('show file at initial version in this repo')}">${c.other_ref_type}@${h.short_id(c.other_ref) if c.other_ref_type=='rev' else c.other_ref}</a> |
|
56 | <a class="spantag" href="${h.url('files_home', repo_name=c.repo_name, f_path=filenode_path, revision=c.other_ref)}" title="${_('show file at initial version in this repo')}">${c.other_ref_type}@${h.short_id(c.other_ref) if c.other_ref_type=='rev' else c.other_ref}</a> | |
57 | </div> |
|
57 | </div> | |
58 | </div> |
|
58 | </div> |
@@ -1,10 +1,10 b'' | |||||
1 | ## Changesets table ! |
|
1 | ## Changesets table ! | |
2 | <div class="container"> |
|
2 | <div class="container"> | |
3 | <table class="compare_view_commits noborder"> |
|
|||
4 | %if not c.cs_ranges: |
|
3 | %if not c.cs_ranges: | |
5 | <span class="empty_data">${_('No changesets')}</span> |
|
4 | <span class="empty_data">${_('No changesets')}</span> | |
6 | %else: |
|
5 | %else: | |
7 | %for cnt, cs in enumerate(c.cs_ranges): |
|
6 | <table class="compare_view_commits noborder"> | |
|
7 | %for cs in reversed(c.cs_ranges): | |||
8 | <tr> |
|
8 | <tr> | |
9 | <td><div class="gravatar"><img alt="gravatar" src="${h.gravatar_url(h.email_or_none(cs.author),14)}"/></div></td> |
|
9 | <td><div class="gravatar"><img alt="gravatar" src="${h.gravatar_url(h.email_or_none(cs.author),14)}"/></div></td> | |
10 | <td> |
|
10 | <td> | |
@@ -22,7 +22,15 b'' | |||||
22 | <td><div class="message tooltip" title="${h.tooltip(cs.message)}" style="white-space:normal">${h.urlify_commit(h.shorter(cs.message, 60),c.repo_name)}</div></td> |
|
22 | <td><div class="message tooltip" title="${h.tooltip(cs.message)}" style="white-space:normal">${h.urlify_commit(h.shorter(cs.message, 60),c.repo_name)}</div></td> | |
23 | </tr> |
|
23 | </tr> | |
24 | %endfor |
|
24 | %endfor | |
25 |
|
25 | </table> | ||
|
26 | %if c.ancestor: | |||
|
27 | <span class="ancestor">${_('Ancestor')}: | |||
|
28 | ${h.link_to(h.short_id(c.ancestor),h.url('changeset_home',repo_name=c.repo_name,revision=c.ancestor))} | |||
|
29 | </span> | |||
|
30 | %endif | |||
|
31 | %if c.as_form: | |||
|
32 | ${h.hidden('ancestor_rev',c.ancestor)} | |||
|
33 | ${h.hidden('merge_rev',c.cs_ranges[-1].raw_id)} | |||
|
34 | %endif | |||
26 | %endif |
|
35 | %endif | |
27 | </table> |
|
|||
28 | </div> |
|
36 | </div> |
@@ -10,10 +10,9 b'' | |||||
10 | </p> |
|
10 | </p> | |
11 |
|
11 | |||
12 | <div>${_('revisions for reviewing')}</div> |
|
12 | <div>${_('revisions for reviewing')}</div> | |
13 | <pre> |
|
13 | <p style="white-space: pre-wrap;"> | |
14 | %for r,r_msg in pr_revisions: |
|
14 | %for r,r_msg in pr_revisions: | |
15 | ${h.short_id(r)}: |
|
15 | <b>${h.short_id(r)}</b>: | |
16 |
|
|
16 | ${h.shorter(r_msg, 256)} | |
17 |
|
||||
18 | %endfor |
|
17 | %endfor | |
19 |
</p |
|
18 | </p> |
@@ -49,7 +49,7 b'' | |||||
49 | <th>${_('Mimetype')}</th> |
|
49 | <th>${_('Mimetype')}</th> | |
50 | <th>${_('Last Revision')}</th> |
|
50 | <th>${_('Last Revision')}</th> | |
51 | <th>${_('Last modified')}</th> |
|
51 | <th>${_('Last modified')}</th> | |
52 | <th>${_('Last commiter')}</th> |
|
52 | <th>${_('Last committer')}</th> | |
53 | </tr> |
|
53 | </tr> | |
54 | </thead> |
|
54 | </thead> | |
55 |
|
55 | |||
@@ -89,8 +89,8 b'' | |||||
89 | <td> |
|
89 | <td> | |
90 | %if node.is_file(): |
|
90 | %if node.is_file(): | |
91 | <div class="tooltip" title="${h.tooltip(node.last_changeset.message)}"> |
|
91 | <div class="tooltip" title="${h.tooltip(node.last_changeset.message)}"> | |
92 | <pre>${'r%s:%s' % (node.last_changeset.revision,node.last_changeset.short_id)}</pre> |
|
92 | <pre>${'r%s:%s' % (node.last_changeset.revision,node.last_changeset.short_id)}</pre> | |
93 | </div> |
|
93 | </div> | |
94 | %endif |
|
94 | %endif | |
95 | </td> |
|
95 | </td> | |
96 | <td> |
|
96 | <td> |
@@ -21,9 +21,6 b'' | |||||
21 | </div> |
|
21 | </div> | |
22 | ${h.form(url('pullrequest', repo_name=c.repo_name), method='post', id='pull_request_form')} |
|
22 | ${h.form(url('pullrequest', repo_name=c.repo_name), method='post', id='pull_request_form')} | |
23 | <div style="float:left;padding:0px 30px 30px 30px"> |
|
23 | <div style="float:left;padding:0px 30px 30px 30px"> | |
24 | <input type="hidden" name="rev_start" value="${request.GET.get('rev_start')}" /> |
|
|||
25 | <input type="hidden" name="rev_end" value="${request.GET.get('rev_end')}" /> |
|
|||
26 |
|
||||
27 | ##ORG |
|
24 | ##ORG | |
28 | <div style="float:left"> |
|
25 | <div style="float:left"> | |
29 | <div> |
|
26 | <div> | |
@@ -101,7 +98,7 b'' | |||||
101 |
|
98 | |||
102 | <div class="field"> |
|
99 | <div class="field"> | |
103 | <div class="label label-textarea"> |
|
100 | <div class="label label-textarea"> | |
104 |
<label for="pullrequest_desc">${_(' |
|
101 | <label for="pullrequest_desc">${_('Description')}:</label> | |
105 | </div> |
|
102 | </div> | |
106 | <div class="textarea text-area editor"> |
|
103 | <div class="textarea text-area editor"> | |
107 | ${h.textarea('pullrequest_desc',size=30)} |
|
104 | ${h.textarea('pullrequest_desc',size=30)} | |
@@ -125,8 +122,31 b'' | |||||
125 |
|
122 | |||
126 | var other_repos_info = ${c.other_repos_info|n}; |
|
123 | var other_repos_info = ${c.other_repos_info|n}; | |
127 |
|
124 | |||
|
125 | var otherrepoChanged = function(){ | |||
|
126 | var sel_box = YUQ('#pull_request_form #other_repo')[0]; | |||
|
127 | var repo_name = sel_box.options[sel_box.selectedIndex].value; | |||
|
128 | ||||
|
129 | YUD.get('other_repo_desc').innerHTML = other_repos_info[repo_name]['description']; | |||
|
130 | // replace options of other_ref with the ones for the current other_repo | |||
|
131 | var other_ref_selector = YUD.get('other_ref'); | |||
|
132 | var new_select = YUD.createElementFromMarkup(other_repos_info[repo_name]['revs']); | |||
|
133 | var new_selectedIndex = new_select.selectedIndex; | |||
|
134 | other_ref_selector.innerHTML = ""; // clear old options | |||
|
135 | while (new_select.length > 0){ // children will be popped when appened to other_ref_selector | |||
|
136 | other_ref_selector.appendChild(new_select.children[0]); | |||
|
137 | } | |||
|
138 | // browsers lost track of selected when appendChild was used | |||
|
139 | other_ref_selector.selectedIndex = new_selectedIndex; | |||
|
140 | ||||
|
141 | // reset && add the reviewer based on selected repo | |||
|
142 | var _data = other_repos_info[repo_name]; | |||
|
143 | YUD.get('review_members').innerHTML = ''; | |||
|
144 | addReviewMember(_data.user.user_id, _data.user.firstname, | |||
|
145 | _data.user.lastname, _data.user.username, | |||
|
146 | _data.user.gravatar_link); | |||
|
147 | } | |||
|
148 | ||||
128 | var loadPreview = function(){ |
|
149 | var loadPreview = function(){ | |
129 | YUD.setStyle(YUD.get('pull_request_overview_url').parentElement,'display','none'); |
|
|||
130 | //url template |
|
150 | //url template | |
131 | var url = "${h.url('compare_url', |
|
151 | var url = "${h.url('compare_url', | |
132 | repo_name='__other_repo__', |
|
152 | repo_name='__other_repo__', | |
@@ -136,8 +156,8 b'' | |||||
136 | other_ref_type='__org_ref_type__', |
|
156 | other_ref_type='__org_ref_type__', | |
137 | other_ref='__org_ref__', |
|
157 | other_ref='__org_ref__', | |
138 | as_form=True, |
|
158 | as_form=True, | |
139 |
|
|
159 | merge=True, | |
140 |
|
|
160 | )}"; | |
141 | var org_repo = YUQ('#pull_request_form #org_repo')[0].value; |
|
161 | var org_repo = YUQ('#pull_request_form #org_repo')[0].value; | |
142 | var org_ref = YUQ('#pull_request_form #org_ref')[0].value.split(':'); |
|
162 | var org_ref = YUQ('#pull_request_form #org_ref')[0].value.split(':'); | |
143 |
|
163 | |||
@@ -159,22 +179,10 b'' | |||||
159 | } |
|
179 | } | |
160 |
|
180 | |||
161 | YUD.get('pull_request_overview').innerHTML = "${_('Loading ...')}"; |
|
181 | YUD.get('pull_request_overview').innerHTML = "${_('Loading ...')}"; | |
|
182 | ypjax(url,'pull_request_overview'); | |||
|
183 | ||||
162 | YUD.get('pull_request_overview_url').href = url; // shouldn't have as_form ... but ... |
|
184 | YUD.get('pull_request_overview_url').href = url; // shouldn't have as_form ... but ... | |
163 | YUD.setStyle(YUD.get('pull_request_overview_url').parentElement,'display',''); |
|
185 | YUD.setStyle(YUD.get('pull_request_overview_url').parentElement,'display',''); | |
164 | ypjax(url,'pull_request_overview', function(data){ |
|
|||
165 | var sel_box = YUQ('#pull_request_form #other_repo')[0]; |
|
|||
166 | var repo_name = sel_box.options[sel_box.selectedIndex].value; |
|
|||
167 | var _data = other_repos_info[repo_name]; |
|
|||
168 | YUD.get('other_repo_desc').innerHTML = other_repos_info[repo_name]['description']; |
|
|||
169 | YUD.get('other_ref').innerHTML = other_repos_info[repo_name]['revs']; |
|
|||
170 | // select back the revision that was just compared |
|
|||
171 | setSelectValue(YUD.get('other_ref'), rev_data['other_ref']); |
|
|||
172 | // reset && add the reviewer based on selected repo |
|
|||
173 | YUD.get('review_members').innerHTML = ''; |
|
|||
174 | addReviewMember(_data.user.user_id, _data.user.firstname, |
|
|||
175 | _data.user.lastname, _data.user.username, |
|
|||
176 | _data.user.gravatar_link); |
|
|||
177 | }) |
|
|||
178 | } |
|
186 | } | |
179 |
|
187 | |||
180 | ## refresh automatically when something changes (org_repo can't change) |
|
188 | ## refresh automatically when something changes (org_repo can't change) | |
@@ -184,9 +192,7 b'' | |||||
184 | }); |
|
192 | }); | |
185 |
|
193 | |||
186 | YUE.on('other_repo', 'change', function(e){ |
|
194 | YUE.on('other_repo', 'change', function(e){ | |
187 | var repo_name = e.currentTarget.value; |
|
195 | otherrepoChanged(); | |
188 | // replace the <select> of changed repo |
|
|||
189 | YUD.get('other_ref').innerHTML = other_repos_info[repo_name]['revs']; |
|
|||
190 | loadPreview(); |
|
196 | loadPreview(); | |
191 | }); |
|
197 | }); | |
192 |
|
198 | |||
@@ -194,8 +200,9 b'' | |||||
194 | loadPreview(); |
|
200 | loadPreview(); | |
195 | }); |
|
201 | }); | |
196 |
|
202 | |||
|
203 | otherrepoChanged(); | |||
197 | //lazy load overview after 0.5s |
|
204 | //lazy load overview after 0.5s | |
198 | setTimeout(loadPreview, 500) |
|
205 | setTimeout(loadPreview, 500); | |
199 |
|
206 | |||
200 | </script> |
|
207 | </script> | |
201 |
|
208 |
@@ -161,7 +161,7 b'' | |||||
161 | <div id="reviewers_container"></div> |
|
161 | <div id="reviewers_container"></div> | |
162 | </div> |
|
162 | </div> | |
163 | <div style="padding:0px 10px"> |
|
163 | <div style="padding:0px 10px"> | |
164 |
<span id="update_pull_request" class="ui-btn xsmall">${_(' |
|
164 | <span id="update_pull_request" class="ui-btn xsmall">${_('Save changes')}</span> | |
165 | </div> |
|
165 | </div> | |
166 | %endif |
|
166 | %endif | |
167 | </div> |
|
167 | </div> |
@@ -47,7 +47,8 b' log = logging.getLogger(__name__)' | |||||
47 | 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO', |
|
47 | 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO', | |
48 | 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO', |
|
48 | 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO', | |
49 | 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'HG_REMOTE_REPO', |
|
49 | 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'HG_REMOTE_REPO', | |
50 | 'GIT_REMOTE_REPO', 'SCM_TESTS', '_get_repo_create_params' |
|
50 | 'GIT_REMOTE_REPO', 'SCM_TESTS', '_get_repo_create_params', | |
|
51 | '_get_group_create_params' | |||
51 | ] |
|
52 | ] | |
52 |
|
53 | |||
53 | # Invoke websetup with the current config file |
|
54 | # Invoke websetup with the current config file | |
@@ -183,3 +184,18 b' def _get_repo_create_params(**custom):' | |||||
183 | defs.update({'repo_name_full': defs['repo_name']}) |
|
184 | defs.update({'repo_name_full': defs['repo_name']}) | |
184 |
|
185 | |||
185 | return defs |
|
186 | return defs | |
|
187 | ||||
|
188 | ||||
|
189 | def _get_group_create_params(**custom): | |||
|
190 | defs = dict( | |||
|
191 | group_name=None, | |||
|
192 | group_description='DESC', | |||
|
193 | group_parent_id=None, | |||
|
194 | perms_updates=[], | |||
|
195 | perms_new=[], | |||
|
196 | enable_locking=False, | |||
|
197 | recursive=False | |||
|
198 | ) | |||
|
199 | defs.update(custom) | |||
|
200 | ||||
|
201 | return defs |
@@ -370,6 +370,17 b' class BaseTestApi(object):' | |||||
370 | % (TEST_USER_ADMIN_LOGIN, self.REPO, True)) |
|
370 | % (TEST_USER_ADMIN_LOGIN, self.REPO, True)) | |
371 | self._compare_ok(id_, expected, given=response.body) |
|
371 | self._compare_ok(id_, expected, given=response.body) | |
372 |
|
372 | |||
|
373 | def test_api_lock_repo_lock_optional_locked(self): | |||
|
374 | from rhodecode.lib.utils2 import time_to_datetime | |||
|
375 | _locked_since = json.dumps(time_to_datetime(Repository\ | |||
|
376 | .get_by_repo_name(self.REPO).locked[1])) | |||
|
377 | id_, params = _build_data(self.apikey, 'lock', | |||
|
378 | repoid=self.REPO) | |||
|
379 | response = api_call(self, params) | |||
|
380 | expected = ('Repo `%s` locked by `%s`. Locked=`True`. Locked since: `%s`' | |||
|
381 | % (self.REPO, TEST_USER_ADMIN_LOGIN, _locked_since)) | |||
|
382 | self._compare_ok(id_, expected, given=response.body) | |||
|
383 | ||||
373 | @mock.patch.object(Repository, 'lock', crash) |
|
384 | @mock.patch.object(Repository, 'lock', crash) | |
374 | def test_api_lock_error(self): |
|
385 | def test_api_lock_error(self): | |
375 | id_, params = _build_data(self.apikey, 'lock', |
|
386 | id_, params = _build_data(self.apikey, 'lock', | |
@@ -381,6 +392,32 b' class BaseTestApi(object):' | |||||
381 | expected = 'Error occurred locking repository `%s`' % self.REPO |
|
392 | expected = 'Error occurred locking repository `%s`' % self.REPO | |
382 | self._compare_error(id_, expected, given=response.body) |
|
393 | self._compare_error(id_, expected, given=response.body) | |
383 |
|
394 | |||
|
395 | def test_api_get_locks_regular_user(self): | |||
|
396 | id_, params = _build_data(self.apikey_regular, 'get_locks') | |||
|
397 | response = api_call(self, params) | |||
|
398 | expected = [] | |||
|
399 | self._compare_ok(id_, expected, given=response.body) | |||
|
400 | ||||
|
401 | def test_api_get_locks_with_userid_regular_user(self): | |||
|
402 | id_, params = _build_data(self.apikey_regular, 'get_locks', | |||
|
403 | userid=TEST_USER_ADMIN_LOGIN) | |||
|
404 | response = api_call(self, params) | |||
|
405 | expected = 'userid is not the same as your user' | |||
|
406 | self._compare_error(id_, expected, given=response.body) | |||
|
407 | ||||
|
408 | def test_api_get_locks(self): | |||
|
409 | id_, params = _build_data(self.apikey, 'get_locks') | |||
|
410 | response = api_call(self, params) | |||
|
411 | expected = [] | |||
|
412 | self._compare_ok(id_, expected, given=response.body) | |||
|
413 | ||||
|
414 | def test_api_get_locks_with_userid(self): | |||
|
415 | id_, params = _build_data(self.apikey, 'get_locks', | |||
|
416 | userid=TEST_USER_REGULAR_LOGIN) | |||
|
417 | response = api_call(self, params) | |||
|
418 | expected = [] | |||
|
419 | self._compare_ok(id_, expected, given=response.body) | |||
|
420 | ||||
384 | def test_api_create_existing_user(self): |
|
421 | def test_api_create_existing_user(self): | |
385 | id_, params = _build_data(self.apikey, 'create_user', |
|
422 | id_, params = _build_data(self.apikey, 'create_user', | |
386 | username=TEST_USER_ADMIN_LOGIN, |
|
423 | username=TEST_USER_ADMIN_LOGIN, |
@@ -106,6 +106,7 b' class TestCompareController(TestControll' | |||||
106 | other_repo=repo2.repo_name, |
|
106 | other_repo=repo2.repo_name, | |
107 | other_ref_type="branch", |
|
107 | other_ref_type="branch", | |
108 | other_ref=rev1, |
|
108 | other_ref=rev1, | |
|
109 | merge='1', | |||
109 | )) |
|
110 | )) | |
110 |
|
111 | |||
111 | response.mustcontain('%s@%s -> %s@%s' % (repo1.repo_name, rev2, repo2.repo_name, rev1)) |
|
112 | response.mustcontain('%s@%s -> %s@%s' % (repo1.repo_name, rev2, repo2.repo_name, rev1)) | |
@@ -118,9 +119,9 b' class TestCompareController(TestControll' | |||||
118 | response.mustcontain("""<a href="/%s/changeset/%s">r1:%s</a>""" % (repo2.repo_name, cs1.raw_id, cs1.short_id)) |
|
119 | response.mustcontain("""<a href="/%s/changeset/%s">r1:%s</a>""" % (repo2.repo_name, cs1.raw_id, cs1.short_id)) | |
119 | response.mustcontain("""<a href="/%s/changeset/%s">r2:%s</a>""" % (repo2.repo_name, cs2.raw_id, cs2.short_id)) |
|
120 | response.mustcontain("""<a href="/%s/changeset/%s">r2:%s</a>""" % (repo2.repo_name, cs2.raw_id, cs2.short_id)) | |
120 | ## files |
|
121 | ## files | |
121 | response.mustcontain("""<a href="/%s/compare/branch@%s...branch@%s?other_repo=%s#C--826e8142e6ba">file1</a>""" % (repo1.repo_name, rev2, rev1, repo2.repo_name)) |
|
122 | response.mustcontain("""<a href="/%s/compare/branch@%s...branch@%s?other_repo=%s&merge=1#C--826e8142e6ba">file1</a>""" % (repo1.repo_name, rev2, rev1, repo2.repo_name)) | |
122 | #swap |
|
123 | #swap | |
123 | response.mustcontain("""<a href="/%s/compare/branch@%s...branch@%s?other_repo=%s">[swap]</a>""" % (repo2.repo_name, rev1, rev2, repo1.repo_name)) |
|
124 | response.mustcontain("""<a href="/%s/compare/branch@%s...branch@%s?other_repo=%s&merge=True">[swap]</a>""" % (repo2.repo_name, rev1, rev2, repo1.repo_name)) | |
124 |
|
125 | |||
125 | def test_compare_forks_on_branch_extra_commits_origin_has_incomming_hg(self): |
|
126 | def test_compare_forks_on_branch_extra_commits_origin_has_incomming_hg(self): | |
126 | self.log_user() |
|
127 | self.log_user() | |
@@ -160,6 +161,7 b' class TestCompareController(TestControll' | |||||
160 | other_repo=repo2.repo_name, |
|
161 | other_repo=repo2.repo_name, | |
161 | other_ref_type="branch", |
|
162 | other_ref_type="branch", | |
162 | other_ref=rev1, |
|
163 | other_ref=rev1, | |
|
164 | merge='x', | |||
163 | )) |
|
165 | )) | |
164 | response.mustcontain('%s@%s -> %s@%s' % (repo1.repo_name, rev2, repo2.repo_name, rev1)) |
|
166 | response.mustcontain('%s@%s -> %s@%s' % (repo1.repo_name, rev2, repo2.repo_name, rev1)) | |
165 | response.mustcontain("""Showing 2 commits""") |
|
167 | response.mustcontain("""Showing 2 commits""") | |
@@ -171,9 +173,9 b' class TestCompareController(TestControll' | |||||
171 | response.mustcontain("""<a href="/%s/changeset/%s">r1:%s</a>""" % (repo2.repo_name, cs1.raw_id, cs1.short_id)) |
|
173 | response.mustcontain("""<a href="/%s/changeset/%s">r1:%s</a>""" % (repo2.repo_name, cs1.raw_id, cs1.short_id)) | |
172 | response.mustcontain("""<a href="/%s/changeset/%s">r2:%s</a>""" % (repo2.repo_name, cs2.raw_id, cs2.short_id)) |
|
174 | response.mustcontain("""<a href="/%s/changeset/%s">r2:%s</a>""" % (repo2.repo_name, cs2.raw_id, cs2.short_id)) | |
173 | ## files |
|
175 | ## files | |
174 | response.mustcontain("""<a href="/%s/compare/branch@%s...branch@%s?other_repo=%s#C--826e8142e6ba">file1</a>""" % (repo1.repo_name, rev2, rev1, repo2.repo_name)) |
|
176 | response.mustcontain("""<a href="/%s/compare/branch@%s...branch@%s?other_repo=%s&merge=x#C--826e8142e6ba">file1</a>""" % (repo1.repo_name, rev2, rev1, repo2.repo_name)) | |
175 | #swap |
|
177 | #swap | |
176 | response.mustcontain("""<a href="/%s/compare/branch@%s...branch@%s?other_repo=%s">[swap]</a>""" % (repo2.repo_name, rev1, rev2, repo1.repo_name)) |
|
178 | response.mustcontain("""<a href="/%s/compare/branch@%s...branch@%s?other_repo=%s&merge=True">[swap]</a>""" % (repo2.repo_name, rev1, rev2, repo1.repo_name)) | |
177 |
|
179 | |||
178 | def test_compare_cherry_pick_changesets_from_bottom(self): |
|
180 | def test_compare_cherry_pick_changesets_from_bottom(self): | |
179 |
|
181 | |||
@@ -215,20 +217,16 b' class TestCompareController(TestControll' | |||||
215 | cs5 = _commit_change(repo1.repo_name, filename='file1', content='line1\nline2\nline3\nline4\nline5\nline6\n', |
|
217 | cs5 = _commit_change(repo1.repo_name, filename='file1', content='line1\nline2\nline3\nline4\nline5\nline6\n', | |
216 | message='commit6', vcs_type='hg', parent=cs4) |
|
218 | message='commit6', vcs_type='hg', parent=cs4) | |
217 |
|
219 | |||
218 | rev1 = 'tip' |
|
|||
219 | rev2 = 'tip' |
|
|||
220 |
|
||||
221 | response = self.app.get(url(controller='compare', action='index', |
|
220 | response = self.app.get(url(controller='compare', action='index', | |
222 | repo_name=repo2.repo_name, |
|
221 | repo_name=repo2.repo_name, | |
223 |
org_ref_type=" |
|
222 | org_ref_type="rev", | |
224 |
org_ref= |
|
223 | org_ref=cs1.short_id, # parent of cs2, in repo2 | |
225 | other_repo=repo1.repo_name, |
|
224 | other_repo=repo1.repo_name, | |
226 |
other_ref_type=" |
|
225 | other_ref_type="rev", | |
227 |
other_ref= |
|
226 | other_ref=cs4.short_id, | |
228 |
|
|
227 | merge='True', | |
229 | rev_end=cs4.raw_id, |
|
|||
230 | )) |
|
228 | )) | |
231 |
response.mustcontain('%s@%s -> %s@%s' % (repo2.repo_name, cs |
|
229 | response.mustcontain('%s@%s -> %s@%s' % (repo2.repo_name, cs1.short_id, repo1.repo_name, cs4.short_id)) | |
232 | response.mustcontain("""Showing 3 commits""") |
|
230 | response.mustcontain("""Showing 3 commits""") | |
233 | response.mustcontain("""1 file changed with 3 insertions and 0 deletions""") |
|
231 | response.mustcontain("""1 file changed with 3 insertions and 0 deletions""") | |
234 |
|
232 | |||
@@ -280,21 +278,16 b' class TestCompareController(TestControll' | |||||
280 | message='commit5', vcs_type='hg', parent=cs3) |
|
278 | message='commit5', vcs_type='hg', parent=cs3) | |
281 | cs5 = _commit_change(repo1.repo_name, filename='file1', content='line1\nline2\nline3\nline4\nline5\nline6\n', |
|
279 | cs5 = _commit_change(repo1.repo_name, filename='file1', content='line1\nline2\nline3\nline4\nline5\nline6\n', | |
282 | message='commit6', vcs_type='hg', parent=cs4) |
|
280 | message='commit6', vcs_type='hg', parent=cs4) | |
283 | rev1 = 'tip' |
|
|||
284 | rev2 = 'tip' |
|
|||
285 |
|
||||
286 | response = self.app.get(url(controller='compare', action='index', |
|
281 | response = self.app.get(url(controller='compare', action='index', | |
287 |
repo_name=repo |
|
282 | repo_name=repo1.repo_name, | |
288 |
org_ref_type=" |
|
283 | org_ref_type="rev", | |
289 |
org_ref= |
|
284 | org_ref=cs2.short_id, # parent of cs3, not in repo2 | |
290 |
other_re |
|
285 | other_ref_type="rev", | |
291 |
other_ref |
|
286 | other_ref=cs5.short_id, | |
292 |
|
|
287 | merge='1', | |
293 | rev_start=cs3.raw_id, |
|
|||
294 | rev_end=cs5.raw_id, |
|
|||
295 | )) |
|
288 | )) | |
296 |
|
289 | |||
297 |
response.mustcontain('%s@%s -> %s@%s' % (repo |
|
290 | response.mustcontain('%s@%s -> %s@%s' % (repo1.repo_name, cs2.short_id, repo1.repo_name, cs5.short_id)) | |
298 | response.mustcontain("""Showing 3 commits""") |
|
291 | response.mustcontain("""Showing 3 commits""") | |
299 | response.mustcontain("""1 file changed with 3 insertions and 0 deletions""") |
|
292 | response.mustcontain("""1 file changed with 3 insertions and 0 deletions""") | |
300 |
|
293 | |||
@@ -330,6 +323,7 b' class TestCompareController(TestControll' | |||||
330 | other_ref_type="rev", |
|
323 | other_ref_type="rev", | |
331 | other_ref=rev2, |
|
324 | other_ref=rev2, | |
332 | other_repo=HG_FORK, |
|
325 | other_repo=HG_FORK, | |
|
326 | merge='1', | |||
333 | )) |
|
327 | )) | |
334 | response.mustcontain('%s@%s -> %s@%s' % (HG_REPO, rev1, HG_FORK, rev2)) |
|
328 | response.mustcontain('%s@%s -> %s@%s' % (HG_REPO, rev1, HG_FORK, rev2)) | |
335 | ## outgoing changesets between those revisions |
|
329 | ## outgoing changesets between those revisions | |
@@ -339,9 +333,9 b' class TestCompareController(TestControll' | |||||
339 | response.mustcontain("""<a href="/%s/changeset/7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7">r6:%s</a>""" % (HG_FORK, rev2)) |
|
333 | response.mustcontain("""<a href="/%s/changeset/7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7">r6:%s</a>""" % (HG_FORK, rev2)) | |
340 |
|
334 | |||
341 | ## files |
|
335 | ## files | |
342 | response.mustcontain("""<a href="/%s/compare/rev@%s...rev@%s?other_repo=%s#C--9c390eb52cd6">vcs/backends/hg.py</a>""" % (HG_REPO, rev1, rev2, HG_FORK)) |
|
336 | response.mustcontain("""<a href="/%s/compare/rev@%s...rev@%s?other_repo=%s&merge=1#C--9c390eb52cd6">vcs/backends/hg.py</a>""" % (HG_REPO, rev1, rev2, HG_FORK)) | |
343 | response.mustcontain("""<a href="/%s/compare/rev@%s...rev@%s?other_repo=%s#C--41b41c1f2796">vcs/backends/__init__.py</a>""" % (HG_REPO, rev1, rev2, HG_FORK)) |
|
337 | response.mustcontain("""<a href="/%s/compare/rev@%s...rev@%s?other_repo=%s&merge=1#C--41b41c1f2796">vcs/backends/__init__.py</a>""" % (HG_REPO, rev1, rev2, HG_FORK)) | |
344 | response.mustcontain("""<a href="/%s/compare/rev@%s...rev@%s?other_repo=%s#C--2f574d260608">vcs/backends/base.py</a>""" % (HG_REPO, rev1, rev2, HG_FORK)) |
|
338 | response.mustcontain("""<a href="/%s/compare/rev@%s...rev@%s?other_repo=%s&merge=1#C--2f574d260608">vcs/backends/base.py</a>""" % (HG_REPO, rev1, rev2, HG_FORK)) | |
345 |
|
339 | |||
346 | def test_org_repo_new_commits_after_forking_simple_diff(self): |
|
340 | def test_org_repo_new_commits_after_forking_simple_diff(self): | |
347 | self.log_user() |
|
341 | self.log_user() | |
@@ -412,6 +406,7 b' class TestCompareController(TestControll' | |||||
412 | other_ref_type="branch", |
|
406 | other_ref_type="branch", | |
413 | other_ref=rev2, |
|
407 | other_ref=rev2, | |
414 | other_repo=r1_name, |
|
408 | other_repo=r1_name, | |
|
409 | merge='1', | |||
415 | )) |
|
410 | )) | |
416 | response.mustcontain('%s@%s -> %s@%s' % (r2_name, rev1, r1_name, rev2)) |
|
411 | response.mustcontain('%s@%s -> %s@%s' % (r2_name, rev1, r1_name, rev2)) | |
417 | response.mustcontain('No files') |
|
412 | response.mustcontain('No files') | |
@@ -436,6 +431,7 b' class TestCompareController(TestControll' | |||||
436 | other_ref_type="branch", |
|
431 | other_ref_type="branch", | |
437 | other_ref=rev2, |
|
432 | other_ref=rev2, | |
438 | other_repo=r1_name, |
|
433 | other_repo=r1_name, | |
|
434 | merge='1', | |||
439 | )) |
|
435 | )) | |
440 |
|
436 | |||
441 | response.mustcontain('%s@%s -> %s@%s' % (r2_name, rev1, r1_name, rev2)) |
|
437 | response.mustcontain('%s@%s -> %s@%s' % (r2_name, rev1, r1_name, rev2)) |
@@ -21,6 +21,33 b" def _make_group(path, desc='desc', paren" | |||||
21 | return gr |
|
21 | return gr | |
22 |
|
22 | |||
23 |
|
23 | |||
|
24 | def _update_group(id_, group_name, desc='desc', parent_id=None): | |||
|
25 | form_data = _get_group_create_params(group_name=group_name, | |||
|
26 | group_desc=desc, | |||
|
27 | group_parent_id=parent_id) | |||
|
28 | gr = ReposGroupModel().update(id_, form_data) | |||
|
29 | return gr | |||
|
30 | ||||
|
31 | ||||
|
32 | def _make_repo(name, **kwargs): | |||
|
33 | form_data = _get_repo_create_params(repo_name=name, **kwargs) | |||
|
34 | cur_user = User.get_by_username(TEST_USER_ADMIN_LOGIN) | |||
|
35 | r = RepoModel().create(form_data, cur_user) | |||
|
36 | return r | |||
|
37 | ||||
|
38 | ||||
|
39 | def _update_repo(name, **kwargs): | |||
|
40 | form_data = _get_repo_create_params(**kwargs) | |||
|
41 | if not 'repo_name' in kwargs: | |||
|
42 | form_data['repo_name'] = name | |||
|
43 | if not 'perms_new' in kwargs: | |||
|
44 | form_data['perms_new'] = [] | |||
|
45 | if not 'perms_updates' in kwargs: | |||
|
46 | form_data['perms_updates'] = [] | |||
|
47 | r = RepoModel().update(name, **form_data) | |||
|
48 | return r | |||
|
49 | ||||
|
50 | ||||
24 | class TestReposGroups(unittest.TestCase): |
|
51 | class TestReposGroups(unittest.TestCase): | |
25 |
|
52 | |||
26 | def setUp(self): |
|
53 | def setUp(self): | |
@@ -32,7 +59,7 b' class TestReposGroups(unittest.TestCase)' | |||||
32 | Session().commit() |
|
59 | Session().commit() | |
33 |
|
60 | |||
34 | def tearDown(self): |
|
61 | def tearDown(self): | |
35 | print 'out' |
|
62 | Session.remove() | |
36 |
|
63 | |||
37 | def __check_path(self, *path): |
|
64 | def __check_path(self, *path): | |
38 | """ |
|
65 | """ | |
@@ -48,21 +75,9 b' class TestReposGroups(unittest.TestCase)' | |||||
48 | def __delete_group(self, id_): |
|
75 | def __delete_group(self, id_): | |
49 | ReposGroupModel().delete(id_) |
|
76 | ReposGroupModel().delete(id_) | |
50 |
|
77 | |||
51 | def __update_group(self, id_, path, desc='desc', parent_id=None): |
|
|||
52 | form_data = dict( |
|
|||
53 | group_name=path, |
|
|||
54 | group_description=desc, |
|
|||
55 | group_parent_id=parent_id, |
|
|||
56 | perms_updates=[], |
|
|||
57 | perms_new=[], |
|
|||
58 | enable_locking=False, |
|
|||
59 | recursive=False |
|
|||
60 | ) |
|
|||
61 | gr = ReposGroupModel().update(id_, form_data) |
|
|||
62 | return gr |
|
|||
63 |
|
||||
64 | def test_create_group(self): |
|
78 | def test_create_group(self): | |
65 | g = _make_group('newGroup') |
|
79 | g = _make_group('newGroup') | |
|
80 | Session().commit() | |||
66 | self.assertEqual(g.full_path, 'newGroup') |
|
81 | self.assertEqual(g.full_path, 'newGroup') | |
67 |
|
82 | |||
68 | self.assertTrue(self.__check_path('newGroup')) |
|
83 | self.assertTrue(self.__check_path('newGroup')) | |
@@ -73,23 +88,27 b' class TestReposGroups(unittest.TestCase)' | |||||
73 |
|
88 | |||
74 | def test_same_subgroup(self): |
|
89 | def test_same_subgroup(self): | |
75 | sg1 = _make_group('sub1', parent_id=self.g1.group_id) |
|
90 | sg1 = _make_group('sub1', parent_id=self.g1.group_id) | |
|
91 | Session().commit() | |||
76 | self.assertEqual(sg1.parent_group, self.g1) |
|
92 | self.assertEqual(sg1.parent_group, self.g1) | |
77 | self.assertEqual(sg1.full_path, 'test1/sub1') |
|
93 | self.assertEqual(sg1.full_path, 'test1/sub1') | |
78 | self.assertTrue(self.__check_path('test1', 'sub1')) |
|
94 | self.assertTrue(self.__check_path('test1', 'sub1')) | |
79 |
|
95 | |||
80 | ssg1 = _make_group('subsub1', parent_id=sg1.group_id) |
|
96 | ssg1 = _make_group('subsub1', parent_id=sg1.group_id) | |
|
97 | Session().commit() | |||
81 | self.assertEqual(ssg1.parent_group, sg1) |
|
98 | self.assertEqual(ssg1.parent_group, sg1) | |
82 | self.assertEqual(ssg1.full_path, 'test1/sub1/subsub1') |
|
99 | self.assertEqual(ssg1.full_path, 'test1/sub1/subsub1') | |
83 | self.assertTrue(self.__check_path('test1', 'sub1', 'subsub1')) |
|
100 | self.assertTrue(self.__check_path('test1', 'sub1', 'subsub1')) | |
84 |
|
101 | |||
85 | def test_remove_group(self): |
|
102 | def test_remove_group(self): | |
86 | sg1 = _make_group('deleteme') |
|
103 | sg1 = _make_group('deleteme') | |
|
104 | Session().commit() | |||
87 | self.__delete_group(sg1.group_id) |
|
105 | self.__delete_group(sg1.group_id) | |
88 |
|
106 | |||
89 | self.assertEqual(RepoGroup.get(sg1.group_id), None) |
|
107 | self.assertEqual(RepoGroup.get(sg1.group_id), None) | |
90 | self.assertFalse(self.__check_path('deteteme')) |
|
108 | self.assertFalse(self.__check_path('deteteme')) | |
91 |
|
109 | |||
92 | sg1 = _make_group('deleteme', parent_id=self.g1.group_id) |
|
110 | sg1 = _make_group('deleteme', parent_id=self.g1.group_id) | |
|
111 | Session().commit() | |||
93 | self.__delete_group(sg1.group_id) |
|
112 | self.__delete_group(sg1.group_id) | |
94 |
|
113 | |||
95 | self.assertEqual(RepoGroup.get(sg1.group_id), None) |
|
114 | self.assertEqual(RepoGroup.get(sg1.group_id), None) | |
@@ -97,24 +116,26 b' class TestReposGroups(unittest.TestCase)' | |||||
97 |
|
116 | |||
98 | def test_rename_single_group(self): |
|
117 | def test_rename_single_group(self): | |
99 | sg1 = _make_group('initial') |
|
118 | sg1 = _make_group('initial') | |
|
119 | Session().commit() | |||
100 |
|
120 | |||
101 |
new_sg1 = |
|
121 | new_sg1 = _update_group(sg1.group_id, 'after') | |
102 | self.assertTrue(self.__check_path('after')) |
|
122 | self.assertTrue(self.__check_path('after')) | |
103 | self.assertEqual(RepoGroup.get_by_group_name('initial'), None) |
|
123 | self.assertEqual(RepoGroup.get_by_group_name('initial'), None) | |
104 |
|
124 | |||
105 | def test_update_group_parent(self): |
|
125 | def test_update_group_parent(self): | |
106 |
|
126 | |||
107 | sg1 = _make_group('initial', parent_id=self.g1.group_id) |
|
127 | sg1 = _make_group('initial', parent_id=self.g1.group_id) | |
|
128 | Session().commit() | |||
108 |
|
129 | |||
109 |
new_sg1 = |
|
130 | new_sg1 = _update_group(sg1.group_id, 'after', parent_id=self.g1.group_id) | |
110 | self.assertTrue(self.__check_path('test1', 'after')) |
|
131 | self.assertTrue(self.__check_path('test1', 'after')) | |
111 | self.assertEqual(RepoGroup.get_by_group_name('test1/initial'), None) |
|
132 | self.assertEqual(RepoGroup.get_by_group_name('test1/initial'), None) | |
112 |
|
133 | |||
113 |
new_sg1 = |
|
134 | new_sg1 = _update_group(sg1.group_id, 'after', parent_id=self.g3.group_id) | |
114 | self.assertTrue(self.__check_path('test3', 'after')) |
|
135 | self.assertTrue(self.__check_path('test3', 'after')) | |
115 | self.assertEqual(RepoGroup.get_by_group_name('test3/initial'), None) |
|
136 | self.assertEqual(RepoGroup.get_by_group_name('test3/initial'), None) | |
116 |
|
137 | |||
117 |
new_sg1 = |
|
138 | new_sg1 = _update_group(sg1.group_id, 'hello') | |
118 | self.assertTrue(self.__check_path('hello')) |
|
139 | self.assertTrue(self.__check_path('hello')) | |
119 |
|
140 | |||
120 | self.assertEqual(RepoGroup.get_by_group_name('hello'), new_sg1) |
|
141 | self.assertEqual(RepoGroup.get_by_group_name('hello'), new_sg1) | |
@@ -123,23 +144,17 b' class TestReposGroups(unittest.TestCase)' | |||||
123 |
|
144 | |||
124 | g1 = _make_group('g1') |
|
145 | g1 = _make_group('g1') | |
125 | g2 = _make_group('g2') |
|
146 | g2 = _make_group('g2') | |
126 |
|
147 | Session().commit() | ||
127 | # create new repo |
|
148 | # create new repo | |
128 | form_data = _get_repo_create_params(repo_name='john') |
|
149 | r = _make_repo('john') | |
129 | cur_user = User.get_by_username(TEST_USER_ADMIN_LOGIN) |
|
150 | Session().commit() | |
130 | r = RepoModel().create(form_data, cur_user) |
|
|||
131 |
|
||||
132 | self.assertEqual(r.repo_name, 'john') |
|
151 | self.assertEqual(r.repo_name, 'john') | |
133 |
|
||||
134 | # put repo into group |
|
152 | # put repo into group | |
135 | form_data = form_data |
|
153 | r = _update_repo('john', repo_group=g1.group_id) | |
136 | form_data['repo_group'] = g1.group_id |
|
154 | Session().commit() | |
137 | form_data['perms_new'] = [] |
|
|||
138 | form_data['perms_updates'] = [] |
|
|||
139 | RepoModel().update(r.repo_name, **form_data) |
|
|||
140 | self.assertEqual(r.repo_name, 'g1/john') |
|
155 | self.assertEqual(r.repo_name, 'g1/john') | |
141 |
|
156 | |||
142 |
|
|
157 | _update_group(g1.group_id, 'g1', parent_id=g2.group_id) | |
143 | self.assertTrue(self.__check_path('g2', 'g1')) |
|
158 | self.assertTrue(self.__check_path('g2', 'g1')) | |
144 |
|
159 | |||
145 | # test repo |
|
160 | # test repo | |
@@ -155,7 +170,7 b' class TestReposGroups(unittest.TestCase)' | |||||
155 | self.assertEqual(g2.full_path, 't11/t22') |
|
170 | self.assertEqual(g2.full_path, 't11/t22') | |
156 | self.assertTrue(self.__check_path('t11', 't22')) |
|
171 | self.assertTrue(self.__check_path('t11', 't22')) | |
157 |
|
172 | |||
158 |
g2 = |
|
173 | g2 = _update_group(g2.group_id, 'g22', parent_id=None) | |
159 | Session().commit() |
|
174 | Session().commit() | |
160 |
|
175 | |||
161 | self.assertEqual(g2.group_name, 'g22') |
|
176 | self.assertEqual(g2.group_name, 'g22') | |
@@ -163,3 +178,65 b' class TestReposGroups(unittest.TestCase)' | |||||
163 | self.assertEqual(g2.full_path, 'g22') |
|
178 | self.assertEqual(g2.full_path, 'g22') | |
164 | self.assertFalse(self.__check_path('t11', 't22')) |
|
179 | self.assertFalse(self.__check_path('t11', 't22')) | |
165 | self.assertTrue(self.__check_path('g22')) |
|
180 | self.assertTrue(self.__check_path('g22')) | |
|
181 | ||||
|
182 | def test_rename_top_level_group_in_nested_setup(self): | |||
|
183 | g1 = _make_group('L1') | |||
|
184 | Session().commit() | |||
|
185 | g2 = _make_group('L2', parent_id=g1.group_id) | |||
|
186 | Session().commit() | |||
|
187 | g3 = _make_group('L3', parent_id=g2.group_id) | |||
|
188 | Session().commit() | |||
|
189 | ||||
|
190 | r = _make_repo('L1/L2/L3/L3_REPO', repo_group=g3.group_id) | |||
|
191 | Session().commit() | |||
|
192 | ||||
|
193 | ##rename L1 all groups should be now changed | |||
|
194 | _update_group(g1.group_id, 'L1_NEW') | |||
|
195 | Session().commit() | |||
|
196 | self.assertEqual(g1.full_path, 'L1_NEW') | |||
|
197 | self.assertEqual(g2.full_path, 'L1_NEW/L2') | |||
|
198 | self.assertEqual(g3.full_path, 'L1_NEW/L2/L3') | |||
|
199 | self.assertEqual(r.repo_name, 'L1_NEW/L2/L3/L3_REPO') | |||
|
200 | ||||
|
201 | def test_change_parent_of_top_level_group_in_nested_setup(self): | |||
|
202 | g1 = _make_group('R1') | |||
|
203 | Session().commit() | |||
|
204 | g2 = _make_group('R2', parent_id=g1.group_id) | |||
|
205 | Session().commit() | |||
|
206 | g3 = _make_group('R3', parent_id=g2.group_id) | |||
|
207 | Session().commit() | |||
|
208 | ||||
|
209 | g4 = _make_group('R1_NEW') | |||
|
210 | Session().commit() | |||
|
211 | ||||
|
212 | r = _make_repo('R1/R2/R3/R3_REPO', repo_group=g3.group_id) | |||
|
213 | Session().commit() | |||
|
214 | ##rename L1 all groups should be now changed | |||
|
215 | _update_group(g1.group_id, 'R1', parent_id=g4.group_id) | |||
|
216 | Session().commit() | |||
|
217 | self.assertEqual(g1.full_path, 'R1_NEW/R1') | |||
|
218 | self.assertEqual(g2.full_path, 'R1_NEW/R1/R2') | |||
|
219 | self.assertEqual(g3.full_path, 'R1_NEW/R1/R2/R3') | |||
|
220 | self.assertEqual(r.repo_name, 'R1_NEW/R1/R2/R3/R3_REPO') | |||
|
221 | ||||
|
222 | def test_change_parent_of_top_level_group_in_nested_setup_with_rename(self): | |||
|
223 | g1 = _make_group('X1') | |||
|
224 | Session().commit() | |||
|
225 | g2 = _make_group('X2', parent_id=g1.group_id) | |||
|
226 | Session().commit() | |||
|
227 | g3 = _make_group('X3', parent_id=g2.group_id) | |||
|
228 | Session().commit() | |||
|
229 | ||||
|
230 | g4 = _make_group('X1_NEW') | |||
|
231 | Session().commit() | |||
|
232 | ||||
|
233 | r = _make_repo('X1/X2/X3/X3_REPO', repo_group=g3.group_id) | |||
|
234 | Session().commit() | |||
|
235 | ||||
|
236 | ##rename L1 all groups should be now changed | |||
|
237 | _update_group(g1.group_id, 'X1_PRIM', parent_id=g4.group_id) | |||
|
238 | Session().commit() | |||
|
239 | self.assertEqual(g1.full_path, 'X1_NEW/X1_PRIM') | |||
|
240 | self.assertEqual(g2.full_path, 'X1_NEW/X1_PRIM/X2') | |||
|
241 | self.assertEqual(g3.full_path, 'X1_NEW/X1_PRIM/X2/X3') | |||
|
242 | self.assertEqual(r.repo_name, 'X1_NEW/X1_PRIM/X2/X3/X3_REPO') |
@@ -4,9 +4,10 b'' | |||||
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Test suite for making push/pull operations. |
|
6 | Test suite for making push/pull operations. | |
7 | Run using:: |
|
7 | Run using after doing paster serve test.ini:: | |
|
8 | RC_WHOOSH_TEST_DISABLE=1 RC_NO_TMP_PATH=1 nosetests rhodecode/tests/scripts/test_vcs_operations.py | |||
8 |
|
9 | |||
9 | RC_WHOOSH_TEST_DISABLE=1 RC_NO_TMP_PATH=1 nosetests rhodecode/tests/scripts/test_vcs_operations.py |
|
10 | You must have git > 1.8.1 for tests to work fine | |
10 |
|
11 | |||
11 | :created_on: Dec 30, 2010 |
|
12 | :created_on: Dec 30, 2010 | |
12 | :author: marcink |
|
13 | :author: marcink | |
@@ -107,13 +108,14 b' def _add_files_and_push(vcs, DEST, **kwa' | |||||
107 | for i in xrange(3): |
|
108 | for i in xrange(3): | |
108 | cmd = """echo 'added_line%s' >> %s""" % (i, added_file) |
|
109 | cmd = """echo 'added_line%s' >> %s""" % (i, added_file) | |
109 | Command(cwd).execute(cmd) |
|
110 | Command(cwd).execute(cmd) | |
|
111 | author_str = 'Marcin Kuźminski <me@email.com>' | |||
110 | if vcs == 'hg': |
|
112 | if vcs == 'hg': | |
111 | cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % ( |
|
113 | cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % ( | |
112 |
i, |
|
114 | i, author_str, added_file | |
113 | ) |
|
115 | ) | |
114 | elif vcs == 'git': |
|
116 | elif vcs == 'git': | |
115 | cmd = """git commit -m 'commited new %s' --author '%s' %s """ % ( |
|
117 | cmd = """git commit -m 'commited new %s' --author '%s' %s """ % ( | |
116 |
i, |
|
118 | i, author_str, added_file | |
117 | ) |
|
119 | ) | |
118 | Command(cwd).execute(cmd) |
|
120 | Command(cwd).execute(cmd) | |
119 | # PUSH it back |
|
121 | # PUSH it back | |
@@ -129,7 +131,7 b' def _add_files_and_push(vcs, DEST, **kwa' | |||||
129 | if vcs == 'hg': |
|
131 | if vcs == 'hg': | |
130 | stdout, stderr = Command(cwd).execute('hg push --verbose', clone_url) |
|
132 | stdout, stderr = Command(cwd).execute('hg push --verbose', clone_url) | |
131 | elif vcs == 'git': |
|
133 | elif vcs == 'git': | |
132 | stdout, stderr = Command(cwd).execute('git push', clone_url + " master") |
|
134 | stdout, stderr = Command(cwd).execute('git push --verbose', clone_url + " master") | |
133 |
|
135 | |||
134 | return stdout, stderr |
|
136 | return stdout, stderr | |
135 |
|
137 | |||
@@ -324,8 +326,7 b' class TestVCSOperations(unittest.TestCas' | |||||
324 | #pull fails since repo is locked |
|
326 | #pull fails since repo is locked | |
325 | clone_url = _construct_url(GIT_REPO) |
|
327 | clone_url = _construct_url(GIT_REPO) | |
326 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
328 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) | |
327 | msg = ("""423 Repository `%s` locked by user `%s`""" |
|
329 | msg = ("""The requested URL returned error: 423""") | |
328 | % (GIT_REPO, TEST_USER_ADMIN_LOGIN)) |
|
|||
329 | assert msg in stderr |
|
330 | assert msg in stderr | |
330 |
|
331 | |||
331 | def test_push_on_locked_repo_by_other_user_hg(self): |
|
332 | def test_push_on_locked_repo_by_other_user_hg(self): | |
@@ -455,7 +456,8 b' class TestVCSOperations(unittest.TestCas' | |||||
455 | Session().commit() |
|
456 | Session().commit() | |
456 | clone_url = _construct_url(GIT_REPO) |
|
457 | clone_url = _construct_url(GIT_REPO) | |
457 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) |
|
458 | stdout, stderr = Command('/tmp').execute('git clone', clone_url) | |
458 |
|
|
459 | msg = ("""The requested URL returned error: 403""") | |
|
460 | assert msg in stderr | |||
459 | finally: |
|
461 | finally: | |
460 | #release IP restrictions |
|
462 | #release IP restrictions | |
461 | for ip in UserIpMap.getAll(): |
|
463 | for ip in UserIpMap.getAll(): |
@@ -7,6 +7,7 b' import time' | |||||
7 | import hashlib |
|
7 | import hashlib | |
8 | import tempfile |
|
8 | import tempfile | |
9 | import datetime |
|
9 | import datetime | |
|
10 | import shutil | |||
10 | from rhodecode.tests import * |
|
11 | from rhodecode.tests import * | |
11 | from utils import get_normalized_path |
|
12 | from utils import get_normalized_path | |
12 | from os.path import join as jn |
|
13 | from os.path import join as jn | |
@@ -58,5 +59,6 b' THIS = os.path.abspath(os.path.dirname(_' | |||||
58 |
|
59 | |||
59 | PACKAGE_DIR = os.path.abspath(os.path.join( |
|
60 | PACKAGE_DIR = os.path.abspath(os.path.join( | |
60 | os.path.dirname(__file__), '..')) |
|
61 | os.path.dirname(__file__), '..')) | |
61 |
|
62 | _dest = jn(TESTS_TMP_PATH,'aconfig') | ||
62 | TEST_USER_CONFIG_FILE = jn(THIS, 'aconfig') |
|
63 | shutil.copy(jn(THIS, 'aconfig'), _dest) | |
|
64 | TEST_USER_CONFIG_FILE = _dest |
@@ -62,10 +62,10 b' if sys.version_info < (2, 7):' | |||||
62 | requirements.append("argparse") |
|
62 | requirements.append("argparse") | |
63 |
|
63 | |||
64 | if is_windows: |
|
64 | if is_windows: | |
65 |
requirements.append("mercurial==2.5. |
|
65 | requirements.append("mercurial==2.5.2") | |
66 | else: |
|
66 | else: | |
67 | requirements.append("py-bcrypt") |
|
67 | requirements.append("py-bcrypt") | |
68 |
requirements.append("mercurial==2.5. |
|
68 | requirements.append("mercurial==2.5.2") | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | dependency_links = [ |
|
71 | dependency_links = [ |
@@ -31,22 +31,24 b' pdebug = false' | |||||
31 | [server:main] |
|
31 | [server:main] | |
32 | ## PASTE |
|
32 | ## PASTE | |
33 | ##nr of threads to spawn |
|
33 | ##nr of threads to spawn | |
34 | threadpool_workers = 5 |
|
34 | #threadpool_workers = 5 | |
35 |
|
35 | |||
36 | ##max request before thread respawn |
|
36 | ##max request before thread respawn | |
37 | threadpool_max_requests = 10 |
|
37 | #threadpool_max_requests = 10 | |
38 |
|
38 | |||
39 | ##option to use threads of process |
|
39 | ##option to use threads of process | |
40 | use_threadpool = true |
|
40 | #use_threadpool = true | |
41 |
|
41 | |||
42 | use = egg:Paste#http |
|
42 | #use = egg:Paste#http | |
43 |
|
43 | |||
44 | #WAITRESS |
|
44 | #WAITRESS | |
45 | threads = 5 |
|
45 | threads = 5 | |
|
46 | #100GB | |||
|
47 | max_request_body_size = 107374182400 | |||
46 | use = egg:waitress#main |
|
48 | use = egg:waitress#main | |
47 |
|
49 | |||
48 | host = 127.0.0.1 |
|
50 | host = 127.0.0.1 | |
49 |
port = |
|
51 | port = 5000 | |
50 |
|
52 | |||
51 | [filter:proxy-prefix] |
|
53 | [filter:proxy-prefix] | |
52 | # prefix middleware for rc |
|
54 | # prefix middleware for rc | |
@@ -63,6 +65,8 b' static_files = true' | |||||
63 | lang = en |
|
65 | lang = en | |
64 | cache_dir = /tmp/rc/data |
|
66 | cache_dir = /tmp/rc/data | |
65 | index_dir = /tmp/rc/index |
|
67 | index_dir = /tmp/rc/index | |
|
68 | # set this path to use archive download cache | |||
|
69 | #archive_cache_dir = /tmp/rhodecode_tarballcache | |||
66 | app_instance_uuid = develop-test |
|
70 | app_instance_uuid = develop-test | |
67 | cut_off_limit = 256000 |
|
71 | cut_off_limit = 256000 | |
68 | vcs_full_cache = False |
|
72 | vcs_full_cache = False | |
@@ -250,6 +254,87 b' beaker.session.auto = False' | |||||
250 | #beaker.session.cookie_expires = 3600 |
|
254 | #beaker.session.cookie_expires = 3600 | |
251 |
|
255 | |||
252 |
|
256 | |||
|
257 | ############################ | |||
|
258 | ## ERROR HANDLING SYSTEMS ## | |||
|
259 | ############################ | |||
|
260 | ||||
|
261 | #################### | |||
|
262 | ### [errormator] ### | |||
|
263 | #################### | |||
|
264 | ||||
|
265 | # Errormator is tailored to work with RhodeCode, see | |||
|
266 | # http://errormator.com for details how to obtain an account | |||
|
267 | # you must install python package `errormator_client` to make it work | |||
|
268 | ||||
|
269 | # errormator enabled | |||
|
270 | errormator = true | |||
|
271 | ||||
|
272 | errormator.server_url = https://api.errormator.com | |||
|
273 | errormator.api_key = YOUR_API_KEY | |||
|
274 | ||||
|
275 | # TWEAK AMOUNT OF INFO SENT HERE | |||
|
276 | ||||
|
277 | # enables 404 error logging (default False) | |||
|
278 | errormator.report_404 = false | |||
|
279 | ||||
|
280 | # time in seconds after request is considered being slow (default 1) | |||
|
281 | errormator.slow_request_time = 1 | |||
|
282 | ||||
|
283 | # record slow requests in application | |||
|
284 | # (needs to be enabled for slow datastore recording and time tracking) | |||
|
285 | errormator.slow_requests = true | |||
|
286 | ||||
|
287 | # enable hooking to application loggers | |||
|
288 | # errormator.logging = true | |||
|
289 | ||||
|
290 | # minimum log level for log capture | |||
|
291 | # errormator.logging.level = WARNING | |||
|
292 | ||||
|
293 | # send logs only from erroneous/slow requests | |||
|
294 | # (saves API quota for intensive logging) | |||
|
295 | errormator.logging_on_error = false | |||
|
296 | ||||
|
297 | # list of additonal keywords that should be grabbed from environ object | |||
|
298 | # can be string with comma separated list of words in lowercase | |||
|
299 | # (by default client will always send following info: | |||
|
300 | # 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that | |||
|
301 | # start with HTTP* this list be extended with additional keywords here | |||
|
302 | errormator.environ_keys_whitelist = | |||
|
303 | ||||
|
304 | ||||
|
305 | # list of keywords that should be blanked from request object | |||
|
306 | # can be string with comma separated list of words in lowercase | |||
|
307 | # (by default client will always blank keys that contain following words | |||
|
308 | # 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' | |||
|
309 | # this list be extended with additional keywords set here | |||
|
310 | errormator.request_keys_blacklist = | |||
|
311 | ||||
|
312 | ||||
|
313 | # list of namespaces that should be ignores when gathering log entries | |||
|
314 | # can be string with comma separated list of namespaces | |||
|
315 | # (by default the client ignores own entries: errormator_client.client) | |||
|
316 | errormator.log_namespace_blacklist = | |||
|
317 | ||||
|
318 | ||||
|
319 | ################ | |||
|
320 | ### [sentry] ### | |||
|
321 | ################ | |||
|
322 | ||||
|
323 | # sentry is a alternative open source error aggregator | |||
|
324 | # you must install python packages `sentry` and `raven` to enable | |||
|
325 | ||||
|
326 | sentry.dsn = YOUR_DNS | |||
|
327 | sentry.servers = | |||
|
328 | sentry.name = | |||
|
329 | sentry.key = | |||
|
330 | sentry.public_key = | |||
|
331 | sentry.secret_key = | |||
|
332 | sentry.project = | |||
|
333 | sentry.site = | |||
|
334 | sentry.include_paths = | |||
|
335 | sentry.exclude_paths = | |||
|
336 | ||||
|
337 | ||||
253 | ################################################################################ |
|
338 | ################################################################################ | |
254 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
339 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## | |
255 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
340 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## | |
@@ -270,7 +355,6 b' logview.pylons.util = #eee' | |||||
270 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.sqlite |
|
355 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode_test.sqlite | |
271 | #sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode_test |
|
356 | #sqlalchemy.db1.url = postgresql://postgres:qwe@localhost/rhodecode_test | |
272 | #sqlalchemy.db1.url = mysql://root:qwe@localhost/rhodecode_test |
|
357 | #sqlalchemy.db1.url = mysql://root:qwe@localhost/rhodecode_test | |
273 |
|
||||
274 | sqlalchemy.db1.echo = false |
|
358 | sqlalchemy.db1.echo = false | |
275 | sqlalchemy.db1.pool_recycle = 3600 |
|
359 | sqlalchemy.db1.pool_recycle = 3600 | |
276 | sqlalchemy.db1.convert_unicode = true |
|
360 | sqlalchemy.db1.convert_unicode = true |
General Comments 0
You need to be logged in to leave comments.
Login now