##// END OF EJS Templates
validators: fix url_validator tests and make it flag controllable.
marcink -
r3072:fe39713b default
parent child Browse files
Show More
@@ -1,53 +1,55 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22 import mock
23 import pytest
23 import pytest
24
24
25 from rhodecode.tests import TESTS_TMP_PATH
25 from rhodecode.tests import TESTS_TMP_PATH
26 from rhodecode.api.tests.utils import (
26 from rhodecode.api.tests.utils import (
27 build_data, api_call, assert_ok, assert_error)
27 build_data, api_call, assert_ok, assert_error)
28
28
29
29
30 @pytest.mark.usefixtures("testuser_api", "app")
30 @pytest.mark.usefixtures("testuser_api", "app")
31 class TestPull(object):
31 class TestPull(object):
32
32 @pytest.mark.backends("git", "hg")
33 @pytest.mark.backends("git", "hg")
33 def test_api_pull(self, backend):
34 def test_api_pull(self, backend):
34 r = backend.create_repo()
35 r = backend.create_repo()
35 repo_name = r.repo_name
36 repo_name = r.repo_name
36 clone_uri = os.path.join(TESTS_TMP_PATH, backend.repo_name)
37 clone_uri = os.path.join(TESTS_TMP_PATH, backend.repo_name)
37 r.clone_uri = clone_uri
38 r.clone_uri = clone_uri
38
39
39 id_, params = build_data(self.apikey, 'pull', repoid=repo_name,)
40 id_, params = build_data(self.apikey, 'pull', repoid=repo_name,)
40 response = api_call(self.app, params)
41 with mock.patch('rhodecode.model.scm.url_validator'):
41 msg = 'Pulled from url `%s` on repo `%s`' % (
42 response = api_call(self.app, params)
42 clone_uri, repo_name)
43 msg = 'Pulled from url `%s` on repo `%s`' % (
43 expected = {'msg': msg,
44 clone_uri, repo_name)
44 'repository': repo_name}
45 expected = {'msg': msg,
45 assert_ok(id_, expected, given=response.body)
46 'repository': repo_name}
47 assert_ok(id_, expected, given=response.body)
46
48
47 def test_api_pull_error(self, backend):
49 def test_api_pull_error(self, backend):
48 id_, params = build_data(
50 id_, params = build_data(
49 self.apikey, 'pull', repoid=backend.repo_name)
51 self.apikey, 'pull', repoid=backend.repo_name)
50 response = api_call(self.app, params)
52 response = api_call(self.app, params)
51
53
52 expected = 'Unable to pull changes from `None`'
54 expected = 'Unable to pull changes from `None`'
53 assert_error(id_, expected, given=response.body)
55 assert_error(id_, expected, given=response.body)
@@ -1,197 +1,203 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.model.repo import RepoModel
24 from rhodecode.model.repo import RepoModel
25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 from rhodecode.api.tests.utils import (
26 from rhodecode.api.tests.utils import (
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 from rhodecode.tests.fixture import Fixture
28 from rhodecode.tests.fixture import Fixture
29 from rhodecode.tests.plugin import http_host_stub, http_host_only_stub
29 from rhodecode.tests.plugin import http_host_stub, http_host_only_stub
30
30
31 fixture = Fixture()
31 fixture = Fixture()
32
32
33 UPDATE_REPO_NAME = 'api_update_me'
33 UPDATE_REPO_NAME = 'api_update_me'
34
34
35
35
36 class SAME_AS_UPDATES(object):
36 class SAME_AS_UPDATES(object):
37 """ Constant used for tests below """
37 """ Constant used for tests below """
38
38
39
39
40 @pytest.mark.usefixtures("testuser_api", "app")
40 @pytest.mark.usefixtures("testuser_api", "app")
41 class TestApiUpdateRepo(object):
41 class TestApiUpdateRepo(object):
42
42
43 @pytest.mark.parametrize("updates, expected", [
43 @pytest.mark.parametrize("updates, expected", [
44 ({'owner': TEST_USER_REGULAR_LOGIN},
44 ({'owner': TEST_USER_REGULAR_LOGIN},
45 SAME_AS_UPDATES),
45 SAME_AS_UPDATES),
46
46
47 ({'description': 'new description'},
47 ({'description': 'new description'},
48 SAME_AS_UPDATES),
48 SAME_AS_UPDATES),
49
49
50 ({'clone_uri': 'http://foo.com/repo'},
50 ({'clone_uri': 'http://foo.com/repo'},
51 SAME_AS_UPDATES),
51 SAME_AS_UPDATES),
52
52
53 ({'clone_uri': None},
53 ({'clone_uri': None},
54 {'clone_uri': ''}),
54 {'clone_uri': ''}),
55
55
56 ({'clone_uri': ''},
56 ({'clone_uri': ''},
57 {'clone_uri': ''}),
57 {'clone_uri': ''}),
58
58
59 ({'clone_uri': 'http://example.com/repo_pull'},
60 {'clone_uri': 'http://example.com/repo_pull'}),
61
59 ({'push_uri': ''},
62 ({'push_uri': ''},
60 {'push_uri': ''}),
63 {'push_uri': ''}),
61
64
65 ({'push_uri': 'http://example.com/repo_push'},
66 {'push_uri': 'http://example.com/repo_push'}),
67
62 ({'landing_rev': 'rev:tip'},
68 ({'landing_rev': 'rev:tip'},
63 {'landing_rev': ['rev', 'tip']}),
69 {'landing_rev': ['rev', 'tip']}),
64
70
65 ({'enable_statistics': True},
71 ({'enable_statistics': True},
66 SAME_AS_UPDATES),
72 SAME_AS_UPDATES),
67
73
68 ({'enable_locking': True},
74 ({'enable_locking': True},
69 SAME_AS_UPDATES),
75 SAME_AS_UPDATES),
70
76
71 ({'enable_downloads': True},
77 ({'enable_downloads': True},
72 SAME_AS_UPDATES),
78 SAME_AS_UPDATES),
73
79
74 ({'repo_name': 'new_repo_name'},
80 ({'repo_name': 'new_repo_name'},
75 {
81 {
76 'repo_name': 'new_repo_name',
82 'repo_name': 'new_repo_name',
77 'url': 'http://{}/new_repo_name'.format(http_host_only_stub())
83 'url': 'http://{}/new_repo_name'.format(http_host_only_stub())
78 }),
84 }),
79
85
80 ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
86 ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
81 '_group': 'test_group_for_update'},
87 '_group': 'test_group_for_update'},
82 {
88 {
83 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
89 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
84 'url': 'http://{}/test_group_for_update/{}'.format(
90 'url': 'http://{}/test_group_for_update/{}'.format(
85 http_host_only_stub(), UPDATE_REPO_NAME)
91 http_host_only_stub(), UPDATE_REPO_NAME)
86 }),
92 }),
87 ])
93 ])
88 def test_api_update_repo(self, updates, expected, backend):
94 def test_api_update_repo(self, updates, expected, backend):
89 repo_name = UPDATE_REPO_NAME
95 repo_name = UPDATE_REPO_NAME
90 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
96 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
91 if updates.get('_group'):
97 if updates.get('_group'):
92 fixture.create_repo_group(updates['_group'])
98 fixture.create_repo_group(updates['_group'])
93
99
94 expected_api_data = repo.get_api_data(include_secrets=True)
100 expected_api_data = repo.get_api_data(include_secrets=True)
95 if expected is SAME_AS_UPDATES:
101 if expected is SAME_AS_UPDATES:
96 expected_api_data.update(updates)
102 expected_api_data.update(updates)
97 else:
103 else:
98 expected_api_data.update(expected)
104 expected_api_data.update(expected)
99
105
100 id_, params = build_data(
106 id_, params = build_data(
101 self.apikey, 'update_repo', repoid=repo_name, **updates)
107 self.apikey, 'update_repo', repoid=repo_name, **updates)
102
108
103 with mock.patch('rhodecode.model.validation_schema.validators.url_validator'):
109 with mock.patch('rhodecode.model.validation_schema.validators.url_validator'):
104 response = api_call(self.app, params)
110 response = api_call(self.app, params)
105
111
106 if updates.get('repo_name'):
112 if updates.get('repo_name'):
107 repo_name = updates['repo_name']
113 repo_name = updates['repo_name']
108
114
109 try:
115 try:
110 expected = {
116 expected = {
111 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
117 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
112 'repository': jsonify(expected_api_data)
118 'repository': jsonify(expected_api_data)
113 }
119 }
114 assert_ok(id_, expected, given=response.body)
120 assert_ok(id_, expected, given=response.body)
115 finally:
121 finally:
116 fixture.destroy_repo(repo_name)
122 fixture.destroy_repo(repo_name)
117 if updates.get('_group'):
123 if updates.get('_group'):
118 fixture.destroy_repo_group(updates['_group'])
124 fixture.destroy_repo_group(updates['_group'])
119
125
120 def test_api_update_repo_fork_of_field(self, backend):
126 def test_api_update_repo_fork_of_field(self, backend):
121 master_repo = backend.create_repo()
127 master_repo = backend.create_repo()
122 repo = backend.create_repo()
128 repo = backend.create_repo()
123 updates = {
129 updates = {
124 'fork_of': master_repo.repo_name,
130 'fork_of': master_repo.repo_name,
125 'fork_of_id': master_repo.repo_id
131 'fork_of_id': master_repo.repo_id
126 }
132 }
127 expected_api_data = repo.get_api_data(include_secrets=True)
133 expected_api_data = repo.get_api_data(include_secrets=True)
128 expected_api_data.update(updates)
134 expected_api_data.update(updates)
129
135
130 id_, params = build_data(
136 id_, params = build_data(
131 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
137 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
132 response = api_call(self.app, params)
138 response = api_call(self.app, params)
133 expected = {
139 expected = {
134 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
140 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
135 'repository': jsonify(expected_api_data)
141 'repository': jsonify(expected_api_data)
136 }
142 }
137 assert_ok(id_, expected, given=response.body)
143 assert_ok(id_, expected, given=response.body)
138 result = response.json['result']['repository']
144 result = response.json['result']['repository']
139 assert result['fork_of'] == master_repo.repo_name
145 assert result['fork_of'] == master_repo.repo_name
140 assert result['fork_of_id'] == master_repo.repo_id
146 assert result['fork_of_id'] == master_repo.repo_id
141
147
142 def test_api_update_repo_fork_of_not_found(self, backend):
148 def test_api_update_repo_fork_of_not_found(self, backend):
143 master_repo_name = 'fake-parent-repo'
149 master_repo_name = 'fake-parent-repo'
144 repo = backend.create_repo()
150 repo = backend.create_repo()
145 updates = {
151 updates = {
146 'fork_of': master_repo_name
152 'fork_of': master_repo_name
147 }
153 }
148 id_, params = build_data(
154 id_, params = build_data(
149 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
155 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
150 response = api_call(self.app, params)
156 response = api_call(self.app, params)
151 expected = {
157 expected = {
152 'repo_fork_of': 'Fork with id `{}` does not exists'.format(
158 'repo_fork_of': 'Fork with id `{}` does not exists'.format(
153 master_repo_name)}
159 master_repo_name)}
154 assert_error(id_, expected, given=response.body)
160 assert_error(id_, expected, given=response.body)
155
161
156 def test_api_update_repo_with_repo_group_not_existing(self):
162 def test_api_update_repo_with_repo_group_not_existing(self):
157 repo_name = 'admin_owned'
163 repo_name = 'admin_owned'
158 fake_repo_group = 'test_group_for_update'
164 fake_repo_group = 'test_group_for_update'
159 fixture.create_repo(repo_name)
165 fixture.create_repo(repo_name)
160 updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)}
166 updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)}
161 id_, params = build_data(
167 id_, params = build_data(
162 self.apikey, 'update_repo', repoid=repo_name, **updates)
168 self.apikey, 'update_repo', repoid=repo_name, **updates)
163 response = api_call(self.app, params)
169 response = api_call(self.app, params)
164 try:
170 try:
165 expected = {
171 expected = {
166 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group)
172 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group)
167 }
173 }
168 assert_error(id_, expected, given=response.body)
174 assert_error(id_, expected, given=response.body)
169 finally:
175 finally:
170 fixture.destroy_repo(repo_name)
176 fixture.destroy_repo(repo_name)
171
177
172 def test_api_update_repo_regular_user_not_allowed(self):
178 def test_api_update_repo_regular_user_not_allowed(self):
173 repo_name = 'admin_owned'
179 repo_name = 'admin_owned'
174 fixture.create_repo(repo_name)
180 fixture.create_repo(repo_name)
175 updates = {'active': False}
181 updates = {'active': False}
176 id_, params = build_data(
182 id_, params = build_data(
177 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
183 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
178 response = api_call(self.app, params)
184 response = api_call(self.app, params)
179 try:
185 try:
180 expected = 'repository `%s` does not exist' % (repo_name,)
186 expected = 'repository `%s` does not exist' % (repo_name,)
181 assert_error(id_, expected, given=response.body)
187 assert_error(id_, expected, given=response.body)
182 finally:
188 finally:
183 fixture.destroy_repo(repo_name)
189 fixture.destroy_repo(repo_name)
184
190
185 @mock.patch.object(RepoModel, 'update', crash)
191 @mock.patch.object(RepoModel, 'update', crash)
186 def test_api_update_repo_exception_occurred(self, backend):
192 def test_api_update_repo_exception_occurred(self, backend):
187 repo_name = UPDATE_REPO_NAME
193 repo_name = UPDATE_REPO_NAME
188 fixture.create_repo(repo_name, repo_type=backend.alias)
194 fixture.create_repo(repo_name, repo_type=backend.alias)
189 id_, params = build_data(
195 id_, params = build_data(
190 self.apikey, 'update_repo', repoid=repo_name,
196 self.apikey, 'update_repo', repoid=repo_name,
191 owner=TEST_USER_ADMIN_LOGIN,)
197 owner=TEST_USER_ADMIN_LOGIN,)
192 response = api_call(self.app, params)
198 response = api_call(self.app, params)
193 try:
199 try:
194 expected = 'failed to update repo `%s`' % (repo_name,)
200 expected = 'failed to update repo `%s`' % (repo_name,)
195 assert_error(id_, expected, given=response.body)
201 assert_error(id_, expected, given=response.body)
196 finally:
202 finally:
197 fixture.destroy_repo(repo_name)
203 fixture.destroy_repo(repo_name)
@@ -1,831 +1,833 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import traceback
26 import traceback
27 import logging
27 import logging
28 import cStringIO
28 import cStringIO
29
29
30 from sqlalchemy import func
30 from sqlalchemy import func
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
41 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
43 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
44 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
45 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 PullRequest)
51 PullRequest)
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class UserTemp(object):
58 class UserTemp(object):
59 def __init__(self, user_id):
59 def __init__(self, user_id):
60 self.user_id = user_id
60 self.user_id = user_id
61
61
62 def __repr__(self):
62 def __repr__(self):
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64
64
65
65
66 class RepoTemp(object):
66 class RepoTemp(object):
67 def __init__(self, repo_id):
67 def __init__(self, repo_id):
68 self.repo_id = repo_id
68 self.repo_id = repo_id
69
69
70 def __repr__(self):
70 def __repr__(self):
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72
72
73
73
74 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
75 """
75 """
76 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
77 and with cache usage
77 and with cache usage
78 """
78 """
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
81 self.repos_path = repos_path
82 self.order_by = order_by
82 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
84 if not perm_set:
84 if not perm_set:
85 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
86 'repository.admin']
86 'repository.admin']
87 self.perm_set = perm_set
87 self.perm_set = perm_set
88
88
89 def __len__(self):
89 def __len__(self):
90 return len(self.db_repo_list)
90 return len(self.db_repo_list)
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94
94
95 def __iter__(self):
95 def __iter__(self):
96 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
97 # check permission at this level
97 # check permission at this level
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
100 if not has_perm:
100 if not has_perm:
101 continue
101 continue
102
102
103 tmp_d = {
103 tmp_d = {
104 'name': dbr.repo_name,
104 'name': dbr.repo_name,
105 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 }
107 }
108 yield tmp_d
108 yield tmp_d
109
109
110
110
111 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
112
112
113 def __init__(
113 def __init__(
114 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
115 extra_kwargs=None):
115 extra_kwargs=None):
116 """
116 """
117 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
118 checking permission for them from perm_set var
118 checking permission for them from perm_set var
119
119
120 :param obj_list: list of db objects
120 :param obj_list: list of db objects
121 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
122 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
123 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
124 """
124 """
125 self.obj_list = obj_list
125 self.obj_list = obj_list
126 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
127 self.perm_set = perm_set
127 self.perm_set = perm_set
128 self.perm_checker = perm_checker
128 self.perm_checker = perm_checker
129 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
130
130
131 def __len__(self):
131 def __len__(self):
132 return len(self.obj_list)
132 return len(self.obj_list)
133
133
134 def __repr__(self):
134 def __repr__(self):
135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136
136
137 def __iter__(self):
137 def __iter__(self):
138 checker = self.perm_checker(*self.perm_set)
138 checker = self.perm_checker(*self.perm_set)
139 for db_obj in self.obj_list:
139 for db_obj in self.obj_list:
140 # check permission at this level
140 # check permission at this level
141 name = getattr(db_obj, self.obj_attr, None)
141 name = getattr(db_obj, self.obj_attr, None)
142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
143 continue
144
144
145 yield db_obj
145 yield db_obj
146
146
147
147
148 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
149
149
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
151 if not perm_set:
152 perm_set = [
152 perm_set = [
153 'repository.read', 'repository.write', 'repository.admin']
153 'repository.read', 'repository.write', 'repository.admin']
154
154
155 super(RepoList, self).__init__(
155 super(RepoList, self).__init__(
156 obj_list=db_repo_list,
156 obj_list=db_repo_list,
157 obj_attr='repo_name', perm_set=perm_set,
157 obj_attr='repo_name', perm_set=perm_set,
158 perm_checker=HasRepoPermissionAny,
158 perm_checker=HasRepoPermissionAny,
159 extra_kwargs=extra_kwargs)
159 extra_kwargs=extra_kwargs)
160
160
161
161
162 class RepoGroupList(_PermCheckIterator):
162 class RepoGroupList(_PermCheckIterator):
163
163
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 if not perm_set:
165 if not perm_set:
166 perm_set = ['group.read', 'group.write', 'group.admin']
166 perm_set = ['group.read', 'group.write', 'group.admin']
167
167
168 super(RepoGroupList, self).__init__(
168 super(RepoGroupList, self).__init__(
169 obj_list=db_repo_group_list,
169 obj_list=db_repo_group_list,
170 obj_attr='group_name', perm_set=perm_set,
170 obj_attr='group_name', perm_set=perm_set,
171 perm_checker=HasRepoGroupPermissionAny,
171 perm_checker=HasRepoGroupPermissionAny,
172 extra_kwargs=extra_kwargs)
172 extra_kwargs=extra_kwargs)
173
173
174
174
175 class UserGroupList(_PermCheckIterator):
175 class UserGroupList(_PermCheckIterator):
176
176
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 if not perm_set:
178 if not perm_set:
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180
180
181 super(UserGroupList, self).__init__(
181 super(UserGroupList, self).__init__(
182 obj_list=db_user_group_list,
182 obj_list=db_user_group_list,
183 obj_attr='users_group_name', perm_set=perm_set,
183 obj_attr='users_group_name', perm_set=perm_set,
184 perm_checker=HasUserGroupPermissionAny,
184 perm_checker=HasUserGroupPermissionAny,
185 extra_kwargs=extra_kwargs)
185 extra_kwargs=extra_kwargs)
186
186
187
187
188 class ScmModel(BaseModel):
188 class ScmModel(BaseModel):
189 """
189 """
190 Generic Scm Model
190 Generic Scm Model
191 """
191 """
192
192
193 @LazyProperty
193 @LazyProperty
194 def repos_path(self):
194 def repos_path(self):
195 """
195 """
196 Gets the repositories root path from database
196 Gets the repositories root path from database
197 """
197 """
198
198
199 settings_model = VcsSettingsModel(sa=self.sa)
199 settings_model = VcsSettingsModel(sa=self.sa)
200 return settings_model.get_repos_location()
200 return settings_model.get_repos_location()
201
201
202 def repo_scan(self, repos_path=None):
202 def repo_scan(self, repos_path=None):
203 """
203 """
204 Listing of repositories in given path. This path should not be a
204 Listing of repositories in given path. This path should not be a
205 repository itself. Return a dictionary of repository objects
205 repository itself. Return a dictionary of repository objects
206
206
207 :param repos_path: path to directory containing repositories
207 :param repos_path: path to directory containing repositories
208 """
208 """
209
209
210 if repos_path is None:
210 if repos_path is None:
211 repos_path = self.repos_path
211 repos_path = self.repos_path
212
212
213 log.info('scanning for repositories in %s', repos_path)
213 log.info('scanning for repositories in %s', repos_path)
214
214
215 config = make_db_config()
215 config = make_db_config()
216 config.set('extensions', 'largefiles', '')
216 config.set('extensions', 'largefiles', '')
217 repos = {}
217 repos = {}
218
218
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 # name need to be decomposed and put back together using the /
220 # name need to be decomposed and put back together using the /
221 # since this is internal storage separator for rhodecode
221 # since this is internal storage separator for rhodecode
222 name = Repository.normalize_repo_name(name)
222 name = Repository.normalize_repo_name(name)
223
223
224 try:
224 try:
225 if name in repos:
225 if name in repos:
226 raise RepositoryError('Duplicate repository name %s '
226 raise RepositoryError('Duplicate repository name %s '
227 'found in %s' % (name, path))
227 'found in %s' % (name, path))
228 elif path[0] in rhodecode.BACKENDS:
228 elif path[0] in rhodecode.BACKENDS:
229 klass = get_backend(path[0])
229 klass = get_backend(path[0])
230 repos[name] = klass(path[1], config=config)
230 repos[name] = klass(path[1], config=config)
231 except OSError:
231 except OSError:
232 continue
232 continue
233 log.debug('found %s paths with repositories', len(repos))
233 log.debug('found %s paths with repositories', len(repos))
234 return repos
234 return repos
235
235
236 def get_repos(self, all_repos=None, sort_key=None):
236 def get_repos(self, all_repos=None, sort_key=None):
237 """
237 """
238 Get all repositories from db and for each repo create it's
238 Get all repositories from db and for each repo create it's
239 backend instance and fill that backed with information from database
239 backend instance and fill that backed with information from database
240
240
241 :param all_repos: list of repository names as strings
241 :param all_repos: list of repository names as strings
242 give specific repositories list, good for filtering
242 give specific repositories list, good for filtering
243
243
244 :param sort_key: initial sorting of repositories
244 :param sort_key: initial sorting of repositories
245 """
245 """
246 if all_repos is None:
246 if all_repos is None:
247 all_repos = self.sa.query(Repository)\
247 all_repos = self.sa.query(Repository)\
248 .filter(Repository.group_id == None)\
248 .filter(Repository.group_id == None)\
249 .order_by(func.lower(Repository.repo_name)).all()
249 .order_by(func.lower(Repository.repo_name)).all()
250 repo_iter = SimpleCachedRepoList(
250 repo_iter = SimpleCachedRepoList(
251 all_repos, repos_path=self.repos_path, order_by=sort_key)
251 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 return repo_iter
252 return repo_iter
253
253
254 def get_repo_groups(self, all_groups=None):
254 def get_repo_groups(self, all_groups=None):
255 if all_groups is None:
255 if all_groups is None:
256 all_groups = RepoGroup.query()\
256 all_groups = RepoGroup.query()\
257 .filter(RepoGroup.group_parent_id == None).all()
257 .filter(RepoGroup.group_parent_id == None).all()
258 return [x for x in RepoGroupList(all_groups)]
258 return [x for x in RepoGroupList(all_groups)]
259
259
260 def mark_for_invalidation(self, repo_name, delete=False):
260 def mark_for_invalidation(self, repo_name, delete=False):
261 """
261 """
262 Mark caches of this repo invalid in the database. `delete` flag
262 Mark caches of this repo invalid in the database. `delete` flag
263 removes the cache entries
263 removes the cache entries
264
264
265 :param repo_name: the repo_name for which caches should be marked
265 :param repo_name: the repo_name for which caches should be marked
266 invalid, or deleted
266 invalid, or deleted
267 :param delete: delete the entry keys instead of setting bool
267 :param delete: delete the entry keys instead of setting bool
268 flag on them, and also purge caches used by the dogpile
268 flag on them, and also purge caches used by the dogpile
269 """
269 """
270 repo = Repository.get_by_repo_name(repo_name)
270 repo = Repository.get_by_repo_name(repo_name)
271
271
272 if repo:
272 if repo:
273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 repo_id=repo.repo_id)
274 repo_id=repo.repo_id)
275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276
276
277 repo_id = repo.repo_id
277 repo_id = repo.repo_id
278 config = repo._config
278 config = repo._config
279 config.set('extensions', 'largefiles', '')
279 config.set('extensions', 'largefiles', '')
280 repo.update_commit_cache(config=config, cs_cache=None)
280 repo.update_commit_cache(config=config, cs_cache=None)
281 if delete:
281 if delete:
282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284
284
285 def toggle_following_repo(self, follow_repo_id, user_id):
285 def toggle_following_repo(self, follow_repo_id, user_id):
286
286
287 f = self.sa.query(UserFollowing)\
287 f = self.sa.query(UserFollowing)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 .filter(UserFollowing.user_id == user_id).scalar()
289 .filter(UserFollowing.user_id == user_id).scalar()
290
290
291 if f is not None:
291 if f is not None:
292 try:
292 try:
293 self.sa.delete(f)
293 self.sa.delete(f)
294 return
294 return
295 except Exception:
295 except Exception:
296 log.error(traceback.format_exc())
296 log.error(traceback.format_exc())
297 raise
297 raise
298
298
299 try:
299 try:
300 f = UserFollowing()
300 f = UserFollowing()
301 f.user_id = user_id
301 f.user_id = user_id
302 f.follows_repo_id = follow_repo_id
302 f.follows_repo_id = follow_repo_id
303 self.sa.add(f)
303 self.sa.add(f)
304 except Exception:
304 except Exception:
305 log.error(traceback.format_exc())
305 log.error(traceback.format_exc())
306 raise
306 raise
307
307
308 def toggle_following_user(self, follow_user_id, user_id):
308 def toggle_following_user(self, follow_user_id, user_id):
309 f = self.sa.query(UserFollowing)\
309 f = self.sa.query(UserFollowing)\
310 .filter(UserFollowing.follows_user_id == follow_user_id)\
310 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 .filter(UserFollowing.user_id == user_id).scalar()
311 .filter(UserFollowing.user_id == user_id).scalar()
312
312
313 if f is not None:
313 if f is not None:
314 try:
314 try:
315 self.sa.delete(f)
315 self.sa.delete(f)
316 return
316 return
317 except Exception:
317 except Exception:
318 log.error(traceback.format_exc())
318 log.error(traceback.format_exc())
319 raise
319 raise
320
320
321 try:
321 try:
322 f = UserFollowing()
322 f = UserFollowing()
323 f.user_id = user_id
323 f.user_id = user_id
324 f.follows_user_id = follow_user_id
324 f.follows_user_id = follow_user_id
325 self.sa.add(f)
325 self.sa.add(f)
326 except Exception:
326 except Exception:
327 log.error(traceback.format_exc())
327 log.error(traceback.format_exc())
328 raise
328 raise
329
329
330 def is_following_repo(self, repo_name, user_id, cache=False):
330 def is_following_repo(self, repo_name, user_id, cache=False):
331 r = self.sa.query(Repository)\
331 r = self.sa.query(Repository)\
332 .filter(Repository.repo_name == repo_name).scalar()
332 .filter(Repository.repo_name == repo_name).scalar()
333
333
334 f = self.sa.query(UserFollowing)\
334 f = self.sa.query(UserFollowing)\
335 .filter(UserFollowing.follows_repository == r)\
335 .filter(UserFollowing.follows_repository == r)\
336 .filter(UserFollowing.user_id == user_id).scalar()
336 .filter(UserFollowing.user_id == user_id).scalar()
337
337
338 return f is not None
338 return f is not None
339
339
340 def is_following_user(self, username, user_id, cache=False):
340 def is_following_user(self, username, user_id, cache=False):
341 u = User.get_by_username(username)
341 u = User.get_by_username(username)
342
342
343 f = self.sa.query(UserFollowing)\
343 f = self.sa.query(UserFollowing)\
344 .filter(UserFollowing.follows_user == u)\
344 .filter(UserFollowing.follows_user == u)\
345 .filter(UserFollowing.user_id == user_id).scalar()
345 .filter(UserFollowing.user_id == user_id).scalar()
346
346
347 return f is not None
347 return f is not None
348
348
349 def get_followers(self, repo):
349 def get_followers(self, repo):
350 repo = self._get_repo(repo)
350 repo = self._get_repo(repo)
351
351
352 return self.sa.query(UserFollowing)\
352 return self.sa.query(UserFollowing)\
353 .filter(UserFollowing.follows_repository == repo).count()
353 .filter(UserFollowing.follows_repository == repo).count()
354
354
355 def get_forks(self, repo):
355 def get_forks(self, repo):
356 repo = self._get_repo(repo)
356 repo = self._get_repo(repo)
357 return self.sa.query(Repository)\
357 return self.sa.query(Repository)\
358 .filter(Repository.fork == repo).count()
358 .filter(Repository.fork == repo).count()
359
359
360 def get_pull_requests(self, repo):
360 def get_pull_requests(self, repo):
361 repo = self._get_repo(repo)
361 repo = self._get_repo(repo)
362 return self.sa.query(PullRequest)\
362 return self.sa.query(PullRequest)\
363 .filter(PullRequest.target_repo == repo)\
363 .filter(PullRequest.target_repo == repo)\
364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365
365
366 def mark_as_fork(self, repo, fork, user):
366 def mark_as_fork(self, repo, fork, user):
367 repo = self._get_repo(repo)
367 repo = self._get_repo(repo)
368 fork = self._get_repo(fork)
368 fork = self._get_repo(fork)
369 if fork and repo.repo_id == fork.repo_id:
369 if fork and repo.repo_id == fork.repo_id:
370 raise Exception("Cannot set repository as fork of itself")
370 raise Exception("Cannot set repository as fork of itself")
371
371
372 if fork and repo.repo_type != fork.repo_type:
372 if fork and repo.repo_type != fork.repo_type:
373 raise RepositoryError(
373 raise RepositoryError(
374 "Cannot set repository as fork of repository with other type")
374 "Cannot set repository as fork of repository with other type")
375
375
376 repo.fork = fork
376 repo.fork = fork
377 self.sa.add(repo)
377 self.sa.add(repo)
378 return repo
378 return repo
379
379
380 def pull_changes(self, repo, username, remote_uri=None):
380 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
381 dbrepo = self._get_repo(repo)
381 dbrepo = self._get_repo(repo)
382 remote_uri = remote_uri or dbrepo.clone_uri
382 remote_uri = remote_uri or dbrepo.clone_uri
383 if not remote_uri:
383 if not remote_uri:
384 raise Exception("This repository doesn't have a clone uri")
384 raise Exception("This repository doesn't have a clone uri")
385
385
386 repo = dbrepo.scm_instance(cache=False)
386 repo = dbrepo.scm_instance(cache=False)
387 repo.config.clear_section('hooks')
387 repo.config.clear_section('hooks')
388
388
389 try:
389 try:
390 # NOTE(marcink): add extra validation so we skip invalid urls
390 # NOTE(marcink): add extra validation so we skip invalid urls
391 # this is due this tasks can be executed via scheduler without
391 # this is due this tasks can be executed via scheduler without
392 # proper validation of remote_uri
392 # proper validation of remote_uri
393 config = make_db_config(clear_session=False)
393 if validate_uri:
394 url_validator(remote_uri, dbrepo.repo_type, config)
394 config = make_db_config(clear_session=False)
395 url_validator(remote_uri, dbrepo.repo_type, config)
395 except InvalidCloneUrl:
396 except InvalidCloneUrl:
396 raise
397 raise
397
398
398 repo_name = dbrepo.repo_name
399 repo_name = dbrepo.repo_name
399 try:
400 try:
400 # TODO: we need to make sure those operations call proper hooks !
401 # TODO: we need to make sure those operations call proper hooks !
401 repo.pull(remote_uri)
402 repo.pull(remote_uri)
402
403
403 self.mark_for_invalidation(repo_name)
404 self.mark_for_invalidation(repo_name)
404 except Exception:
405 except Exception:
405 log.error(traceback.format_exc())
406 log.error(traceback.format_exc())
406 raise
407 raise
407
408
408 def push_changes(self, repo, username, remote_uri=None):
409 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
409 dbrepo = self._get_repo(repo)
410 dbrepo = self._get_repo(repo)
410 remote_uri = remote_uri or dbrepo.push_uri
411 remote_uri = remote_uri or dbrepo.push_uri
411 if not remote_uri:
412 if not remote_uri:
412 raise Exception("This repository doesn't have a clone uri")
413 raise Exception("This repository doesn't have a clone uri")
413
414
414 repo = dbrepo.scm_instance(cache=False)
415 repo = dbrepo.scm_instance(cache=False)
415 repo.config.clear_section('hooks')
416 repo.config.clear_section('hooks')
416
417
417 try:
418 try:
418 # NOTE(marcink): add extra validation so we skip invalid urls
419 # NOTE(marcink): add extra validation so we skip invalid urls
419 # this is due this tasks can be executed via scheduler without
420 # this is due this tasks can be executed via scheduler without
420 # proper validation of remote_uri
421 # proper validation of remote_uri
421 config = make_db_config(clear_session=False)
422 if validate_uri:
422 url_validator(remote_uri, dbrepo.repo_type, config)
423 config = make_db_config(clear_session=False)
424 url_validator(remote_uri, dbrepo.repo_type, config)
423 except InvalidCloneUrl:
425 except InvalidCloneUrl:
424 raise
426 raise
425
427
426 try:
428 try:
427 repo.push(remote_uri)
429 repo.push(remote_uri)
428 except Exception:
430 except Exception:
429 log.error(traceback.format_exc())
431 log.error(traceback.format_exc())
430 raise
432 raise
431
433
432 def commit_change(self, repo, repo_name, commit, user, author, message,
434 def commit_change(self, repo, repo_name, commit, user, author, message,
433 content, f_path):
435 content, f_path):
434 """
436 """
435 Commits changes
437 Commits changes
436
438
437 :param repo: SCM instance
439 :param repo: SCM instance
438
440
439 """
441 """
440 user = self._get_user(user)
442 user = self._get_user(user)
441
443
442 # decoding here will force that we have proper encoded values
444 # decoding here will force that we have proper encoded values
443 # in any other case this will throw exceptions and deny commit
445 # in any other case this will throw exceptions and deny commit
444 content = safe_str(content)
446 content = safe_str(content)
445 path = safe_str(f_path)
447 path = safe_str(f_path)
446 # message and author needs to be unicode
448 # message and author needs to be unicode
447 # proper backend should then translate that into required type
449 # proper backend should then translate that into required type
448 message = safe_unicode(message)
450 message = safe_unicode(message)
449 author = safe_unicode(author)
451 author = safe_unicode(author)
450 imc = repo.in_memory_commit
452 imc = repo.in_memory_commit
451 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
453 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
452 try:
454 try:
453 # TODO: handle pre-push action !
455 # TODO: handle pre-push action !
454 tip = imc.commit(
456 tip = imc.commit(
455 message=message, author=author, parents=[commit],
457 message=message, author=author, parents=[commit],
456 branch=commit.branch)
458 branch=commit.branch)
457 except Exception as e:
459 except Exception as e:
458 log.error(traceback.format_exc())
460 log.error(traceback.format_exc())
459 raise IMCCommitError(str(e))
461 raise IMCCommitError(str(e))
460 finally:
462 finally:
461 # always clear caches, if commit fails we want fresh object also
463 # always clear caches, if commit fails we want fresh object also
462 self.mark_for_invalidation(repo_name)
464 self.mark_for_invalidation(repo_name)
463
465
464 # We trigger the post-push action
466 # We trigger the post-push action
465 hooks_utils.trigger_post_push_hook(
467 hooks_utils.trigger_post_push_hook(
466 username=user.username, action='push_local', repo_name=repo_name,
468 username=user.username, action='push_local', repo_name=repo_name,
467 repo_alias=repo.alias, commit_ids=[tip.raw_id])
469 repo_alias=repo.alias, commit_ids=[tip.raw_id])
468 return tip
470 return tip
469
471
470 def _sanitize_path(self, f_path):
472 def _sanitize_path(self, f_path):
471 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
473 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
472 raise NonRelativePathError('%s is not an relative path' % f_path)
474 raise NonRelativePathError('%s is not an relative path' % f_path)
473 if f_path:
475 if f_path:
474 f_path = os.path.normpath(f_path)
476 f_path = os.path.normpath(f_path)
475 return f_path
477 return f_path
476
478
477 def get_dirnode_metadata(self, request, commit, dir_node):
479 def get_dirnode_metadata(self, request, commit, dir_node):
478 if not dir_node.is_dir():
480 if not dir_node.is_dir():
479 return []
481 return []
480
482
481 data = []
483 data = []
482 for node in dir_node:
484 for node in dir_node:
483 if not node.is_file():
485 if not node.is_file():
484 # we skip file-nodes
486 # we skip file-nodes
485 continue
487 continue
486
488
487 last_commit = node.last_commit
489 last_commit = node.last_commit
488 last_commit_date = last_commit.date
490 last_commit_date = last_commit.date
489 data.append({
491 data.append({
490 'name': node.name,
492 'name': node.name,
491 'size': h.format_byte_size_binary(node.size),
493 'size': h.format_byte_size_binary(node.size),
492 'modified_at': h.format_date(last_commit_date),
494 'modified_at': h.format_date(last_commit_date),
493 'modified_ts': last_commit_date.isoformat(),
495 'modified_ts': last_commit_date.isoformat(),
494 'revision': last_commit.revision,
496 'revision': last_commit.revision,
495 'short_id': last_commit.short_id,
497 'short_id': last_commit.short_id,
496 'message': h.escape(last_commit.message),
498 'message': h.escape(last_commit.message),
497 'author': h.escape(last_commit.author),
499 'author': h.escape(last_commit.author),
498 'user_profile': h.gravatar_with_user(
500 'user_profile': h.gravatar_with_user(
499 request, last_commit.author),
501 request, last_commit.author),
500 })
502 })
501
503
502 return data
504 return data
503
505
504 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
506 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
505 extended_info=False, content=False, max_file_bytes=None):
507 extended_info=False, content=False, max_file_bytes=None):
506 """
508 """
507 recursive walk in root dir and return a set of all path in that dir
509 recursive walk in root dir and return a set of all path in that dir
508 based on repository walk function
510 based on repository walk function
509
511
510 :param repo_name: name of repository
512 :param repo_name: name of repository
511 :param commit_id: commit id for which to list nodes
513 :param commit_id: commit id for which to list nodes
512 :param root_path: root path to list
514 :param root_path: root path to list
513 :param flat: return as a list, if False returns a dict with description
515 :param flat: return as a list, if False returns a dict with description
514 :param max_file_bytes: will not return file contents over this limit
516 :param max_file_bytes: will not return file contents over this limit
515
517
516 """
518 """
517 _files = list()
519 _files = list()
518 _dirs = list()
520 _dirs = list()
519 try:
521 try:
520 _repo = self._get_repo(repo_name)
522 _repo = self._get_repo(repo_name)
521 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
523 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
522 root_path = root_path.lstrip('/')
524 root_path = root_path.lstrip('/')
523 for __, dirs, files in commit.walk(root_path):
525 for __, dirs, files in commit.walk(root_path):
524 for f in files:
526 for f in files:
525 _content = None
527 _content = None
526 _data = f.unicode_path
528 _data = f.unicode_path
527 over_size_limit = (max_file_bytes is not None
529 over_size_limit = (max_file_bytes is not None
528 and f.size > max_file_bytes)
530 and f.size > max_file_bytes)
529
531
530 if not flat:
532 if not flat:
531 _data = {
533 _data = {
532 "name": h.escape(f.unicode_path),
534 "name": h.escape(f.unicode_path),
533 "type": "file",
535 "type": "file",
534 }
536 }
535 if extended_info:
537 if extended_info:
536 _data.update({
538 _data.update({
537 "md5": f.md5,
539 "md5": f.md5,
538 "binary": f.is_binary,
540 "binary": f.is_binary,
539 "size": f.size,
541 "size": f.size,
540 "extension": f.extension,
542 "extension": f.extension,
541 "mimetype": f.mimetype,
543 "mimetype": f.mimetype,
542 "lines": f.lines()[0]
544 "lines": f.lines()[0]
543 })
545 })
544
546
545 if content:
547 if content:
546 full_content = None
548 full_content = None
547 if not f.is_binary and not over_size_limit:
549 if not f.is_binary and not over_size_limit:
548 full_content = safe_str(f.content)
550 full_content = safe_str(f.content)
549
551
550 _data.update({
552 _data.update({
551 "content": full_content,
553 "content": full_content,
552 })
554 })
553 _files.append(_data)
555 _files.append(_data)
554 for d in dirs:
556 for d in dirs:
555 _data = d.unicode_path
557 _data = d.unicode_path
556 if not flat:
558 if not flat:
557 _data = {
559 _data = {
558 "name": h.escape(d.unicode_path),
560 "name": h.escape(d.unicode_path),
559 "type": "dir",
561 "type": "dir",
560 }
562 }
561 if extended_info:
563 if extended_info:
562 _data.update({
564 _data.update({
563 "md5": None,
565 "md5": None,
564 "binary": None,
566 "binary": None,
565 "size": None,
567 "size": None,
566 "extension": None,
568 "extension": None,
567 })
569 })
568 if content:
570 if content:
569 _data.update({
571 _data.update({
570 "content": None
572 "content": None
571 })
573 })
572 _dirs.append(_data)
574 _dirs.append(_data)
573 except RepositoryError:
575 except RepositoryError:
574 log.debug("Exception in get_nodes", exc_info=True)
576 log.debug("Exception in get_nodes", exc_info=True)
575 raise
577 raise
576
578
577 return _dirs, _files
579 return _dirs, _files
578
580
579 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
581 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
580 author=None, trigger_push_hook=True):
582 author=None, trigger_push_hook=True):
581 """
583 """
582 Commits given multiple nodes into repo
584 Commits given multiple nodes into repo
583
585
584 :param user: RhodeCode User object or user_id, the commiter
586 :param user: RhodeCode User object or user_id, the commiter
585 :param repo: RhodeCode Repository object
587 :param repo: RhodeCode Repository object
586 :param message: commit message
588 :param message: commit message
587 :param nodes: mapping {filename:{'content':content},...}
589 :param nodes: mapping {filename:{'content':content},...}
588 :param parent_commit: parent commit, can be empty than it's
590 :param parent_commit: parent commit, can be empty than it's
589 initial commit
591 initial commit
590 :param author: author of commit, cna be different that commiter
592 :param author: author of commit, cna be different that commiter
591 only for git
593 only for git
592 :param trigger_push_hook: trigger push hooks
594 :param trigger_push_hook: trigger push hooks
593
595
594 :returns: new commited commit
596 :returns: new commited commit
595 """
597 """
596
598
597 user = self._get_user(user)
599 user = self._get_user(user)
598 scm_instance = repo.scm_instance(cache=False)
600 scm_instance = repo.scm_instance(cache=False)
599
601
600 processed_nodes = []
602 processed_nodes = []
601 for f_path in nodes:
603 for f_path in nodes:
602 f_path = self._sanitize_path(f_path)
604 f_path = self._sanitize_path(f_path)
603 content = nodes[f_path]['content']
605 content = nodes[f_path]['content']
604 f_path = safe_str(f_path)
606 f_path = safe_str(f_path)
605 # decoding here will force that we have proper encoded values
607 # decoding here will force that we have proper encoded values
606 # in any other case this will throw exceptions and deny commit
608 # in any other case this will throw exceptions and deny commit
607 if isinstance(content, (basestring,)):
609 if isinstance(content, (basestring,)):
608 content = safe_str(content)
610 content = safe_str(content)
609 elif isinstance(content, (file, cStringIO.OutputType,)):
611 elif isinstance(content, (file, cStringIO.OutputType,)):
610 content = content.read()
612 content = content.read()
611 else:
613 else:
612 raise Exception('Content is of unrecognized type %s' % (
614 raise Exception('Content is of unrecognized type %s' % (
613 type(content)
615 type(content)
614 ))
616 ))
615 processed_nodes.append((f_path, content))
617 processed_nodes.append((f_path, content))
616
618
617 message = safe_unicode(message)
619 message = safe_unicode(message)
618 commiter = user.full_contact
620 commiter = user.full_contact
619 author = safe_unicode(author) if author else commiter
621 author = safe_unicode(author) if author else commiter
620
622
621 imc = scm_instance.in_memory_commit
623 imc = scm_instance.in_memory_commit
622
624
623 if not parent_commit:
625 if not parent_commit:
624 parent_commit = EmptyCommit(alias=scm_instance.alias)
626 parent_commit = EmptyCommit(alias=scm_instance.alias)
625
627
626 if isinstance(parent_commit, EmptyCommit):
628 if isinstance(parent_commit, EmptyCommit):
627 # EmptyCommit means we we're editing empty repository
629 # EmptyCommit means we we're editing empty repository
628 parents = None
630 parents = None
629 else:
631 else:
630 parents = [parent_commit]
632 parents = [parent_commit]
631 # add multiple nodes
633 # add multiple nodes
632 for path, content in processed_nodes:
634 for path, content in processed_nodes:
633 imc.add(FileNode(path, content=content))
635 imc.add(FileNode(path, content=content))
634 # TODO: handle pre push scenario
636 # TODO: handle pre push scenario
635 tip = imc.commit(message=message,
637 tip = imc.commit(message=message,
636 author=author,
638 author=author,
637 parents=parents,
639 parents=parents,
638 branch=parent_commit.branch)
640 branch=parent_commit.branch)
639
641
640 self.mark_for_invalidation(repo.repo_name)
642 self.mark_for_invalidation(repo.repo_name)
641 if trigger_push_hook:
643 if trigger_push_hook:
642 hooks_utils.trigger_post_push_hook(
644 hooks_utils.trigger_post_push_hook(
643 username=user.username, action='push_local',
645 username=user.username, action='push_local',
644 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
646 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
645 commit_ids=[tip.raw_id])
647 commit_ids=[tip.raw_id])
646 return tip
648 return tip
647
649
648 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
650 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
649 author=None, trigger_push_hook=True):
651 author=None, trigger_push_hook=True):
650 user = self._get_user(user)
652 user = self._get_user(user)
651 scm_instance = repo.scm_instance(cache=False)
653 scm_instance = repo.scm_instance(cache=False)
652
654
653 message = safe_unicode(message)
655 message = safe_unicode(message)
654 commiter = user.full_contact
656 commiter = user.full_contact
655 author = safe_unicode(author) if author else commiter
657 author = safe_unicode(author) if author else commiter
656
658
657 imc = scm_instance.in_memory_commit
659 imc = scm_instance.in_memory_commit
658
660
659 if not parent_commit:
661 if not parent_commit:
660 parent_commit = EmptyCommit(alias=scm_instance.alias)
662 parent_commit = EmptyCommit(alias=scm_instance.alias)
661
663
662 if isinstance(parent_commit, EmptyCommit):
664 if isinstance(parent_commit, EmptyCommit):
663 # EmptyCommit means we we're editing empty repository
665 # EmptyCommit means we we're editing empty repository
664 parents = None
666 parents = None
665 else:
667 else:
666 parents = [parent_commit]
668 parents = [parent_commit]
667
669
668 # add multiple nodes
670 # add multiple nodes
669 for _filename, data in nodes.items():
671 for _filename, data in nodes.items():
670 # new filename, can be renamed from the old one, also sanitaze
672 # new filename, can be renamed from the old one, also sanitaze
671 # the path for any hack around relative paths like ../../ etc.
673 # the path for any hack around relative paths like ../../ etc.
672 filename = self._sanitize_path(data['filename'])
674 filename = self._sanitize_path(data['filename'])
673 old_filename = self._sanitize_path(_filename)
675 old_filename = self._sanitize_path(_filename)
674 content = data['content']
676 content = data['content']
675
677
676 filenode = FileNode(old_filename, content=content)
678 filenode = FileNode(old_filename, content=content)
677 op = data['op']
679 op = data['op']
678 if op == 'add':
680 if op == 'add':
679 imc.add(filenode)
681 imc.add(filenode)
680 elif op == 'del':
682 elif op == 'del':
681 imc.remove(filenode)
683 imc.remove(filenode)
682 elif op == 'mod':
684 elif op == 'mod':
683 if filename != old_filename:
685 if filename != old_filename:
684 # TODO: handle renames more efficient, needs vcs lib
686 # TODO: handle renames more efficient, needs vcs lib
685 # changes
687 # changes
686 imc.remove(filenode)
688 imc.remove(filenode)
687 imc.add(FileNode(filename, content=content))
689 imc.add(FileNode(filename, content=content))
688 else:
690 else:
689 imc.change(filenode)
691 imc.change(filenode)
690
692
691 try:
693 try:
692 # TODO: handle pre push scenario
694 # TODO: handle pre push scenario
693 # commit changes
695 # commit changes
694 tip = imc.commit(message=message,
696 tip = imc.commit(message=message,
695 author=author,
697 author=author,
696 parents=parents,
698 parents=parents,
697 branch=parent_commit.branch)
699 branch=parent_commit.branch)
698 except NodeNotChangedError:
700 except NodeNotChangedError:
699 raise
701 raise
700 except Exception as e:
702 except Exception as e:
701 log.exception("Unexpected exception during call to imc.commit")
703 log.exception("Unexpected exception during call to imc.commit")
702 raise IMCCommitError(str(e))
704 raise IMCCommitError(str(e))
703 finally:
705 finally:
704 # always clear caches, if commit fails we want fresh object also
706 # always clear caches, if commit fails we want fresh object also
705 self.mark_for_invalidation(repo.repo_name)
707 self.mark_for_invalidation(repo.repo_name)
706
708
707 if trigger_push_hook:
709 if trigger_push_hook:
708 hooks_utils.trigger_post_push_hook(
710 hooks_utils.trigger_post_push_hook(
709 username=user.username, action='push_local',
711 username=user.username, action='push_local',
710 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
712 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
711 commit_ids=[tip.raw_id])
713 commit_ids=[tip.raw_id])
712
714
713 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
715 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
714 author=None, trigger_push_hook=True):
716 author=None, trigger_push_hook=True):
715 """
717 """
716 Deletes given multiple nodes into `repo`
718 Deletes given multiple nodes into `repo`
717
719
718 :param user: RhodeCode User object or user_id, the committer
720 :param user: RhodeCode User object or user_id, the committer
719 :param repo: RhodeCode Repository object
721 :param repo: RhodeCode Repository object
720 :param message: commit message
722 :param message: commit message
721 :param nodes: mapping {filename:{'content':content},...}
723 :param nodes: mapping {filename:{'content':content},...}
722 :param parent_commit: parent commit, can be empty than it's initial
724 :param parent_commit: parent commit, can be empty than it's initial
723 commit
725 commit
724 :param author: author of commit, cna be different that commiter only
726 :param author: author of commit, cna be different that commiter only
725 for git
727 for git
726 :param trigger_push_hook: trigger push hooks
728 :param trigger_push_hook: trigger push hooks
727
729
728 :returns: new commit after deletion
730 :returns: new commit after deletion
729 """
731 """
730
732
731 user = self._get_user(user)
733 user = self._get_user(user)
732 scm_instance = repo.scm_instance(cache=False)
734 scm_instance = repo.scm_instance(cache=False)
733
735
734 processed_nodes = []
736 processed_nodes = []
735 for f_path in nodes:
737 for f_path in nodes:
736 f_path = self._sanitize_path(f_path)
738 f_path = self._sanitize_path(f_path)
737 # content can be empty but for compatabilty it allows same dicts
739 # content can be empty but for compatabilty it allows same dicts
738 # structure as add_nodes
740 # structure as add_nodes
739 content = nodes[f_path].get('content')
741 content = nodes[f_path].get('content')
740 processed_nodes.append((f_path, content))
742 processed_nodes.append((f_path, content))
741
743
742 message = safe_unicode(message)
744 message = safe_unicode(message)
743 commiter = user.full_contact
745 commiter = user.full_contact
744 author = safe_unicode(author) if author else commiter
746 author = safe_unicode(author) if author else commiter
745
747
746 imc = scm_instance.in_memory_commit
748 imc = scm_instance.in_memory_commit
747
749
748 if not parent_commit:
750 if not parent_commit:
749 parent_commit = EmptyCommit(alias=scm_instance.alias)
751 parent_commit = EmptyCommit(alias=scm_instance.alias)
750
752
751 if isinstance(parent_commit, EmptyCommit):
753 if isinstance(parent_commit, EmptyCommit):
752 # EmptyCommit means we we're editing empty repository
754 # EmptyCommit means we we're editing empty repository
753 parents = None
755 parents = None
754 else:
756 else:
755 parents = [parent_commit]
757 parents = [parent_commit]
756 # add multiple nodes
758 # add multiple nodes
757 for path, content in processed_nodes:
759 for path, content in processed_nodes:
758 imc.remove(FileNode(path, content=content))
760 imc.remove(FileNode(path, content=content))
759
761
760 # TODO: handle pre push scenario
762 # TODO: handle pre push scenario
761 tip = imc.commit(message=message,
763 tip = imc.commit(message=message,
762 author=author,
764 author=author,
763 parents=parents,
765 parents=parents,
764 branch=parent_commit.branch)
766 branch=parent_commit.branch)
765
767
766 self.mark_for_invalidation(repo.repo_name)
768 self.mark_for_invalidation(repo.repo_name)
767 if trigger_push_hook:
769 if trigger_push_hook:
768 hooks_utils.trigger_post_push_hook(
770 hooks_utils.trigger_post_push_hook(
769 username=user.username, action='push_local',
771 username=user.username, action='push_local',
770 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
772 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
771 commit_ids=[tip.raw_id])
773 commit_ids=[tip.raw_id])
772 return tip
774 return tip
773
775
774 def strip(self, repo, commit_id, branch):
776 def strip(self, repo, commit_id, branch):
775 scm_instance = repo.scm_instance(cache=False)
777 scm_instance = repo.scm_instance(cache=False)
776 scm_instance.config.clear_section('hooks')
778 scm_instance.config.clear_section('hooks')
777 scm_instance.strip(commit_id, branch)
779 scm_instance.strip(commit_id, branch)
778 self.mark_for_invalidation(repo.repo_name)
780 self.mark_for_invalidation(repo.repo_name)
779
781
780 def get_unread_journal(self):
782 def get_unread_journal(self):
781 return self.sa.query(UserLog).count()
783 return self.sa.query(UserLog).count()
782
784
783 def get_repo_landing_revs(self, translator, repo=None):
785 def get_repo_landing_revs(self, translator, repo=None):
784 """
786 """
785 Generates select option with tags branches and bookmarks (for hg only)
787 Generates select option with tags branches and bookmarks (for hg only)
786 grouped by type
788 grouped by type
787
789
788 :param repo:
790 :param repo:
789 """
791 """
790 _ = translator
792 _ = translator
791 repo = self._get_repo(repo)
793 repo = self._get_repo(repo)
792
794
793 hist_l = [
795 hist_l = [
794 ['rev:tip', _('latest tip')]
796 ['rev:tip', _('latest tip')]
795 ]
797 ]
796 choices = [
798 choices = [
797 'rev:tip'
799 'rev:tip'
798 ]
800 ]
799
801
800 if not repo:
802 if not repo:
801 return choices, hist_l
803 return choices, hist_l
802
804
803 repo = repo.scm_instance()
805 repo = repo.scm_instance()
804
806
805 branches_group = (
807 branches_group = (
806 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
808 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
807 for b in repo.branches],
809 for b in repo.branches],
808 _("Branches"))
810 _("Branches"))
809 hist_l.append(branches_group)
811 hist_l.append(branches_group)
810 choices.extend([x[0] for x in branches_group[0]])
812 choices.extend([x[0] for x in branches_group[0]])
811
813
812 if repo.alias == 'hg':
814 if repo.alias == 'hg':
813 bookmarks_group = (
815 bookmarks_group = (
814 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
816 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
815 for b in repo.bookmarks],
817 for b in repo.bookmarks],
816 _("Bookmarks"))
818 _("Bookmarks"))
817 hist_l.append(bookmarks_group)
819 hist_l.append(bookmarks_group)
818 choices.extend([x[0] for x in bookmarks_group[0]])
820 choices.extend([x[0] for x in bookmarks_group[0]])
819
821
820 tags_group = (
822 tags_group = (
821 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
823 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
822 for t in repo.tags],
824 for t in repo.tags],
823 _("Tags"))
825 _("Tags"))
824 hist_l.append(tags_group)
826 hist_l.append(tags_group)
825 choices.extend([x[0] for x in tags_group[0]])
827 choices.extend([x[0] for x in tags_group[0]])
826
828
827 return choices, hist_l
829 return choices, hist_l
828
830
829 def get_server_info(self, environ=None):
831 def get_server_info(self, environ=None):
830 server_info = get_system_info(environ)
832 server_info = get_system_info(environ)
831 return server_info
833 return server_info
General Comments 0
You need to be logged in to leave comments. Login now