##// END OF EJS Templates
repos: cleannup and fix landing-ref code....
super-admin -
r4852:07a18b11 default
parent child Browse files
Show More
@@ -1,203 +1,210 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.model.repo import RepoModel
24 from rhodecode.model.repo import RepoModel
25 from rhodecode.model.scm import ScmModel
25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 from rhodecode.api.tests.utils import (
27 from rhodecode.api.tests.utils import (
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 from rhodecode.tests.fixture import Fixture
29 from rhodecode.tests.fixture import Fixture
29 from rhodecode.tests.plugin import plain_http_host_only_stub
30 from rhodecode.tests.plugin import plain_http_host_only_stub
30
31
31 fixture = Fixture()
32 fixture = Fixture()
32
33
33 UPDATE_REPO_NAME = 'api_update_me'
34 UPDATE_REPO_NAME = 'api_update_me'
34
35
35
36
36 class SAME_AS_UPDATES(object):
37 class SAME_AS_UPDATES(object):
37 """ Constant used for tests below """
38 """ Constant used for tests below """
38
39
39
40
40 @pytest.mark.usefixtures("testuser_api", "app")
41 @pytest.mark.usefixtures("testuser_api", "app")
41 class TestApiUpdateRepo(object):
42 class TestApiUpdateRepo(object):
42
43
43 @pytest.mark.parametrize("updates, expected", [
44 @pytest.mark.parametrize("updates, expected", [
44 ({'owner': TEST_USER_REGULAR_LOGIN},
45 ({'owner': TEST_USER_REGULAR_LOGIN},
45 SAME_AS_UPDATES),
46 SAME_AS_UPDATES),
46
47
47 ({'description': 'new description'},
48 ({'description': 'new description'},
48 SAME_AS_UPDATES),
49 SAME_AS_UPDATES),
49
50
50 ({'clone_uri': 'http://foo.com/repo'},
51 ({'clone_uri': 'http://foo.com/repo'},
51 SAME_AS_UPDATES),
52 SAME_AS_UPDATES),
52
53
53 ({'clone_uri': None},
54 ({'clone_uri': None},
54 {'clone_uri': ''}),
55 {'clone_uri': ''}),
55
56
56 ({'clone_uri': ''},
57 ({'clone_uri': ''},
57 {'clone_uri': ''}),
58 {'clone_uri': ''}),
58
59
59 ({'clone_uri': 'http://example.com/repo_pull'},
60 ({'clone_uri': 'http://example.com/repo_pull'},
60 {'clone_uri': 'http://example.com/repo_pull'}),
61 {'clone_uri': 'http://example.com/repo_pull'}),
61
62
62 ({'push_uri': ''},
63 ({'push_uri': ''},
63 {'push_uri': ''}),
64 {'push_uri': ''}),
64
65
65 ({'push_uri': 'http://example.com/repo_push'},
66 ({'push_uri': 'http://example.com/repo_push'},
66 {'push_uri': 'http://example.com/repo_push'}),
67 {'push_uri': 'http://example.com/repo_push'}),
67
68
68 ({'landing_rev': 'rev:tip'},
69 ({'landing_rev': None}, # auto-updated based on type of repo
69 {'landing_rev': ['rev', 'tip']}),
70 {'landing_rev': [None, None]}),
70
71
71 ({'enable_statistics': True},
72 ({'enable_statistics': True},
72 SAME_AS_UPDATES),
73 SAME_AS_UPDATES),
73
74
74 ({'enable_locking': True},
75 ({'enable_locking': True},
75 SAME_AS_UPDATES),
76 SAME_AS_UPDATES),
76
77
77 ({'enable_downloads': True},
78 ({'enable_downloads': True},
78 SAME_AS_UPDATES),
79 SAME_AS_UPDATES),
79
80
80 ({'repo_name': 'new_repo_name'},
81 ({'repo_name': 'new_repo_name'},
81 {
82 {
82 'repo_name': 'new_repo_name',
83 'repo_name': 'new_repo_name',
83 'url': 'http://{}/new_repo_name'.format(plain_http_host_only_stub())
84 'url': 'http://{}/new_repo_name'.format(plain_http_host_only_stub())
84 }),
85 }),
85
86
86 ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
87 ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
87 '_group': 'test_group_for_update'},
88 '_group': 'test_group_for_update'},
88 {
89 {
89 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
90 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
90 'url': 'http://{}/test_group_for_update/{}'.format(
91 'url': 'http://{}/test_group_for_update/{}'.format(
91 plain_http_host_only_stub(), UPDATE_REPO_NAME)
92 plain_http_host_only_stub(), UPDATE_REPO_NAME)
92 }),
93 }),
93 ])
94 ])
94 def test_api_update_repo(self, updates, expected, backend):
95 def test_api_update_repo(self, updates, expected, backend):
95 repo_name = UPDATE_REPO_NAME
96 repo_name = UPDATE_REPO_NAME
96 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
97 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
97 if updates.get('_group'):
98 if updates.get('_group'):
98 fixture.create_repo_group(updates['_group'])
99 fixture.create_repo_group(updates['_group'])
99
100
101 if 'landing_rev' in updates:
102 default_landing_ref, _lbl = ScmModel.backend_landing_ref(backend.alias)
103 _type, _name = default_landing_ref.split(':')
104 updates['landing_rev'] = default_landing_ref
105 expected['landing_rev'] = [_type, _name]
106
100 expected_api_data = repo.get_api_data(include_secrets=True)
107 expected_api_data = repo.get_api_data(include_secrets=True)
101 if expected is SAME_AS_UPDATES:
108 if expected is SAME_AS_UPDATES:
102 expected_api_data.update(updates)
109 expected_api_data.update(updates)
103 else:
110 else:
104 expected_api_data.update(expected)
111 expected_api_data.update(expected)
105
112
106 id_, params = build_data(
113 id_, params = build_data(
107 self.apikey, 'update_repo', repoid=repo_name, **updates)
114 self.apikey, 'update_repo', repoid=repo_name, **updates)
108
115
109 with mock.patch('rhodecode.model.validation_schema.validators.url_validator'):
116 with mock.patch('rhodecode.model.validation_schema.validators.url_validator'):
110 response = api_call(self.app, params)
117 response = api_call(self.app, params)
111
118
112 if updates.get('repo_name'):
119 if updates.get('repo_name'):
113 repo_name = updates['repo_name']
120 repo_name = updates['repo_name']
114
121
115 try:
122 try:
116 expected = {
123 expected = {
117 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
124 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
118 'repository': jsonify(expected_api_data)
125 'repository': jsonify(expected_api_data)
119 }
126 }
120 assert_ok(id_, expected, given=response.body)
127 assert_ok(id_, expected, given=response.body)
121 finally:
128 finally:
122 fixture.destroy_repo(repo_name)
129 fixture.destroy_repo(repo_name)
123 if updates.get('_group'):
130 if updates.get('_group'):
124 fixture.destroy_repo_group(updates['_group'])
131 fixture.destroy_repo_group(updates['_group'])
125
132
126 def test_api_update_repo_fork_of_field(self, backend):
133 def test_api_update_repo_fork_of_field(self, backend):
127 master_repo = backend.create_repo()
134 master_repo = backend.create_repo()
128 repo = backend.create_repo()
135 repo = backend.create_repo()
129 updates = {
136 updates = {
130 'fork_of': master_repo.repo_name,
137 'fork_of': master_repo.repo_name,
131 'fork_of_id': master_repo.repo_id
138 'fork_of_id': master_repo.repo_id
132 }
139 }
133 expected_api_data = repo.get_api_data(include_secrets=True)
140 expected_api_data = repo.get_api_data(include_secrets=True)
134 expected_api_data.update(updates)
141 expected_api_data.update(updates)
135
142
136 id_, params = build_data(
143 id_, params = build_data(
137 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
144 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
138 response = api_call(self.app, params)
145 response = api_call(self.app, params)
139 expected = {
146 expected = {
140 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
147 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
141 'repository': jsonify(expected_api_data)
148 'repository': jsonify(expected_api_data)
142 }
149 }
143 assert_ok(id_, expected, given=response.body)
150 assert_ok(id_, expected, given=response.body)
144 result = response.json['result']['repository']
151 result = response.json['result']['repository']
145 assert result['fork_of'] == master_repo.repo_name
152 assert result['fork_of'] == master_repo.repo_name
146 assert result['fork_of_id'] == master_repo.repo_id
153 assert result['fork_of_id'] == master_repo.repo_id
147
154
148 def test_api_update_repo_fork_of_not_found(self, backend):
155 def test_api_update_repo_fork_of_not_found(self, backend):
149 master_repo_name = 'fake-parent-repo'
156 master_repo_name = 'fake-parent-repo'
150 repo = backend.create_repo()
157 repo = backend.create_repo()
151 updates = {
158 updates = {
152 'fork_of': master_repo_name
159 'fork_of': master_repo_name
153 }
160 }
154 id_, params = build_data(
161 id_, params = build_data(
155 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
162 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
156 response = api_call(self.app, params)
163 response = api_call(self.app, params)
157 expected = {
164 expected = {
158 'repo_fork_of': 'Fork with id `{}` does not exists'.format(
165 'repo_fork_of': 'Fork with id `{}` does not exists'.format(
159 master_repo_name)}
166 master_repo_name)}
160 assert_error(id_, expected, given=response.body)
167 assert_error(id_, expected, given=response.body)
161
168
162 def test_api_update_repo_with_repo_group_not_existing(self):
169 def test_api_update_repo_with_repo_group_not_existing(self):
163 repo_name = 'admin_owned'
170 repo_name = 'admin_owned'
164 fake_repo_group = 'test_group_for_update'
171 fake_repo_group = 'test_group_for_update'
165 fixture.create_repo(repo_name)
172 fixture.create_repo(repo_name)
166 updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)}
173 updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)}
167 id_, params = build_data(
174 id_, params = build_data(
168 self.apikey, 'update_repo', repoid=repo_name, **updates)
175 self.apikey, 'update_repo', repoid=repo_name, **updates)
169 response = api_call(self.app, params)
176 response = api_call(self.app, params)
170 try:
177 try:
171 expected = {
178 expected = {
172 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group)
179 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group)
173 }
180 }
174 assert_error(id_, expected, given=response.body)
181 assert_error(id_, expected, given=response.body)
175 finally:
182 finally:
176 fixture.destroy_repo(repo_name)
183 fixture.destroy_repo(repo_name)
177
184
178 def test_api_update_repo_regular_user_not_allowed(self):
185 def test_api_update_repo_regular_user_not_allowed(self):
179 repo_name = 'admin_owned'
186 repo_name = 'admin_owned'
180 fixture.create_repo(repo_name)
187 fixture.create_repo(repo_name)
181 updates = {'active': False}
188 updates = {'active': False}
182 id_, params = build_data(
189 id_, params = build_data(
183 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
190 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
184 response = api_call(self.app, params)
191 response = api_call(self.app, params)
185 try:
192 try:
186 expected = 'repository `%s` does not exist' % (repo_name,)
193 expected = 'repository `%s` does not exist' % (repo_name,)
187 assert_error(id_, expected, given=response.body)
194 assert_error(id_, expected, given=response.body)
188 finally:
195 finally:
189 fixture.destroy_repo(repo_name)
196 fixture.destroy_repo(repo_name)
190
197
191 @mock.patch.object(RepoModel, 'update', crash)
198 @mock.patch.object(RepoModel, 'update', crash)
192 def test_api_update_repo_exception_occurred(self, backend):
199 def test_api_update_repo_exception_occurred(self, backend):
193 repo_name = UPDATE_REPO_NAME
200 repo_name = UPDATE_REPO_NAME
194 fixture.create_repo(repo_name, repo_type=backend.alias)
201 fixture.create_repo(repo_name, repo_type=backend.alias)
195 id_, params = build_data(
202 id_, params = build_data(
196 self.apikey, 'update_repo', repoid=repo_name,
203 self.apikey, 'update_repo', repoid=repo_name,
197 owner=TEST_USER_ADMIN_LOGIN,)
204 owner=TEST_USER_ADMIN_LOGIN,)
198 response = api_call(self.app, params)
205 response = api_call(self.app, params)
199 try:
206 try:
200 expected = 'failed to update repo `%s`' % (repo_name,)
207 expected = 'failed to update repo `%s`' % (repo_name,)
201 assert_error(id_, expected, given=response.body)
208 assert_error(id_, expected, given=response.body)
202 finally:
209 finally:
203 fixture.destroy_repo(repo_name)
210 fixture.destroy_repo(repo_name)
@@ -1,1051 +1,1052 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference)
39 MergeFailureReason, Reference)
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, EmptyRepositoryError,
44 CommitDoesNotExistError, EmptyRepositoryError,
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46
46
47
47
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class GitRepository(BaseRepository):
53 class GitRepository(BaseRepository):
54 """
54 """
55 Git repository backend.
55 Git repository backend.
56 """
56 """
57 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
57 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
58 DEFAULT_REF = 'branch:{}'.format(DEFAULT_BRANCH_NAME)
58
59
59 contact = BaseRepository.DEFAULT_CONTACT
60 contact = BaseRepository.DEFAULT_CONTACT
60
61
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 do_workspace_checkout=False, with_wire=None, bare=False):
63 do_workspace_checkout=False, with_wire=None, bare=False):
63
64
64 self.path = safe_str(os.path.abspath(repo_path))
65 self.path = safe_str(os.path.abspath(repo_path))
65 self.config = config if config else self.get_default_config()
66 self.config = config if config else self.get_default_config()
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 self.with_wire = with_wire or {"cache": False} # default should not use cache
67
68
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 self._init_repo(create, src_url, do_workspace_checkout, bare)
69
70
70 # caches
71 # caches
71 self._commit_ids = {}
72 self._commit_ids = {}
72
73
73 @LazyProperty
74 @LazyProperty
74 def _remote(self):
75 def _remote(self):
75 repo_id = self.path
76 repo_id = self.path
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77
78
78 @LazyProperty
79 @LazyProperty
79 def bare(self):
80 def bare(self):
80 return self._remote.bare()
81 return self._remote.bare()
81
82
82 @LazyProperty
83 @LazyProperty
83 def head(self):
84 def head(self):
84 return self._remote.head()
85 return self._remote.head()
85
86
86 @CachedProperty
87 @CachedProperty
87 def commit_ids(self):
88 def commit_ids(self):
88 """
89 """
89 Returns list of commit ids, in ascending order. Being lazy
90 Returns list of commit ids, in ascending order. Being lazy
90 attribute allows external tools to inject commit ids from cache.
91 attribute allows external tools to inject commit ids from cache.
91 """
92 """
92 commit_ids = self._get_all_commit_ids()
93 commit_ids = self._get_all_commit_ids()
93 self._rebuild_cache(commit_ids)
94 self._rebuild_cache(commit_ids)
94 return commit_ids
95 return commit_ids
95
96
96 def _rebuild_cache(self, commit_ids):
97 def _rebuild_cache(self, commit_ids):
97 self._commit_ids = dict((commit_id, index)
98 self._commit_ids = dict((commit_id, index)
98 for index, commit_id in enumerate(commit_ids))
99 for index, commit_id in enumerate(commit_ids))
99
100
100 def run_git_command(self, cmd, **opts):
101 def run_git_command(self, cmd, **opts):
101 """
102 """
102 Runs given ``cmd`` as git command and returns tuple
103 Runs given ``cmd`` as git command and returns tuple
103 (stdout, stderr).
104 (stdout, stderr).
104
105
105 :param cmd: git command to be executed
106 :param cmd: git command to be executed
106 :param opts: env options to pass into Subprocess command
107 :param opts: env options to pass into Subprocess command
107 """
108 """
108 if not isinstance(cmd, list):
109 if not isinstance(cmd, list):
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110
111
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 out, err = self._remote.run_git_command(cmd, **opts)
113 out, err = self._remote.run_git_command(cmd, **opts)
113 if err and not skip_stderr_log:
114 if err and not skip_stderr_log:
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 return out, err
116 return out, err
116
117
117 @staticmethod
118 @staticmethod
118 def check_url(url, config):
119 def check_url(url, config):
119 """
120 """
120 Function will check given url and try to verify if it's a valid
121 Function will check given url and try to verify if it's a valid
121 link. Sometimes it may happened that git will issue basic
122 link. Sometimes it may happened that git will issue basic
122 auth request that can cause whole API to hang when used from python
123 auth request that can cause whole API to hang when used from python
123 or other external calls.
124 or other external calls.
124
125
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 when the return code is non 200
127 when the return code is non 200
127 """
128 """
128 # check first if it's not an url
129 # check first if it's not an url
129 if os.path.isdir(url) or url.startswith('file:'):
130 if os.path.isdir(url) or url.startswith('file:'):
130 return True
131 return True
131
132
132 if '+' in url.split('://', 1)[0]:
133 if '+' in url.split('://', 1)[0]:
133 url = url.split('+', 1)[1]
134 url = url.split('+', 1)[1]
134
135
135 # Request the _remote to verify the url
136 # Request the _remote to verify the url
136 return connection.Git.check_url(url, config.serialize())
137 return connection.Git.check_url(url, config.serialize())
137
138
138 @staticmethod
139 @staticmethod
139 def is_valid_repository(path):
140 def is_valid_repository(path):
140 if os.path.isdir(os.path.join(path, '.git')):
141 if os.path.isdir(os.path.join(path, '.git')):
141 return True
142 return True
142 # check case of bare repository
143 # check case of bare repository
143 try:
144 try:
144 GitRepository(path)
145 GitRepository(path)
145 return True
146 return True
146 except VCSError:
147 except VCSError:
147 pass
148 pass
148 return False
149 return False
149
150
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 bare=False):
152 bare=False):
152 if create and os.path.exists(self.path):
153 if create and os.path.exists(self.path):
153 raise RepositoryError(
154 raise RepositoryError(
154 "Cannot create repository at %s, location already exist"
155 "Cannot create repository at %s, location already exist"
155 % self.path)
156 % self.path)
156
157
157 if bare and do_workspace_checkout:
158 if bare and do_workspace_checkout:
158 raise RepositoryError("Cannot update a bare repository")
159 raise RepositoryError("Cannot update a bare repository")
159 try:
160 try:
160
161
161 if src_url:
162 if src_url:
162 # check URL before any actions
163 # check URL before any actions
163 GitRepository.check_url(src_url, self.config)
164 GitRepository.check_url(src_url, self.config)
164
165
165 if create:
166 if create:
166 os.makedirs(self.path, mode=0o755)
167 os.makedirs(self.path, mode=0o755)
167
168
168 if bare:
169 if bare:
169 self._remote.init_bare()
170 self._remote.init_bare()
170 else:
171 else:
171 self._remote.init()
172 self._remote.init()
172
173
173 if src_url and bare:
174 if src_url and bare:
174 # bare repository only allows a fetch and checkout is not allowed
175 # bare repository only allows a fetch and checkout is not allowed
175 self.fetch(src_url, commit_ids=None)
176 self.fetch(src_url, commit_ids=None)
176 elif src_url:
177 elif src_url:
177 self.pull(src_url, commit_ids=None,
178 self.pull(src_url, commit_ids=None,
178 update_after=do_workspace_checkout)
179 update_after=do_workspace_checkout)
179
180
180 else:
181 else:
181 if not self._remote.assert_correct_path():
182 if not self._remote.assert_correct_path():
182 raise RepositoryError(
183 raise RepositoryError(
183 'Path "%s" does not contain a Git repository' %
184 'Path "%s" does not contain a Git repository' %
184 (self.path,))
185 (self.path,))
185
186
186 # TODO: johbo: check if we have to translate the OSError here
187 # TODO: johbo: check if we have to translate the OSError here
187 except OSError as err:
188 except OSError as err:
188 raise RepositoryError(err)
189 raise RepositoryError(err)
189
190
190 def _get_all_commit_ids(self):
191 def _get_all_commit_ids(self):
191 return self._remote.get_all_commit_ids()
192 return self._remote.get_all_commit_ids()
192
193
193 def _get_commit_ids(self, filters=None):
194 def _get_commit_ids(self, filters=None):
194 # we must check if this repo is not empty, since later command
195 # we must check if this repo is not empty, since later command
195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # errors
197 # errors
197
198
198 head = self._remote.head(show_exc=False)
199 head = self._remote.head(show_exc=False)
199
200
200 if not head:
201 if not head:
201 return []
202 return []
202
203
203 rev_filter = ['--branches', '--tags']
204 rev_filter = ['--branches', '--tags']
204 extra_filter = []
205 extra_filter = []
205
206
206 if filters:
207 if filters:
207 if filters.get('since'):
208 if filters.get('since'):
208 extra_filter.append('--since=%s' % (filters['since']))
209 extra_filter.append('--since=%s' % (filters['since']))
209 if filters.get('until'):
210 if filters.get('until'):
210 extra_filter.append('--until=%s' % (filters['until']))
211 extra_filter.append('--until=%s' % (filters['until']))
211 if filters.get('branch_name'):
212 if filters.get('branch_name'):
212 rev_filter = []
213 rev_filter = []
213 extra_filter.append(filters['branch_name'])
214 extra_filter.append(filters['branch_name'])
214 rev_filter.extend(extra_filter)
215 rev_filter.extend(extra_filter)
215
216
216 # if filters.get('start') or filters.get('end'):
217 # if filters.get('start') or filters.get('end'):
217 # # skip is offset, max-count is limit
218 # # skip is offset, max-count is limit
218 # if filters.get('start'):
219 # if filters.get('start'):
219 # extra_filter += ' --skip=%s' % filters['start']
220 # extra_filter += ' --skip=%s' % filters['start']
220 # if filters.get('end'):
221 # if filters.get('end'):
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222
223
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 try:
225 try:
225 output, __ = self.run_git_command(cmd)
226 output, __ = self.run_git_command(cmd)
226 except RepositoryError:
227 except RepositoryError:
227 # Can be raised for empty repositories
228 # Can be raised for empty repositories
228 return []
229 return []
229 return output.splitlines()
230 return output.splitlines()
230
231
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
232 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
232
233
233 def is_null(value):
234 def is_null(value):
234 return len(value) == commit_id_or_idx.count('0')
235 return len(value) == commit_id_or_idx.count('0')
235
236
236 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
237 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
237 return self.commit_ids[-1]
238 return self.commit_ids[-1]
238
239
239 commit_missing_err = "Commit {} does not exist for `{}`".format(
240 commit_missing_err = "Commit {} does not exist for `{}`".format(
240 *map(safe_str, [commit_id_or_idx, self.name]))
241 *map(safe_str, [commit_id_or_idx, self.name]))
241
242
242 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
243 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
243 is_branch = reference_obj and reference_obj.branch
244 is_branch = reference_obj and reference_obj.branch
244
245
245 lookup_ok = False
246 lookup_ok = False
246 if is_bstr:
247 if is_bstr:
247 # Need to call remote to translate id for tagging scenarios,
248 # Need to call remote to translate id for tagging scenarios,
248 # or branch that are numeric
249 # or branch that are numeric
249 try:
250 try:
250 remote_data = self._remote.get_object(commit_id_or_idx,
251 remote_data = self._remote.get_object(commit_id_or_idx,
251 maybe_unreachable=maybe_unreachable)
252 maybe_unreachable=maybe_unreachable)
252 commit_id_or_idx = remote_data["commit_id"]
253 commit_id_or_idx = remote_data["commit_id"]
253 lookup_ok = True
254 lookup_ok = True
254 except (CommitDoesNotExistError,):
255 except (CommitDoesNotExistError,):
255 lookup_ok = False
256 lookup_ok = False
256
257
257 if lookup_ok is False:
258 if lookup_ok is False:
258 is_numeric_idx = \
259 is_numeric_idx = \
259 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
260 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
260 or isinstance(commit_id_or_idx, int)
261 or isinstance(commit_id_or_idx, int)
261 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
262 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
262 try:
263 try:
263 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
264 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
264 lookup_ok = True
265 lookup_ok = True
265 except Exception:
266 except Exception:
266 raise CommitDoesNotExistError(commit_missing_err)
267 raise CommitDoesNotExistError(commit_missing_err)
267
268
268 # we failed regular lookup, and by integer number lookup
269 # we failed regular lookup, and by integer number lookup
269 if lookup_ok is False:
270 if lookup_ok is False:
270 raise CommitDoesNotExistError(commit_missing_err)
271 raise CommitDoesNotExistError(commit_missing_err)
271
272
272 # Ensure we return full id
273 # Ensure we return full id
273 if not SHA_PATTERN.match(str(commit_id_or_idx)):
274 if not SHA_PATTERN.match(str(commit_id_or_idx)):
274 raise CommitDoesNotExistError(
275 raise CommitDoesNotExistError(
275 "Given commit id %s not recognized" % commit_id_or_idx)
276 "Given commit id %s not recognized" % commit_id_or_idx)
276 return commit_id_or_idx
277 return commit_id_or_idx
277
278
278 def get_hook_location(self):
279 def get_hook_location(self):
279 """
280 """
280 returns absolute path to location where hooks are stored
281 returns absolute path to location where hooks are stored
281 """
282 """
282 loc = os.path.join(self.path, 'hooks')
283 loc = os.path.join(self.path, 'hooks')
283 if not self.bare:
284 if not self.bare:
284 loc = os.path.join(self.path, '.git', 'hooks')
285 loc = os.path.join(self.path, '.git', 'hooks')
285 return loc
286 return loc
286
287
287 @LazyProperty
288 @LazyProperty
288 def last_change(self):
289 def last_change(self):
289 """
290 """
290 Returns last change made on this repository as
291 Returns last change made on this repository as
291 `datetime.datetime` object.
292 `datetime.datetime` object.
292 """
293 """
293 try:
294 try:
294 return self.get_commit().date
295 return self.get_commit().date
295 except RepositoryError:
296 except RepositoryError:
296 tzoffset = makedate()[1]
297 tzoffset = makedate()[1]
297 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
298 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
298
299
299 def _get_fs_mtime(self):
300 def _get_fs_mtime(self):
300 idx_loc = '' if self.bare else '.git'
301 idx_loc = '' if self.bare else '.git'
301 # fallback to filesystem
302 # fallback to filesystem
302 in_path = os.path.join(self.path, idx_loc, "index")
303 in_path = os.path.join(self.path, idx_loc, "index")
303 he_path = os.path.join(self.path, idx_loc, "HEAD")
304 he_path = os.path.join(self.path, idx_loc, "HEAD")
304 if os.path.exists(in_path):
305 if os.path.exists(in_path):
305 return os.stat(in_path).st_mtime
306 return os.stat(in_path).st_mtime
306 else:
307 else:
307 return os.stat(he_path).st_mtime
308 return os.stat(he_path).st_mtime
308
309
309 @LazyProperty
310 @LazyProperty
310 def description(self):
311 def description(self):
311 description = self._remote.get_description()
312 description = self._remote.get_description()
312 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
313 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
313
314
314 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
315 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
315 if self.is_empty():
316 if self.is_empty():
316 return OrderedDict()
317 return OrderedDict()
317
318
318 result = []
319 result = []
319 for ref, sha in self._refs.iteritems():
320 for ref, sha in self._refs.iteritems():
320 if ref.startswith(prefix):
321 if ref.startswith(prefix):
321 ref_name = ref
322 ref_name = ref
322 if strip_prefix:
323 if strip_prefix:
323 ref_name = ref[len(prefix):]
324 ref_name = ref[len(prefix):]
324 result.append((safe_unicode(ref_name), sha))
325 result.append((safe_unicode(ref_name), sha))
325
326
326 def get_name(entry):
327 def get_name(entry):
327 return entry[0]
328 return entry[0]
328
329
329 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
330 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
330
331
331 def _get_branches(self):
332 def _get_branches(self):
332 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
333 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
333
334
334 @CachedProperty
335 @CachedProperty
335 def branches(self):
336 def branches(self):
336 return self._get_branches()
337 return self._get_branches()
337
338
338 @CachedProperty
339 @CachedProperty
339 def branches_closed(self):
340 def branches_closed(self):
340 return {}
341 return {}
341
342
342 @CachedProperty
343 @CachedProperty
343 def bookmarks(self):
344 def bookmarks(self):
344 return {}
345 return {}
345
346
346 @CachedProperty
347 @CachedProperty
347 def branches_all(self):
348 def branches_all(self):
348 all_branches = {}
349 all_branches = {}
349 all_branches.update(self.branches)
350 all_branches.update(self.branches)
350 all_branches.update(self.branches_closed)
351 all_branches.update(self.branches_closed)
351 return all_branches
352 return all_branches
352
353
353 @CachedProperty
354 @CachedProperty
354 def tags(self):
355 def tags(self):
355 return self._get_tags()
356 return self._get_tags()
356
357
357 def _get_tags(self):
358 def _get_tags(self):
358 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
359 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
359
360
360 def tag(self, name, user, commit_id=None, message=None, date=None,
361 def tag(self, name, user, commit_id=None, message=None, date=None,
361 **kwargs):
362 **kwargs):
362 # TODO: fix this method to apply annotated tags correct with message
363 # TODO: fix this method to apply annotated tags correct with message
363 """
364 """
364 Creates and returns a tag for the given ``commit_id``.
365 Creates and returns a tag for the given ``commit_id``.
365
366
366 :param name: name for new tag
367 :param name: name for new tag
367 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
368 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
368 :param commit_id: commit id for which new tag would be created
369 :param commit_id: commit id for which new tag would be created
369 :param message: message of the tag's commit
370 :param message: message of the tag's commit
370 :param date: date of tag's commit
371 :param date: date of tag's commit
371
372
372 :raises TagAlreadyExistError: if tag with same name already exists
373 :raises TagAlreadyExistError: if tag with same name already exists
373 """
374 """
374 if name in self.tags:
375 if name in self.tags:
375 raise TagAlreadyExistError("Tag %s already exists" % name)
376 raise TagAlreadyExistError("Tag %s already exists" % name)
376 commit = self.get_commit(commit_id=commit_id)
377 commit = self.get_commit(commit_id=commit_id)
377 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
378 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
378
379
379 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
380 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
380
381
381 self._invalidate_prop_cache('tags')
382 self._invalidate_prop_cache('tags')
382 self._invalidate_prop_cache('_refs')
383 self._invalidate_prop_cache('_refs')
383
384
384 return commit
385 return commit
385
386
386 def remove_tag(self, name, user, message=None, date=None):
387 def remove_tag(self, name, user, message=None, date=None):
387 """
388 """
388 Removes tag with the given ``name``.
389 Removes tag with the given ``name``.
389
390
390 :param name: name of the tag to be removed
391 :param name: name of the tag to be removed
391 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
392 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
392 :param message: message of the tag's removal commit
393 :param message: message of the tag's removal commit
393 :param date: date of tag's removal commit
394 :param date: date of tag's removal commit
394
395
395 :raises TagDoesNotExistError: if tag with given name does not exists
396 :raises TagDoesNotExistError: if tag with given name does not exists
396 """
397 """
397 if name not in self.tags:
398 if name not in self.tags:
398 raise TagDoesNotExistError("Tag %s does not exist" % name)
399 raise TagDoesNotExistError("Tag %s does not exist" % name)
399
400
400 self._remote.tag_remove(name)
401 self._remote.tag_remove(name)
401 self._invalidate_prop_cache('tags')
402 self._invalidate_prop_cache('tags')
402 self._invalidate_prop_cache('_refs')
403 self._invalidate_prop_cache('_refs')
403
404
404 def _get_refs(self):
405 def _get_refs(self):
405 return self._remote.get_refs()
406 return self._remote.get_refs()
406
407
407 @CachedProperty
408 @CachedProperty
408 def _refs(self):
409 def _refs(self):
409 return self._get_refs()
410 return self._get_refs()
410
411
411 @property
412 @property
412 def _ref_tree(self):
413 def _ref_tree(self):
413 node = tree = {}
414 node = tree = {}
414 for ref, sha in self._refs.iteritems():
415 for ref, sha in self._refs.iteritems():
415 path = ref.split('/')
416 path = ref.split('/')
416 for bit in path[:-1]:
417 for bit in path[:-1]:
417 node = node.setdefault(bit, {})
418 node = node.setdefault(bit, {})
418 node[path[-1]] = sha
419 node[path[-1]] = sha
419 node = tree
420 node = tree
420 return tree
421 return tree
421
422
422 def get_remote_ref(self, ref_name):
423 def get_remote_ref(self, ref_name):
423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 try:
425 try:
425 return self._refs[ref_key]
426 return self._refs[ref_key]
426 except Exception:
427 except Exception:
427 return
428 return
428
429
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
430 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
430 translate_tag=True, maybe_unreachable=False, reference_obj=None):
431 translate_tag=True, maybe_unreachable=False, reference_obj=None):
431 """
432 """
432 Returns `GitCommit` object representing commit from git repository
433 Returns `GitCommit` object representing commit from git repository
433 at the given `commit_id` or head (most recent commit) if None given.
434 at the given `commit_id` or head (most recent commit) if None given.
434 """
435 """
435
436
436 if self.is_empty():
437 if self.is_empty():
437 raise EmptyRepositoryError("There are no commits yet")
438 raise EmptyRepositoryError("There are no commits yet")
438
439
439 if commit_id is not None:
440 if commit_id is not None:
440 self._validate_commit_id(commit_id)
441 self._validate_commit_id(commit_id)
441 try:
442 try:
442 # we have cached idx, use it without contacting the remote
443 # we have cached idx, use it without contacting the remote
443 idx = self._commit_ids[commit_id]
444 idx = self._commit_ids[commit_id]
444 return GitCommit(self, commit_id, idx, pre_load=pre_load)
445 return GitCommit(self, commit_id, idx, pre_load=pre_load)
445 except KeyError:
446 except KeyError:
446 pass
447 pass
447
448
448 elif commit_idx is not None:
449 elif commit_idx is not None:
449 self._validate_commit_idx(commit_idx)
450 self._validate_commit_idx(commit_idx)
450 try:
451 try:
451 _commit_id = self.commit_ids[commit_idx]
452 _commit_id = self.commit_ids[commit_idx]
452 if commit_idx < 0:
453 if commit_idx < 0:
453 commit_idx = self.commit_ids.index(_commit_id)
454 commit_idx = self.commit_ids.index(_commit_id)
454 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
455 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
455 except IndexError:
456 except IndexError:
456 commit_id = commit_idx
457 commit_id = commit_idx
457 else:
458 else:
458 commit_id = "tip"
459 commit_id = "tip"
459
460
460 if translate_tag:
461 if translate_tag:
461 commit_id = self._lookup_commit(
462 commit_id = self._lookup_commit(
462 commit_id, maybe_unreachable=maybe_unreachable,
463 commit_id, maybe_unreachable=maybe_unreachable,
463 reference_obj=reference_obj)
464 reference_obj=reference_obj)
464
465
465 try:
466 try:
466 idx = self._commit_ids[commit_id]
467 idx = self._commit_ids[commit_id]
467 except KeyError:
468 except KeyError:
468 idx = -1
469 idx = -1
469
470
470 return GitCommit(self, commit_id, idx, pre_load=pre_load)
471 return GitCommit(self, commit_id, idx, pre_load=pre_load)
471
472
472 def get_commits(
473 def get_commits(
473 self, start_id=None, end_id=None, start_date=None, end_date=None,
474 self, start_id=None, end_id=None, start_date=None, end_date=None,
474 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
475 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
475 """
476 """
476 Returns generator of `GitCommit` objects from start to end (both
477 Returns generator of `GitCommit` objects from start to end (both
477 are inclusive), in ascending date order.
478 are inclusive), in ascending date order.
478
479
479 :param start_id: None, str(commit_id)
480 :param start_id: None, str(commit_id)
480 :param end_id: None, str(commit_id)
481 :param end_id: None, str(commit_id)
481 :param start_date: if specified, commits with commit date less than
482 :param start_date: if specified, commits with commit date less than
482 ``start_date`` would be filtered out from returned set
483 ``start_date`` would be filtered out from returned set
483 :param end_date: if specified, commits with commit date greater than
484 :param end_date: if specified, commits with commit date greater than
484 ``end_date`` would be filtered out from returned set
485 ``end_date`` would be filtered out from returned set
485 :param branch_name: if specified, commits not reachable from given
486 :param branch_name: if specified, commits not reachable from given
486 branch would be filtered out from returned set
487 branch would be filtered out from returned set
487 :param show_hidden: Show hidden commits such as obsolete or hidden from
488 :param show_hidden: Show hidden commits such as obsolete or hidden from
488 Mercurial evolve
489 Mercurial evolve
489 :raise BranchDoesNotExistError: If given `branch_name` does not
490 :raise BranchDoesNotExistError: If given `branch_name` does not
490 exist.
491 exist.
491 :raise CommitDoesNotExistError: If commits for given `start` or
492 :raise CommitDoesNotExistError: If commits for given `start` or
492 `end` could not be found.
493 `end` could not be found.
493
494
494 """
495 """
495 if self.is_empty():
496 if self.is_empty():
496 raise EmptyRepositoryError("There are no commits yet")
497 raise EmptyRepositoryError("There are no commits yet")
497
498
498 self._validate_branch_name(branch_name)
499 self._validate_branch_name(branch_name)
499
500
500 if start_id is not None:
501 if start_id is not None:
501 self._validate_commit_id(start_id)
502 self._validate_commit_id(start_id)
502 if end_id is not None:
503 if end_id is not None:
503 self._validate_commit_id(end_id)
504 self._validate_commit_id(end_id)
504
505
505 start_raw_id = self._lookup_commit(start_id)
506 start_raw_id = self._lookup_commit(start_id)
506 start_pos = self._commit_ids[start_raw_id] if start_id else None
507 start_pos = self._commit_ids[start_raw_id] if start_id else None
507 end_raw_id = self._lookup_commit(end_id)
508 end_raw_id = self._lookup_commit(end_id)
508 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
509 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
509
510
510 if None not in [start_id, end_id] and start_pos > end_pos:
511 if None not in [start_id, end_id] and start_pos > end_pos:
511 raise RepositoryError(
512 raise RepositoryError(
512 "Start commit '%s' cannot be after end commit '%s'" %
513 "Start commit '%s' cannot be after end commit '%s'" %
513 (start_id, end_id))
514 (start_id, end_id))
514
515
515 if end_pos is not None:
516 if end_pos is not None:
516 end_pos += 1
517 end_pos += 1
517
518
518 filter_ = []
519 filter_ = []
519 if branch_name:
520 if branch_name:
520 filter_.append({'branch_name': branch_name})
521 filter_.append({'branch_name': branch_name})
521 if start_date and not end_date:
522 if start_date and not end_date:
522 filter_.append({'since': start_date})
523 filter_.append({'since': start_date})
523 if end_date and not start_date:
524 if end_date and not start_date:
524 filter_.append({'until': end_date})
525 filter_.append({'until': end_date})
525 if start_date and end_date:
526 if start_date and end_date:
526 filter_.append({'since': start_date})
527 filter_.append({'since': start_date})
527 filter_.append({'until': end_date})
528 filter_.append({'until': end_date})
528
529
529 # if start_pos or end_pos:
530 # if start_pos or end_pos:
530 # filter_.append({'start': start_pos})
531 # filter_.append({'start': start_pos})
531 # filter_.append({'end': end_pos})
532 # filter_.append({'end': end_pos})
532
533
533 if filter_:
534 if filter_:
534 revfilters = {
535 revfilters = {
535 'branch_name': branch_name,
536 'branch_name': branch_name,
536 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
537 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
537 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
538 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
538 'start': start_pos,
539 'start': start_pos,
539 'end': end_pos,
540 'end': end_pos,
540 }
541 }
541 commit_ids = self._get_commit_ids(filters=revfilters)
542 commit_ids = self._get_commit_ids(filters=revfilters)
542
543
543 else:
544 else:
544 commit_ids = self.commit_ids
545 commit_ids = self.commit_ids
545
546
546 if start_pos or end_pos:
547 if start_pos or end_pos:
547 commit_ids = commit_ids[start_pos: end_pos]
548 commit_ids = commit_ids[start_pos: end_pos]
548
549
549 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
550 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
550 translate_tag=translate_tags)
551 translate_tag=translate_tags)
551
552
552 def get_diff(
553 def get_diff(
553 self, commit1, commit2, path='', ignore_whitespace=False,
554 self, commit1, commit2, path='', ignore_whitespace=False,
554 context=3, path1=None):
555 context=3, path1=None):
555 """
556 """
556 Returns (git like) *diff*, as plain text. Shows changes introduced by
557 Returns (git like) *diff*, as plain text. Shows changes introduced by
557 ``commit2`` since ``commit1``.
558 ``commit2`` since ``commit1``.
558
559
559 :param commit1: Entry point from which diff is shown. Can be
560 :param commit1: Entry point from which diff is shown. Can be
560 ``self.EMPTY_COMMIT`` - in this case, patch showing all
561 ``self.EMPTY_COMMIT`` - in this case, patch showing all
561 the changes since empty state of the repository until ``commit2``
562 the changes since empty state of the repository until ``commit2``
562 :param commit2: Until which commits changes should be shown.
563 :param commit2: Until which commits changes should be shown.
563 :param ignore_whitespace: If set to ``True``, would not show whitespace
564 :param ignore_whitespace: If set to ``True``, would not show whitespace
564 changes. Defaults to ``False``.
565 changes. Defaults to ``False``.
565 :param context: How many lines before/after changed lines should be
566 :param context: How many lines before/after changed lines should be
566 shown. Defaults to ``3``.
567 shown. Defaults to ``3``.
567 """
568 """
568 self._validate_diff_commits(commit1, commit2)
569 self._validate_diff_commits(commit1, commit2)
569 if path1 is not None and path1 != path:
570 if path1 is not None and path1 != path:
570 raise ValueError("Diff of two different paths not supported.")
571 raise ValueError("Diff of two different paths not supported.")
571
572
572 if path:
573 if path:
573 file_filter = path
574 file_filter = path
574 else:
575 else:
575 file_filter = None
576 file_filter = None
576
577
577 diff = self._remote.diff(
578 diff = self._remote.diff(
578 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
579 opt_ignorews=ignore_whitespace,
580 opt_ignorews=ignore_whitespace,
580 context=context)
581 context=context)
581 return GitDiff(diff)
582 return GitDiff(diff)
582
583
583 def strip(self, commit_id, branch_name):
584 def strip(self, commit_id, branch_name):
584 commit = self.get_commit(commit_id=commit_id)
585 commit = self.get_commit(commit_id=commit_id)
585 if commit.merge:
586 if commit.merge:
586 raise Exception('Cannot reset to merge commit')
587 raise Exception('Cannot reset to merge commit')
587
588
588 # parent is going to be the new head now
589 # parent is going to be the new head now
589 commit = commit.parents[0]
590 commit = commit.parents[0]
590 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
591 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
591
592
592 # clear cached properties
593 # clear cached properties
593 self._invalidate_prop_cache('commit_ids')
594 self._invalidate_prop_cache('commit_ids')
594 self._invalidate_prop_cache('_refs')
595 self._invalidate_prop_cache('_refs')
595 self._invalidate_prop_cache('branches')
596 self._invalidate_prop_cache('branches')
596
597
597 return len(self.commit_ids)
598 return len(self.commit_ids)
598
599
599 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
600 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
601 self, commit_id1, repo2, commit_id2)
602 self, commit_id1, repo2, commit_id2)
602
603
603 if commit_id1 == commit_id2:
604 if commit_id1 == commit_id2:
604 return commit_id1
605 return commit_id1
605
606
606 if self != repo2:
607 if self != repo2:
607 commits = self._remote.get_missing_revs(
608 commits = self._remote.get_missing_revs(
608 commit_id1, commit_id2, repo2.path)
609 commit_id1, commit_id2, repo2.path)
609 if commits:
610 if commits:
610 commit = repo2.get_commit(commits[-1])
611 commit = repo2.get_commit(commits[-1])
611 if commit.parents:
612 if commit.parents:
612 ancestor_id = commit.parents[0].raw_id
613 ancestor_id = commit.parents[0].raw_id
613 else:
614 else:
614 ancestor_id = None
615 ancestor_id = None
615 else:
616 else:
616 # no commits from other repo, ancestor_id is the commit_id2
617 # no commits from other repo, ancestor_id is the commit_id2
617 ancestor_id = commit_id2
618 ancestor_id = commit_id2
618 else:
619 else:
619 output, __ = self.run_git_command(
620 output, __ = self.run_git_command(
620 ['merge-base', commit_id1, commit_id2])
621 ['merge-base', commit_id1, commit_id2])
621 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
622
623
623 log.debug('Found common ancestor with sha: %s', ancestor_id)
624 log.debug('Found common ancestor with sha: %s', ancestor_id)
624
625
625 return ancestor_id
626 return ancestor_id
626
627
627 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
628 repo1 = self
629 repo1 = self
629 ancestor_id = None
630 ancestor_id = None
630
631
631 if commit_id1 == commit_id2:
632 if commit_id1 == commit_id2:
632 commits = []
633 commits = []
633 elif repo1 != repo2:
634 elif repo1 != repo2:
634 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
635 repo2.path)
636 repo2.path)
636 commits = [
637 commits = [
637 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
638 for commit_id in reversed(missing_ids)]
639 for commit_id in reversed(missing_ids)]
639 else:
640 else:
640 output, __ = repo1.run_git_command(
641 output, __ = repo1.run_git_command(
641 ['log', '--reverse', '--pretty=format: %H', '-s',
642 ['log', '--reverse', '--pretty=format: %H', '-s',
642 '%s..%s' % (commit_id1, commit_id2)])
643 '%s..%s' % (commit_id1, commit_id2)])
643 commits = [
644 commits = [
644 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
645 for commit_id in self.COMMIT_ID_PAT.findall(output)]
646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
646
647
647 return commits
648 return commits
648
649
649 @LazyProperty
650 @LazyProperty
650 def in_memory_commit(self):
651 def in_memory_commit(self):
651 """
652 """
652 Returns ``GitInMemoryCommit`` object for this repository.
653 Returns ``GitInMemoryCommit`` object for this repository.
653 """
654 """
654 return GitInMemoryCommit(self)
655 return GitInMemoryCommit(self)
655
656
656 def pull(self, url, commit_ids=None, update_after=False):
657 def pull(self, url, commit_ids=None, update_after=False):
657 """
658 """
658 Pull changes from external location. Pull is different in GIT
659 Pull changes from external location. Pull is different in GIT
659 that fetch since it's doing a checkout
660 that fetch since it's doing a checkout
660
661
661 :param commit_ids: Optional. Can be set to a list of commit ids
662 :param commit_ids: Optional. Can be set to a list of commit ids
662 which shall be pulled from the other repository.
663 which shall be pulled from the other repository.
663 """
664 """
664 refs = None
665 refs = None
665 if commit_ids is not None:
666 if commit_ids is not None:
666 remote_refs = self._remote.get_remote_refs(url)
667 remote_refs = self._remote.get_remote_refs(url)
667 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
668 self._remote.pull(url, refs=refs, update_after=update_after)
669 self._remote.pull(url, refs=refs, update_after=update_after)
669 self._remote.invalidate_vcs_cache()
670 self._remote.invalidate_vcs_cache()
670
671
671 def fetch(self, url, commit_ids=None):
672 def fetch(self, url, commit_ids=None):
672 """
673 """
673 Fetch all git objects from external location.
674 Fetch all git objects from external location.
674 """
675 """
675 self._remote.sync_fetch(url, refs=commit_ids)
676 self._remote.sync_fetch(url, refs=commit_ids)
676 self._remote.invalidate_vcs_cache()
677 self._remote.invalidate_vcs_cache()
677
678
678 def push(self, url):
679 def push(self, url):
679 refs = None
680 refs = None
680 self._remote.sync_push(url, refs=refs)
681 self._remote.sync_push(url, refs=refs)
681
682
682 def set_refs(self, ref_name, commit_id):
683 def set_refs(self, ref_name, commit_id):
683 self._remote.set_refs(ref_name, commit_id)
684 self._remote.set_refs(ref_name, commit_id)
684 self._invalidate_prop_cache('_refs')
685 self._invalidate_prop_cache('_refs')
685
686
686 def remove_ref(self, ref_name):
687 def remove_ref(self, ref_name):
687 self._remote.remove_ref(ref_name)
688 self._remote.remove_ref(ref_name)
688 self._invalidate_prop_cache('_refs')
689 self._invalidate_prop_cache('_refs')
689
690
690 def run_gc(self, prune=True):
691 def run_gc(self, prune=True):
691 cmd = ['gc', '--aggressive']
692 cmd = ['gc', '--aggressive']
692 if prune:
693 if prune:
693 cmd += ['--prune=now']
694 cmd += ['--prune=now']
694 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
695 return stderr
696 return stderr
696
697
697 def _update_server_info(self):
698 def _update_server_info(self):
698 """
699 """
699 runs gits update-server-info command in this repo instance
700 runs gits update-server-info command in this repo instance
700 """
701 """
701 self._remote.update_server_info()
702 self._remote.update_server_info()
702
703
703 def _current_branch(self):
704 def _current_branch(self):
704 """
705 """
705 Return the name of the current branch.
706 Return the name of the current branch.
706
707
707 It only works for non bare repositories (i.e. repositories with a
708 It only works for non bare repositories (i.e. repositories with a
708 working copy)
709 working copy)
709 """
710 """
710 if self.bare:
711 if self.bare:
711 raise RepositoryError('Bare git repos do not have active branches')
712 raise RepositoryError('Bare git repos do not have active branches')
712
713
713 if self.is_empty():
714 if self.is_empty():
714 return None
715 return None
715
716
716 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
717 return stdout.strip()
718 return stdout.strip()
718
719
719 def _checkout(self, branch_name, create=False, force=False):
720 def _checkout(self, branch_name, create=False, force=False):
720 """
721 """
721 Checkout a branch in the working directory.
722 Checkout a branch in the working directory.
722
723
723 It tries to create the branch if create is True, failing if the branch
724 It tries to create the branch if create is True, failing if the branch
724 already exists.
725 already exists.
725
726
726 It only works for non bare repositories (i.e. repositories with a
727 It only works for non bare repositories (i.e. repositories with a
727 working copy)
728 working copy)
728 """
729 """
729 if self.bare:
730 if self.bare:
730 raise RepositoryError('Cannot checkout branches in a bare git repo')
731 raise RepositoryError('Cannot checkout branches in a bare git repo')
731
732
732 cmd = ['checkout']
733 cmd = ['checkout']
733 if force:
734 if force:
734 cmd.append('-f')
735 cmd.append('-f')
735 if create:
736 if create:
736 cmd.append('-b')
737 cmd.append('-b')
737 cmd.append(branch_name)
738 cmd.append(branch_name)
738 self.run_git_command(cmd, fail_on_stderr=False)
739 self.run_git_command(cmd, fail_on_stderr=False)
739
740
740 def _create_branch(self, branch_name, commit_id):
741 def _create_branch(self, branch_name, commit_id):
741 """
742 """
742 creates a branch in a GIT repo
743 creates a branch in a GIT repo
743 """
744 """
744 self._remote.create_branch(branch_name, commit_id)
745 self._remote.create_branch(branch_name, commit_id)
745
746
746 def _identify(self):
747 def _identify(self):
747 """
748 """
748 Return the current state of the working directory.
749 Return the current state of the working directory.
749 """
750 """
750 if self.bare:
751 if self.bare:
751 raise RepositoryError('Bare git repos do not have active branches')
752 raise RepositoryError('Bare git repos do not have active branches')
752
753
753 if self.is_empty():
754 if self.is_empty():
754 return None
755 return None
755
756
756 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
757 return stdout.strip()
758 return stdout.strip()
758
759
759 def _local_clone(self, clone_path, branch_name, source_branch=None):
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
760 """
761 """
761 Create a local clone of the current repo.
762 Create a local clone of the current repo.
762 """
763 """
763 # N.B.(skreft): the --branch option is required as otherwise the shallow
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
764 # clone will only fetch the active branch.
765 # clone will only fetch the active branch.
765 cmd = ['clone', '--branch', branch_name,
766 cmd = ['clone', '--branch', branch_name,
766 self.path, os.path.abspath(clone_path)]
767 self.path, os.path.abspath(clone_path)]
767
768
768 self.run_git_command(cmd, fail_on_stderr=False)
769 self.run_git_command(cmd, fail_on_stderr=False)
769
770
770 # if we get the different source branch, make sure we also fetch it for
771 # if we get the different source branch, make sure we also fetch it for
771 # merge conditions
772 # merge conditions
772 if source_branch and source_branch != branch_name:
773 if source_branch and source_branch != branch_name:
773 # check if the ref exists.
774 # check if the ref exists.
774 shadow_repo = GitRepository(os.path.abspath(clone_path))
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
775 if shadow_repo.get_remote_ref(source_branch):
776 if shadow_repo.get_remote_ref(source_branch):
776 cmd = ['fetch', self.path, source_branch]
777 cmd = ['fetch', self.path, source_branch]
777 self.run_git_command(cmd, fail_on_stderr=False)
778 self.run_git_command(cmd, fail_on_stderr=False)
778
779
779 def _local_fetch(self, repository_path, branch_name, use_origin=False):
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
780 """
781 """
781 Fetch a branch from a local repository.
782 Fetch a branch from a local repository.
782 """
783 """
783 repository_path = os.path.abspath(repository_path)
784 repository_path = os.path.abspath(repository_path)
784 if repository_path == self.path:
785 if repository_path == self.path:
785 raise ValueError('Cannot fetch from the same repository')
786 raise ValueError('Cannot fetch from the same repository')
786
787
787 if use_origin:
788 if use_origin:
788 branch_name = '+{branch}:refs/heads/{branch}'.format(
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
789 branch=branch_name)
790 branch=branch_name)
790
791
791 cmd = ['fetch', '--no-tags', '--update-head-ok',
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
792 repository_path, branch_name]
793 repository_path, branch_name]
793 self.run_git_command(cmd, fail_on_stderr=False)
794 self.run_git_command(cmd, fail_on_stderr=False)
794
795
795 def _local_reset(self, branch_name):
796 def _local_reset(self, branch_name):
796 branch_name = '{}'.format(branch_name)
797 branch_name = '{}'.format(branch_name)
797 cmd = ['reset', '--hard', branch_name, '--']
798 cmd = ['reset', '--hard', branch_name, '--']
798 self.run_git_command(cmd, fail_on_stderr=False)
799 self.run_git_command(cmd, fail_on_stderr=False)
799
800
800 def _last_fetch_heads(self):
801 def _last_fetch_heads(self):
801 """
802 """
802 Return the last fetched heads that need merging.
803 Return the last fetched heads that need merging.
803
804
804 The algorithm is defined at
805 The algorithm is defined at
805 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
806 """
807 """
807 if not self.bare:
808 if not self.bare:
808 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
809 else:
810 else:
810 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
811
812
812 heads = []
813 heads = []
813 with open(fetch_heads_path) as f:
814 with open(fetch_heads_path) as f:
814 for line in f:
815 for line in f:
815 if ' not-for-merge ' in line:
816 if ' not-for-merge ' in line:
816 continue
817 continue
817 line = re.sub('\t.*', '', line, flags=re.DOTALL)
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
818 heads.append(line)
819 heads.append(line)
819
820
820 return heads
821 return heads
821
822
822 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
823 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
824
825
825 def _local_pull(self, repository_path, branch_name, ff_only=True):
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
826 """
827 """
827 Pull a branch from a local repository.
828 Pull a branch from a local repository.
828 """
829 """
829 if self.bare:
830 if self.bare:
830 raise RepositoryError('Cannot pull into a bare git repository')
831 raise RepositoryError('Cannot pull into a bare git repository')
831 # N.B.(skreft): The --ff-only option is to make sure this is a
832 # N.B.(skreft): The --ff-only option is to make sure this is a
832 # fast-forward (i.e., we are only pulling new changes and there are no
833 # fast-forward (i.e., we are only pulling new changes and there are no
833 # conflicts with our current branch)
834 # conflicts with our current branch)
834 # Additionally, that option needs to go before --no-tags, otherwise git
835 # Additionally, that option needs to go before --no-tags, otherwise git
835 # pull complains about it being an unknown flag.
836 # pull complains about it being an unknown flag.
836 cmd = ['pull']
837 cmd = ['pull']
837 if ff_only:
838 if ff_only:
838 cmd.append('--ff-only')
839 cmd.append('--ff-only')
839 cmd.extend(['--no-tags', repository_path, branch_name])
840 cmd.extend(['--no-tags', repository_path, branch_name])
840 self.run_git_command(cmd, fail_on_stderr=False)
841 self.run_git_command(cmd, fail_on_stderr=False)
841
842
842 def _local_merge(self, merge_message, user_name, user_email, heads):
843 def _local_merge(self, merge_message, user_name, user_email, heads):
843 """
844 """
844 Merge the given head into the checked out branch.
845 Merge the given head into the checked out branch.
845
846
846 It will force a merge commit.
847 It will force a merge commit.
847
848
848 Currently it raises an error if the repo is empty, as it is not possible
849 Currently it raises an error if the repo is empty, as it is not possible
849 to create a merge commit in an empty repo.
850 to create a merge commit in an empty repo.
850
851
851 :param merge_message: The message to use for the merge commit.
852 :param merge_message: The message to use for the merge commit.
852 :param heads: the heads to merge.
853 :param heads: the heads to merge.
853 """
854 """
854 if self.bare:
855 if self.bare:
855 raise RepositoryError('Cannot merge into a bare git repository')
856 raise RepositoryError('Cannot merge into a bare git repository')
856
857
857 if not heads:
858 if not heads:
858 return
859 return
859
860
860 if self.is_empty():
861 if self.is_empty():
861 # TODO(skreft): do something more robust in this case.
862 # TODO(skreft): do something more robust in this case.
862 raise RepositoryError('Do not know how to merge into empty repositories yet')
863 raise RepositoryError('Do not know how to merge into empty repositories yet')
863 unresolved = None
864 unresolved = None
864
865
865 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
866 # commit message. We also specify the user who is doing the merge.
867 # commit message. We also specify the user who is doing the merge.
867 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
868 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
868 '-c', 'user.email=%s' % safe_str(user_email),
869 '-c', 'user.email=%s' % safe_str(user_email),
869 'merge', '--no-ff', '-m', safe_str(merge_message)]
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
870
871
871 merge_cmd = cmd + heads
872 merge_cmd = cmd + heads
872
873
873 try:
874 try:
874 self.run_git_command(merge_cmd, fail_on_stderr=False)
875 self.run_git_command(merge_cmd, fail_on_stderr=False)
875 except RepositoryError:
876 except RepositoryError:
876 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
877 fail_on_stderr=False)[0].splitlines()
878 fail_on_stderr=False)[0].splitlines()
878 # NOTE(marcink): we add U notation for consistent with HG backend output
879 # NOTE(marcink): we add U notation for consistent with HG backend output
879 unresolved = ['U {}'.format(f) for f in files]
880 unresolved = ['U {}'.format(f) for f in files]
880
881
881 # Cleanup any merge leftovers
882 # Cleanup any merge leftovers
882 self._remote.invalidate_vcs_cache()
883 self._remote.invalidate_vcs_cache()
883 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
884
885
885 if unresolved:
886 if unresolved:
886 raise UnresolvedFilesInRepo(unresolved)
887 raise UnresolvedFilesInRepo(unresolved)
887 else:
888 else:
888 raise
889 raise
889
890
890 def _local_push(
891 def _local_push(
891 self, source_branch, repository_path, target_branch,
892 self, source_branch, repository_path, target_branch,
892 enable_hooks=False, rc_scm_data=None):
893 enable_hooks=False, rc_scm_data=None):
893 """
894 """
894 Push the source_branch to the given repository and target_branch.
895 Push the source_branch to the given repository and target_branch.
895
896
896 Currently it if the target_branch is not master and the target repo is
897 Currently it if the target_branch is not master and the target repo is
897 empty, the push will work, but then GitRepository won't be able to find
898 empty, the push will work, but then GitRepository won't be able to find
898 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
899 pointing to master, which does not exist).
900 pointing to master, which does not exist).
900
901
901 It does not run the hooks in the target repo.
902 It does not run the hooks in the target repo.
902 """
903 """
903 # TODO(skreft): deal with the case in which the target repo is empty,
904 # TODO(skreft): deal with the case in which the target repo is empty,
904 # and the target_branch is not master.
905 # and the target_branch is not master.
905 target_repo = GitRepository(repository_path)
906 target_repo = GitRepository(repository_path)
906 if (not target_repo.bare and
907 if (not target_repo.bare and
907 target_repo._current_branch() == target_branch):
908 target_repo._current_branch() == target_branch):
908 # Git prevents pushing to the checked out branch, so simulate it by
909 # Git prevents pushing to the checked out branch, so simulate it by
909 # pulling into the target repository.
910 # pulling into the target repository.
910 target_repo._local_pull(self.path, source_branch)
911 target_repo._local_pull(self.path, source_branch)
911 else:
912 else:
912 cmd = ['push', os.path.abspath(repository_path),
913 cmd = ['push', os.path.abspath(repository_path),
913 '%s:%s' % (source_branch, target_branch)]
914 '%s:%s' % (source_branch, target_branch)]
914 gitenv = {}
915 gitenv = {}
915 if rc_scm_data:
916 if rc_scm_data:
916 gitenv.update({'RC_SCM_DATA': rc_scm_data})
917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
917
918
918 if not enable_hooks:
919 if not enable_hooks:
919 gitenv['RC_SKIP_HOOKS'] = '1'
920 gitenv['RC_SKIP_HOOKS'] = '1'
920 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
921
922
922 def _get_new_pr_branch(self, source_branch, target_branch):
923 def _get_new_pr_branch(self, source_branch, target_branch):
923 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
924 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
924 pr_branches = []
925 pr_branches = []
925 for branch in self.branches:
926 for branch in self.branches:
926 if branch.startswith(prefix):
927 if branch.startswith(prefix):
927 pr_branches.append(int(branch[len(prefix):]))
928 pr_branches.append(int(branch[len(prefix):]))
928
929
929 if not pr_branches:
930 if not pr_branches:
930 branch_id = 0
931 branch_id = 0
931 else:
932 else:
932 branch_id = max(pr_branches) + 1
933 branch_id = max(pr_branches) + 1
933
934
934 return '%s%d' % (prefix, branch_id)
935 return '%s%d' % (prefix, branch_id)
935
936
936 def _maybe_prepare_merge_workspace(
937 def _maybe_prepare_merge_workspace(
937 self, repo_id, workspace_id, target_ref, source_ref):
938 self, repo_id, workspace_id, target_ref, source_ref):
938 shadow_repository_path = self._get_shadow_repository_path(
939 shadow_repository_path = self._get_shadow_repository_path(
939 self.path, repo_id, workspace_id)
940 self.path, repo_id, workspace_id)
940 if not os.path.exists(shadow_repository_path):
941 if not os.path.exists(shadow_repository_path):
941 self._local_clone(
942 self._local_clone(
942 shadow_repository_path, target_ref.name, source_ref.name)
943 shadow_repository_path, target_ref.name, source_ref.name)
943 log.debug('Prepared %s shadow repository in %s',
944 log.debug('Prepared %s shadow repository in %s',
944 self.alias, shadow_repository_path)
945 self.alias, shadow_repository_path)
945
946
946 return shadow_repository_path
947 return shadow_repository_path
947
948
948 def _merge_repo(self, repo_id, workspace_id, target_ref,
949 def _merge_repo(self, repo_id, workspace_id, target_ref,
949 source_repo, source_ref, merge_message,
950 source_repo, source_ref, merge_message,
950 merger_name, merger_email, dry_run=False,
951 merger_name, merger_email, dry_run=False,
951 use_rebase=False, close_branch=False):
952 use_rebase=False, close_branch=False):
952
953
953 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
954 'rebase' if use_rebase else 'merge', dry_run)
955 'rebase' if use_rebase else 'merge', dry_run)
955 if target_ref.commit_id != self.branches[target_ref.name]:
956 if target_ref.commit_id != self.branches[target_ref.name]:
956 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
957 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
957 target_ref.commit_id, self.branches[target_ref.name])
958 target_ref.commit_id, self.branches[target_ref.name])
958 return MergeResponse(
959 return MergeResponse(
959 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
960 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
960 metadata={'target_ref': target_ref})
961 metadata={'target_ref': target_ref})
961
962
962 shadow_repository_path = self._maybe_prepare_merge_workspace(
963 shadow_repository_path = self._maybe_prepare_merge_workspace(
963 repo_id, workspace_id, target_ref, source_ref)
964 repo_id, workspace_id, target_ref, source_ref)
964 shadow_repo = self.get_shadow_instance(shadow_repository_path)
965 shadow_repo = self.get_shadow_instance(shadow_repository_path)
965
966
966 # checkout source, if it's different. Otherwise we could not
967 # checkout source, if it's different. Otherwise we could not
967 # fetch proper commits for merge testing
968 # fetch proper commits for merge testing
968 if source_ref.name != target_ref.name:
969 if source_ref.name != target_ref.name:
969 if shadow_repo.get_remote_ref(source_ref.name):
970 if shadow_repo.get_remote_ref(source_ref.name):
970 shadow_repo._checkout(source_ref.name, force=True)
971 shadow_repo._checkout(source_ref.name, force=True)
971
972
972 # checkout target, and fetch changes
973 # checkout target, and fetch changes
973 shadow_repo._checkout(target_ref.name, force=True)
974 shadow_repo._checkout(target_ref.name, force=True)
974
975
975 # fetch/reset pull the target, in case it is changed
976 # fetch/reset pull the target, in case it is changed
976 # this handles even force changes
977 # this handles even force changes
977 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
978 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
978 shadow_repo._local_reset(target_ref.name)
979 shadow_repo._local_reset(target_ref.name)
979
980
980 # Need to reload repo to invalidate the cache, or otherwise we cannot
981 # Need to reload repo to invalidate the cache, or otherwise we cannot
981 # retrieve the last target commit.
982 # retrieve the last target commit.
982 shadow_repo = self.get_shadow_instance(shadow_repository_path)
983 shadow_repo = self.get_shadow_instance(shadow_repository_path)
983 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
984 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
984 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
985 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
985 target_ref, target_ref.commit_id,
986 target_ref, target_ref.commit_id,
986 shadow_repo.branches[target_ref.name])
987 shadow_repo.branches[target_ref.name])
987 return MergeResponse(
988 return MergeResponse(
988 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
989 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
989 metadata={'target_ref': target_ref})
990 metadata={'target_ref': target_ref})
990
991
991 # calculate new branch
992 # calculate new branch
992 pr_branch = shadow_repo._get_new_pr_branch(
993 pr_branch = shadow_repo._get_new_pr_branch(
993 source_ref.name, target_ref.name)
994 source_ref.name, target_ref.name)
994 log.debug('using pull-request merge branch: `%s`', pr_branch)
995 log.debug('using pull-request merge branch: `%s`', pr_branch)
995 # checkout to temp branch, and fetch changes
996 # checkout to temp branch, and fetch changes
996 shadow_repo._checkout(pr_branch, create=True)
997 shadow_repo._checkout(pr_branch, create=True)
997 try:
998 try:
998 shadow_repo._local_fetch(source_repo.path, source_ref.name)
999 shadow_repo._local_fetch(source_repo.path, source_ref.name)
999 except RepositoryError:
1000 except RepositoryError:
1000 log.exception('Failure when doing local fetch on '
1001 log.exception('Failure when doing local fetch on '
1001 'shadow repo: %s', shadow_repo)
1002 'shadow repo: %s', shadow_repo)
1002 return MergeResponse(
1003 return MergeResponse(
1003 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1004 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1004 metadata={'source_ref': source_ref})
1005 metadata={'source_ref': source_ref})
1005
1006
1006 merge_ref = None
1007 merge_ref = None
1007 merge_failure_reason = MergeFailureReason.NONE
1008 merge_failure_reason = MergeFailureReason.NONE
1008 metadata = {}
1009 metadata = {}
1009 try:
1010 try:
1010 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1011 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1011 [source_ref.commit_id])
1012 [source_ref.commit_id])
1012 merge_possible = True
1013 merge_possible = True
1013
1014
1014 # Need to invalidate the cache, or otherwise we
1015 # Need to invalidate the cache, or otherwise we
1015 # cannot retrieve the merge commit.
1016 # cannot retrieve the merge commit.
1016 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1017 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1017 merge_commit_id = shadow_repo.branches[pr_branch]
1018 merge_commit_id = shadow_repo.branches[pr_branch]
1018
1019
1019 # Set a reference pointing to the merge commit. This reference may
1020 # Set a reference pointing to the merge commit. This reference may
1020 # be used to easily identify the last successful merge commit in
1021 # be used to easily identify the last successful merge commit in
1021 # the shadow repository.
1022 # the shadow repository.
1022 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1023 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1023 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1024 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1024 except RepositoryError as e:
1025 except RepositoryError as e:
1025 log.exception('Failure when doing local merge on git shadow repo')
1026 log.exception('Failure when doing local merge on git shadow repo')
1026 if isinstance(e, UnresolvedFilesInRepo):
1027 if isinstance(e, UnresolvedFilesInRepo):
1027 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1028 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1028
1029
1029 merge_possible = False
1030 merge_possible = False
1030 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1031 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1031
1032
1032 if merge_possible and not dry_run:
1033 if merge_possible and not dry_run:
1033 try:
1034 try:
1034 shadow_repo._local_push(
1035 shadow_repo._local_push(
1035 pr_branch, self.path, target_ref.name, enable_hooks=True,
1036 pr_branch, self.path, target_ref.name, enable_hooks=True,
1036 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1037 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1037 merge_succeeded = True
1038 merge_succeeded = True
1038 except RepositoryError:
1039 except RepositoryError:
1039 log.exception(
1040 log.exception(
1040 'Failure when doing local push from the shadow '
1041 'Failure when doing local push from the shadow '
1041 'repository to the target repository at %s.', self.path)
1042 'repository to the target repository at %s.', self.path)
1042 merge_succeeded = False
1043 merge_succeeded = False
1043 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1044 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1044 metadata['target'] = 'git shadow repo'
1045 metadata['target'] = 'git shadow repo'
1045 metadata['merge_commit'] = pr_branch
1046 metadata['merge_commit'] = pr_branch
1046 else:
1047 else:
1047 merge_succeeded = False
1048 merge_succeeded = False
1048
1049
1049 return MergeResponse(
1050 return MergeResponse(
1050 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1051 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1051 metadata=metadata)
1052 metadata=metadata)
@@ -1,1195 +1,1197 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import re
22 import re
23 import shutil
23 import shutil
24 import time
24 import time
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import datetime
27 import datetime
28
28
29 from pyramid.threadlocal import get_current_request
29 from pyramid.threadlocal import get_current_request
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.lib.caching_query import FromCache
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 from rhodecode.lib import hooks_base
36 from rhodecode.lib import hooks_base
37 from rhodecode.lib.user_log_filter import user_log_filter
37 from rhodecode.lib.user_log_filter import user_log_filter
38 from rhodecode.lib.utils import make_db_config
38 from rhodecode.lib.utils import make_db_config
39 from rhodecode.lib.utils2 import (
39 from rhodecode.lib.utils2 import (
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 get_current_rhodecode_user, safe_int, action_logger_generic)
41 get_current_rhodecode_user, safe_int, action_logger_generic)
42 from rhodecode.lib.vcs.backends import get_backend
42 from rhodecode.lib.vcs.backends import get_backend
43 from rhodecode.model import BaseModel
43 from rhodecode.model import BaseModel
44 from rhodecode.model.db import (
44 from rhodecode.model.db import (
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.settings import VcsSettingsModel
50 from rhodecode.model.settings import VcsSettingsModel
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 class RepoModel(BaseModel):
55 class RepoModel(BaseModel):
56
56
57 cls = Repository
57 cls = Repository
58
58
59 def _get_user_group(self, users_group):
59 def _get_user_group(self, users_group):
60 return self._get_instance(UserGroup, users_group,
60 return self._get_instance(UserGroup, users_group,
61 callback=UserGroup.get_by_group_name)
61 callback=UserGroup.get_by_group_name)
62
62
63 def _get_repo_group(self, repo_group):
63 def _get_repo_group(self, repo_group):
64 return self._get_instance(RepoGroup, repo_group,
64 return self._get_instance(RepoGroup, repo_group,
65 callback=RepoGroup.get_by_group_name)
65 callback=RepoGroup.get_by_group_name)
66
66
67 def _create_default_perms(self, repository, private):
67 def _create_default_perms(self, repository, private):
68 # create default permission
68 # create default permission
69 default = 'repository.read'
69 default = 'repository.read'
70 def_user = User.get_default_user()
70 def_user = User.get_default_user()
71 for p in def_user.user_perms:
71 for p in def_user.user_perms:
72 if p.permission.permission_name.startswith('repository.'):
72 if p.permission.permission_name.startswith('repository.'):
73 default = p.permission.permission_name
73 default = p.permission.permission_name
74 break
74 break
75
75
76 default_perm = 'repository.none' if private else default
76 default_perm = 'repository.none' if private else default
77
77
78 repo_to_perm = UserRepoToPerm()
78 repo_to_perm = UserRepoToPerm()
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
80
80
81 repo_to_perm.repository = repository
81 repo_to_perm.repository = repository
82 repo_to_perm.user_id = def_user.user_id
82 repo_to_perm.user_id = def_user.user_id
83
83
84 return repo_to_perm
84 return repo_to_perm
85
85
86 @LazyProperty
86 @LazyProperty
87 def repos_path(self):
87 def repos_path(self):
88 """
88 """
89 Gets the repositories root path from database
89 Gets the repositories root path from database
90 """
90 """
91 settings_model = VcsSettingsModel(sa=self.sa)
91 settings_model = VcsSettingsModel(sa=self.sa)
92 return settings_model.get_repos_location()
92 return settings_model.get_repos_location()
93
93
94 def get(self, repo_id):
94 def get(self, repo_id):
95 repo = self.sa.query(Repository) \
95 repo = self.sa.query(Repository) \
96 .filter(Repository.repo_id == repo_id)
96 .filter(Repository.repo_id == repo_id)
97
97
98 return repo.scalar()
98 return repo.scalar()
99
99
100 def get_repo(self, repository):
100 def get_repo(self, repository):
101 return self._get_repo(repository)
101 return self._get_repo(repository)
102
102
103 def get_by_repo_name(self, repo_name, cache=False):
103 def get_by_repo_name(self, repo_name, cache=False):
104 repo = self.sa.query(Repository) \
104 repo = self.sa.query(Repository) \
105 .filter(Repository.repo_name == repo_name)
105 .filter(Repository.repo_name == repo_name)
106
106
107 if cache:
107 if cache:
108 name_key = _hash_key(repo_name)
108 name_key = _hash_key(repo_name)
109 repo = repo.options(
109 repo = repo.options(
110 FromCache("sql_cache_short", "get_repo_%s" % name_key))
110 FromCache("sql_cache_short", "get_repo_%s" % name_key))
111 return repo.scalar()
111 return repo.scalar()
112
112
113 def _extract_id_from_repo_name(self, repo_name):
113 def _extract_id_from_repo_name(self, repo_name):
114 if repo_name.startswith('/'):
114 if repo_name.startswith('/'):
115 repo_name = repo_name.lstrip('/')
115 repo_name = repo_name.lstrip('/')
116 by_id_match = re.match(r'^_(\d{1,})', repo_name)
116 by_id_match = re.match(r'^_(\d{1,})', repo_name)
117 if by_id_match:
117 if by_id_match:
118 return by_id_match.groups()[0]
118 return by_id_match.groups()[0]
119
119
120 def get_repo_by_id(self, repo_name):
120 def get_repo_by_id(self, repo_name):
121 """
121 """
122 Extracts repo_name by id from special urls.
122 Extracts repo_name by id from special urls.
123 Example url is _11/repo_name
123 Example url is _11/repo_name
124
124
125 :param repo_name:
125 :param repo_name:
126 :return: repo object if matched else None
126 :return: repo object if matched else None
127 """
127 """
128 _repo_id = None
128 _repo_id = None
129 try:
129 try:
130 _repo_id = self._extract_id_from_repo_name(repo_name)
130 _repo_id = self._extract_id_from_repo_name(repo_name)
131 if _repo_id:
131 if _repo_id:
132 return self.get(_repo_id)
132 return self.get(_repo_id)
133 except Exception:
133 except Exception:
134 log.exception('Failed to extract repo_name from URL')
134 log.exception('Failed to extract repo_name from URL')
135 if _repo_id:
135 if _repo_id:
136 Session().rollback()
136 Session().rollback()
137
137
138 return None
138 return None
139
139
140 def get_repos_for_root(self, root, traverse=False):
140 def get_repos_for_root(self, root, traverse=False):
141 if traverse:
141 if traverse:
142 like_expression = u'{}%'.format(safe_unicode(root))
142 like_expression = u'{}%'.format(safe_unicode(root))
143 repos = Repository.query().filter(
143 repos = Repository.query().filter(
144 Repository.repo_name.like(like_expression)).all()
144 Repository.repo_name.like(like_expression)).all()
145 else:
145 else:
146 if root and not isinstance(root, RepoGroup):
146 if root and not isinstance(root, RepoGroup):
147 raise ValueError(
147 raise ValueError(
148 'Root must be an instance '
148 'Root must be an instance '
149 'of RepoGroup, got:{} instead'.format(type(root)))
149 'of RepoGroup, got:{} instead'.format(type(root)))
150 repos = Repository.query().filter(Repository.group == root).all()
150 repos = Repository.query().filter(Repository.group == root).all()
151 return repos
151 return repos
152
152
153 def get_url(self, repo, request=None, permalink=False):
153 def get_url(self, repo, request=None, permalink=False):
154 if not request:
154 if not request:
155 request = get_current_request()
155 request = get_current_request()
156
156
157 if not request:
157 if not request:
158 return
158 return
159
159
160 if permalink:
160 if permalink:
161 return request.route_url(
161 return request.route_url(
162 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
162 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
163 else:
163 else:
164 return request.route_url(
164 return request.route_url(
165 'repo_summary', repo_name=safe_str(repo.repo_name))
165 'repo_summary', repo_name=safe_str(repo.repo_name))
166
166
167 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
167 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
168 if not request:
168 if not request:
169 request = get_current_request()
169 request = get_current_request()
170
170
171 if not request:
171 if not request:
172 return
172 return
173
173
174 if permalink:
174 if permalink:
175 return request.route_url(
175 return request.route_url(
176 'repo_commit', repo_name=safe_str(repo.repo_id),
176 'repo_commit', repo_name=safe_str(repo.repo_id),
177 commit_id=commit_id)
177 commit_id=commit_id)
178
178
179 else:
179 else:
180 return request.route_url(
180 return request.route_url(
181 'repo_commit', repo_name=safe_str(repo.repo_name),
181 'repo_commit', repo_name=safe_str(repo.repo_name),
182 commit_id=commit_id)
182 commit_id=commit_id)
183
183
184 def get_repo_log(self, repo, filter_term):
184 def get_repo_log(self, repo, filter_term):
185 repo_log = UserLog.query()\
185 repo_log = UserLog.query()\
186 .filter(or_(UserLog.repository_id == repo.repo_id,
186 .filter(or_(UserLog.repository_id == repo.repo_id,
187 UserLog.repository_name == repo.repo_name))\
187 UserLog.repository_name == repo.repo_name))\
188 .options(joinedload(UserLog.user))\
188 .options(joinedload(UserLog.user))\
189 .options(joinedload(UserLog.repository))\
189 .options(joinedload(UserLog.repository))\
190 .order_by(UserLog.action_date.desc())
190 .order_by(UserLog.action_date.desc())
191
191
192 repo_log = user_log_filter(repo_log, filter_term)
192 repo_log = user_log_filter(repo_log, filter_term)
193 return repo_log
193 return repo_log
194
194
195 @classmethod
195 @classmethod
196 def update_commit_cache(cls, repositories=None):
196 def update_commit_cache(cls, repositories=None):
197 if not repositories:
197 if not repositories:
198 repositories = Repository.getAll()
198 repositories = Repository.getAll()
199 for repo in repositories:
199 for repo in repositories:
200 repo.update_commit_cache()
200 repo.update_commit_cache()
201
201
202 def get_repos_as_dict(self, repo_list=None, admin=False,
202 def get_repos_as_dict(self, repo_list=None, admin=False,
203 super_user_actions=False, short_name=None):
203 super_user_actions=False, short_name=None):
204
204
205 _render = get_current_request().get_partial_renderer(
205 _render = get_current_request().get_partial_renderer(
206 'rhodecode:templates/data_table/_dt_elements.mako')
206 'rhodecode:templates/data_table/_dt_elements.mako')
207 c = _render.get_call_context()
207 c = _render.get_call_context()
208 h = _render.get_helpers()
208 h = _render.get_helpers()
209
209
210 def quick_menu(repo_name):
210 def quick_menu(repo_name):
211 return _render('quick_menu', repo_name)
211 return _render('quick_menu', repo_name)
212
212
213 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
213 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
214 if short_name is not None:
214 if short_name is not None:
215 short_name_var = short_name
215 short_name_var = short_name
216 else:
216 else:
217 short_name_var = not admin
217 short_name_var = not admin
218 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
218 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
219 short_name=short_name_var, admin=False)
219 short_name=short_name_var, admin=False)
220
220
221 def last_change(last_change):
221 def last_change(last_change):
222 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
222 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
223 ts = time.time()
223 ts = time.time()
224 utc_offset = (datetime.datetime.fromtimestamp(ts)
224 utc_offset = (datetime.datetime.fromtimestamp(ts)
225 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
225 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
226 last_change = last_change + datetime.timedelta(seconds=utc_offset)
226 last_change = last_change + datetime.timedelta(seconds=utc_offset)
227
227
228 return _render("last_change", last_change)
228 return _render("last_change", last_change)
229
229
230 def rss_lnk(repo_name):
230 def rss_lnk(repo_name):
231 return _render("rss", repo_name)
231 return _render("rss", repo_name)
232
232
233 def atom_lnk(repo_name):
233 def atom_lnk(repo_name):
234 return _render("atom", repo_name)
234 return _render("atom", repo_name)
235
235
236 def last_rev(repo_name, cs_cache):
236 def last_rev(repo_name, cs_cache):
237 return _render('revision', repo_name, cs_cache.get('revision'),
237 return _render('revision', repo_name, cs_cache.get('revision'),
238 cs_cache.get('raw_id'), cs_cache.get('author'),
238 cs_cache.get('raw_id'), cs_cache.get('author'),
239 cs_cache.get('message'), cs_cache.get('date'))
239 cs_cache.get('message'), cs_cache.get('date'))
240
240
241 def desc(desc):
241 def desc(desc):
242 return _render('repo_desc', desc, c.visual.stylify_metatags)
242 return _render('repo_desc', desc, c.visual.stylify_metatags)
243
243
244 def state(repo_state):
244 def state(repo_state):
245 return _render("repo_state", repo_state)
245 return _render("repo_state", repo_state)
246
246
247 def repo_actions(repo_name):
247 def repo_actions(repo_name):
248 return _render('repo_actions', repo_name, super_user_actions)
248 return _render('repo_actions', repo_name, super_user_actions)
249
249
250 def user_profile(username):
250 def user_profile(username):
251 return _render('user_profile', username)
251 return _render('user_profile', username)
252
252
253 repos_data = []
253 repos_data = []
254 for repo in repo_list:
254 for repo in repo_list:
255 # NOTE(marcink): because we use only raw column we need to load it like that
255 # NOTE(marcink): because we use only raw column we need to load it like that
256 changeset_cache = Repository._load_changeset_cache(
256 changeset_cache = Repository._load_changeset_cache(
257 repo.repo_id, repo._changeset_cache)
257 repo.repo_id, repo._changeset_cache)
258
258
259 row = {
259 row = {
260 "menu": quick_menu(repo.repo_name),
260 "menu": quick_menu(repo.repo_name),
261
261
262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
263 repo.private, repo.archived, repo.fork),
263 repo.private, repo.archived, repo.fork),
264
264
265 "desc": desc(h.escape(repo.description)),
265 "desc": desc(h.escape(repo.description)),
266
266
267 "last_change": last_change(repo.updated_on),
267 "last_change": last_change(repo.updated_on),
268
268
269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
270 "last_changeset_raw": changeset_cache.get('revision'),
270 "last_changeset_raw": changeset_cache.get('revision'),
271
271
272 "owner": user_profile(repo.User.username),
272 "owner": user_profile(repo.User.username),
273
273
274 "state": state(repo.repo_state),
274 "state": state(repo.repo_state),
275 "rss": rss_lnk(repo.repo_name),
275 "rss": rss_lnk(repo.repo_name),
276 "atom": atom_lnk(repo.repo_name),
276 "atom": atom_lnk(repo.repo_name),
277 }
277 }
278 if admin:
278 if admin:
279 row.update({
279 row.update({
280 "action": repo_actions(repo.repo_name),
280 "action": repo_actions(repo.repo_name),
281 })
281 })
282 repos_data.append(row)
282 repos_data.append(row)
283
283
284 return repos_data
284 return repos_data
285
285
286 def get_repos_data_table(
286 def get_repos_data_table(
287 self, draw, start, limit,
287 self, draw, start, limit,
288 search_q, order_by, order_dir,
288 search_q, order_by, order_dir,
289 auth_user, repo_group_id):
289 auth_user, repo_group_id):
290 from rhodecode.model.scm import RepoList
290 from rhodecode.model.scm import RepoList
291
291
292 _perms = ['repository.read', 'repository.write', 'repository.admin']
292 _perms = ['repository.read', 'repository.write', 'repository.admin']
293
293
294 repos = Repository.query() \
294 repos = Repository.query() \
295 .filter(Repository.group_id == repo_group_id) \
295 .filter(Repository.group_id == repo_group_id) \
296 .all()
296 .all()
297 auth_repo_list = RepoList(
297 auth_repo_list = RepoList(
298 repos, perm_set=_perms,
298 repos, perm_set=_perms,
299 extra_kwargs=dict(user=auth_user))
299 extra_kwargs=dict(user=auth_user))
300
300
301 allowed_ids = [-1]
301 allowed_ids = [-1]
302 for repo in auth_repo_list:
302 for repo in auth_repo_list:
303 allowed_ids.append(repo.repo_id)
303 allowed_ids.append(repo.repo_id)
304
304
305 repos_data_total_count = Repository.query() \
305 repos_data_total_count = Repository.query() \
306 .filter(Repository.group_id == repo_group_id) \
306 .filter(Repository.group_id == repo_group_id) \
307 .filter(or_(
307 .filter(or_(
308 # generate multiple IN to fix limitation problems
308 # generate multiple IN to fix limitation problems
309 *in_filter_generator(Repository.repo_id, allowed_ids))
309 *in_filter_generator(Repository.repo_id, allowed_ids))
310 ) \
310 ) \
311 .count()
311 .count()
312
312
313 base_q = Session.query(
313 base_q = Session.query(
314 Repository.repo_id,
314 Repository.repo_id,
315 Repository.repo_name,
315 Repository.repo_name,
316 Repository.description,
316 Repository.description,
317 Repository.repo_type,
317 Repository.repo_type,
318 Repository.repo_state,
318 Repository.repo_state,
319 Repository.private,
319 Repository.private,
320 Repository.archived,
320 Repository.archived,
321 Repository.fork,
321 Repository.fork,
322 Repository.updated_on,
322 Repository.updated_on,
323 Repository._changeset_cache,
323 Repository._changeset_cache,
324 User,
324 User,
325 ) \
325 ) \
326 .filter(Repository.group_id == repo_group_id) \
326 .filter(Repository.group_id == repo_group_id) \
327 .filter(or_(
327 .filter(or_(
328 # generate multiple IN to fix limitation problems
328 # generate multiple IN to fix limitation problems
329 *in_filter_generator(Repository.repo_id, allowed_ids))
329 *in_filter_generator(Repository.repo_id, allowed_ids))
330 ) \
330 ) \
331 .join(User, User.user_id == Repository.user_id) \
331 .join(User, User.user_id == Repository.user_id) \
332 .group_by(Repository, User)
332 .group_by(Repository, User)
333
333
334 repos_data_total_filtered_count = base_q.count()
334 repos_data_total_filtered_count = base_q.count()
335
335
336 sort_defined = False
336 sort_defined = False
337 if order_by == 'repo_name':
337 if order_by == 'repo_name':
338 sort_col = func.lower(Repository.repo_name)
338 sort_col = func.lower(Repository.repo_name)
339 sort_defined = True
339 sort_defined = True
340 elif order_by == 'user_username':
340 elif order_by == 'user_username':
341 sort_col = User.username
341 sort_col = User.username
342 else:
342 else:
343 sort_col = getattr(Repository, order_by, None)
343 sort_col = getattr(Repository, order_by, None)
344
344
345 if sort_defined or sort_col:
345 if sort_defined or sort_col:
346 if order_dir == 'asc':
346 if order_dir == 'asc':
347 sort_col = sort_col.asc()
347 sort_col = sort_col.asc()
348 else:
348 else:
349 sort_col = sort_col.desc()
349 sort_col = sort_col.desc()
350
350
351 base_q = base_q.order_by(sort_col)
351 base_q = base_q.order_by(sort_col)
352 base_q = base_q.offset(start).limit(limit)
352 base_q = base_q.offset(start).limit(limit)
353
353
354 repos_list = base_q.all()
354 repos_list = base_q.all()
355
355
356 repos_data = RepoModel().get_repos_as_dict(
356 repos_data = RepoModel().get_repos_as_dict(
357 repo_list=repos_list, admin=False)
357 repo_list=repos_list, admin=False)
358
358
359 data = ({
359 data = ({
360 'draw': draw,
360 'draw': draw,
361 'data': repos_data,
361 'data': repos_data,
362 'recordsTotal': repos_data_total_count,
362 'recordsTotal': repos_data_total_count,
363 'recordsFiltered': repos_data_total_filtered_count,
363 'recordsFiltered': repos_data_total_filtered_count,
364 })
364 })
365 return data
365 return data
366
366
367 def _get_defaults(self, repo_name):
367 def _get_defaults(self, repo_name):
368 """
368 """
369 Gets information about repository, and returns a dict for
369 Gets information about repository, and returns a dict for
370 usage in forms
370 usage in forms
371
371
372 :param repo_name:
372 :param repo_name:
373 """
373 """
374
374
375 repo_info = Repository.get_by_repo_name(repo_name)
375 repo_info = Repository.get_by_repo_name(repo_name)
376
376
377 if repo_info is None:
377 if repo_info is None:
378 return None
378 return None
379
379
380 defaults = repo_info.get_dict()
380 defaults = repo_info.get_dict()
381 defaults['repo_name'] = repo_info.just_name
381 defaults['repo_name'] = repo_info.just_name
382
382
383 groups = repo_info.groups_with_parents
383 groups = repo_info.groups_with_parents
384 parent_group = groups[-1] if groups else None
384 parent_group = groups[-1] if groups else None
385
385
386 # we use -1 as this is how in HTML, we mark an empty group
386 # we use -1 as this is how in HTML, we mark an empty group
387 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
387 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
388
388
389 keys_to_process = (
389 keys_to_process = (
390 {'k': 'repo_type', 'strip': False},
390 {'k': 'repo_type', 'strip': False},
391 {'k': 'repo_enable_downloads', 'strip': True},
391 {'k': 'repo_enable_downloads', 'strip': True},
392 {'k': 'repo_description', 'strip': True},
392 {'k': 'repo_description', 'strip': True},
393 {'k': 'repo_enable_locking', 'strip': True},
393 {'k': 'repo_enable_locking', 'strip': True},
394 {'k': 'repo_landing_rev', 'strip': True},
394 {'k': 'repo_landing_rev', 'strip': True},
395 {'k': 'clone_uri', 'strip': False},
395 {'k': 'clone_uri', 'strip': False},
396 {'k': 'push_uri', 'strip': False},
396 {'k': 'push_uri', 'strip': False},
397 {'k': 'repo_private', 'strip': True},
397 {'k': 'repo_private', 'strip': True},
398 {'k': 'repo_enable_statistics', 'strip': True}
398 {'k': 'repo_enable_statistics', 'strip': True}
399 )
399 )
400
400
401 for item in keys_to_process:
401 for item in keys_to_process:
402 attr = item['k']
402 attr = item['k']
403 if item['strip']:
403 if item['strip']:
404 attr = remove_prefix(item['k'], 'repo_')
404 attr = remove_prefix(item['k'], 'repo_')
405
405
406 val = defaults[attr]
406 val = defaults[attr]
407 if item['k'] == 'repo_landing_rev':
407 if item['k'] == 'repo_landing_rev':
408 val = ':'.join(defaults[attr])
408 val = ':'.join(defaults[attr])
409 defaults[item['k']] = val
409 defaults[item['k']] = val
410 if item['k'] == 'clone_uri':
410 if item['k'] == 'clone_uri':
411 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
411 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
412 if item['k'] == 'push_uri':
412 if item['k'] == 'push_uri':
413 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
413 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
414
414
415 # fill owner
415 # fill owner
416 if repo_info.user:
416 if repo_info.user:
417 defaults.update({'user': repo_info.user.username})
417 defaults.update({'user': repo_info.user.username})
418 else:
418 else:
419 replacement_user = User.get_first_super_admin().username
419 replacement_user = User.get_first_super_admin().username
420 defaults.update({'user': replacement_user})
420 defaults.update({'user': replacement_user})
421
421
422 return defaults
422 return defaults
423
423
424 def update(self, repo, **kwargs):
424 def update(self, repo, **kwargs):
425 try:
425 try:
426 cur_repo = self._get_repo(repo)
426 cur_repo = self._get_repo(repo)
427 source_repo_name = cur_repo.repo_name
427 source_repo_name = cur_repo.repo_name
428
428
429 affected_user_ids = []
429 affected_user_ids = []
430 if 'user' in kwargs:
430 if 'user' in kwargs:
431 old_owner_id = cur_repo.user.user_id
431 old_owner_id = cur_repo.user.user_id
432 new_owner = User.get_by_username(kwargs['user'])
432 new_owner = User.get_by_username(kwargs['user'])
433 cur_repo.user = new_owner
433 cur_repo.user = new_owner
434
434
435 if old_owner_id != new_owner.user_id:
435 if old_owner_id != new_owner.user_id:
436 affected_user_ids = [new_owner.user_id, old_owner_id]
436 affected_user_ids = [new_owner.user_id, old_owner_id]
437
437
438 if 'repo_group' in kwargs:
438 if 'repo_group' in kwargs:
439 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
439 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
440 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
440 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
441
441
442 update_keys = [
442 update_keys = [
443 (1, 'repo_description'),
443 (1, 'repo_description'),
444 (1, 'repo_landing_rev'),
444 (1, 'repo_landing_rev'),
445 (1, 'repo_private'),
445 (1, 'repo_private'),
446 (1, 'repo_enable_downloads'),
446 (1, 'repo_enable_downloads'),
447 (1, 'repo_enable_locking'),
447 (1, 'repo_enable_locking'),
448 (1, 'repo_enable_statistics'),
448 (1, 'repo_enable_statistics'),
449 (0, 'clone_uri'),
449 (0, 'clone_uri'),
450 (0, 'push_uri'),
450 (0, 'push_uri'),
451 (0, 'fork_id')
451 (0, 'fork_id')
452 ]
452 ]
453 for strip, k in update_keys:
453 for strip, k in update_keys:
454 if k in kwargs:
454 if k in kwargs:
455 val = kwargs[k]
455 val = kwargs[k]
456 if strip:
456 if strip:
457 k = remove_prefix(k, 'repo_')
457 k = remove_prefix(k, 'repo_')
458
458
459 setattr(cur_repo, k, val)
459 setattr(cur_repo, k, val)
460
460
461 new_name = cur_repo.get_new_name(kwargs['repo_name'])
461 new_name = cur_repo.get_new_name(kwargs['repo_name'])
462 cur_repo.repo_name = new_name
462 cur_repo.repo_name = new_name
463
463
464 # if private flag is set, reset default permission to NONE
464 # if private flag is set, reset default permission to NONE
465 if kwargs.get('repo_private'):
465 if kwargs.get('repo_private'):
466 EMPTY_PERM = 'repository.none'
466 EMPTY_PERM = 'repository.none'
467 RepoModel().grant_user_permission(
467 RepoModel().grant_user_permission(
468 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
468 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
469 )
469 )
470 if kwargs.get('repo_landing_rev'):
470 if kwargs.get('repo_landing_rev'):
471 landing_rev_val = kwargs['repo_landing_rev']
471 landing_rev_val = kwargs['repo_landing_rev']
472 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
472 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
473
473
474 # handle extra fields
474 # handle extra fields
475 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
475 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
476 k = RepositoryField.un_prefix_key(field)
476 k = RepositoryField.un_prefix_key(field)
477 ex_field = RepositoryField.get_by_key_name(
477 ex_field = RepositoryField.get_by_key_name(
478 key=k, repo=cur_repo)
478 key=k, repo=cur_repo)
479 if ex_field:
479 if ex_field:
480 ex_field.field_value = kwargs[field]
480 ex_field.field_value = kwargs[field]
481 self.sa.add(ex_field)
481 self.sa.add(ex_field)
482
482
483 self.sa.add(cur_repo)
483 self.sa.add(cur_repo)
484
484
485 if source_repo_name != new_name:
485 if source_repo_name != new_name:
486 # rename repository
486 # rename repository
487 self._rename_filesystem_repo(
487 self._rename_filesystem_repo(
488 old=source_repo_name, new=new_name)
488 old=source_repo_name, new=new_name)
489
489
490 if affected_user_ids:
490 if affected_user_ids:
491 PermissionModel().trigger_permission_flush(affected_user_ids)
491 PermissionModel().trigger_permission_flush(affected_user_ids)
492
492
493 return cur_repo
493 return cur_repo
494 except Exception:
494 except Exception:
495 log.error(traceback.format_exc())
495 log.error(traceback.format_exc())
496 raise
496 raise
497
497
498 def _create_repo(self, repo_name, repo_type, description, owner,
498 def _create_repo(self, repo_name, repo_type, description, owner,
499 private=False, clone_uri=None, repo_group=None,
499 private=False, clone_uri=None, repo_group=None,
500 landing_rev='rev:tip', fork_of=None,
500 landing_rev=None, fork_of=None,
501 copy_fork_permissions=False, enable_statistics=False,
501 copy_fork_permissions=False, enable_statistics=False,
502 enable_locking=False, enable_downloads=False,
502 enable_locking=False, enable_downloads=False,
503 copy_group_permissions=False,
503 copy_group_permissions=False,
504 state=Repository.STATE_PENDING):
504 state=Repository.STATE_PENDING):
505 """
505 """
506 Create repository inside database with PENDING state, this should be
506 Create repository inside database with PENDING state, this should be
507 only executed by create() repo. With exception of importing existing
507 only executed by create() repo. With exception of importing existing
508 repos
508 repos
509 """
509 """
510 from rhodecode.model.scm import ScmModel
510 from rhodecode.model.scm import ScmModel
511
511
512 owner = self._get_user(owner)
512 owner = self._get_user(owner)
513 fork_of = self._get_repo(fork_of)
513 fork_of = self._get_repo(fork_of)
514 repo_group = self._get_repo_group(safe_int(repo_group))
514 repo_group = self._get_repo_group(safe_int(repo_group))
515 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
516 landing_rev = landing_rev or default_landing_ref
515
517
516 try:
518 try:
517 repo_name = safe_unicode(repo_name)
519 repo_name = safe_unicode(repo_name)
518 description = safe_unicode(description)
520 description = safe_unicode(description)
519 # repo name is just a name of repository
521 # repo name is just a name of repository
520 # while repo_name_full is a full qualified name that is combined
522 # while repo_name_full is a full qualified name that is combined
521 # with name and path of group
523 # with name and path of group
522 repo_name_full = repo_name
524 repo_name_full = repo_name
523 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
525 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
524
526
525 new_repo = Repository()
527 new_repo = Repository()
526 new_repo.repo_state = state
528 new_repo.repo_state = state
527 new_repo.enable_statistics = False
529 new_repo.enable_statistics = False
528 new_repo.repo_name = repo_name_full
530 new_repo.repo_name = repo_name_full
529 new_repo.repo_type = repo_type
531 new_repo.repo_type = repo_type
530 new_repo.user = owner
532 new_repo.user = owner
531 new_repo.group = repo_group
533 new_repo.group = repo_group
532 new_repo.description = description or repo_name
534 new_repo.description = description or repo_name
533 new_repo.private = private
535 new_repo.private = private
534 new_repo.archived = False
536 new_repo.archived = False
535 new_repo.clone_uri = clone_uri
537 new_repo.clone_uri = clone_uri
536 new_repo.landing_rev = landing_rev
538 new_repo.landing_rev = landing_rev
537
539
538 new_repo.enable_statistics = enable_statistics
540 new_repo.enable_statistics = enable_statistics
539 new_repo.enable_locking = enable_locking
541 new_repo.enable_locking = enable_locking
540 new_repo.enable_downloads = enable_downloads
542 new_repo.enable_downloads = enable_downloads
541
543
542 if repo_group:
544 if repo_group:
543 new_repo.enable_locking = repo_group.enable_locking
545 new_repo.enable_locking = repo_group.enable_locking
544
546
545 if fork_of:
547 if fork_of:
546 parent_repo = fork_of
548 parent_repo = fork_of
547 new_repo.fork = parent_repo
549 new_repo.fork = parent_repo
548
550
549 events.trigger(events.RepoPreCreateEvent(new_repo))
551 events.trigger(events.RepoPreCreateEvent(new_repo))
550
552
551 self.sa.add(new_repo)
553 self.sa.add(new_repo)
552
554
553 EMPTY_PERM = 'repository.none'
555 EMPTY_PERM = 'repository.none'
554 if fork_of and copy_fork_permissions:
556 if fork_of and copy_fork_permissions:
555 repo = fork_of
557 repo = fork_of
556 user_perms = UserRepoToPerm.query() \
558 user_perms = UserRepoToPerm.query() \
557 .filter(UserRepoToPerm.repository == repo).all()
559 .filter(UserRepoToPerm.repository == repo).all()
558 group_perms = UserGroupRepoToPerm.query() \
560 group_perms = UserGroupRepoToPerm.query() \
559 .filter(UserGroupRepoToPerm.repository == repo).all()
561 .filter(UserGroupRepoToPerm.repository == repo).all()
560
562
561 for perm in user_perms:
563 for perm in user_perms:
562 UserRepoToPerm.create(
564 UserRepoToPerm.create(
563 perm.user, new_repo, perm.permission)
565 perm.user, new_repo, perm.permission)
564
566
565 for perm in group_perms:
567 for perm in group_perms:
566 UserGroupRepoToPerm.create(
568 UserGroupRepoToPerm.create(
567 perm.users_group, new_repo, perm.permission)
569 perm.users_group, new_repo, perm.permission)
568 # in case we copy permissions and also set this repo to private
570 # in case we copy permissions and also set this repo to private
569 # override the default user permission to make it a private repo
571 # override the default user permission to make it a private repo
570 if private:
572 if private:
571 RepoModel(self.sa).grant_user_permission(
573 RepoModel(self.sa).grant_user_permission(
572 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
574 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
573
575
574 elif repo_group and copy_group_permissions:
576 elif repo_group and copy_group_permissions:
575 user_perms = UserRepoGroupToPerm.query() \
577 user_perms = UserRepoGroupToPerm.query() \
576 .filter(UserRepoGroupToPerm.group == repo_group).all()
578 .filter(UserRepoGroupToPerm.group == repo_group).all()
577
579
578 group_perms = UserGroupRepoGroupToPerm.query() \
580 group_perms = UserGroupRepoGroupToPerm.query() \
579 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
581 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
580
582
581 for perm in user_perms:
583 for perm in user_perms:
582 perm_name = perm.permission.permission_name.replace(
584 perm_name = perm.permission.permission_name.replace(
583 'group.', 'repository.')
585 'group.', 'repository.')
584 perm_obj = Permission.get_by_key(perm_name)
586 perm_obj = Permission.get_by_key(perm_name)
585 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
587 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
586
588
587 for perm in group_perms:
589 for perm in group_perms:
588 perm_name = perm.permission.permission_name.replace(
590 perm_name = perm.permission.permission_name.replace(
589 'group.', 'repository.')
591 'group.', 'repository.')
590 perm_obj = Permission.get_by_key(perm_name)
592 perm_obj = Permission.get_by_key(perm_name)
591 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
593 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
592
594
593 if private:
595 if private:
594 RepoModel(self.sa).grant_user_permission(
596 RepoModel(self.sa).grant_user_permission(
595 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
597 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
596
598
597 else:
599 else:
598 perm_obj = self._create_default_perms(new_repo, private)
600 perm_obj = self._create_default_perms(new_repo, private)
599 self.sa.add(perm_obj)
601 self.sa.add(perm_obj)
600
602
601 # now automatically start following this repository as owner
603 # now automatically start following this repository as owner
602 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
604 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
603
605
604 # we need to flush here, in order to check if database won't
606 # we need to flush here, in order to check if database won't
605 # throw any exceptions, create filesystem dirs at the very end
607 # throw any exceptions, create filesystem dirs at the very end
606 self.sa.flush()
608 self.sa.flush()
607 events.trigger(events.RepoCreateEvent(new_repo))
609 events.trigger(events.RepoCreateEvent(new_repo))
608 return new_repo
610 return new_repo
609
611
610 except Exception:
612 except Exception:
611 log.error(traceback.format_exc())
613 log.error(traceback.format_exc())
612 raise
614 raise
613
615
614 def create(self, form_data, cur_user):
616 def create(self, form_data, cur_user):
615 """
617 """
616 Create repository using celery tasks
618 Create repository using celery tasks
617
619
618 :param form_data:
620 :param form_data:
619 :param cur_user:
621 :param cur_user:
620 """
622 """
621 from rhodecode.lib.celerylib import tasks, run_task
623 from rhodecode.lib.celerylib import tasks, run_task
622 return run_task(tasks.create_repo, form_data, cur_user)
624 return run_task(tasks.create_repo, form_data, cur_user)
623
625
624 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
626 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
625 perm_deletions=None, check_perms=True,
627 perm_deletions=None, check_perms=True,
626 cur_user=None):
628 cur_user=None):
627 if not perm_additions:
629 if not perm_additions:
628 perm_additions = []
630 perm_additions = []
629 if not perm_updates:
631 if not perm_updates:
630 perm_updates = []
632 perm_updates = []
631 if not perm_deletions:
633 if not perm_deletions:
632 perm_deletions = []
634 perm_deletions = []
633
635
634 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
636 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
635
637
636 changes = {
638 changes = {
637 'added': [],
639 'added': [],
638 'updated': [],
640 'updated': [],
639 'deleted': [],
641 'deleted': [],
640 'default_user_changed': None
642 'default_user_changed': None
641 }
643 }
642
644
643 repo = self._get_repo(repo)
645 repo = self._get_repo(repo)
644
646
645 # update permissions
647 # update permissions
646 for member_id, perm, member_type in perm_updates:
648 for member_id, perm, member_type in perm_updates:
647 member_id = int(member_id)
649 member_id = int(member_id)
648 if member_type == 'user':
650 if member_type == 'user':
649 member_name = User.get(member_id).username
651 member_name = User.get(member_id).username
650 if member_name == User.DEFAULT_USER:
652 if member_name == User.DEFAULT_USER:
651 # NOTE(dan): detect if we changed permissions for default user
653 # NOTE(dan): detect if we changed permissions for default user
652 perm_obj = self.sa.query(UserRepoToPerm) \
654 perm_obj = self.sa.query(UserRepoToPerm) \
653 .filter(UserRepoToPerm.user_id == member_id) \
655 .filter(UserRepoToPerm.user_id == member_id) \
654 .filter(UserRepoToPerm.repository == repo) \
656 .filter(UserRepoToPerm.repository == repo) \
655 .scalar()
657 .scalar()
656 if perm_obj and perm_obj.permission.permission_name != perm:
658 if perm_obj and perm_obj.permission.permission_name != perm:
657 changes['default_user_changed'] = True
659 changes['default_user_changed'] = True
658
660
659 # this updates also current one if found
661 # this updates also current one if found
660 self.grant_user_permission(
662 self.grant_user_permission(
661 repo=repo, user=member_id, perm=perm)
663 repo=repo, user=member_id, perm=perm)
662 elif member_type == 'user_group':
664 elif member_type == 'user_group':
663 # check if we have permissions to alter this usergroup
665 # check if we have permissions to alter this usergroup
664 member_name = UserGroup.get(member_id).users_group_name
666 member_name = UserGroup.get(member_id).users_group_name
665 if not check_perms or HasUserGroupPermissionAny(
667 if not check_perms or HasUserGroupPermissionAny(
666 *req_perms)(member_name, user=cur_user):
668 *req_perms)(member_name, user=cur_user):
667 self.grant_user_group_permission(
669 self.grant_user_group_permission(
668 repo=repo, group_name=member_id, perm=perm)
670 repo=repo, group_name=member_id, perm=perm)
669 else:
671 else:
670 raise ValueError("member_type must be 'user' or 'user_group' "
672 raise ValueError("member_type must be 'user' or 'user_group' "
671 "got {} instead".format(member_type))
673 "got {} instead".format(member_type))
672 changes['updated'].append({'type': member_type, 'id': member_id,
674 changes['updated'].append({'type': member_type, 'id': member_id,
673 'name': member_name, 'new_perm': perm})
675 'name': member_name, 'new_perm': perm})
674
676
675 # set new permissions
677 # set new permissions
676 for member_id, perm, member_type in perm_additions:
678 for member_id, perm, member_type in perm_additions:
677 member_id = int(member_id)
679 member_id = int(member_id)
678 if member_type == 'user':
680 if member_type == 'user':
679 member_name = User.get(member_id).username
681 member_name = User.get(member_id).username
680 self.grant_user_permission(
682 self.grant_user_permission(
681 repo=repo, user=member_id, perm=perm)
683 repo=repo, user=member_id, perm=perm)
682 elif member_type == 'user_group':
684 elif member_type == 'user_group':
683 # check if we have permissions to alter this usergroup
685 # check if we have permissions to alter this usergroup
684 member_name = UserGroup.get(member_id).users_group_name
686 member_name = UserGroup.get(member_id).users_group_name
685 if not check_perms or HasUserGroupPermissionAny(
687 if not check_perms or HasUserGroupPermissionAny(
686 *req_perms)(member_name, user=cur_user):
688 *req_perms)(member_name, user=cur_user):
687 self.grant_user_group_permission(
689 self.grant_user_group_permission(
688 repo=repo, group_name=member_id, perm=perm)
690 repo=repo, group_name=member_id, perm=perm)
689 else:
691 else:
690 raise ValueError("member_type must be 'user' or 'user_group' "
692 raise ValueError("member_type must be 'user' or 'user_group' "
691 "got {} instead".format(member_type))
693 "got {} instead".format(member_type))
692
694
693 changes['added'].append({'type': member_type, 'id': member_id,
695 changes['added'].append({'type': member_type, 'id': member_id,
694 'name': member_name, 'new_perm': perm})
696 'name': member_name, 'new_perm': perm})
695 # delete permissions
697 # delete permissions
696 for member_id, perm, member_type in perm_deletions:
698 for member_id, perm, member_type in perm_deletions:
697 member_id = int(member_id)
699 member_id = int(member_id)
698 if member_type == 'user':
700 if member_type == 'user':
699 member_name = User.get(member_id).username
701 member_name = User.get(member_id).username
700 self.revoke_user_permission(repo=repo, user=member_id)
702 self.revoke_user_permission(repo=repo, user=member_id)
701 elif member_type == 'user_group':
703 elif member_type == 'user_group':
702 # check if we have permissions to alter this usergroup
704 # check if we have permissions to alter this usergroup
703 member_name = UserGroup.get(member_id).users_group_name
705 member_name = UserGroup.get(member_id).users_group_name
704 if not check_perms or HasUserGroupPermissionAny(
706 if not check_perms or HasUserGroupPermissionAny(
705 *req_perms)(member_name, user=cur_user):
707 *req_perms)(member_name, user=cur_user):
706 self.revoke_user_group_permission(
708 self.revoke_user_group_permission(
707 repo=repo, group_name=member_id)
709 repo=repo, group_name=member_id)
708 else:
710 else:
709 raise ValueError("member_type must be 'user' or 'user_group' "
711 raise ValueError("member_type must be 'user' or 'user_group' "
710 "got {} instead".format(member_type))
712 "got {} instead".format(member_type))
711
713
712 changes['deleted'].append({'type': member_type, 'id': member_id,
714 changes['deleted'].append({'type': member_type, 'id': member_id,
713 'name': member_name, 'new_perm': perm})
715 'name': member_name, 'new_perm': perm})
714 return changes
716 return changes
715
717
716 def create_fork(self, form_data, cur_user):
718 def create_fork(self, form_data, cur_user):
717 """
719 """
718 Simple wrapper into executing celery task for fork creation
720 Simple wrapper into executing celery task for fork creation
719
721
720 :param form_data:
722 :param form_data:
721 :param cur_user:
723 :param cur_user:
722 """
724 """
723 from rhodecode.lib.celerylib import tasks, run_task
725 from rhodecode.lib.celerylib import tasks, run_task
724 return run_task(tasks.create_repo_fork, form_data, cur_user)
726 return run_task(tasks.create_repo_fork, form_data, cur_user)
725
727
726 def archive(self, repo):
728 def archive(self, repo):
727 """
729 """
728 Archive given repository. Set archive flag.
730 Archive given repository. Set archive flag.
729
731
730 :param repo:
732 :param repo:
731 """
733 """
732 repo = self._get_repo(repo)
734 repo = self._get_repo(repo)
733 if repo:
735 if repo:
734
736
735 try:
737 try:
736 repo.archived = True
738 repo.archived = True
737 self.sa.add(repo)
739 self.sa.add(repo)
738 self.sa.commit()
740 self.sa.commit()
739 except Exception:
741 except Exception:
740 log.error(traceback.format_exc())
742 log.error(traceback.format_exc())
741 raise
743 raise
742
744
743 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
745 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
744 """
746 """
745 Delete given repository, forks parameter defines what do do with
747 Delete given repository, forks parameter defines what do do with
746 attached forks. Throws AttachedForksError if deleted repo has attached
748 attached forks. Throws AttachedForksError if deleted repo has attached
747 forks
749 forks
748
750
749 :param repo:
751 :param repo:
750 :param forks: str 'delete' or 'detach'
752 :param forks: str 'delete' or 'detach'
751 :param pull_requests: str 'delete' or None
753 :param pull_requests: str 'delete' or None
752 :param fs_remove: remove(archive) repo from filesystem
754 :param fs_remove: remove(archive) repo from filesystem
753 """
755 """
754 if not cur_user:
756 if not cur_user:
755 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
757 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
756 repo = self._get_repo(repo)
758 repo = self._get_repo(repo)
757 if repo:
759 if repo:
758 if forks == 'detach':
760 if forks == 'detach':
759 for r in repo.forks:
761 for r in repo.forks:
760 r.fork = None
762 r.fork = None
761 self.sa.add(r)
763 self.sa.add(r)
762 elif forks == 'delete':
764 elif forks == 'delete':
763 for r in repo.forks:
765 for r in repo.forks:
764 self.delete(r, forks='delete')
766 self.delete(r, forks='delete')
765 elif [f for f in repo.forks]:
767 elif [f for f in repo.forks]:
766 raise AttachedForksError()
768 raise AttachedForksError()
767
769
768 # check for pull requests
770 # check for pull requests
769 pr_sources = repo.pull_requests_source
771 pr_sources = repo.pull_requests_source
770 pr_targets = repo.pull_requests_target
772 pr_targets = repo.pull_requests_target
771 if pull_requests != 'delete' and (pr_sources or pr_targets):
773 if pull_requests != 'delete' and (pr_sources or pr_targets):
772 raise AttachedPullRequestsError()
774 raise AttachedPullRequestsError()
773
775
774 old_repo_dict = repo.get_dict()
776 old_repo_dict = repo.get_dict()
775 events.trigger(events.RepoPreDeleteEvent(repo))
777 events.trigger(events.RepoPreDeleteEvent(repo))
776 try:
778 try:
777 self.sa.delete(repo)
779 self.sa.delete(repo)
778 if fs_remove:
780 if fs_remove:
779 self._delete_filesystem_repo(repo)
781 self._delete_filesystem_repo(repo)
780 else:
782 else:
781 log.debug('skipping removal from filesystem')
783 log.debug('skipping removal from filesystem')
782 old_repo_dict.update({
784 old_repo_dict.update({
783 'deleted_by': cur_user,
785 'deleted_by': cur_user,
784 'deleted_on': time.time(),
786 'deleted_on': time.time(),
785 })
787 })
786 hooks_base.delete_repository(**old_repo_dict)
788 hooks_base.delete_repository(**old_repo_dict)
787 events.trigger(events.RepoDeleteEvent(repo))
789 events.trigger(events.RepoDeleteEvent(repo))
788 except Exception:
790 except Exception:
789 log.error(traceback.format_exc())
791 log.error(traceback.format_exc())
790 raise
792 raise
791
793
792 def grant_user_permission(self, repo, user, perm):
794 def grant_user_permission(self, repo, user, perm):
793 """
795 """
794 Grant permission for user on given repository, or update existing one
796 Grant permission for user on given repository, or update existing one
795 if found
797 if found
796
798
797 :param repo: Instance of Repository, repository_id, or repository name
799 :param repo: Instance of Repository, repository_id, or repository name
798 :param user: Instance of User, user_id or username
800 :param user: Instance of User, user_id or username
799 :param perm: Instance of Permission, or permission_name
801 :param perm: Instance of Permission, or permission_name
800 """
802 """
801 user = self._get_user(user)
803 user = self._get_user(user)
802 repo = self._get_repo(repo)
804 repo = self._get_repo(repo)
803 permission = self._get_perm(perm)
805 permission = self._get_perm(perm)
804
806
805 # check if we have that permission already
807 # check if we have that permission already
806 obj = self.sa.query(UserRepoToPerm) \
808 obj = self.sa.query(UserRepoToPerm) \
807 .filter(UserRepoToPerm.user == user) \
809 .filter(UserRepoToPerm.user == user) \
808 .filter(UserRepoToPerm.repository == repo) \
810 .filter(UserRepoToPerm.repository == repo) \
809 .scalar()
811 .scalar()
810 if obj is None:
812 if obj is None:
811 # create new !
813 # create new !
812 obj = UserRepoToPerm()
814 obj = UserRepoToPerm()
813 obj.repository = repo
815 obj.repository = repo
814 obj.user = user
816 obj.user = user
815 obj.permission = permission
817 obj.permission = permission
816 self.sa.add(obj)
818 self.sa.add(obj)
817 log.debug('Granted perm %s to %s on %s', perm, user, repo)
819 log.debug('Granted perm %s to %s on %s', perm, user, repo)
818 action_logger_generic(
820 action_logger_generic(
819 'granted permission: {} to user: {} on repo: {}'.format(
821 'granted permission: {} to user: {} on repo: {}'.format(
820 perm, user, repo), namespace='security.repo')
822 perm, user, repo), namespace='security.repo')
821 return obj
823 return obj
822
824
823 def revoke_user_permission(self, repo, user):
825 def revoke_user_permission(self, repo, user):
824 """
826 """
825 Revoke permission for user on given repository
827 Revoke permission for user on given repository
826
828
827 :param repo: Instance of Repository, repository_id, or repository name
829 :param repo: Instance of Repository, repository_id, or repository name
828 :param user: Instance of User, user_id or username
830 :param user: Instance of User, user_id or username
829 """
831 """
830
832
831 user = self._get_user(user)
833 user = self._get_user(user)
832 repo = self._get_repo(repo)
834 repo = self._get_repo(repo)
833
835
834 obj = self.sa.query(UserRepoToPerm) \
836 obj = self.sa.query(UserRepoToPerm) \
835 .filter(UserRepoToPerm.repository == repo) \
837 .filter(UserRepoToPerm.repository == repo) \
836 .filter(UserRepoToPerm.user == user) \
838 .filter(UserRepoToPerm.user == user) \
837 .scalar()
839 .scalar()
838 if obj:
840 if obj:
839 self.sa.delete(obj)
841 self.sa.delete(obj)
840 log.debug('Revoked perm on %s on %s', repo, user)
842 log.debug('Revoked perm on %s on %s', repo, user)
841 action_logger_generic(
843 action_logger_generic(
842 'revoked permission from user: {} on repo: {}'.format(
844 'revoked permission from user: {} on repo: {}'.format(
843 user, repo), namespace='security.repo')
845 user, repo), namespace='security.repo')
844
846
845 def grant_user_group_permission(self, repo, group_name, perm):
847 def grant_user_group_permission(self, repo, group_name, perm):
846 """
848 """
847 Grant permission for user group on given repository, or update
849 Grant permission for user group on given repository, or update
848 existing one if found
850 existing one if found
849
851
850 :param repo: Instance of Repository, repository_id, or repository name
852 :param repo: Instance of Repository, repository_id, or repository name
851 :param group_name: Instance of UserGroup, users_group_id,
853 :param group_name: Instance of UserGroup, users_group_id,
852 or user group name
854 or user group name
853 :param perm: Instance of Permission, or permission_name
855 :param perm: Instance of Permission, or permission_name
854 """
856 """
855 repo = self._get_repo(repo)
857 repo = self._get_repo(repo)
856 group_name = self._get_user_group(group_name)
858 group_name = self._get_user_group(group_name)
857 permission = self._get_perm(perm)
859 permission = self._get_perm(perm)
858
860
859 # check if we have that permission already
861 # check if we have that permission already
860 obj = self.sa.query(UserGroupRepoToPerm) \
862 obj = self.sa.query(UserGroupRepoToPerm) \
861 .filter(UserGroupRepoToPerm.users_group == group_name) \
863 .filter(UserGroupRepoToPerm.users_group == group_name) \
862 .filter(UserGroupRepoToPerm.repository == repo) \
864 .filter(UserGroupRepoToPerm.repository == repo) \
863 .scalar()
865 .scalar()
864
866
865 if obj is None:
867 if obj is None:
866 # create new
868 # create new
867 obj = UserGroupRepoToPerm()
869 obj = UserGroupRepoToPerm()
868
870
869 obj.repository = repo
871 obj.repository = repo
870 obj.users_group = group_name
872 obj.users_group = group_name
871 obj.permission = permission
873 obj.permission = permission
872 self.sa.add(obj)
874 self.sa.add(obj)
873 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
875 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
874 action_logger_generic(
876 action_logger_generic(
875 'granted permission: {} to usergroup: {} on repo: {}'.format(
877 'granted permission: {} to usergroup: {} on repo: {}'.format(
876 perm, group_name, repo), namespace='security.repo')
878 perm, group_name, repo), namespace='security.repo')
877
879
878 return obj
880 return obj
879
881
880 def revoke_user_group_permission(self, repo, group_name):
882 def revoke_user_group_permission(self, repo, group_name):
881 """
883 """
882 Revoke permission for user group on given repository
884 Revoke permission for user group on given repository
883
885
884 :param repo: Instance of Repository, repository_id, or repository name
886 :param repo: Instance of Repository, repository_id, or repository name
885 :param group_name: Instance of UserGroup, users_group_id,
887 :param group_name: Instance of UserGroup, users_group_id,
886 or user group name
888 or user group name
887 """
889 """
888 repo = self._get_repo(repo)
890 repo = self._get_repo(repo)
889 group_name = self._get_user_group(group_name)
891 group_name = self._get_user_group(group_name)
890
892
891 obj = self.sa.query(UserGroupRepoToPerm) \
893 obj = self.sa.query(UserGroupRepoToPerm) \
892 .filter(UserGroupRepoToPerm.repository == repo) \
894 .filter(UserGroupRepoToPerm.repository == repo) \
893 .filter(UserGroupRepoToPerm.users_group == group_name) \
895 .filter(UserGroupRepoToPerm.users_group == group_name) \
894 .scalar()
896 .scalar()
895 if obj:
897 if obj:
896 self.sa.delete(obj)
898 self.sa.delete(obj)
897 log.debug('Revoked perm to %s on %s', repo, group_name)
899 log.debug('Revoked perm to %s on %s', repo, group_name)
898 action_logger_generic(
900 action_logger_generic(
899 'revoked permission from usergroup: {} on repo: {}'.format(
901 'revoked permission from usergroup: {} on repo: {}'.format(
900 group_name, repo), namespace='security.repo')
902 group_name, repo), namespace='security.repo')
901
903
902 def delete_stats(self, repo_name):
904 def delete_stats(self, repo_name):
903 """
905 """
904 removes stats for given repo
906 removes stats for given repo
905
907
906 :param repo_name:
908 :param repo_name:
907 """
909 """
908 repo = self._get_repo(repo_name)
910 repo = self._get_repo(repo_name)
909 try:
911 try:
910 obj = self.sa.query(Statistics) \
912 obj = self.sa.query(Statistics) \
911 .filter(Statistics.repository == repo).scalar()
913 .filter(Statistics.repository == repo).scalar()
912 if obj:
914 if obj:
913 self.sa.delete(obj)
915 self.sa.delete(obj)
914 except Exception:
916 except Exception:
915 log.error(traceback.format_exc())
917 log.error(traceback.format_exc())
916 raise
918 raise
917
919
918 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
920 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
919 field_type='str', field_desc=''):
921 field_type='str', field_desc=''):
920
922
921 repo = self._get_repo(repo_name)
923 repo = self._get_repo(repo_name)
922
924
923 new_field = RepositoryField()
925 new_field = RepositoryField()
924 new_field.repository = repo
926 new_field.repository = repo
925 new_field.field_key = field_key
927 new_field.field_key = field_key
926 new_field.field_type = field_type # python type
928 new_field.field_type = field_type # python type
927 new_field.field_value = field_value
929 new_field.field_value = field_value
928 new_field.field_desc = field_desc
930 new_field.field_desc = field_desc
929 new_field.field_label = field_label
931 new_field.field_label = field_label
930 self.sa.add(new_field)
932 self.sa.add(new_field)
931 return new_field
933 return new_field
932
934
933 def delete_repo_field(self, repo_name, field_key):
935 def delete_repo_field(self, repo_name, field_key):
934 repo = self._get_repo(repo_name)
936 repo = self._get_repo(repo_name)
935 field = RepositoryField.get_by_key_name(field_key, repo)
937 field = RepositoryField.get_by_key_name(field_key, repo)
936 if field:
938 if field:
937 self.sa.delete(field)
939 self.sa.delete(field)
938
940
939 def set_landing_rev(self, repo, landing_rev_name):
941 def set_landing_rev(self, repo, landing_rev_name):
940 if landing_rev_name.startswith('branch:'):
942 if landing_rev_name.startswith('branch:'):
941 landing_rev_name = landing_rev_name.split('branch:')[-1]
943 landing_rev_name = landing_rev_name.split('branch:')[-1]
942 scm_instance = repo.scm_instance()
944 scm_instance = repo.scm_instance()
943 if scm_instance:
945 if scm_instance:
944 return scm_instance._remote.set_head_ref(landing_rev_name)
946 return scm_instance._remote.set_head_ref(landing_rev_name)
945
947
946 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
948 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
947 clone_uri=None, repo_store_location=None,
949 clone_uri=None, repo_store_location=None,
948 use_global_config=False, install_hooks=True):
950 use_global_config=False, install_hooks=True):
949 """
951 """
950 makes repository on filesystem. It's group aware means it'll create
952 makes repository on filesystem. It's group aware means it'll create
951 a repository within a group, and alter the paths accordingly of
953 a repository within a group, and alter the paths accordingly of
952 group location
954 group location
953
955
954 :param repo_name:
956 :param repo_name:
955 :param alias:
957 :param alias:
956 :param parent:
958 :param parent:
957 :param clone_uri:
959 :param clone_uri:
958 :param repo_store_location:
960 :param repo_store_location:
959 """
961 """
960 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
962 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
961 from rhodecode.model.scm import ScmModel
963 from rhodecode.model.scm import ScmModel
962
964
963 if Repository.NAME_SEP in repo_name:
965 if Repository.NAME_SEP in repo_name:
964 raise ValueError(
966 raise ValueError(
965 'repo_name must not contain groups got `%s`' % repo_name)
967 'repo_name must not contain groups got `%s`' % repo_name)
966
968
967 if isinstance(repo_group, RepoGroup):
969 if isinstance(repo_group, RepoGroup):
968 new_parent_path = os.sep.join(repo_group.full_path_splitted)
970 new_parent_path = os.sep.join(repo_group.full_path_splitted)
969 else:
971 else:
970 new_parent_path = repo_group or ''
972 new_parent_path = repo_group or ''
971
973
972 if repo_store_location:
974 if repo_store_location:
973 _paths = [repo_store_location]
975 _paths = [repo_store_location]
974 else:
976 else:
975 _paths = [self.repos_path, new_parent_path, repo_name]
977 _paths = [self.repos_path, new_parent_path, repo_name]
976 # we need to make it str for mercurial
978 # we need to make it str for mercurial
977 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
979 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
978
980
979 # check if this path is not a repository
981 # check if this path is not a repository
980 if is_valid_repo(repo_path, self.repos_path):
982 if is_valid_repo(repo_path, self.repos_path):
981 raise Exception('This path %s is a valid repository' % repo_path)
983 raise Exception('This path %s is a valid repository' % repo_path)
982
984
983 # check if this path is a group
985 # check if this path is a group
984 if is_valid_repo_group(repo_path, self.repos_path):
986 if is_valid_repo_group(repo_path, self.repos_path):
985 raise Exception('This path %s is a valid group' % repo_path)
987 raise Exception('This path %s is a valid group' % repo_path)
986
988
987 log.info('creating repo %s in %s from url: `%s`',
989 log.info('creating repo %s in %s from url: `%s`',
988 repo_name, safe_unicode(repo_path),
990 repo_name, safe_unicode(repo_path),
989 obfuscate_url_pw(clone_uri))
991 obfuscate_url_pw(clone_uri))
990
992
991 backend = get_backend(repo_type)
993 backend = get_backend(repo_type)
992
994
993 config_repo = None if use_global_config else repo_name
995 config_repo = None if use_global_config else repo_name
994 if config_repo and new_parent_path:
996 if config_repo and new_parent_path:
995 config_repo = Repository.NAME_SEP.join(
997 config_repo = Repository.NAME_SEP.join(
996 (new_parent_path, config_repo))
998 (new_parent_path, config_repo))
997 config = make_db_config(clear_session=False, repo=config_repo)
999 config = make_db_config(clear_session=False, repo=config_repo)
998 config.set('extensions', 'largefiles', '')
1000 config.set('extensions', 'largefiles', '')
999
1001
1000 # patch and reset hooks section of UI config to not run any
1002 # patch and reset hooks section of UI config to not run any
1001 # hooks on creating remote repo
1003 # hooks on creating remote repo
1002 config.clear_section('hooks')
1004 config.clear_section('hooks')
1003
1005
1004 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1006 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1005 if repo_type == 'git':
1007 if repo_type == 'git':
1006 repo = backend(
1008 repo = backend(
1007 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1009 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1008 with_wire={"cache": False})
1010 with_wire={"cache": False})
1009 else:
1011 else:
1010 repo = backend(
1012 repo = backend(
1011 repo_path, config=config, create=True, src_url=clone_uri,
1013 repo_path, config=config, create=True, src_url=clone_uri,
1012 with_wire={"cache": False})
1014 with_wire={"cache": False})
1013
1015
1014 if install_hooks:
1016 if install_hooks:
1015 repo.install_hooks()
1017 repo.install_hooks()
1016
1018
1017 log.debug('Created repo %s with %s backend',
1019 log.debug('Created repo %s with %s backend',
1018 safe_unicode(repo_name), safe_unicode(repo_type))
1020 safe_unicode(repo_name), safe_unicode(repo_type))
1019 return repo
1021 return repo
1020
1022
1021 def _rename_filesystem_repo(self, old, new):
1023 def _rename_filesystem_repo(self, old, new):
1022 """
1024 """
1023 renames repository on filesystem
1025 renames repository on filesystem
1024
1026
1025 :param old: old name
1027 :param old: old name
1026 :param new: new name
1028 :param new: new name
1027 """
1029 """
1028 log.info('renaming repo from %s to %s', old, new)
1030 log.info('renaming repo from %s to %s', old, new)
1029
1031
1030 old_path = os.path.join(self.repos_path, old)
1032 old_path = os.path.join(self.repos_path, old)
1031 new_path = os.path.join(self.repos_path, new)
1033 new_path = os.path.join(self.repos_path, new)
1032 if os.path.isdir(new_path):
1034 if os.path.isdir(new_path):
1033 raise Exception(
1035 raise Exception(
1034 'Was trying to rename to already existing dir %s' % new_path
1036 'Was trying to rename to already existing dir %s' % new_path
1035 )
1037 )
1036 shutil.move(old_path, new_path)
1038 shutil.move(old_path, new_path)
1037
1039
1038 def _delete_filesystem_repo(self, repo):
1040 def _delete_filesystem_repo(self, repo):
1039 """
1041 """
1040 removes repo from filesystem, the removal is acctually made by
1042 removes repo from filesystem, the removal is acctually made by
1041 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1043 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1042 repository is no longer valid for rhodecode, can be undeleted later on
1044 repository is no longer valid for rhodecode, can be undeleted later on
1043 by reverting the renames on this repository
1045 by reverting the renames on this repository
1044
1046
1045 :param repo: repo object
1047 :param repo: repo object
1046 """
1048 """
1047 rm_path = os.path.join(self.repos_path, repo.repo_name)
1049 rm_path = os.path.join(self.repos_path, repo.repo_name)
1048 repo_group = repo.group
1050 repo_group = repo.group
1049 log.info("Removing repository %s", rm_path)
1051 log.info("Removing repository %s", rm_path)
1050 # disable hg/git internal that it doesn't get detected as repo
1052 # disable hg/git internal that it doesn't get detected as repo
1051 alias = repo.repo_type
1053 alias = repo.repo_type
1052
1054
1053 config = make_db_config(clear_session=False)
1055 config = make_db_config(clear_session=False)
1054 config.set('extensions', 'largefiles', '')
1056 config.set('extensions', 'largefiles', '')
1055 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1057 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1056
1058
1057 # skip this for bare git repos
1059 # skip this for bare git repos
1058 if not bare:
1060 if not bare:
1059 # disable VCS repo
1061 # disable VCS repo
1060 vcs_path = os.path.join(rm_path, '.%s' % alias)
1062 vcs_path = os.path.join(rm_path, '.%s' % alias)
1061 if os.path.exists(vcs_path):
1063 if os.path.exists(vcs_path):
1062 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1064 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1063
1065
1064 _now = datetime.datetime.now()
1066 _now = datetime.datetime.now()
1065 _ms = str(_now.microsecond).rjust(6, '0')
1067 _ms = str(_now.microsecond).rjust(6, '0')
1066 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1068 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1067 repo.just_name)
1069 repo.just_name)
1068 if repo_group:
1070 if repo_group:
1069 # if repository is in group, prefix the removal path with the group
1071 # if repository is in group, prefix the removal path with the group
1070 args = repo_group.full_path_splitted + [_d]
1072 args = repo_group.full_path_splitted + [_d]
1071 _d = os.path.join(*args)
1073 _d = os.path.join(*args)
1072
1074
1073 if os.path.isdir(rm_path):
1075 if os.path.isdir(rm_path):
1074 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1076 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1075
1077
1076 # finally cleanup diff-cache if it exists
1078 # finally cleanup diff-cache if it exists
1077 cached_diffs_dir = repo.cached_diffs_dir
1079 cached_diffs_dir = repo.cached_diffs_dir
1078 if os.path.isdir(cached_diffs_dir):
1080 if os.path.isdir(cached_diffs_dir):
1079 shutil.rmtree(cached_diffs_dir)
1081 shutil.rmtree(cached_diffs_dir)
1080
1082
1081
1083
1082 class ReadmeFinder:
1084 class ReadmeFinder:
1083 """
1085 """
1084 Utility which knows how to find a readme for a specific commit.
1086 Utility which knows how to find a readme for a specific commit.
1085
1087
1086 The main idea is that this is a configurable algorithm. When creating an
1088 The main idea is that this is a configurable algorithm. When creating an
1087 instance you can define parameters, currently only the `default_renderer`.
1089 instance you can define parameters, currently only the `default_renderer`.
1088 Based on this configuration the method :meth:`search` behaves slightly
1090 Based on this configuration the method :meth:`search` behaves slightly
1089 different.
1091 different.
1090 """
1092 """
1091
1093
1092 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1094 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1093 path_re = re.compile(r'^docs?', re.IGNORECASE)
1095 path_re = re.compile(r'^docs?', re.IGNORECASE)
1094
1096
1095 default_priorities = {
1097 default_priorities = {
1096 None: 0,
1098 None: 0,
1097 '.text': 2,
1099 '.text': 2,
1098 '.txt': 3,
1100 '.txt': 3,
1099 '.rst': 1,
1101 '.rst': 1,
1100 '.rest': 2,
1102 '.rest': 2,
1101 '.md': 1,
1103 '.md': 1,
1102 '.mkdn': 2,
1104 '.mkdn': 2,
1103 '.mdown': 3,
1105 '.mdown': 3,
1104 '.markdown': 4,
1106 '.markdown': 4,
1105 }
1107 }
1106
1108
1107 path_priority = {
1109 path_priority = {
1108 'doc': 0,
1110 'doc': 0,
1109 'docs': 1,
1111 'docs': 1,
1110 }
1112 }
1111
1113
1112 FALLBACK_PRIORITY = 99
1114 FALLBACK_PRIORITY = 99
1113
1115
1114 RENDERER_TO_EXTENSION = {
1116 RENDERER_TO_EXTENSION = {
1115 'rst': ['.rst', '.rest'],
1117 'rst': ['.rst', '.rest'],
1116 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1118 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1117 }
1119 }
1118
1120
1119 def __init__(self, default_renderer=None):
1121 def __init__(self, default_renderer=None):
1120 self._default_renderer = default_renderer
1122 self._default_renderer = default_renderer
1121 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1123 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1122 default_renderer, [])
1124 default_renderer, [])
1123
1125
1124 def search(self, commit, path=u'/'):
1126 def search(self, commit, path=u'/'):
1125 """
1127 """
1126 Find a readme in the given `commit`.
1128 Find a readme in the given `commit`.
1127 """
1129 """
1128 nodes = commit.get_nodes(path)
1130 nodes = commit.get_nodes(path)
1129 matches = self._match_readmes(nodes)
1131 matches = self._match_readmes(nodes)
1130 matches = self._sort_according_to_priority(matches)
1132 matches = self._sort_according_to_priority(matches)
1131 if matches:
1133 if matches:
1132 return matches[0].node
1134 return matches[0].node
1133
1135
1134 paths = self._match_paths(nodes)
1136 paths = self._match_paths(nodes)
1135 paths = self._sort_paths_according_to_priority(paths)
1137 paths = self._sort_paths_according_to_priority(paths)
1136 for path in paths:
1138 for path in paths:
1137 match = self.search(commit, path=path)
1139 match = self.search(commit, path=path)
1138 if match:
1140 if match:
1139 return match
1141 return match
1140
1142
1141 return None
1143 return None
1142
1144
1143 def _match_readmes(self, nodes):
1145 def _match_readmes(self, nodes):
1144 for node in nodes:
1146 for node in nodes:
1145 if not node.is_file():
1147 if not node.is_file():
1146 continue
1148 continue
1147 path = node.path.rsplit('/', 1)[-1]
1149 path = node.path.rsplit('/', 1)[-1]
1148 match = self.readme_re.match(path)
1150 match = self.readme_re.match(path)
1149 if match:
1151 if match:
1150 extension = match.group(1)
1152 extension = match.group(1)
1151 yield ReadmeMatch(node, match, self._priority(extension))
1153 yield ReadmeMatch(node, match, self._priority(extension))
1152
1154
1153 def _match_paths(self, nodes):
1155 def _match_paths(self, nodes):
1154 for node in nodes:
1156 for node in nodes:
1155 if not node.is_dir():
1157 if not node.is_dir():
1156 continue
1158 continue
1157 match = self.path_re.match(node.path)
1159 match = self.path_re.match(node.path)
1158 if match:
1160 if match:
1159 yield node.path
1161 yield node.path
1160
1162
1161 def _priority(self, extension):
1163 def _priority(self, extension):
1162 renderer_priority = (
1164 renderer_priority = (
1163 0 if extension in self._renderer_extensions else 1)
1165 0 if extension in self._renderer_extensions else 1)
1164 extension_priority = self.default_priorities.get(
1166 extension_priority = self.default_priorities.get(
1165 extension, self.FALLBACK_PRIORITY)
1167 extension, self.FALLBACK_PRIORITY)
1166 return (renderer_priority, extension_priority)
1168 return (renderer_priority, extension_priority)
1167
1169
1168 def _sort_according_to_priority(self, matches):
1170 def _sort_according_to_priority(self, matches):
1169
1171
1170 def priority_and_path(match):
1172 def priority_and_path(match):
1171 return (match.priority, match.path)
1173 return (match.priority, match.path)
1172
1174
1173 return sorted(matches, key=priority_and_path)
1175 return sorted(matches, key=priority_and_path)
1174
1176
1175 def _sort_paths_according_to_priority(self, paths):
1177 def _sort_paths_according_to_priority(self, paths):
1176
1178
1177 def priority_and_path(path):
1179 def priority_and_path(path):
1178 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1180 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1179
1181
1180 return sorted(paths, key=priority_and_path)
1182 return sorted(paths, key=priority_and_path)
1181
1183
1182
1184
1183 class ReadmeMatch:
1185 class ReadmeMatch:
1184
1186
1185 def __init__(self, node, match, priority):
1187 def __init__(self, node, match, priority):
1186 self.node = node
1188 self.node = node
1187 self._match = match
1189 self._match = match
1188 self.priority = priority
1190 self.priority = priority
1189
1191
1190 @property
1192 @property
1191 def path(self):
1193 def path(self):
1192 return self.node.path
1194 return self.node.path
1193
1195
1194 def __repr__(self):
1196 def __repr__(self):
1195 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
1197 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,1025 +1,1028 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import traceback
26 import traceback
27 import logging
27 import logging
28 import cStringIO
28 import cStringIO
29
29
30 from sqlalchemy import func
30 from sqlalchemy import func
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
41 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
43 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
44 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
45 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 or_, false,
50 or_, false,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 PullRequest, FileStore)
52 PullRequest, FileStore)
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 class UserTemp(object):
59 class UserTemp(object):
60 def __init__(self, user_id):
60 def __init__(self, user_id):
61 self.user_id = user_id
61 self.user_id = user_id
62
62
63 def __repr__(self):
63 def __repr__(self):
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65
65
66
66
67 class RepoTemp(object):
67 class RepoTemp(object):
68 def __init__(self, repo_id):
68 def __init__(self, repo_id):
69 self.repo_id = repo_id
69 self.repo_id = repo_id
70
70
71 def __repr__(self):
71 def __repr__(self):
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73
73
74
74
75 class SimpleCachedRepoList(object):
75 class SimpleCachedRepoList(object):
76 """
76 """
77 Lighter version of of iteration of repos without the scm initialisation,
77 Lighter version of of iteration of repos without the scm initialisation,
78 and with cache usage
78 and with cache usage
79 """
79 """
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 self.db_repo_list = db_repo_list
81 self.db_repo_list = db_repo_list
82 self.repos_path = repos_path
82 self.repos_path = repos_path
83 self.order_by = order_by
83 self.order_by = order_by
84 self.reversed = (order_by or '').startswith('-')
84 self.reversed = (order_by or '').startswith('-')
85 if not perm_set:
85 if not perm_set:
86 perm_set = ['repository.read', 'repository.write',
86 perm_set = ['repository.read', 'repository.write',
87 'repository.admin']
87 'repository.admin']
88 self.perm_set = perm_set
88 self.perm_set = perm_set
89
89
90 def __len__(self):
90 def __len__(self):
91 return len(self.db_repo_list)
91 return len(self.db_repo_list)
92
92
93 def __repr__(self):
93 def __repr__(self):
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95
95
96 def __iter__(self):
96 def __iter__(self):
97 for dbr in self.db_repo_list:
97 for dbr in self.db_repo_list:
98 # check permission at this level
98 # check permission at this level
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 dbr.repo_name, 'SimpleCachedRepoList check')
100 dbr.repo_name, 'SimpleCachedRepoList check')
101 if not has_perm:
101 if not has_perm:
102 continue
102 continue
103
103
104 tmp_d = {
104 tmp_d = {
105 'name': dbr.repo_name,
105 'name': dbr.repo_name,
106 'dbrepo': dbr.get_dict(),
106 'dbrepo': dbr.get_dict(),
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 }
108 }
109 yield tmp_d
109 yield tmp_d
110
110
111
111
112 class _PermCheckIterator(object):
112 class _PermCheckIterator(object):
113
113
114 def __init__(
114 def __init__(
115 self, obj_list, obj_attr, perm_set, perm_checker,
115 self, obj_list, obj_attr, perm_set, perm_checker,
116 extra_kwargs=None):
116 extra_kwargs=None):
117 """
117 """
118 Creates iterator from given list of objects, additionally
118 Creates iterator from given list of objects, additionally
119 checking permission for them from perm_set var
119 checking permission for them from perm_set var
120
120
121 :param obj_list: list of db objects
121 :param obj_list: list of db objects
122 :param obj_attr: attribute of object to pass into perm_checker
122 :param obj_attr: attribute of object to pass into perm_checker
123 :param perm_set: list of permissions to check
123 :param perm_set: list of permissions to check
124 :param perm_checker: callable to check permissions against
124 :param perm_checker: callable to check permissions against
125 """
125 """
126 self.obj_list = obj_list
126 self.obj_list = obj_list
127 self.obj_attr = obj_attr
127 self.obj_attr = obj_attr
128 self.perm_set = perm_set
128 self.perm_set = perm_set
129 self.perm_checker = perm_checker(*self.perm_set)
129 self.perm_checker = perm_checker(*self.perm_set)
130 self.extra_kwargs = extra_kwargs or {}
130 self.extra_kwargs = extra_kwargs or {}
131
131
132 def __len__(self):
132 def __len__(self):
133 return len(self.obj_list)
133 return len(self.obj_list)
134
134
135 def __repr__(self):
135 def __repr__(self):
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137
137
138 def __iter__(self):
138 def __iter__(self):
139 for db_obj in self.obj_list:
139 for db_obj in self.obj_list:
140 # check permission at this level
140 # check permission at this level
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
142 name = db_obj.__dict__.get(self.obj_attr, None)
142 name = db_obj.__dict__.get(self.obj_attr, None)
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
144 continue
144 continue
145
145
146 yield db_obj
146 yield db_obj
147
147
148
148
149 class RepoList(_PermCheckIterator):
149 class RepoList(_PermCheckIterator):
150
150
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 if not perm_set:
152 if not perm_set:
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
154
154
155 super(RepoList, self).__init__(
155 super(RepoList, self).__init__(
156 obj_list=db_repo_list,
156 obj_list=db_repo_list,
157 obj_attr='_repo_name', perm_set=perm_set,
157 obj_attr='_repo_name', perm_set=perm_set,
158 perm_checker=HasRepoPermissionAny,
158 perm_checker=HasRepoPermissionAny,
159 extra_kwargs=extra_kwargs)
159 extra_kwargs=extra_kwargs)
160
160
161
161
162 class RepoGroupList(_PermCheckIterator):
162 class RepoGroupList(_PermCheckIterator):
163
163
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 if not perm_set:
165 if not perm_set:
166 perm_set = ['group.read', 'group.write', 'group.admin']
166 perm_set = ['group.read', 'group.write', 'group.admin']
167
167
168 super(RepoGroupList, self).__init__(
168 super(RepoGroupList, self).__init__(
169 obj_list=db_repo_group_list,
169 obj_list=db_repo_group_list,
170 obj_attr='_group_name', perm_set=perm_set,
170 obj_attr='_group_name', perm_set=perm_set,
171 perm_checker=HasRepoGroupPermissionAny,
171 perm_checker=HasRepoGroupPermissionAny,
172 extra_kwargs=extra_kwargs)
172 extra_kwargs=extra_kwargs)
173
173
174
174
175 class UserGroupList(_PermCheckIterator):
175 class UserGroupList(_PermCheckIterator):
176
176
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 if not perm_set:
178 if not perm_set:
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180
180
181 super(UserGroupList, self).__init__(
181 super(UserGroupList, self).__init__(
182 obj_list=db_user_group_list,
182 obj_list=db_user_group_list,
183 obj_attr='users_group_name', perm_set=perm_set,
183 obj_attr='users_group_name', perm_set=perm_set,
184 perm_checker=HasUserGroupPermissionAny,
184 perm_checker=HasUserGroupPermissionAny,
185 extra_kwargs=extra_kwargs)
185 extra_kwargs=extra_kwargs)
186
186
187
187
188 class ScmModel(BaseModel):
188 class ScmModel(BaseModel):
189 """
189 """
190 Generic Scm Model
190 Generic Scm Model
191 """
191 """
192
192
193 @LazyProperty
193 @LazyProperty
194 def repos_path(self):
194 def repos_path(self):
195 """
195 """
196 Gets the repositories root path from database
196 Gets the repositories root path from database
197 """
197 """
198
198
199 settings_model = VcsSettingsModel(sa=self.sa)
199 settings_model = VcsSettingsModel(sa=self.sa)
200 return settings_model.get_repos_location()
200 return settings_model.get_repos_location()
201
201
202 def repo_scan(self, repos_path=None):
202 def repo_scan(self, repos_path=None):
203 """
203 """
204 Listing of repositories in given path. This path should not be a
204 Listing of repositories in given path. This path should not be a
205 repository itself. Return a dictionary of repository objects
205 repository itself. Return a dictionary of repository objects
206
206
207 :param repos_path: path to directory containing repositories
207 :param repos_path: path to directory containing repositories
208 """
208 """
209
209
210 if repos_path is None:
210 if repos_path is None:
211 repos_path = self.repos_path
211 repos_path = self.repos_path
212
212
213 log.info('scanning for repositories in %s', repos_path)
213 log.info('scanning for repositories in %s', repos_path)
214
214
215 config = make_db_config()
215 config = make_db_config()
216 config.set('extensions', 'largefiles', '')
216 config.set('extensions', 'largefiles', '')
217 repos = {}
217 repos = {}
218
218
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 # name need to be decomposed and put back together using the /
220 # name need to be decomposed and put back together using the /
221 # since this is internal storage separator for rhodecode
221 # since this is internal storage separator for rhodecode
222 name = Repository.normalize_repo_name(name)
222 name = Repository.normalize_repo_name(name)
223
223
224 try:
224 try:
225 if name in repos:
225 if name in repos:
226 raise RepositoryError('Duplicate repository name %s '
226 raise RepositoryError('Duplicate repository name %s '
227 'found in %s' % (name, path))
227 'found in %s' % (name, path))
228 elif path[0] in rhodecode.BACKENDS:
228 elif path[0] in rhodecode.BACKENDS:
229 backend = get_backend(path[0])
229 backend = get_backend(path[0])
230 repos[name] = backend(path[1], config=config,
230 repos[name] = backend(path[1], config=config,
231 with_wire={"cache": False})
231 with_wire={"cache": False})
232 except OSError:
232 except OSError:
233 continue
233 continue
234 except RepositoryError:
234 except RepositoryError:
235 log.exception('Failed to create a repo')
235 log.exception('Failed to create a repo')
236 continue
236 continue
237
237
238 log.debug('found %s paths with repositories', len(repos))
238 log.debug('found %s paths with repositories', len(repos))
239 return repos
239 return repos
240
240
241 def get_repos(self, all_repos=None, sort_key=None):
241 def get_repos(self, all_repos=None, sort_key=None):
242 """
242 """
243 Get all repositories from db and for each repo create it's
243 Get all repositories from db and for each repo create it's
244 backend instance and fill that backed with information from database
244 backend instance and fill that backed with information from database
245
245
246 :param all_repos: list of repository names as strings
246 :param all_repos: list of repository names as strings
247 give specific repositories list, good for filtering
247 give specific repositories list, good for filtering
248
248
249 :param sort_key: initial sorting of repositories
249 :param sort_key: initial sorting of repositories
250 """
250 """
251 if all_repos is None:
251 if all_repos is None:
252 all_repos = self.sa.query(Repository)\
252 all_repos = self.sa.query(Repository)\
253 .filter(Repository.group_id == None)\
253 .filter(Repository.group_id == None)\
254 .order_by(func.lower(Repository.repo_name)).all()
254 .order_by(func.lower(Repository.repo_name)).all()
255 repo_iter = SimpleCachedRepoList(
255 repo_iter = SimpleCachedRepoList(
256 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 all_repos, repos_path=self.repos_path, order_by=sort_key)
257 return repo_iter
257 return repo_iter
258
258
259 def get_repo_groups(self, all_groups=None):
259 def get_repo_groups(self, all_groups=None):
260 if all_groups is None:
260 if all_groups is None:
261 all_groups = RepoGroup.query()\
261 all_groups = RepoGroup.query()\
262 .filter(RepoGroup.group_parent_id == None).all()
262 .filter(RepoGroup.group_parent_id == None).all()
263 return [x for x in RepoGroupList(all_groups)]
263 return [x for x in RepoGroupList(all_groups)]
264
264
265 def mark_for_invalidation(self, repo_name, delete=False):
265 def mark_for_invalidation(self, repo_name, delete=False):
266 """
266 """
267 Mark caches of this repo invalid in the database. `delete` flag
267 Mark caches of this repo invalid in the database. `delete` flag
268 removes the cache entries
268 removes the cache entries
269
269
270 :param repo_name: the repo_name for which caches should be marked
270 :param repo_name: the repo_name for which caches should be marked
271 invalid, or deleted
271 invalid, or deleted
272 :param delete: delete the entry keys instead of setting bool
272 :param delete: delete the entry keys instead of setting bool
273 flag on them, and also purge caches used by the dogpile
273 flag on them, and also purge caches used by the dogpile
274 """
274 """
275 repo = Repository.get_by_repo_name(repo_name)
275 repo = Repository.get_by_repo_name(repo_name)
276
276
277 if repo:
277 if repo:
278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
279 repo_id=repo.repo_id)
279 repo_id=repo.repo_id)
280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
281
281
282 repo_id = repo.repo_id
282 repo_id = repo.repo_id
283 config = repo._config
283 config = repo._config
284 config.set('extensions', 'largefiles', '')
284 config.set('extensions', 'largefiles', '')
285 repo.update_commit_cache(config=config, cs_cache=None)
285 repo.update_commit_cache(config=config, cs_cache=None)
286 if delete:
286 if delete:
287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
288 rc_cache.clear_cache_namespace(
288 rc_cache.clear_cache_namespace(
289 'cache_repo', cache_namespace_uid, invalidate=True)
289 'cache_repo', cache_namespace_uid, invalidate=True)
290
290
291 def toggle_following_repo(self, follow_repo_id, user_id):
291 def toggle_following_repo(self, follow_repo_id, user_id):
292
292
293 f = self.sa.query(UserFollowing)\
293 f = self.sa.query(UserFollowing)\
294 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
294 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
295 .filter(UserFollowing.user_id == user_id).scalar()
295 .filter(UserFollowing.user_id == user_id).scalar()
296
296
297 if f is not None:
297 if f is not None:
298 try:
298 try:
299 self.sa.delete(f)
299 self.sa.delete(f)
300 return
300 return
301 except Exception:
301 except Exception:
302 log.error(traceback.format_exc())
302 log.error(traceback.format_exc())
303 raise
303 raise
304
304
305 try:
305 try:
306 f = UserFollowing()
306 f = UserFollowing()
307 f.user_id = user_id
307 f.user_id = user_id
308 f.follows_repo_id = follow_repo_id
308 f.follows_repo_id = follow_repo_id
309 self.sa.add(f)
309 self.sa.add(f)
310 except Exception:
310 except Exception:
311 log.error(traceback.format_exc())
311 log.error(traceback.format_exc())
312 raise
312 raise
313
313
314 def toggle_following_user(self, follow_user_id, user_id):
314 def toggle_following_user(self, follow_user_id, user_id):
315 f = self.sa.query(UserFollowing)\
315 f = self.sa.query(UserFollowing)\
316 .filter(UserFollowing.follows_user_id == follow_user_id)\
316 .filter(UserFollowing.follows_user_id == follow_user_id)\
317 .filter(UserFollowing.user_id == user_id).scalar()
317 .filter(UserFollowing.user_id == user_id).scalar()
318
318
319 if f is not None:
319 if f is not None:
320 try:
320 try:
321 self.sa.delete(f)
321 self.sa.delete(f)
322 return
322 return
323 except Exception:
323 except Exception:
324 log.error(traceback.format_exc())
324 log.error(traceback.format_exc())
325 raise
325 raise
326
326
327 try:
327 try:
328 f = UserFollowing()
328 f = UserFollowing()
329 f.user_id = user_id
329 f.user_id = user_id
330 f.follows_user_id = follow_user_id
330 f.follows_user_id = follow_user_id
331 self.sa.add(f)
331 self.sa.add(f)
332 except Exception:
332 except Exception:
333 log.error(traceback.format_exc())
333 log.error(traceback.format_exc())
334 raise
334 raise
335
335
336 def is_following_repo(self, repo_name, user_id, cache=False):
336 def is_following_repo(self, repo_name, user_id, cache=False):
337 r = self.sa.query(Repository)\
337 r = self.sa.query(Repository)\
338 .filter(Repository.repo_name == repo_name).scalar()
338 .filter(Repository.repo_name == repo_name).scalar()
339
339
340 f = self.sa.query(UserFollowing)\
340 f = self.sa.query(UserFollowing)\
341 .filter(UserFollowing.follows_repository == r)\
341 .filter(UserFollowing.follows_repository == r)\
342 .filter(UserFollowing.user_id == user_id).scalar()
342 .filter(UserFollowing.user_id == user_id).scalar()
343
343
344 return f is not None
344 return f is not None
345
345
346 def is_following_user(self, username, user_id, cache=False):
346 def is_following_user(self, username, user_id, cache=False):
347 u = User.get_by_username(username)
347 u = User.get_by_username(username)
348
348
349 f = self.sa.query(UserFollowing)\
349 f = self.sa.query(UserFollowing)\
350 .filter(UserFollowing.follows_user == u)\
350 .filter(UserFollowing.follows_user == u)\
351 .filter(UserFollowing.user_id == user_id).scalar()
351 .filter(UserFollowing.user_id == user_id).scalar()
352
352
353 return f is not None
353 return f is not None
354
354
355 def get_followers(self, repo):
355 def get_followers(self, repo):
356 repo = self._get_repo(repo)
356 repo = self._get_repo(repo)
357
357
358 return self.sa.query(UserFollowing)\
358 return self.sa.query(UserFollowing)\
359 .filter(UserFollowing.follows_repository == repo).count()
359 .filter(UserFollowing.follows_repository == repo).count()
360
360
361 def get_forks(self, repo):
361 def get_forks(self, repo):
362 repo = self._get_repo(repo)
362 repo = self._get_repo(repo)
363 return self.sa.query(Repository)\
363 return self.sa.query(Repository)\
364 .filter(Repository.fork == repo).count()
364 .filter(Repository.fork == repo).count()
365
365
366 def get_pull_requests(self, repo):
366 def get_pull_requests(self, repo):
367 repo = self._get_repo(repo)
367 repo = self._get_repo(repo)
368 return self.sa.query(PullRequest)\
368 return self.sa.query(PullRequest)\
369 .filter(PullRequest.target_repo == repo)\
369 .filter(PullRequest.target_repo == repo)\
370 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
370 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
371
371
372 def get_artifacts(self, repo):
372 def get_artifacts(self, repo):
373 repo = self._get_repo(repo)
373 repo = self._get_repo(repo)
374 return self.sa.query(FileStore)\
374 return self.sa.query(FileStore)\
375 .filter(FileStore.repo == repo)\
375 .filter(FileStore.repo == repo)\
376 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
376 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
377
377
378 def mark_as_fork(self, repo, fork, user):
378 def mark_as_fork(self, repo, fork, user):
379 repo = self._get_repo(repo)
379 repo = self._get_repo(repo)
380 fork = self._get_repo(fork)
380 fork = self._get_repo(fork)
381 if fork and repo.repo_id == fork.repo_id:
381 if fork and repo.repo_id == fork.repo_id:
382 raise Exception("Cannot set repository as fork of itself")
382 raise Exception("Cannot set repository as fork of itself")
383
383
384 if fork and repo.repo_type != fork.repo_type:
384 if fork and repo.repo_type != fork.repo_type:
385 raise RepositoryError(
385 raise RepositoryError(
386 "Cannot set repository as fork of repository with other type")
386 "Cannot set repository as fork of repository with other type")
387
387
388 repo.fork = fork
388 repo.fork = fork
389 self.sa.add(repo)
389 self.sa.add(repo)
390 return repo
390 return repo
391
391
392 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
392 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
393 dbrepo = self._get_repo(repo)
393 dbrepo = self._get_repo(repo)
394 remote_uri = remote_uri or dbrepo.clone_uri
394 remote_uri = remote_uri or dbrepo.clone_uri
395 if not remote_uri:
395 if not remote_uri:
396 raise Exception("This repository doesn't have a clone uri")
396 raise Exception("This repository doesn't have a clone uri")
397
397
398 repo = dbrepo.scm_instance(cache=False)
398 repo = dbrepo.scm_instance(cache=False)
399 repo.config.clear_section('hooks')
399 repo.config.clear_section('hooks')
400
400
401 try:
401 try:
402 # NOTE(marcink): add extra validation so we skip invalid urls
402 # NOTE(marcink): add extra validation so we skip invalid urls
403 # this is due this tasks can be executed via scheduler without
403 # this is due this tasks can be executed via scheduler without
404 # proper validation of remote_uri
404 # proper validation of remote_uri
405 if validate_uri:
405 if validate_uri:
406 config = make_db_config(clear_session=False)
406 config = make_db_config(clear_session=False)
407 url_validator(remote_uri, dbrepo.repo_type, config)
407 url_validator(remote_uri, dbrepo.repo_type, config)
408 except InvalidCloneUrl:
408 except InvalidCloneUrl:
409 raise
409 raise
410
410
411 repo_name = dbrepo.repo_name
411 repo_name = dbrepo.repo_name
412 try:
412 try:
413 # TODO: we need to make sure those operations call proper hooks !
413 # TODO: we need to make sure those operations call proper hooks !
414 repo.fetch(remote_uri)
414 repo.fetch(remote_uri)
415
415
416 self.mark_for_invalidation(repo_name)
416 self.mark_for_invalidation(repo_name)
417 except Exception:
417 except Exception:
418 log.error(traceback.format_exc())
418 log.error(traceback.format_exc())
419 raise
419 raise
420
420
421 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
421 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
422 dbrepo = self._get_repo(repo)
422 dbrepo = self._get_repo(repo)
423 remote_uri = remote_uri or dbrepo.push_uri
423 remote_uri = remote_uri or dbrepo.push_uri
424 if not remote_uri:
424 if not remote_uri:
425 raise Exception("This repository doesn't have a clone uri")
425 raise Exception("This repository doesn't have a clone uri")
426
426
427 repo = dbrepo.scm_instance(cache=False)
427 repo = dbrepo.scm_instance(cache=False)
428 repo.config.clear_section('hooks')
428 repo.config.clear_section('hooks')
429
429
430 try:
430 try:
431 # NOTE(marcink): add extra validation so we skip invalid urls
431 # NOTE(marcink): add extra validation so we skip invalid urls
432 # this is due this tasks can be executed via scheduler without
432 # this is due this tasks can be executed via scheduler without
433 # proper validation of remote_uri
433 # proper validation of remote_uri
434 if validate_uri:
434 if validate_uri:
435 config = make_db_config(clear_session=False)
435 config = make_db_config(clear_session=False)
436 url_validator(remote_uri, dbrepo.repo_type, config)
436 url_validator(remote_uri, dbrepo.repo_type, config)
437 except InvalidCloneUrl:
437 except InvalidCloneUrl:
438 raise
438 raise
439
439
440 try:
440 try:
441 repo.push(remote_uri)
441 repo.push(remote_uri)
442 except Exception:
442 except Exception:
443 log.error(traceback.format_exc())
443 log.error(traceback.format_exc())
444 raise
444 raise
445
445
446 def commit_change(self, repo, repo_name, commit, user, author, message,
446 def commit_change(self, repo, repo_name, commit, user, author, message,
447 content, f_path):
447 content, f_path):
448 """
448 """
449 Commits changes
449 Commits changes
450
450
451 :param repo: SCM instance
451 :param repo: SCM instance
452
452
453 """
453 """
454 user = self._get_user(user)
454 user = self._get_user(user)
455
455
456 # decoding here will force that we have proper encoded values
456 # decoding here will force that we have proper encoded values
457 # in any other case this will throw exceptions and deny commit
457 # in any other case this will throw exceptions and deny commit
458 content = safe_str(content)
458 content = safe_str(content)
459 path = safe_str(f_path)
459 path = safe_str(f_path)
460 # message and author needs to be unicode
460 # message and author needs to be unicode
461 # proper backend should then translate that into required type
461 # proper backend should then translate that into required type
462 message = safe_unicode(message)
462 message = safe_unicode(message)
463 author = safe_unicode(author)
463 author = safe_unicode(author)
464 imc = repo.in_memory_commit
464 imc = repo.in_memory_commit
465 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
465 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
466 try:
466 try:
467 # TODO: handle pre-push action !
467 # TODO: handle pre-push action !
468 tip = imc.commit(
468 tip = imc.commit(
469 message=message, author=author, parents=[commit],
469 message=message, author=author, parents=[commit],
470 branch=commit.branch)
470 branch=commit.branch)
471 except Exception as e:
471 except Exception as e:
472 log.error(traceback.format_exc())
472 log.error(traceback.format_exc())
473 raise IMCCommitError(str(e))
473 raise IMCCommitError(str(e))
474 finally:
474 finally:
475 # always clear caches, if commit fails we want fresh object also
475 # always clear caches, if commit fails we want fresh object also
476 self.mark_for_invalidation(repo_name)
476 self.mark_for_invalidation(repo_name)
477
477
478 # We trigger the post-push action
478 # We trigger the post-push action
479 hooks_utils.trigger_post_push_hook(
479 hooks_utils.trigger_post_push_hook(
480 username=user.username, action='push_local', hook_type='post_push',
480 username=user.username, action='push_local', hook_type='post_push',
481 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
481 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
482 return tip
482 return tip
483
483
484 def _sanitize_path(self, f_path):
484 def _sanitize_path(self, f_path):
485 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
485 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
486 raise NonRelativePathError('%s is not an relative path' % f_path)
486 raise NonRelativePathError('%s is not an relative path' % f_path)
487 if f_path:
487 if f_path:
488 f_path = os.path.normpath(f_path)
488 f_path = os.path.normpath(f_path)
489 return f_path
489 return f_path
490
490
491 def get_dirnode_metadata(self, request, commit, dir_node):
491 def get_dirnode_metadata(self, request, commit, dir_node):
492 if not dir_node.is_dir():
492 if not dir_node.is_dir():
493 return []
493 return []
494
494
495 data = []
495 data = []
496 for node in dir_node:
496 for node in dir_node:
497 if not node.is_file():
497 if not node.is_file():
498 # we skip file-nodes
498 # we skip file-nodes
499 continue
499 continue
500
500
501 last_commit = node.last_commit
501 last_commit = node.last_commit
502 last_commit_date = last_commit.date
502 last_commit_date = last_commit.date
503 data.append({
503 data.append({
504 'name': node.name,
504 'name': node.name,
505 'size': h.format_byte_size_binary(node.size),
505 'size': h.format_byte_size_binary(node.size),
506 'modified_at': h.format_date(last_commit_date),
506 'modified_at': h.format_date(last_commit_date),
507 'modified_ts': last_commit_date.isoformat(),
507 'modified_ts': last_commit_date.isoformat(),
508 'revision': last_commit.revision,
508 'revision': last_commit.revision,
509 'short_id': last_commit.short_id,
509 'short_id': last_commit.short_id,
510 'message': h.escape(last_commit.message),
510 'message': h.escape(last_commit.message),
511 'author': h.escape(last_commit.author),
511 'author': h.escape(last_commit.author),
512 'user_profile': h.gravatar_with_user(
512 'user_profile': h.gravatar_with_user(
513 request, last_commit.author),
513 request, last_commit.author),
514 })
514 })
515
515
516 return data
516 return data
517
517
518 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
518 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
519 extended_info=False, content=False, max_file_bytes=None):
519 extended_info=False, content=False, max_file_bytes=None):
520 """
520 """
521 recursive walk in root dir and return a set of all path in that dir
521 recursive walk in root dir and return a set of all path in that dir
522 based on repository walk function
522 based on repository walk function
523
523
524 :param repo_name: name of repository
524 :param repo_name: name of repository
525 :param commit_id: commit id for which to list nodes
525 :param commit_id: commit id for which to list nodes
526 :param root_path: root path to list
526 :param root_path: root path to list
527 :param flat: return as a list, if False returns a dict with description
527 :param flat: return as a list, if False returns a dict with description
528 :param extended_info: show additional info such as md5, binary, size etc
528 :param extended_info: show additional info such as md5, binary, size etc
529 :param content: add nodes content to the return data
529 :param content: add nodes content to the return data
530 :param max_file_bytes: will not return file contents over this limit
530 :param max_file_bytes: will not return file contents over this limit
531
531
532 """
532 """
533 _files = list()
533 _files = list()
534 _dirs = list()
534 _dirs = list()
535 try:
535 try:
536 _repo = self._get_repo(repo_name)
536 _repo = self._get_repo(repo_name)
537 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
537 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
538 root_path = root_path.lstrip('/')
538 root_path = root_path.lstrip('/')
539 for __, dirs, files in commit.walk(root_path):
539 for __, dirs, files in commit.walk(root_path):
540
540
541 for f in files:
541 for f in files:
542 _content = None
542 _content = None
543 _data = f_name = f.unicode_path
543 _data = f_name = f.unicode_path
544
544
545 if not flat:
545 if not flat:
546 _data = {
546 _data = {
547 "name": h.escape(f_name),
547 "name": h.escape(f_name),
548 "type": "file",
548 "type": "file",
549 }
549 }
550 if extended_info:
550 if extended_info:
551 _data.update({
551 _data.update({
552 "md5": f.md5,
552 "md5": f.md5,
553 "binary": f.is_binary,
553 "binary": f.is_binary,
554 "size": f.size,
554 "size": f.size,
555 "extension": f.extension,
555 "extension": f.extension,
556 "mimetype": f.mimetype,
556 "mimetype": f.mimetype,
557 "lines": f.lines()[0]
557 "lines": f.lines()[0]
558 })
558 })
559
559
560 if content:
560 if content:
561 over_size_limit = (max_file_bytes is not None
561 over_size_limit = (max_file_bytes is not None
562 and f.size > max_file_bytes)
562 and f.size > max_file_bytes)
563 full_content = None
563 full_content = None
564 if not f.is_binary and not over_size_limit:
564 if not f.is_binary and not over_size_limit:
565 full_content = safe_str(f.content)
565 full_content = safe_str(f.content)
566
566
567 _data.update({
567 _data.update({
568 "content": full_content,
568 "content": full_content,
569 })
569 })
570 _files.append(_data)
570 _files.append(_data)
571
571
572 for d in dirs:
572 for d in dirs:
573 _data = d_name = d.unicode_path
573 _data = d_name = d.unicode_path
574 if not flat:
574 if not flat:
575 _data = {
575 _data = {
576 "name": h.escape(d_name),
576 "name": h.escape(d_name),
577 "type": "dir",
577 "type": "dir",
578 }
578 }
579 if extended_info:
579 if extended_info:
580 _data.update({
580 _data.update({
581 "md5": None,
581 "md5": None,
582 "binary": None,
582 "binary": None,
583 "size": None,
583 "size": None,
584 "extension": None,
584 "extension": None,
585 })
585 })
586 if content:
586 if content:
587 _data.update({
587 _data.update({
588 "content": None
588 "content": None
589 })
589 })
590 _dirs.append(_data)
590 _dirs.append(_data)
591 except RepositoryError:
591 except RepositoryError:
592 log.exception("Exception in get_nodes")
592 log.exception("Exception in get_nodes")
593 raise
593 raise
594
594
595 return _dirs, _files
595 return _dirs, _files
596
596
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 """
598 """
599 Generate files for quick filter in files view
599 Generate files for quick filter in files view
600 """
600 """
601
601
602 _files = list()
602 _files = list()
603 _dirs = list()
603 _dirs = list()
604 try:
604 try:
605 _repo = self._get_repo(repo_name)
605 _repo = self._get_repo(repo_name)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 root_path = root_path.lstrip('/')
607 root_path = root_path.lstrip('/')
608 for __, dirs, files in commit.walk(root_path):
608 for __, dirs, files in commit.walk(root_path):
609
609
610 for f in files:
610 for f in files:
611
611
612 _data = {
612 _data = {
613 "name": h.escape(f.unicode_path),
613 "name": h.escape(f.unicode_path),
614 "type": "file",
614 "type": "file",
615 }
615 }
616
616
617 _files.append(_data)
617 _files.append(_data)
618
618
619 for d in dirs:
619 for d in dirs:
620
620
621 _data = {
621 _data = {
622 "name": h.escape(d.unicode_path),
622 "name": h.escape(d.unicode_path),
623 "type": "dir",
623 "type": "dir",
624 }
624 }
625
625
626 _dirs.append(_data)
626 _dirs.append(_data)
627 except RepositoryError:
627 except RepositoryError:
628 log.exception("Exception in get_quick_filter_nodes")
628 log.exception("Exception in get_quick_filter_nodes")
629 raise
629 raise
630
630
631 return _dirs, _files
631 return _dirs, _files
632
632
633 def get_node(self, repo_name, commit_id, file_path,
633 def get_node(self, repo_name, commit_id, file_path,
634 extended_info=False, content=False, max_file_bytes=None, cache=True):
634 extended_info=False, content=False, max_file_bytes=None, cache=True):
635 """
635 """
636 retrieve single node from commit
636 retrieve single node from commit
637 """
637 """
638 try:
638 try:
639
639
640 _repo = self._get_repo(repo_name)
640 _repo = self._get_repo(repo_name)
641 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
641 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
642
642
643 file_node = commit.get_node(file_path)
643 file_node = commit.get_node(file_path)
644 if file_node.is_dir():
644 if file_node.is_dir():
645 raise RepositoryError('The given path is a directory')
645 raise RepositoryError('The given path is a directory')
646
646
647 _content = None
647 _content = None
648 f_name = file_node.unicode_path
648 f_name = file_node.unicode_path
649
649
650 file_data = {
650 file_data = {
651 "name": h.escape(f_name),
651 "name": h.escape(f_name),
652 "type": "file",
652 "type": "file",
653 }
653 }
654
654
655 if extended_info:
655 if extended_info:
656 file_data.update({
656 file_data.update({
657 "extension": file_node.extension,
657 "extension": file_node.extension,
658 "mimetype": file_node.mimetype,
658 "mimetype": file_node.mimetype,
659 })
659 })
660
660
661 if cache:
661 if cache:
662 md5 = file_node.md5
662 md5 = file_node.md5
663 is_binary = file_node.is_binary
663 is_binary = file_node.is_binary
664 size = file_node.size
664 size = file_node.size
665 else:
665 else:
666 is_binary, md5, size, _content = file_node.metadata_uncached()
666 is_binary, md5, size, _content = file_node.metadata_uncached()
667
667
668 file_data.update({
668 file_data.update({
669 "md5": md5,
669 "md5": md5,
670 "binary": is_binary,
670 "binary": is_binary,
671 "size": size,
671 "size": size,
672 })
672 })
673
673
674 if content and cache:
674 if content and cache:
675 # get content + cache
675 # get content + cache
676 size = file_node.size
676 size = file_node.size
677 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
677 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
678 full_content = None
678 full_content = None
679 all_lines = 0
679 all_lines = 0
680 if not file_node.is_binary and not over_size_limit:
680 if not file_node.is_binary and not over_size_limit:
681 full_content = safe_unicode(file_node.content)
681 full_content = safe_unicode(file_node.content)
682 all_lines, empty_lines = file_node.count_lines(full_content)
682 all_lines, empty_lines = file_node.count_lines(full_content)
683
683
684 file_data.update({
684 file_data.update({
685 "content": full_content,
685 "content": full_content,
686 "lines": all_lines
686 "lines": all_lines
687 })
687 })
688 elif content:
688 elif content:
689 # get content *without* cache
689 # get content *without* cache
690 if _content is None:
690 if _content is None:
691 is_binary, md5, size, _content = file_node.metadata_uncached()
691 is_binary, md5, size, _content = file_node.metadata_uncached()
692
692
693 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
693 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
694 full_content = None
694 full_content = None
695 all_lines = 0
695 all_lines = 0
696 if not is_binary and not over_size_limit:
696 if not is_binary and not over_size_limit:
697 full_content = safe_unicode(_content)
697 full_content = safe_unicode(_content)
698 all_lines, empty_lines = file_node.count_lines(full_content)
698 all_lines, empty_lines = file_node.count_lines(full_content)
699
699
700 file_data.update({
700 file_data.update({
701 "content": full_content,
701 "content": full_content,
702 "lines": all_lines
702 "lines": all_lines
703 })
703 })
704
704
705 except RepositoryError:
705 except RepositoryError:
706 log.exception("Exception in get_node")
706 log.exception("Exception in get_node")
707 raise
707 raise
708
708
709 return file_data
709 return file_data
710
710
711 def get_fts_data(self, repo_name, commit_id, root_path='/'):
711 def get_fts_data(self, repo_name, commit_id, root_path='/'):
712 """
712 """
713 Fetch node tree for usage in full text search
713 Fetch node tree for usage in full text search
714 """
714 """
715
715
716 tree_info = list()
716 tree_info = list()
717
717
718 try:
718 try:
719 _repo = self._get_repo(repo_name)
719 _repo = self._get_repo(repo_name)
720 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
720 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
721 root_path = root_path.lstrip('/')
721 root_path = root_path.lstrip('/')
722 for __, dirs, files in commit.walk(root_path):
722 for __, dirs, files in commit.walk(root_path):
723
723
724 for f in files:
724 for f in files:
725 is_binary, md5, size, _content = f.metadata_uncached()
725 is_binary, md5, size, _content = f.metadata_uncached()
726 _data = {
726 _data = {
727 "name": f.unicode_path,
727 "name": f.unicode_path,
728 "md5": md5,
728 "md5": md5,
729 "extension": f.extension,
729 "extension": f.extension,
730 "binary": is_binary,
730 "binary": is_binary,
731 "size": size
731 "size": size
732 }
732 }
733
733
734 tree_info.append(_data)
734 tree_info.append(_data)
735
735
736 except RepositoryError:
736 except RepositoryError:
737 log.exception("Exception in get_nodes")
737 log.exception("Exception in get_nodes")
738 raise
738 raise
739
739
740 return tree_info
740 return tree_info
741
741
742 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
742 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
743 author=None, trigger_push_hook=True):
743 author=None, trigger_push_hook=True):
744 """
744 """
745 Commits given multiple nodes into repo
745 Commits given multiple nodes into repo
746
746
747 :param user: RhodeCode User object or user_id, the commiter
747 :param user: RhodeCode User object or user_id, the commiter
748 :param repo: RhodeCode Repository object
748 :param repo: RhodeCode Repository object
749 :param message: commit message
749 :param message: commit message
750 :param nodes: mapping {filename:{'content':content},...}
750 :param nodes: mapping {filename:{'content':content},...}
751 :param parent_commit: parent commit, can be empty than it's
751 :param parent_commit: parent commit, can be empty than it's
752 initial commit
752 initial commit
753 :param author: author of commit, cna be different that commiter
753 :param author: author of commit, cna be different that commiter
754 only for git
754 only for git
755 :param trigger_push_hook: trigger push hooks
755 :param trigger_push_hook: trigger push hooks
756
756
757 :returns: new commited commit
757 :returns: new commited commit
758 """
758 """
759
759
760 user = self._get_user(user)
760 user = self._get_user(user)
761 scm_instance = repo.scm_instance(cache=False)
761 scm_instance = repo.scm_instance(cache=False)
762
762
763 processed_nodes = []
763 processed_nodes = []
764 for f_path in nodes:
764 for f_path in nodes:
765 f_path = self._sanitize_path(f_path)
765 f_path = self._sanitize_path(f_path)
766 content = nodes[f_path]['content']
766 content = nodes[f_path]['content']
767 f_path = safe_str(f_path)
767 f_path = safe_str(f_path)
768 # decoding here will force that we have proper encoded values
768 # decoding here will force that we have proper encoded values
769 # in any other case this will throw exceptions and deny commit
769 # in any other case this will throw exceptions and deny commit
770 if isinstance(content, (basestring,)):
770 if isinstance(content, (basestring,)):
771 content = safe_str(content)
771 content = safe_str(content)
772 elif isinstance(content, (file, cStringIO.OutputType,)):
772 elif isinstance(content, (file, cStringIO.OutputType,)):
773 content = content.read()
773 content = content.read()
774 else:
774 else:
775 raise Exception('Content is of unrecognized type %s' % (
775 raise Exception('Content is of unrecognized type %s' % (
776 type(content)
776 type(content)
777 ))
777 ))
778 processed_nodes.append((f_path, content))
778 processed_nodes.append((f_path, content))
779
779
780 message = safe_unicode(message)
780 message = safe_unicode(message)
781 commiter = user.full_contact
781 commiter = user.full_contact
782 author = safe_unicode(author) if author else commiter
782 author = safe_unicode(author) if author else commiter
783
783
784 imc = scm_instance.in_memory_commit
784 imc = scm_instance.in_memory_commit
785
785
786 if not parent_commit:
786 if not parent_commit:
787 parent_commit = EmptyCommit(alias=scm_instance.alias)
787 parent_commit = EmptyCommit(alias=scm_instance.alias)
788
788
789 if isinstance(parent_commit, EmptyCommit):
789 if isinstance(parent_commit, EmptyCommit):
790 # EmptyCommit means we we're editing empty repository
790 # EmptyCommit means we we're editing empty repository
791 parents = None
791 parents = None
792 else:
792 else:
793 parents = [parent_commit]
793 parents = [parent_commit]
794 # add multiple nodes
794 # add multiple nodes
795 for path, content in processed_nodes:
795 for path, content in processed_nodes:
796 imc.add(FileNode(path, content=content))
796 imc.add(FileNode(path, content=content))
797 # TODO: handle pre push scenario
797 # TODO: handle pre push scenario
798 tip = imc.commit(message=message,
798 tip = imc.commit(message=message,
799 author=author,
799 author=author,
800 parents=parents,
800 parents=parents,
801 branch=parent_commit.branch)
801 branch=parent_commit.branch)
802
802
803 self.mark_for_invalidation(repo.repo_name)
803 self.mark_for_invalidation(repo.repo_name)
804 if trigger_push_hook:
804 if trigger_push_hook:
805 hooks_utils.trigger_post_push_hook(
805 hooks_utils.trigger_post_push_hook(
806 username=user.username, action='push_local',
806 username=user.username, action='push_local',
807 repo_name=repo.repo_name, repo_type=scm_instance.alias,
807 repo_name=repo.repo_name, repo_type=scm_instance.alias,
808 hook_type='post_push',
808 hook_type='post_push',
809 commit_ids=[tip.raw_id])
809 commit_ids=[tip.raw_id])
810 return tip
810 return tip
811
811
812 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
812 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
813 author=None, trigger_push_hook=True):
813 author=None, trigger_push_hook=True):
814 user = self._get_user(user)
814 user = self._get_user(user)
815 scm_instance = repo.scm_instance(cache=False)
815 scm_instance = repo.scm_instance(cache=False)
816
816
817 message = safe_unicode(message)
817 message = safe_unicode(message)
818 commiter = user.full_contact
818 commiter = user.full_contact
819 author = safe_unicode(author) if author else commiter
819 author = safe_unicode(author) if author else commiter
820
820
821 imc = scm_instance.in_memory_commit
821 imc = scm_instance.in_memory_commit
822
822
823 if not parent_commit:
823 if not parent_commit:
824 parent_commit = EmptyCommit(alias=scm_instance.alias)
824 parent_commit = EmptyCommit(alias=scm_instance.alias)
825
825
826 if isinstance(parent_commit, EmptyCommit):
826 if isinstance(parent_commit, EmptyCommit):
827 # EmptyCommit means we we're editing empty repository
827 # EmptyCommit means we we're editing empty repository
828 parents = None
828 parents = None
829 else:
829 else:
830 parents = [parent_commit]
830 parents = [parent_commit]
831
831
832 # add multiple nodes
832 # add multiple nodes
833 for _filename, data in nodes.items():
833 for _filename, data in nodes.items():
834 # new filename, can be renamed from the old one, also sanitaze
834 # new filename, can be renamed from the old one, also sanitaze
835 # the path for any hack around relative paths like ../../ etc.
835 # the path for any hack around relative paths like ../../ etc.
836 filename = self._sanitize_path(data['filename'])
836 filename = self._sanitize_path(data['filename'])
837 old_filename = self._sanitize_path(_filename)
837 old_filename = self._sanitize_path(_filename)
838 content = data['content']
838 content = data['content']
839 file_mode = data.get('mode')
839 file_mode = data.get('mode')
840 filenode = FileNode(old_filename, content=content, mode=file_mode)
840 filenode = FileNode(old_filename, content=content, mode=file_mode)
841 op = data['op']
841 op = data['op']
842 if op == 'add':
842 if op == 'add':
843 imc.add(filenode)
843 imc.add(filenode)
844 elif op == 'del':
844 elif op == 'del':
845 imc.remove(filenode)
845 imc.remove(filenode)
846 elif op == 'mod':
846 elif op == 'mod':
847 if filename != old_filename:
847 if filename != old_filename:
848 # TODO: handle renames more efficient, needs vcs lib changes
848 # TODO: handle renames more efficient, needs vcs lib changes
849 imc.remove(filenode)
849 imc.remove(filenode)
850 imc.add(FileNode(filename, content=content, mode=file_mode))
850 imc.add(FileNode(filename, content=content, mode=file_mode))
851 else:
851 else:
852 imc.change(filenode)
852 imc.change(filenode)
853
853
854 try:
854 try:
855 # TODO: handle pre push scenario commit changes
855 # TODO: handle pre push scenario commit changes
856 tip = imc.commit(message=message,
856 tip = imc.commit(message=message,
857 author=author,
857 author=author,
858 parents=parents,
858 parents=parents,
859 branch=parent_commit.branch)
859 branch=parent_commit.branch)
860 except NodeNotChangedError:
860 except NodeNotChangedError:
861 raise
861 raise
862 except Exception as e:
862 except Exception as e:
863 log.exception("Unexpected exception during call to imc.commit")
863 log.exception("Unexpected exception during call to imc.commit")
864 raise IMCCommitError(str(e))
864 raise IMCCommitError(str(e))
865 finally:
865 finally:
866 # always clear caches, if commit fails we want fresh object also
866 # always clear caches, if commit fails we want fresh object also
867 self.mark_for_invalidation(repo.repo_name)
867 self.mark_for_invalidation(repo.repo_name)
868
868
869 if trigger_push_hook:
869 if trigger_push_hook:
870 hooks_utils.trigger_post_push_hook(
870 hooks_utils.trigger_post_push_hook(
871 username=user.username, action='push_local', hook_type='post_push',
871 username=user.username, action='push_local', hook_type='post_push',
872 repo_name=repo.repo_name, repo_type=scm_instance.alias,
872 repo_name=repo.repo_name, repo_type=scm_instance.alias,
873 commit_ids=[tip.raw_id])
873 commit_ids=[tip.raw_id])
874
874
875 return tip
875 return tip
876
876
877 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
877 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
878 author=None, trigger_push_hook=True):
878 author=None, trigger_push_hook=True):
879 """
879 """
880 Deletes given multiple nodes into `repo`
880 Deletes given multiple nodes into `repo`
881
881
882 :param user: RhodeCode User object or user_id, the committer
882 :param user: RhodeCode User object or user_id, the committer
883 :param repo: RhodeCode Repository object
883 :param repo: RhodeCode Repository object
884 :param message: commit message
884 :param message: commit message
885 :param nodes: mapping {filename:{'content':content},...}
885 :param nodes: mapping {filename:{'content':content},...}
886 :param parent_commit: parent commit, can be empty than it's initial
886 :param parent_commit: parent commit, can be empty than it's initial
887 commit
887 commit
888 :param author: author of commit, cna be different that commiter only
888 :param author: author of commit, cna be different that commiter only
889 for git
889 for git
890 :param trigger_push_hook: trigger push hooks
890 :param trigger_push_hook: trigger push hooks
891
891
892 :returns: new commit after deletion
892 :returns: new commit after deletion
893 """
893 """
894
894
895 user = self._get_user(user)
895 user = self._get_user(user)
896 scm_instance = repo.scm_instance(cache=False)
896 scm_instance = repo.scm_instance(cache=False)
897
897
898 processed_nodes = []
898 processed_nodes = []
899 for f_path in nodes:
899 for f_path in nodes:
900 f_path = self._sanitize_path(f_path)
900 f_path = self._sanitize_path(f_path)
901 # content can be empty but for compatabilty it allows same dicts
901 # content can be empty but for compatabilty it allows same dicts
902 # structure as add_nodes
902 # structure as add_nodes
903 content = nodes[f_path].get('content')
903 content = nodes[f_path].get('content')
904 processed_nodes.append((f_path, content))
904 processed_nodes.append((f_path, content))
905
905
906 message = safe_unicode(message)
906 message = safe_unicode(message)
907 commiter = user.full_contact
907 commiter = user.full_contact
908 author = safe_unicode(author) if author else commiter
908 author = safe_unicode(author) if author else commiter
909
909
910 imc = scm_instance.in_memory_commit
910 imc = scm_instance.in_memory_commit
911
911
912 if not parent_commit:
912 if not parent_commit:
913 parent_commit = EmptyCommit(alias=scm_instance.alias)
913 parent_commit = EmptyCommit(alias=scm_instance.alias)
914
914
915 if isinstance(parent_commit, EmptyCommit):
915 if isinstance(parent_commit, EmptyCommit):
916 # EmptyCommit means we we're editing empty repository
916 # EmptyCommit means we we're editing empty repository
917 parents = None
917 parents = None
918 else:
918 else:
919 parents = [parent_commit]
919 parents = [parent_commit]
920 # add multiple nodes
920 # add multiple nodes
921 for path, content in processed_nodes:
921 for path, content in processed_nodes:
922 imc.remove(FileNode(path, content=content))
922 imc.remove(FileNode(path, content=content))
923
923
924 # TODO: handle pre push scenario
924 # TODO: handle pre push scenario
925 tip = imc.commit(message=message,
925 tip = imc.commit(message=message,
926 author=author,
926 author=author,
927 parents=parents,
927 parents=parents,
928 branch=parent_commit.branch)
928 branch=parent_commit.branch)
929
929
930 self.mark_for_invalidation(repo.repo_name)
930 self.mark_for_invalidation(repo.repo_name)
931 if trigger_push_hook:
931 if trigger_push_hook:
932 hooks_utils.trigger_post_push_hook(
932 hooks_utils.trigger_post_push_hook(
933 username=user.username, action='push_local', hook_type='post_push',
933 username=user.username, action='push_local', hook_type='post_push',
934 repo_name=repo.repo_name, repo_type=scm_instance.alias,
934 repo_name=repo.repo_name, repo_type=scm_instance.alias,
935 commit_ids=[tip.raw_id])
935 commit_ids=[tip.raw_id])
936 return tip
936 return tip
937
937
938 def strip(self, repo, commit_id, branch):
938 def strip(self, repo, commit_id, branch):
939 scm_instance = repo.scm_instance(cache=False)
939 scm_instance = repo.scm_instance(cache=False)
940 scm_instance.config.clear_section('hooks')
940 scm_instance.config.clear_section('hooks')
941 scm_instance.strip(commit_id, branch)
941 scm_instance.strip(commit_id, branch)
942 self.mark_for_invalidation(repo.repo_name)
942 self.mark_for_invalidation(repo.repo_name)
943
943
944 def get_unread_journal(self):
944 def get_unread_journal(self):
945 return self.sa.query(UserLog).count()
945 return self.sa.query(UserLog).count()
946
946
947 @classmethod
947 @classmethod
948 def backend_landing_ref(cls, repo_type):
948 def backend_landing_ref(cls, repo_type):
949 """
949 """
950 Return a default landing ref based on a repository type.
950 Return a default landing ref based on a repository type.
951 """
951 """
952
952
953 landing_ref = {
953 landing_ref = {
954 'hg': ('branch:default', 'default'),
954 'hg': ('branch:default', 'default'),
955 'git': ('branch:master', 'master'),
955 'git': ('branch:master', 'master'),
956 'svn': ('rev:tip', 'latest tip'),
956 'svn': ('rev:tip', 'latest tip'),
957 'default': ('rev:tip', 'latest tip'),
957 'default': ('rev:tip', 'latest tip'),
958 }
958 }
959
959
960 return landing_ref.get(repo_type) or landing_ref['default']
960 return landing_ref.get(repo_type) or landing_ref['default']
961
961
962 def get_repo_landing_revs(self, translator, repo=None):
962 def get_repo_landing_revs(self, translator, repo=None):
963 """
963 """
964 Generates select option with tags branches and bookmarks (for hg only)
964 Generates select option with tags branches and bookmarks (for hg only)
965 grouped by type
965 grouped by type
966
966
967 :param repo:
967 :param repo:
968 """
968 """
969 from rhodecode.lib.vcs.backends.git import GitRepository
970
969 _ = translator
971 _ = translator
970 repo = self._get_repo(repo)
972 repo = self._get_repo(repo)
971
973
972 if repo:
974 if repo:
973 repo_type = repo.repo_type
975 repo_type = repo.repo_type
974 else:
976 else:
975 repo_type = 'default'
977 repo_type = 'default'
976
978
977 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
979 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
978
980
979 default_ref_options = [
981 default_ref_options = [
980 [default_landing_ref, landing_ref_lbl]
982 [default_landing_ref, landing_ref_lbl]
981 ]
983 ]
982 default_choices = [
984 default_choices = [
983 default_landing_ref
985 default_landing_ref
984 ]
986 ]
985
987
986 if not repo:
988 if not repo:
989 # presented at NEW repo creation
987 return default_choices, default_ref_options
990 return default_choices, default_ref_options
988
991
989 repo = repo.scm_instance()
992 repo = repo.scm_instance()
990
993
991 ref_options = [('rev:tip', 'latest tip')]
994 ref_options = [(default_landing_ref, landing_ref_lbl)]
992 choices = ['rev:tip']
995 choices = [default_landing_ref]
993
996
994 # branches
997 # branches
995 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
998 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
996 if not branch_group:
999 if not branch_group:
997 # new repo, or without maybe a branch?
1000 # new repo, or without maybe a branch?
998 branch_group = default_ref_options
1001 branch_group = default_ref_options
999
1002
1000 branches_group = (branch_group, _("Branches"))
1003 branches_group = (branch_group, _("Branches"))
1001 ref_options.append(branches_group)
1004 ref_options.append(branches_group)
1002 choices.extend([x[0] for x in branches_group[0]])
1005 choices.extend([x[0] for x in branches_group[0]])
1003
1006
1004 # bookmarks for HG
1007 # bookmarks for HG
1005 if repo.alias == 'hg':
1008 if repo.alias == 'hg':
1006 bookmarks_group = (
1009 bookmarks_group = (
1007 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1010 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1008 for b in repo.bookmarks],
1011 for b in repo.bookmarks],
1009 _("Bookmarks"))
1012 _("Bookmarks"))
1010 ref_options.append(bookmarks_group)
1013 ref_options.append(bookmarks_group)
1011 choices.extend([x[0] for x in bookmarks_group[0]])
1014 choices.extend([x[0] for x in bookmarks_group[0]])
1012
1015
1013 # tags
1016 # tags
1014 tags_group = (
1017 tags_group = (
1015 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1018 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1016 for t in repo.tags],
1019 for t in repo.tags],
1017 _("Tags"))
1020 _("Tags"))
1018 ref_options.append(tags_group)
1021 ref_options.append(tags_group)
1019 choices.extend([x[0] for x in tags_group[0]])
1022 choices.extend([x[0] for x in tags_group[0]])
1020
1023
1021 return choices, ref_options
1024 return choices, ref_options
1022
1025
1023 def get_server_info(self, environ=None):
1026 def get_server_info(self, environ=None):
1024 server_info = get_system_info(environ)
1027 server_info = get_system_info(environ)
1025 return server_info
1028 return server_info
@@ -1,454 +1,450 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import colander
21 import colander
22 import deform.widget
22 import deform.widget
23
23
24 from rhodecode.translation import _
24 from rhodecode.translation import _
25 from rhodecode.model.validation_schema.utils import convert_to_optgroup, username_converter
25 from rhodecode.model.validation_schema.utils import convert_to_optgroup, username_converter
26 from rhodecode.model.validation_schema import validators, preparers, types
26 from rhodecode.model.validation_schema import validators, preparers, types
27
27
28 DEFAULT_LANDING_REF = 'rev:tip'
28 DEFAULT_LANDING_REF = 'rev:tip'
29 DEFAULT_BACKEND_LANDING_REF = {
30 'hg': 'branch:default',
31 'git': 'branch:master',
32 'svn': 'rev:tip',
33 }
34
29
35
30
36 def get_group_and_repo(repo_name):
31 def get_group_and_repo(repo_name):
37 from rhodecode.model.repo_group import RepoGroupModel
32 from rhodecode.model.repo_group import RepoGroupModel
38 return RepoGroupModel()._get_group_name_and_parent(
33 return RepoGroupModel()._get_group_name_and_parent(
39 repo_name, get_object=True)
34 repo_name, get_object=True)
40
35
41
36
42 def get_repo_group(repo_group_id):
37 def get_repo_group(repo_group_id):
43 from rhodecode.model.repo_group import RepoGroup
38 from rhodecode.model.repo_group import RepoGroup
44 return RepoGroup.get(repo_group_id), RepoGroup.CHOICES_SEPARATOR
39 return RepoGroup.get(repo_group_id), RepoGroup.CHOICES_SEPARATOR
45
40
46
41
47 @colander.deferred
42 @colander.deferred
48 def deferred_repo_type_validator(node, kw):
43 def deferred_repo_type_validator(node, kw):
49 options = kw.get('repo_type_options', [])
44 options = kw.get('repo_type_options', [])
50 return colander.OneOf([x for x in options])
45 return colander.OneOf([x for x in options])
51
46
52
47
53 @colander.deferred
48 @colander.deferred
54 def deferred_repo_owner_validator(node, kw):
49 def deferred_repo_owner_validator(node, kw):
55
50
56 def repo_owner_validator(node, value):
51 def repo_owner_validator(node, value):
57 from rhodecode.model.db import User
52 from rhodecode.model.db import User
58 value = username_converter(value)
53 value = username_converter(value)
59 existing = User.get_by_username(value)
54 existing = User.get_by_username(value)
60 if not existing:
55 if not existing:
61 msg = _(u'Repo owner with id `{}` does not exists').format(value)
56 msg = _(u'Repo owner with id `{}` does not exists').format(value)
62 raise colander.Invalid(node, msg)
57 raise colander.Invalid(node, msg)
63
58
64 return repo_owner_validator
59 return repo_owner_validator
65
60
66
61
67 @colander.deferred
62 @colander.deferred
68 def deferred_landing_ref_validator(node, kw):
63 def deferred_landing_ref_validator(node, kw):
69 options = kw.get(
64 options = kw.get(
70 'repo_ref_options', [DEFAULT_LANDING_REF])
65 'repo_ref_options', [DEFAULT_LANDING_REF])
71 return colander.OneOf([x for x in options])
66 return colander.OneOf([x for x in options])
72
67
73
68
74 @colander.deferred
69 @colander.deferred
75 def deferred_sync_uri_validator(node, kw):
70 def deferred_sync_uri_validator(node, kw):
76 repo_type = kw.get('repo_type')
71 repo_type = kw.get('repo_type')
77 validator = validators.CloneUriValidator(repo_type)
72 validator = validators.CloneUriValidator(repo_type)
78 return validator
73 return validator
79
74
80
75
81 @colander.deferred
76 @colander.deferred
82 def deferred_landing_ref_widget(node, kw):
77 def deferred_landing_ref_widget(node, kw):
78 from rhodecode.model.scm import ScmModel
79
83 repo_type = kw.get('repo_type')
80 repo_type = kw.get('repo_type')
84 default_opts = []
81 default_opts = []
85 if repo_type:
82 if repo_type:
86 default_opts.append(
83 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
87 (DEFAULT_BACKEND_LANDING_REF[repo_type],
84 default_opts.append((default_landing_ref, default_landing_ref))
88 DEFAULT_BACKEND_LANDING_REF[repo_type]))
89
85
90 items = kw.get('repo_ref_items', default_opts)
86 items = kw.get('repo_ref_items', default_opts)
91 items = convert_to_optgroup(items)
87 items = convert_to_optgroup(items)
92 return deform.widget.Select2Widget(values=items)
88 return deform.widget.Select2Widget(values=items)
93
89
94
90
95 @colander.deferred
91 @colander.deferred
96 def deferred_fork_of_validator(node, kw):
92 def deferred_fork_of_validator(node, kw):
97 old_values = kw.get('old_values') or {}
93 old_values = kw.get('old_values') or {}
98
94
99 def fork_of_validator(node, value):
95 def fork_of_validator(node, value):
100 from rhodecode.model.db import Repository, RepoGroup
96 from rhodecode.model.db import Repository, RepoGroup
101 existing = Repository.get_by_repo_name(value)
97 existing = Repository.get_by_repo_name(value)
102 if not existing:
98 if not existing:
103 msg = _(u'Fork with id `{}` does not exists').format(value)
99 msg = _(u'Fork with id `{}` does not exists').format(value)
104 raise colander.Invalid(node, msg)
100 raise colander.Invalid(node, msg)
105 elif old_values['repo_name'] == existing.repo_name:
101 elif old_values['repo_name'] == existing.repo_name:
106 msg = _(u'Cannot set fork of '
102 msg = _(u'Cannot set fork of '
107 u'parameter of this repository to itself').format(value)
103 u'parameter of this repository to itself').format(value)
108 raise colander.Invalid(node, msg)
104 raise colander.Invalid(node, msg)
109
105
110 return fork_of_validator
106 return fork_of_validator
111
107
112
108
113 @colander.deferred
109 @colander.deferred
114 def deferred_can_write_to_group_validator(node, kw):
110 def deferred_can_write_to_group_validator(node, kw):
115 request_user = kw.get('user')
111 request_user = kw.get('user')
116 old_values = kw.get('old_values') or {}
112 old_values = kw.get('old_values') or {}
117
113
118 def can_write_to_group_validator(node, value):
114 def can_write_to_group_validator(node, value):
119 """
115 """
120 Checks if given repo path is writable by user. This includes checks if
116 Checks if given repo path is writable by user. This includes checks if
121 user is allowed to create repositories under root path or under
117 user is allowed to create repositories under root path or under
122 repo group paths
118 repo group paths
123 """
119 """
124
120
125 from rhodecode.lib.auth import (
121 from rhodecode.lib.auth import (
126 HasPermissionAny, HasRepoGroupPermissionAny)
122 HasPermissionAny, HasRepoGroupPermissionAny)
127 from rhodecode.model.repo_group import RepoGroupModel
123 from rhodecode.model.repo_group import RepoGroupModel
128
124
129 messages = {
125 messages = {
130 'invalid_repo_group':
126 'invalid_repo_group':
131 _(u"Repository group `{}` does not exist"),
127 _(u"Repository group `{}` does not exist"),
132 # permissions denied we expose as not existing, to prevent
128 # permissions denied we expose as not existing, to prevent
133 # resource discovery
129 # resource discovery
134 'permission_denied':
130 'permission_denied':
135 _(u"Repository group `{}` does not exist"),
131 _(u"Repository group `{}` does not exist"),
136 'permission_denied_root':
132 'permission_denied_root':
137 _(u"You do not have the permission to store "
133 _(u"You do not have the permission to store "
138 u"repositories in the root location.")
134 u"repositories in the root location.")
139 }
135 }
140
136
141 value = value['repo_group_name']
137 value = value['repo_group_name']
142
138
143 is_root_location = value is types.RootLocation
139 is_root_location = value is types.RootLocation
144 # NOT initialized validators, we must call them
140 # NOT initialized validators, we must call them
145 can_create_repos_at_root = HasPermissionAny('hg.admin', 'hg.create.repository')
141 can_create_repos_at_root = HasPermissionAny('hg.admin', 'hg.create.repository')
146
142
147 # if values is root location, we simply need to check if we can write
143 # if values is root location, we simply need to check if we can write
148 # to root location !
144 # to root location !
149 if is_root_location:
145 if is_root_location:
150
146
151 if can_create_repos_at_root(user=request_user):
147 if can_create_repos_at_root(user=request_user):
152 # we can create repo group inside tool-level. No more checks
148 # we can create repo group inside tool-level. No more checks
153 # are required
149 # are required
154 return
150 return
155 else:
151 else:
156 old_name = old_values.get('repo_name')
152 old_name = old_values.get('repo_name')
157 if old_name and old_name == old_values.get('submitted_repo_name'):
153 if old_name and old_name == old_values.get('submitted_repo_name'):
158 # since we didn't change the name, we can skip validation and
154 # since we didn't change the name, we can skip validation and
159 # allow current users without store-in-root permissions to update
155 # allow current users without store-in-root permissions to update
160 return
156 return
161
157
162 # "fake" node name as repo_name, otherwise we oddly report
158 # "fake" node name as repo_name, otherwise we oddly report
163 # the error as if it was coming form repo_group
159 # the error as if it was coming form repo_group
164 # however repo_group is empty when using root location.
160 # however repo_group is empty when using root location.
165 node.name = 'repo_name'
161 node.name = 'repo_name'
166 raise colander.Invalid(node, messages['permission_denied_root'])
162 raise colander.Invalid(node, messages['permission_denied_root'])
167
163
168 # parent group not exists ? throw an error
164 # parent group not exists ? throw an error
169 repo_group = RepoGroupModel().get_by_group_name(value)
165 repo_group = RepoGroupModel().get_by_group_name(value)
170 if value and not repo_group:
166 if value and not repo_group:
171 raise colander.Invalid(
167 raise colander.Invalid(
172 node, messages['invalid_repo_group'].format(value))
168 node, messages['invalid_repo_group'].format(value))
173
169
174 gr_name = repo_group.group_name
170 gr_name = repo_group.group_name
175
171
176 # create repositories with write permission on group is set to true
172 # create repositories with write permission on group is set to true
177 create_on_write = HasPermissionAny(
173 create_on_write = HasPermissionAny(
178 'hg.create.write_on_repogroup.true')(user=request_user)
174 'hg.create.write_on_repogroup.true')(user=request_user)
179
175
180 group_admin = HasRepoGroupPermissionAny('group.admin')(
176 group_admin = HasRepoGroupPermissionAny('group.admin')(
181 gr_name, 'can write into group validator', user=request_user)
177 gr_name, 'can write into group validator', user=request_user)
182 group_write = HasRepoGroupPermissionAny('group.write')(
178 group_write = HasRepoGroupPermissionAny('group.write')(
183 gr_name, 'can write into group validator', user=request_user)
179 gr_name, 'can write into group validator', user=request_user)
184
180
185 forbidden = not (group_admin or (group_write and create_on_write))
181 forbidden = not (group_admin or (group_write and create_on_write))
186
182
187 # TODO: handling of old values, and detecting no-change in path
183 # TODO: handling of old values, and detecting no-change in path
188 # to skip permission checks in such cases. This only needs to be
184 # to skip permission checks in such cases. This only needs to be
189 # implemented if we use this schema in forms as well
185 # implemented if we use this schema in forms as well
190
186
191 # gid = (old_data['repo_group'].get('group_id')
187 # gid = (old_data['repo_group'].get('group_id')
192 # if (old_data and 'repo_group' in old_data) else None)
188 # if (old_data and 'repo_group' in old_data) else None)
193 # value_changed = gid != safe_int(value)
189 # value_changed = gid != safe_int(value)
194 # new = not old_data
190 # new = not old_data
195
191
196 # do check if we changed the value, there's a case that someone got
192 # do check if we changed the value, there's a case that someone got
197 # revoked write permissions to a repository, he still created, we
193 # revoked write permissions to a repository, he still created, we
198 # don't need to check permission if he didn't change the value of
194 # don't need to check permission if he didn't change the value of
199 # groups in form box
195 # groups in form box
200 # if value_changed or new:
196 # if value_changed or new:
201 # # parent group need to be existing
197 # # parent group need to be existing
202 # TODO: ENDS HERE
198 # TODO: ENDS HERE
203
199
204 if repo_group and forbidden:
200 if repo_group and forbidden:
205 msg = messages['permission_denied'].format(value)
201 msg = messages['permission_denied'].format(value)
206 raise colander.Invalid(node, msg)
202 raise colander.Invalid(node, msg)
207
203
208 return can_write_to_group_validator
204 return can_write_to_group_validator
209
205
210
206
211 @colander.deferred
207 @colander.deferred
212 def deferred_unique_name_validator(node, kw):
208 def deferred_unique_name_validator(node, kw):
213 request_user = kw.get('user')
209 request_user = kw.get('user')
214 old_values = kw.get('old_values') or {}
210 old_values = kw.get('old_values') or {}
215
211
216 def unique_name_validator(node, value):
212 def unique_name_validator(node, value):
217 from rhodecode.model.db import Repository, RepoGroup
213 from rhodecode.model.db import Repository, RepoGroup
218 name_changed = value != old_values.get('repo_name')
214 name_changed = value != old_values.get('repo_name')
219
215
220 existing = Repository.get_by_repo_name(value)
216 existing = Repository.get_by_repo_name(value)
221 if name_changed and existing:
217 if name_changed and existing:
222 msg = _(u'Repository with name `{}` already exists').format(value)
218 msg = _(u'Repository with name `{}` already exists').format(value)
223 raise colander.Invalid(node, msg)
219 raise colander.Invalid(node, msg)
224
220
225 existing_group = RepoGroup.get_by_group_name(value)
221 existing_group = RepoGroup.get_by_group_name(value)
226 if name_changed and existing_group:
222 if name_changed and existing_group:
227 msg = _(u'Repository group with name `{}` already exists').format(
223 msg = _(u'Repository group with name `{}` already exists').format(
228 value)
224 value)
229 raise colander.Invalid(node, msg)
225 raise colander.Invalid(node, msg)
230 return unique_name_validator
226 return unique_name_validator
231
227
232
228
233 @colander.deferred
229 @colander.deferred
234 def deferred_repo_name_validator(node, kw):
230 def deferred_repo_name_validator(node, kw):
235 def no_git_suffix_validator(node, value):
231 def no_git_suffix_validator(node, value):
236 if value.endswith('.git'):
232 if value.endswith('.git'):
237 msg = _('Repository name cannot end with .git')
233 msg = _('Repository name cannot end with .git')
238 raise colander.Invalid(node, msg)
234 raise colander.Invalid(node, msg)
239 return colander.All(
235 return colander.All(
240 no_git_suffix_validator, validators.valid_name_validator)
236 no_git_suffix_validator, validators.valid_name_validator)
241
237
242
238
243 @colander.deferred
239 @colander.deferred
244 def deferred_repo_group_validator(node, kw):
240 def deferred_repo_group_validator(node, kw):
245 options = kw.get(
241 options = kw.get(
246 'repo_repo_group_options')
242 'repo_repo_group_options')
247 return colander.OneOf([x for x in options])
243 return colander.OneOf([x for x in options])
248
244
249
245
250 @colander.deferred
246 @colander.deferred
251 def deferred_repo_group_widget(node, kw):
247 def deferred_repo_group_widget(node, kw):
252 items = kw.get('repo_repo_group_items')
248 items = kw.get('repo_repo_group_items')
253 return deform.widget.Select2Widget(values=items)
249 return deform.widget.Select2Widget(values=items)
254
250
255
251
256 class GroupType(colander.Mapping):
252 class GroupType(colander.Mapping):
257 def _validate(self, node, value):
253 def _validate(self, node, value):
258 try:
254 try:
259 return dict(repo_group_name=value)
255 return dict(repo_group_name=value)
260 except Exception as e:
256 except Exception as e:
261 raise colander.Invalid(
257 raise colander.Invalid(
262 node, '"${val}" is not a mapping type: ${err}'.format(
258 node, '"${val}" is not a mapping type: ${err}'.format(
263 val=value, err=e))
259 val=value, err=e))
264
260
265 def deserialize(self, node, cstruct):
261 def deserialize(self, node, cstruct):
266 if cstruct is colander.null:
262 if cstruct is colander.null:
267 return cstruct
263 return cstruct
268
264
269 appstruct = super(GroupType, self).deserialize(node, cstruct)
265 appstruct = super(GroupType, self).deserialize(node, cstruct)
270 validated_name = appstruct['repo_group_name']
266 validated_name = appstruct['repo_group_name']
271
267
272 # inject group based on once deserialized data
268 # inject group based on once deserialized data
273 (repo_name_without_group,
269 (repo_name_without_group,
274 parent_group_name,
270 parent_group_name,
275 parent_group) = get_group_and_repo(validated_name)
271 parent_group) = get_group_and_repo(validated_name)
276
272
277 appstruct['repo_name_with_group'] = validated_name
273 appstruct['repo_name_with_group'] = validated_name
278 appstruct['repo_name_without_group'] = repo_name_without_group
274 appstruct['repo_name_without_group'] = repo_name_without_group
279 appstruct['repo_group_name'] = parent_group_name or types.RootLocation
275 appstruct['repo_group_name'] = parent_group_name or types.RootLocation
280
276
281 if parent_group:
277 if parent_group:
282 appstruct['repo_group_id'] = parent_group.group_id
278 appstruct['repo_group_id'] = parent_group.group_id
283
279
284 return appstruct
280 return appstruct
285
281
286
282
287 class GroupSchema(colander.SchemaNode):
283 class GroupSchema(colander.SchemaNode):
288 schema_type = GroupType
284 schema_type = GroupType
289 validator = deferred_can_write_to_group_validator
285 validator = deferred_can_write_to_group_validator
290 missing = colander.null
286 missing = colander.null
291
287
292
288
293 class RepoGroup(GroupSchema):
289 class RepoGroup(GroupSchema):
294 repo_group_name = colander.SchemaNode(
290 repo_group_name = colander.SchemaNode(
295 types.GroupNameType())
291 types.GroupNameType())
296 repo_group_id = colander.SchemaNode(
292 repo_group_id = colander.SchemaNode(
297 colander.String(), missing=None)
293 colander.String(), missing=None)
298 repo_name_without_group = colander.SchemaNode(
294 repo_name_without_group = colander.SchemaNode(
299 colander.String(), missing=None)
295 colander.String(), missing=None)
300
296
301
297
302 class RepoGroupAccessSchema(colander.MappingSchema):
298 class RepoGroupAccessSchema(colander.MappingSchema):
303 repo_group = RepoGroup()
299 repo_group = RepoGroup()
304
300
305
301
306 class RepoNameUniqueSchema(colander.MappingSchema):
302 class RepoNameUniqueSchema(colander.MappingSchema):
307 unique_repo_name = colander.SchemaNode(
303 unique_repo_name = colander.SchemaNode(
308 colander.String(),
304 colander.String(),
309 validator=deferred_unique_name_validator)
305 validator=deferred_unique_name_validator)
310
306
311
307
312 class RepoSchema(colander.MappingSchema):
308 class RepoSchema(colander.MappingSchema):
313
309
314 repo_name = colander.SchemaNode(
310 repo_name = colander.SchemaNode(
315 types.RepoNameType(),
311 types.RepoNameType(),
316 validator=deferred_repo_name_validator)
312 validator=deferred_repo_name_validator)
317
313
318 repo_type = colander.SchemaNode(
314 repo_type = colander.SchemaNode(
319 colander.String(),
315 colander.String(),
320 validator=deferred_repo_type_validator)
316 validator=deferred_repo_type_validator)
321
317
322 repo_owner = colander.SchemaNode(
318 repo_owner = colander.SchemaNode(
323 colander.String(),
319 colander.String(),
324 validator=deferred_repo_owner_validator,
320 validator=deferred_repo_owner_validator,
325 widget=deform.widget.TextInputWidget())
321 widget=deform.widget.TextInputWidget())
326
322
327 repo_description = colander.SchemaNode(
323 repo_description = colander.SchemaNode(
328 colander.String(), missing='',
324 colander.String(), missing='',
329 widget=deform.widget.TextAreaWidget())
325 widget=deform.widget.TextAreaWidget())
330
326
331 repo_landing_commit_ref = colander.SchemaNode(
327 repo_landing_commit_ref = colander.SchemaNode(
332 colander.String(),
328 colander.String(),
333 validator=deferred_landing_ref_validator,
329 validator=deferred_landing_ref_validator,
334 preparers=[preparers.strip_preparer],
330 preparers=[preparers.strip_preparer],
335 missing=DEFAULT_LANDING_REF,
331 missing=DEFAULT_LANDING_REF,
336 widget=deferred_landing_ref_widget)
332 widget=deferred_landing_ref_widget)
337
333
338 repo_clone_uri = colander.SchemaNode(
334 repo_clone_uri = colander.SchemaNode(
339 colander.String(),
335 colander.String(),
340 validator=deferred_sync_uri_validator,
336 validator=deferred_sync_uri_validator,
341 preparers=[preparers.strip_preparer],
337 preparers=[preparers.strip_preparer],
342 missing='')
338 missing='')
343
339
344 repo_push_uri = colander.SchemaNode(
340 repo_push_uri = colander.SchemaNode(
345 colander.String(),
341 colander.String(),
346 validator=deferred_sync_uri_validator,
342 validator=deferred_sync_uri_validator,
347 preparers=[preparers.strip_preparer],
343 preparers=[preparers.strip_preparer],
348 missing='')
344 missing='')
349
345
350 repo_fork_of = colander.SchemaNode(
346 repo_fork_of = colander.SchemaNode(
351 colander.String(),
347 colander.String(),
352 validator=deferred_fork_of_validator,
348 validator=deferred_fork_of_validator,
353 missing=None)
349 missing=None)
354
350
355 repo_private = colander.SchemaNode(
351 repo_private = colander.SchemaNode(
356 types.StringBooleanType(),
352 types.StringBooleanType(),
357 missing=False, widget=deform.widget.CheckboxWidget())
353 missing=False, widget=deform.widget.CheckboxWidget())
358 repo_copy_permissions = colander.SchemaNode(
354 repo_copy_permissions = colander.SchemaNode(
359 types.StringBooleanType(),
355 types.StringBooleanType(),
360 missing=False, widget=deform.widget.CheckboxWidget())
356 missing=False, widget=deform.widget.CheckboxWidget())
361 repo_enable_statistics = colander.SchemaNode(
357 repo_enable_statistics = colander.SchemaNode(
362 types.StringBooleanType(),
358 types.StringBooleanType(),
363 missing=False, widget=deform.widget.CheckboxWidget())
359 missing=False, widget=deform.widget.CheckboxWidget())
364 repo_enable_downloads = colander.SchemaNode(
360 repo_enable_downloads = colander.SchemaNode(
365 types.StringBooleanType(),
361 types.StringBooleanType(),
366 missing=False, widget=deform.widget.CheckboxWidget())
362 missing=False, widget=deform.widget.CheckboxWidget())
367 repo_enable_locking = colander.SchemaNode(
363 repo_enable_locking = colander.SchemaNode(
368 types.StringBooleanType(),
364 types.StringBooleanType(),
369 missing=False, widget=deform.widget.CheckboxWidget())
365 missing=False, widget=deform.widget.CheckboxWidget())
370
366
371 def deserialize(self, cstruct):
367 def deserialize(self, cstruct):
372 """
368 """
373 Custom deserialize that allows to chain validation, and verify
369 Custom deserialize that allows to chain validation, and verify
374 permissions, and as last step uniqueness
370 permissions, and as last step uniqueness
375 """
371 """
376
372
377 # first pass, to validate given data
373 # first pass, to validate given data
378 appstruct = super(RepoSchema, self).deserialize(cstruct)
374 appstruct = super(RepoSchema, self).deserialize(cstruct)
379 validated_name = appstruct['repo_name']
375 validated_name = appstruct['repo_name']
380
376
381 # second pass to validate permissions to repo_group
377 # second pass to validate permissions to repo_group
382 if 'old_values' in self.bindings:
378 if 'old_values' in self.bindings:
383 # save current repo name for name change checks
379 # save current repo name for name change checks
384 self.bindings['old_values']['submitted_repo_name'] = validated_name
380 self.bindings['old_values']['submitted_repo_name'] = validated_name
385 second = RepoGroupAccessSchema().bind(**self.bindings)
381 second = RepoGroupAccessSchema().bind(**self.bindings)
386 appstruct_second = second.deserialize({'repo_group': validated_name})
382 appstruct_second = second.deserialize({'repo_group': validated_name})
387 # save result
383 # save result
388 appstruct['repo_group'] = appstruct_second['repo_group']
384 appstruct['repo_group'] = appstruct_second['repo_group']
389
385
390 # thirds to validate uniqueness
386 # thirds to validate uniqueness
391 third = RepoNameUniqueSchema().bind(**self.bindings)
387 third = RepoNameUniqueSchema().bind(**self.bindings)
392 third.deserialize({'unique_repo_name': validated_name})
388 third.deserialize({'unique_repo_name': validated_name})
393
389
394 return appstruct
390 return appstruct
395
391
396
392
397 class RepoSettingsSchema(RepoSchema):
393 class RepoSettingsSchema(RepoSchema):
398 repo_group = colander.SchemaNode(
394 repo_group = colander.SchemaNode(
399 colander.Integer(),
395 colander.Integer(),
400 validator=deferred_repo_group_validator,
396 validator=deferred_repo_group_validator,
401 widget=deferred_repo_group_widget,
397 widget=deferred_repo_group_widget,
402 missing='')
398 missing='')
403
399
404 repo_clone_uri_change = colander.SchemaNode(
400 repo_clone_uri_change = colander.SchemaNode(
405 colander.String(),
401 colander.String(),
406 missing='NEW')
402 missing='NEW')
407
403
408 repo_clone_uri = colander.SchemaNode(
404 repo_clone_uri = colander.SchemaNode(
409 colander.String(),
405 colander.String(),
410 preparers=[preparers.strip_preparer],
406 preparers=[preparers.strip_preparer],
411 validator=deferred_sync_uri_validator,
407 validator=deferred_sync_uri_validator,
412 missing='')
408 missing='')
413
409
414 repo_push_uri_change = colander.SchemaNode(
410 repo_push_uri_change = colander.SchemaNode(
415 colander.String(),
411 colander.String(),
416 missing='NEW')
412 missing='NEW')
417
413
418 repo_push_uri = colander.SchemaNode(
414 repo_push_uri = colander.SchemaNode(
419 colander.String(),
415 colander.String(),
420 preparers=[preparers.strip_preparer],
416 preparers=[preparers.strip_preparer],
421 validator=deferred_sync_uri_validator,
417 validator=deferred_sync_uri_validator,
422 missing='')
418 missing='')
423
419
424 def deserialize(self, cstruct):
420 def deserialize(self, cstruct):
425 """
421 """
426 Custom deserialize that allows to chain validation, and verify
422 Custom deserialize that allows to chain validation, and verify
427 permissions, and as last step uniqueness
423 permissions, and as last step uniqueness
428 """
424 """
429
425
430 # first pass, to validate given data
426 # first pass, to validate given data
431 appstruct = super(RepoSchema, self).deserialize(cstruct)
427 appstruct = super(RepoSchema, self).deserialize(cstruct)
432 validated_name = appstruct['repo_name']
428 validated_name = appstruct['repo_name']
433 # because of repoSchema adds repo-group as an ID, we inject it as
429 # because of repoSchema adds repo-group as an ID, we inject it as
434 # full name here because validators require it, it's unwrapped later
430 # full name here because validators require it, it's unwrapped later
435 # so it's safe to use and final name is going to be without group anyway
431 # so it's safe to use and final name is going to be without group anyway
436
432
437 group, separator = get_repo_group(appstruct['repo_group'])
433 group, separator = get_repo_group(appstruct['repo_group'])
438 if group:
434 if group:
439 validated_name = separator.join([group.group_name, validated_name])
435 validated_name = separator.join([group.group_name, validated_name])
440
436
441 # second pass to validate permissions to repo_group
437 # second pass to validate permissions to repo_group
442 if 'old_values' in self.bindings:
438 if 'old_values' in self.bindings:
443 # save current repo name for name change checks
439 # save current repo name for name change checks
444 self.bindings['old_values']['submitted_repo_name'] = validated_name
440 self.bindings['old_values']['submitted_repo_name'] = validated_name
445 second = RepoGroupAccessSchema().bind(**self.bindings)
441 second = RepoGroupAccessSchema().bind(**self.bindings)
446 appstruct_second = second.deserialize({'repo_group': validated_name})
442 appstruct_second = second.deserialize({'repo_group': validated_name})
447 # save result
443 # save result
448 appstruct['repo_group'] = appstruct_second['repo_group']
444 appstruct['repo_group'] = appstruct_second['repo_group']
449
445
450 # thirds to validate uniqueness
446 # thirds to validate uniqueness
451 third = RepoNameUniqueSchema().bind(**self.bindings)
447 third = RepoNameUniqueSchema().bind(**self.bindings)
452 third.deserialize({'unique_repo_name': validated_name})
448 third.deserialize({'unique_repo_name': validated_name})
453
449
454 return appstruct
450 return appstruct
@@ -1,419 +1,426 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Helpers for fixture generation
22 Helpers for fixture generation
23 """
23 """
24
24
25 import os
25 import os
26 import time
26 import time
27 import tempfile
27 import tempfile
28 import shutil
28 import shutil
29
29
30 import configobj
30 import configobj
31
31
32 from rhodecode.model.settings import SettingsModel
32 from rhodecode.model.settings import SettingsModel
33 from rhodecode.tests import *
33 from rhodecode.tests import *
34 from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap
34 from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap
35 from rhodecode.model.meta import Session
35 from rhodecode.model.meta import Session
36 from rhodecode.model.repo import RepoModel
36 from rhodecode.model.repo import RepoModel
37 from rhodecode.model.user import UserModel
37 from rhodecode.model.user import UserModel
38 from rhodecode.model.repo_group import RepoGroupModel
38 from rhodecode.model.repo_group import RepoGroupModel
39 from rhodecode.model.user_group import UserGroupModel
39 from rhodecode.model.user_group import UserGroupModel
40 from rhodecode.model.gist import GistModel
40 from rhodecode.model.gist import GistModel
41 from rhodecode.model.auth_token import AuthTokenModel
41 from rhodecode.model.auth_token import AuthTokenModel
42 from rhodecode.model.scm import ScmModel
42 from rhodecode.authentication.plugins.auth_rhodecode import \
43 from rhodecode.authentication.plugins.auth_rhodecode import \
43 RhodeCodeAuthPlugin
44 RhodeCodeAuthPlugin
44
45
45 dn = os.path.dirname
46 dn = os.path.dirname
46 FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures')
47 FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures')
47
48
48
49
49 def error_function(*args, **kwargs):
50 def error_function(*args, **kwargs):
50 raise Exception('Total Crash !')
51 raise Exception('Total Crash !')
51
52
52
53
53 class TestINI(object):
54 class TestINI(object):
54 """
55 """
55 Allows to create a new test.ini file as a copy of existing one with edited
56 Allows to create a new test.ini file as a copy of existing one with edited
56 data. Example usage::
57 data. Example usage::
57
58
58 with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path:
59 with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path:
59 print('paster server %s' % new_test_ini)
60 print('paster server %s' % new_test_ini)
60 """
61 """
61
62
62 def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT',
63 def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT',
63 destroy=True, dir=None):
64 destroy=True, dir=None):
64 self.ini_file_path = ini_file_path
65 self.ini_file_path = ini_file_path
65 self.ini_params = ini_params
66 self.ini_params = ini_params
66 self.new_path = None
67 self.new_path = None
67 self.new_path_prefix = new_file_prefix
68 self.new_path_prefix = new_file_prefix
68 self._destroy = destroy
69 self._destroy = destroy
69 self._dir = dir
70 self._dir = dir
70
71
71 def __enter__(self):
72 def __enter__(self):
72 return self.create()
73 return self.create()
73
74
74 def __exit__(self, exc_type, exc_val, exc_tb):
75 def __exit__(self, exc_type, exc_val, exc_tb):
75 self.destroy()
76 self.destroy()
76
77
77 def create(self):
78 def create(self):
78 config = configobj.ConfigObj(
79 config = configobj.ConfigObj(
79 self.ini_file_path, file_error=True, write_empty_values=True)
80 self.ini_file_path, file_error=True, write_empty_values=True)
80
81
81 for data in self.ini_params:
82 for data in self.ini_params:
82 section, ini_params = data.items()[0]
83 section, ini_params = data.items()[0]
83 for key, val in ini_params.items():
84 for key, val in ini_params.items():
84 config[section][key] = val
85 config[section][key] = val
85 with tempfile.NamedTemporaryFile(
86 with tempfile.NamedTemporaryFile(
86 prefix=self.new_path_prefix, suffix='.ini', dir=self._dir,
87 prefix=self.new_path_prefix, suffix='.ini', dir=self._dir,
87 delete=False) as new_ini_file:
88 delete=False) as new_ini_file:
88 config.write(new_ini_file)
89 config.write(new_ini_file)
89 self.new_path = new_ini_file.name
90 self.new_path = new_ini_file.name
90
91
91 return self.new_path
92 return self.new_path
92
93
93 def destroy(self):
94 def destroy(self):
94 if self._destroy:
95 if self._destroy:
95 os.remove(self.new_path)
96 os.remove(self.new_path)
96
97
97
98
98 class Fixture(object):
99 class Fixture(object):
99
100
100 def anon_access(self, status):
101 def anon_access(self, status):
101 """
102 """
102 Context process for disabling anonymous access. use like:
103 Context process for disabling anonymous access. use like:
103 fixture = Fixture()
104 fixture = Fixture()
104 with fixture.anon_access(False):
105 with fixture.anon_access(False):
105 #tests
106 #tests
106
107
107 after this block anon access will be set to `not status`
108 after this block anon access will be set to `not status`
108 """
109 """
109
110
110 class context(object):
111 class context(object):
111 def __enter__(self):
112 def __enter__(self):
112 anon = User.get_default_user()
113 anon = User.get_default_user()
113 anon.active = status
114 anon.active = status
114 Session().add(anon)
115 Session().add(anon)
115 Session().commit()
116 Session().commit()
116 time.sleep(1.5) # must sleep for cache (1s to expire)
117 time.sleep(1.5) # must sleep for cache (1s to expire)
117
118
118 def __exit__(self, exc_type, exc_val, exc_tb):
119 def __exit__(self, exc_type, exc_val, exc_tb):
119 anon = User.get_default_user()
120 anon = User.get_default_user()
120 anon.active = not status
121 anon.active = not status
121 Session().add(anon)
122 Session().add(anon)
122 Session().commit()
123 Session().commit()
123
124
124 return context()
125 return context()
125
126
126 def auth_restriction(self, registry, auth_restriction):
127 def auth_restriction(self, registry, auth_restriction):
127 """
128 """
128 Context process for changing the builtin rhodecode plugin auth restrictions.
129 Context process for changing the builtin rhodecode plugin auth restrictions.
129 Use like:
130 Use like:
130 fixture = Fixture()
131 fixture = Fixture()
131 with fixture.auth_restriction('super_admin'):
132 with fixture.auth_restriction('super_admin'):
132 #tests
133 #tests
133
134
134 after this block auth restriction will be taken off
135 after this block auth restriction will be taken off
135 """
136 """
136
137
137 class context(object):
138 class context(object):
138 def _get_plugin(self):
139 def _get_plugin(self):
139 plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid)
140 plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid)
140 plugin = RhodeCodeAuthPlugin(plugin_id)
141 plugin = RhodeCodeAuthPlugin(plugin_id)
141 return plugin
142 return plugin
142
143
143 def __enter__(self):
144 def __enter__(self):
144
145
145 plugin = self._get_plugin()
146 plugin = self._get_plugin()
146 plugin.create_or_update_setting('auth_restriction', auth_restriction)
147 plugin.create_or_update_setting('auth_restriction', auth_restriction)
147 Session().commit()
148 Session().commit()
148 SettingsModel().invalidate_settings_cache()
149 SettingsModel().invalidate_settings_cache()
149
150
150 def __exit__(self, exc_type, exc_val, exc_tb):
151 def __exit__(self, exc_type, exc_val, exc_tb):
151
152
152 plugin = self._get_plugin()
153 plugin = self._get_plugin()
153 plugin.create_or_update_setting(
154 plugin.create_or_update_setting(
154 'auth_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE)
155 'auth_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE)
155 Session().commit()
156 Session().commit()
156 SettingsModel().invalidate_settings_cache()
157 SettingsModel().invalidate_settings_cache()
157
158
158 return context()
159 return context()
159
160
160 def scope_restriction(self, registry, scope_restriction):
161 def scope_restriction(self, registry, scope_restriction):
161 """
162 """
162 Context process for changing the builtin rhodecode plugin scope restrictions.
163 Context process for changing the builtin rhodecode plugin scope restrictions.
163 Use like:
164 Use like:
164 fixture = Fixture()
165 fixture = Fixture()
165 with fixture.scope_restriction('scope_http'):
166 with fixture.scope_restriction('scope_http'):
166 #tests
167 #tests
167
168
168 after this block scope restriction will be taken off
169 after this block scope restriction will be taken off
169 """
170 """
170
171
171 class context(object):
172 class context(object):
172 def _get_plugin(self):
173 def _get_plugin(self):
173 plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid)
174 plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid)
174 plugin = RhodeCodeAuthPlugin(plugin_id)
175 plugin = RhodeCodeAuthPlugin(plugin_id)
175 return plugin
176 return plugin
176
177
177 def __enter__(self):
178 def __enter__(self):
178 plugin = self._get_plugin()
179 plugin = self._get_plugin()
179 plugin.create_or_update_setting('scope_restriction', scope_restriction)
180 plugin.create_or_update_setting('scope_restriction', scope_restriction)
180 Session().commit()
181 Session().commit()
181 SettingsModel().invalidate_settings_cache()
182 SettingsModel().invalidate_settings_cache()
182
183
183 def __exit__(self, exc_type, exc_val, exc_tb):
184 def __exit__(self, exc_type, exc_val, exc_tb):
184 plugin = self._get_plugin()
185 plugin = self._get_plugin()
185 plugin.create_or_update_setting(
186 plugin.create_or_update_setting(
186 'scope_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL)
187 'scope_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL)
187 Session().commit()
188 Session().commit()
188 SettingsModel().invalidate_settings_cache()
189 SettingsModel().invalidate_settings_cache()
189
190
190 return context()
191 return context()
191
192
192 def _get_repo_create_params(self, **custom):
193 def _get_repo_create_params(self, **custom):
194 repo_type = custom.get('repo_type') or 'hg'
195
196 default_landing_ref, landing_ref_lbl = ScmModel.backend_landing_ref(repo_type)
197
193 defs = {
198 defs = {
194 'repo_name': None,
199 'repo_name': None,
195 'repo_type': 'hg',
200 'repo_type': repo_type,
196 'clone_uri': '',
201 'clone_uri': '',
197 'push_uri': '',
202 'push_uri': '',
198 'repo_group': '-1',
203 'repo_group': '-1',
199 'repo_description': 'DESC',
204 'repo_description': 'DESC',
200 'repo_private': False,
205 'repo_private': False,
201 'repo_landing_rev': 'rev:tip',
206 'repo_landing_commit_ref': default_landing_ref,
202 'repo_copy_permissions': False,
207 'repo_copy_permissions': False,
203 'repo_state': Repository.STATE_CREATED,
208 'repo_state': Repository.STATE_CREATED,
204 }
209 }
205 defs.update(custom)
210 defs.update(custom)
206 if 'repo_name_full' not in custom:
211 if 'repo_name_full' not in custom:
207 defs.update({'repo_name_full': defs['repo_name']})
212 defs.update({'repo_name_full': defs['repo_name']})
208
213
209 # fix the repo name if passed as repo_name_full
214 # fix the repo name if passed as repo_name_full
210 if defs['repo_name']:
215 if defs['repo_name']:
211 defs['repo_name'] = defs['repo_name'].split('/')[-1]
216 defs['repo_name'] = defs['repo_name'].split('/')[-1]
212
217
213 return defs
218 return defs
214
219
215 def _get_group_create_params(self, **custom):
220 def _get_group_create_params(self, **custom):
216 defs = {
221 defs = {
217 'group_name': None,
222 'group_name': None,
218 'group_description': 'DESC',
223 'group_description': 'DESC',
219 'perm_updates': [],
224 'perm_updates': [],
220 'perm_additions': [],
225 'perm_additions': [],
221 'perm_deletions': [],
226 'perm_deletions': [],
222 'group_parent_id': -1,
227 'group_parent_id': -1,
223 'enable_locking': False,
228 'enable_locking': False,
224 'recursive': False,
229 'recursive': False,
225 }
230 }
226 defs.update(custom)
231 defs.update(custom)
227
232
228 return defs
233 return defs
229
234
230 def _get_user_create_params(self, name, **custom):
235 def _get_user_create_params(self, name, **custom):
231 defs = {
236 defs = {
232 'username': name,
237 'username': name,
233 'password': 'qweqwe',
238 'password': 'qweqwe',
234 'email': '%s+test@rhodecode.org' % name,
239 'email': '%s+test@rhodecode.org' % name,
235 'firstname': 'TestUser',
240 'firstname': 'TestUser',
236 'lastname': 'Test',
241 'lastname': 'Test',
237 'description': 'test description',
242 'description': 'test description',
238 'active': True,
243 'active': True,
239 'admin': False,
244 'admin': False,
240 'extern_type': 'rhodecode',
245 'extern_type': 'rhodecode',
241 'extern_name': None,
246 'extern_name': None,
242 }
247 }
243 defs.update(custom)
248 defs.update(custom)
244
249
245 return defs
250 return defs
246
251
247 def _get_user_group_create_params(self, name, **custom):
252 def _get_user_group_create_params(self, name, **custom):
248 defs = {
253 defs = {
249 'users_group_name': name,
254 'users_group_name': name,
250 'user_group_description': 'DESC',
255 'user_group_description': 'DESC',
251 'users_group_active': True,
256 'users_group_active': True,
252 'user_group_data': {},
257 'user_group_data': {},
253 }
258 }
254 defs.update(custom)
259 defs.update(custom)
255
260
256 return defs
261 return defs
257
262
258 def create_repo(self, name, **kwargs):
263 def create_repo(self, name, **kwargs):
259 repo_group = kwargs.get('repo_group')
264 repo_group = kwargs.get('repo_group')
260 if isinstance(repo_group, RepoGroup):
265 if isinstance(repo_group, RepoGroup):
261 kwargs['repo_group'] = repo_group.group_id
266 kwargs['repo_group'] = repo_group.group_id
262 name = name.split(Repository.NAME_SEP)[-1]
267 name = name.split(Repository.NAME_SEP)[-1]
263 name = Repository.NAME_SEP.join((repo_group.group_name, name))
268 name = Repository.NAME_SEP.join((repo_group.group_name, name))
264
269
265 if 'skip_if_exists' in kwargs:
270 if 'skip_if_exists' in kwargs:
266 del kwargs['skip_if_exists']
271 del kwargs['skip_if_exists']
267 r = Repository.get_by_repo_name(name)
272 r = Repository.get_by_repo_name(name)
268 if r:
273 if r:
269 return r
274 return r
270
275
271 form_data = self._get_repo_create_params(repo_name=name, **kwargs)
276 form_data = self._get_repo_create_params(repo_name=name, **kwargs)
272 cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
277 cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
273 RepoModel().create(form_data, cur_user)
278 RepoModel().create(form_data, cur_user)
274 Session().commit()
279 Session().commit()
275 repo = Repository.get_by_repo_name(name)
280 repo = Repository.get_by_repo_name(name)
276 assert repo
281 assert repo
277 return repo
282 return repo
278
283
279 def create_fork(self, repo_to_fork, fork_name, **kwargs):
284 def create_fork(self, repo_to_fork, fork_name, **kwargs):
280 repo_to_fork = Repository.get_by_repo_name(repo_to_fork)
285 repo_to_fork = Repository.get_by_repo_name(repo_to_fork)
281
286
282 form_data = self._get_repo_create_params(repo_name=fork_name,
287 form_data = self._get_repo_create_params(
283 fork_parent_id=repo_to_fork.repo_id,
288 repo_name=fork_name,
284 repo_type=repo_to_fork.repo_type,
289 fork_parent_id=repo_to_fork.repo_id,
285 **kwargs)
290 repo_type=repo_to_fork.repo_type,
291 **kwargs)
292
286 #TODO: fix it !!
293 #TODO: fix it !!
287 form_data['description'] = form_data['repo_description']
294 form_data['description'] = form_data['repo_description']
288 form_data['private'] = form_data['repo_private']
295 form_data['private'] = form_data['repo_private']
289 form_data['landing_rev'] = form_data['repo_landing_rev']
296 form_data['landing_rev'] = form_data['repo_landing_commit_ref']
290
297
291 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
298 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
292 RepoModel().create_fork(form_data, cur_user=owner)
299 RepoModel().create_fork(form_data, cur_user=owner)
293 Session().commit()
300 Session().commit()
294 r = Repository.get_by_repo_name(fork_name)
301 r = Repository.get_by_repo_name(fork_name)
295 assert r
302 assert r
296 return r
303 return r
297
304
298 def destroy_repo(self, repo_name, **kwargs):
305 def destroy_repo(self, repo_name, **kwargs):
299 RepoModel().delete(repo_name, pull_requests='delete', **kwargs)
306 RepoModel().delete(repo_name, pull_requests='delete', **kwargs)
300 Session().commit()
307 Session().commit()
301
308
302 def destroy_repo_on_filesystem(self, repo_name):
309 def destroy_repo_on_filesystem(self, repo_name):
303 rm_path = os.path.join(RepoModel().repos_path, repo_name)
310 rm_path = os.path.join(RepoModel().repos_path, repo_name)
304 if os.path.isdir(rm_path):
311 if os.path.isdir(rm_path):
305 shutil.rmtree(rm_path)
312 shutil.rmtree(rm_path)
306
313
307 def create_repo_group(self, name, **kwargs):
314 def create_repo_group(self, name, **kwargs):
308 if 'skip_if_exists' in kwargs:
315 if 'skip_if_exists' in kwargs:
309 del kwargs['skip_if_exists']
316 del kwargs['skip_if_exists']
310 gr = RepoGroup.get_by_group_name(group_name=name)
317 gr = RepoGroup.get_by_group_name(group_name=name)
311 if gr:
318 if gr:
312 return gr
319 return gr
313 form_data = self._get_group_create_params(group_name=name, **kwargs)
320 form_data = self._get_group_create_params(group_name=name, **kwargs)
314 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
321 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
315 gr = RepoGroupModel().create(
322 gr = RepoGroupModel().create(
316 group_name=form_data['group_name'],
323 group_name=form_data['group_name'],
317 group_description=form_data['group_name'],
324 group_description=form_data['group_name'],
318 owner=owner)
325 owner=owner)
319 Session().commit()
326 Session().commit()
320 gr = RepoGroup.get_by_group_name(gr.group_name)
327 gr = RepoGroup.get_by_group_name(gr.group_name)
321 return gr
328 return gr
322
329
323 def destroy_repo_group(self, repogroupid):
330 def destroy_repo_group(self, repogroupid):
324 RepoGroupModel().delete(repogroupid)
331 RepoGroupModel().delete(repogroupid)
325 Session().commit()
332 Session().commit()
326
333
327 def create_user(self, name, **kwargs):
334 def create_user(self, name, **kwargs):
328 if 'skip_if_exists' in kwargs:
335 if 'skip_if_exists' in kwargs:
329 del kwargs['skip_if_exists']
336 del kwargs['skip_if_exists']
330 user = User.get_by_username(name)
337 user = User.get_by_username(name)
331 if user:
338 if user:
332 return user
339 return user
333 form_data = self._get_user_create_params(name, **kwargs)
340 form_data = self._get_user_create_params(name, **kwargs)
334 user = UserModel().create(form_data)
341 user = UserModel().create(form_data)
335
342
336 # create token for user
343 # create token for user
337 AuthTokenModel().create(
344 AuthTokenModel().create(
338 user=user, description=u'TEST_USER_TOKEN')
345 user=user, description=u'TEST_USER_TOKEN')
339
346
340 Session().commit()
347 Session().commit()
341 user = User.get_by_username(user.username)
348 user = User.get_by_username(user.username)
342 return user
349 return user
343
350
344 def destroy_user(self, userid):
351 def destroy_user(self, userid):
345 UserModel().delete(userid)
352 UserModel().delete(userid)
346 Session().commit()
353 Session().commit()
347
354
348 def create_additional_user_email(self, user, email):
355 def create_additional_user_email(self, user, email):
349 uem = UserEmailMap()
356 uem = UserEmailMap()
350 uem.user = user
357 uem.user = user
351 uem.email = email
358 uem.email = email
352 Session().add(uem)
359 Session().add(uem)
353 return uem
360 return uem
354
361
355 def destroy_users(self, userid_iter):
362 def destroy_users(self, userid_iter):
356 for user_id in userid_iter:
363 for user_id in userid_iter:
357 if User.get_by_username(user_id):
364 if User.get_by_username(user_id):
358 UserModel().delete(user_id)
365 UserModel().delete(user_id)
359 Session().commit()
366 Session().commit()
360
367
361 def create_user_group(self, name, **kwargs):
368 def create_user_group(self, name, **kwargs):
362 if 'skip_if_exists' in kwargs:
369 if 'skip_if_exists' in kwargs:
363 del kwargs['skip_if_exists']
370 del kwargs['skip_if_exists']
364 gr = UserGroup.get_by_group_name(group_name=name)
371 gr = UserGroup.get_by_group_name(group_name=name)
365 if gr:
372 if gr:
366 return gr
373 return gr
367 # map active flag to the real attribute. For API consistency of fixtures
374 # map active flag to the real attribute. For API consistency of fixtures
368 if 'active' in kwargs:
375 if 'active' in kwargs:
369 kwargs['users_group_active'] = kwargs['active']
376 kwargs['users_group_active'] = kwargs['active']
370 del kwargs['active']
377 del kwargs['active']
371 form_data = self._get_user_group_create_params(name, **kwargs)
378 form_data = self._get_user_group_create_params(name, **kwargs)
372 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
379 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
373 user_group = UserGroupModel().create(
380 user_group = UserGroupModel().create(
374 name=form_data['users_group_name'],
381 name=form_data['users_group_name'],
375 description=form_data['user_group_description'],
382 description=form_data['user_group_description'],
376 owner=owner, active=form_data['users_group_active'],
383 owner=owner, active=form_data['users_group_active'],
377 group_data=form_data['user_group_data'])
384 group_data=form_data['user_group_data'])
378 Session().commit()
385 Session().commit()
379 user_group = UserGroup.get_by_group_name(user_group.users_group_name)
386 user_group = UserGroup.get_by_group_name(user_group.users_group_name)
380 return user_group
387 return user_group
381
388
382 def destroy_user_group(self, usergroupid):
389 def destroy_user_group(self, usergroupid):
383 UserGroupModel().delete(user_group=usergroupid, force=True)
390 UserGroupModel().delete(user_group=usergroupid, force=True)
384 Session().commit()
391 Session().commit()
385
392
386 def create_gist(self, **kwargs):
393 def create_gist(self, **kwargs):
387 form_data = {
394 form_data = {
388 'description': 'new-gist',
395 'description': 'new-gist',
389 'owner': TEST_USER_ADMIN_LOGIN,
396 'owner': TEST_USER_ADMIN_LOGIN,
390 'gist_type': GistModel.cls.GIST_PUBLIC,
397 'gist_type': GistModel.cls.GIST_PUBLIC,
391 'lifetime': -1,
398 'lifetime': -1,
392 'acl_level': Gist.ACL_LEVEL_PUBLIC,
399 'acl_level': Gist.ACL_LEVEL_PUBLIC,
393 'gist_mapping': {'filename1.txt': {'content': 'hello world'},}
400 'gist_mapping': {'filename1.txt': {'content': 'hello world'},}
394 }
401 }
395 form_data.update(kwargs)
402 form_data.update(kwargs)
396 gist = GistModel().create(
403 gist = GistModel().create(
397 description=form_data['description'], owner=form_data['owner'],
404 description=form_data['description'], owner=form_data['owner'],
398 gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'],
405 gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'],
399 lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level']
406 lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level']
400 )
407 )
401 Session().commit()
408 Session().commit()
402 return gist
409 return gist
403
410
404 def destroy_gists(self, gistid=None):
411 def destroy_gists(self, gistid=None):
405 for g in GistModel.cls.get_all():
412 for g in GistModel.cls.get_all():
406 if gistid:
413 if gistid:
407 if gistid == g.gist_access_id:
414 if gistid == g.gist_access_id:
408 GistModel().delete(g)
415 GistModel().delete(g)
409 else:
416 else:
410 GistModel().delete(g)
417 GistModel().delete(g)
411 Session().commit()
418 Session().commit()
412
419
413 def load_resource(self, resource_name, strip=False):
420 def load_resource(self, resource_name, strip=False):
414 with open(os.path.join(FIXTURES, resource_name)) as f:
421 with open(os.path.join(FIXTURES, resource_name)) as f:
415 source = f.read()
422 source = f.read()
416 if strip:
423 if strip:
417 source = source.strip()
424 source = source.strip()
418
425
419 return source
426 return source
General Comments 0
You need to be logged in to leave comments. Login now