##// END OF EJS Templates
fix(server-info): removed dulwich from update-server-info and added force flag support
super-admin -
r5276:073d7b04 default
parent child Browse files
Show More
@@ -1,808 +1,808 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Utilities library for RhodeCode
20 Utilities library for RhodeCode
21 """
21 """
22
22
23 import datetime
23 import datetime
24 import decorator
24 import decorator
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28 import sys
28 import sys
29 import shutil
29 import shutil
30 import socket
30 import socket
31 import tempfile
31 import tempfile
32 import traceback
32 import traceback
33 import tarfile
33 import tarfile
34 import warnings
34 import warnings
35 from os.path import join as jn
35 from os.path import join as jn
36
36
37 import paste
37 import paste
38 import pkg_resources
38 import pkg_resources
39 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
39 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
40
40
41 from mako import exceptions
41 from mako import exceptions
42
42
43 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
43 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
44 from rhodecode.lib.str_utils import safe_bytes, safe_str
44 from rhodecode.lib.str_utils import safe_bytes, safe_str
45 from rhodecode.lib.vcs.backends.base import Config
45 from rhodecode.lib.vcs.backends.base import Config
46 from rhodecode.lib.vcs.exceptions import VCSError
46 from rhodecode.lib.vcs.exceptions import VCSError
47 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
47 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
48 from rhodecode.lib.ext_json import sjson as json
48 from rhodecode.lib.ext_json import sjson as json
49 from rhodecode.model import meta
49 from rhodecode.model import meta
50 from rhodecode.model.db import (
50 from rhodecode.model.db import (
51 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
51 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
52 from rhodecode.model.meta import Session
52 from rhodecode.model.meta import Session
53
53
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
57 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
58
58
59 # String which contains characters that are not allowed in slug names for
59 # String which contains characters that are not allowed in slug names for
60 # repositories or repository groups. It is properly escaped to use it in
60 # repositories or repository groups. It is properly escaped to use it in
61 # regular expressions.
61 # regular expressions.
62 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
62 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
63
63
64 # Regex that matches forbidden characters in repo/group slugs.
64 # Regex that matches forbidden characters in repo/group slugs.
65 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
65 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
66
66
67 # Regex that matches allowed characters in repo/group slugs.
67 # Regex that matches allowed characters in repo/group slugs.
68 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
68 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
69
69
70 # Regex that matches whole repo/group slugs.
70 # Regex that matches whole repo/group slugs.
71 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
71 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
72
72
73 _license_cache = None
73 _license_cache = None
74
74
75
75
76 def repo_name_slug(value):
76 def repo_name_slug(value):
77 """
77 """
78 Return slug of name of repository
78 Return slug of name of repository
79 This function is called on each creation/modification
79 This function is called on each creation/modification
80 of repository to prevent bad names in repo
80 of repository to prevent bad names in repo
81 """
81 """
82
82
83 replacement_char = '-'
83 replacement_char = '-'
84
84
85 slug = strip_tags(value)
85 slug = strip_tags(value)
86 slug = convert_accented_entities(slug)
86 slug = convert_accented_entities(slug)
87 slug = convert_misc_entities(slug)
87 slug = convert_misc_entities(slug)
88
88
89 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 slug = SLUG_BAD_CHAR_RE.sub('', slug)
90 slug = re.sub(r'[\s]+', '-', slug)
90 slug = re.sub(r'[\s]+', '-', slug)
91 slug = collapse(slug, replacement_char)
91 slug = collapse(slug, replacement_char)
92
92
93 return slug
93 return slug
94
94
95
95
96 #==============================================================================
96 #==============================================================================
97 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
97 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
98 #==============================================================================
98 #==============================================================================
99 def get_repo_slug(request):
99 def get_repo_slug(request):
100 _repo = ''
100 _repo = ''
101
101
102 if hasattr(request, 'db_repo_name'):
102 if hasattr(request, 'db_repo_name'):
103 # if our requests has set db reference use it for name, this
103 # if our requests has set db reference use it for name, this
104 # translates the example.com/_<id> into proper repo names
104 # translates the example.com/_<id> into proper repo names
105 _repo = request.db_repo_name
105 _repo = request.db_repo_name
106 elif getattr(request, 'matchdict', None):
106 elif getattr(request, 'matchdict', None):
107 # pyramid
107 # pyramid
108 _repo = request.matchdict.get('repo_name')
108 _repo = request.matchdict.get('repo_name')
109
109
110 if _repo:
110 if _repo:
111 _repo = _repo.rstrip('/')
111 _repo = _repo.rstrip('/')
112 return _repo
112 return _repo
113
113
114
114
115 def get_repo_group_slug(request):
115 def get_repo_group_slug(request):
116 _group = ''
116 _group = ''
117 if hasattr(request, 'db_repo_group'):
117 if hasattr(request, 'db_repo_group'):
118 # if our requests has set db reference use it for name, this
118 # if our requests has set db reference use it for name, this
119 # translates the example.com/_<id> into proper repo group names
119 # translates the example.com/_<id> into proper repo group names
120 _group = request.db_repo_group.group_name
120 _group = request.db_repo_group.group_name
121 elif getattr(request, 'matchdict', None):
121 elif getattr(request, 'matchdict', None):
122 # pyramid
122 # pyramid
123 _group = request.matchdict.get('repo_group_name')
123 _group = request.matchdict.get('repo_group_name')
124
124
125 if _group:
125 if _group:
126 _group = _group.rstrip('/')
126 _group = _group.rstrip('/')
127 return _group
127 return _group
128
128
129
129
130 def get_user_group_slug(request):
130 def get_user_group_slug(request):
131 _user_group = ''
131 _user_group = ''
132
132
133 if hasattr(request, 'db_user_group'):
133 if hasattr(request, 'db_user_group'):
134 _user_group = request.db_user_group.users_group_name
134 _user_group = request.db_user_group.users_group_name
135 elif getattr(request, 'matchdict', None):
135 elif getattr(request, 'matchdict', None):
136 # pyramid
136 # pyramid
137 _user_group = request.matchdict.get('user_group_id')
137 _user_group = request.matchdict.get('user_group_id')
138 _user_group_name = request.matchdict.get('user_group_name')
138 _user_group_name = request.matchdict.get('user_group_name')
139 try:
139 try:
140 if _user_group:
140 if _user_group:
141 _user_group = UserGroup.get(_user_group)
141 _user_group = UserGroup.get(_user_group)
142 elif _user_group_name:
142 elif _user_group_name:
143 _user_group = UserGroup.get_by_group_name(_user_group_name)
143 _user_group = UserGroup.get_by_group_name(_user_group_name)
144
144
145 if _user_group:
145 if _user_group:
146 _user_group = _user_group.users_group_name
146 _user_group = _user_group.users_group_name
147 except Exception:
147 except Exception:
148 log.exception('Failed to get user group by id and name')
148 log.exception('Failed to get user group by id and name')
149 # catch all failures here
149 # catch all failures here
150 return None
150 return None
151
151
152 return _user_group
152 return _user_group
153
153
154
154
155 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
155 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
156 """
156 """
157 Scans given path for repos and return (name,(type,path)) tuple
157 Scans given path for repos and return (name,(type,path)) tuple
158
158
159 :param path: path to scan for repositories
159 :param path: path to scan for repositories
160 :param recursive: recursive search and return names with subdirs in front
160 :param recursive: recursive search and return names with subdirs in front
161 """
161 """
162
162
163 # remove ending slash for better results
163 # remove ending slash for better results
164 path = path.rstrip(os.sep)
164 path = path.rstrip(os.sep)
165 log.debug('now scanning in %s location recursive:%s...', path, recursive)
165 log.debug('now scanning in %s location recursive:%s...', path, recursive)
166
166
167 def _get_repos(p):
167 def _get_repos(p):
168 dirpaths = get_dirpaths(p)
168 dirpaths = get_dirpaths(p)
169 if not _is_dir_writable(p):
169 if not _is_dir_writable(p):
170 log.warning('repo path without write access: %s', p)
170 log.warning('repo path without write access: %s', p)
171
171
172 for dirpath in dirpaths:
172 for dirpath in dirpaths:
173 if os.path.isfile(os.path.join(p, dirpath)):
173 if os.path.isfile(os.path.join(p, dirpath)):
174 continue
174 continue
175 cur_path = os.path.join(p, dirpath)
175 cur_path = os.path.join(p, dirpath)
176
176
177 # skip removed repos
177 # skip removed repos
178 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
178 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
179 continue
179 continue
180
180
181 #skip .<somethin> dirs
181 #skip .<somethin> dirs
182 if dirpath.startswith('.'):
182 if dirpath.startswith('.'):
183 continue
183 continue
184
184
185 try:
185 try:
186 scm_info = get_scm(cur_path)
186 scm_info = get_scm(cur_path)
187 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
187 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
188 except VCSError:
188 except VCSError:
189 if not recursive:
189 if not recursive:
190 continue
190 continue
191 #check if this dir containts other repos for recursive scan
191 #check if this dir containts other repos for recursive scan
192 rec_path = os.path.join(p, dirpath)
192 rec_path = os.path.join(p, dirpath)
193 if os.path.isdir(rec_path):
193 if os.path.isdir(rec_path):
194 yield from _get_repos(rec_path)
194 yield from _get_repos(rec_path)
195
195
196 return _get_repos(path)
196 return _get_repos(path)
197
197
198
198
199 def get_dirpaths(p: str) -> list:
199 def get_dirpaths(p: str) -> list:
200 try:
200 try:
201 # OS-independable way of checking if we have at least read-only
201 # OS-independable way of checking if we have at least read-only
202 # access or not.
202 # access or not.
203 dirpaths = os.listdir(p)
203 dirpaths = os.listdir(p)
204 except OSError:
204 except OSError:
205 log.warning('ignoring repo path without read access: %s', p)
205 log.warning('ignoring repo path without read access: %s', p)
206 return []
206 return []
207
207
208 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
208 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
209 # decode paths and suddenly returns unicode objects itself. The items it
209 # decode paths and suddenly returns unicode objects itself. The items it
210 # cannot decode are returned as strings and cause issues.
210 # cannot decode are returned as strings and cause issues.
211 #
211 #
212 # Those paths are ignored here until a solid solution for path handling has
212 # Those paths are ignored here until a solid solution for path handling has
213 # been built.
213 # been built.
214 expected_type = type(p)
214 expected_type = type(p)
215
215
216 def _has_correct_type(item):
216 def _has_correct_type(item):
217 if type(item) is not expected_type:
217 if type(item) is not expected_type:
218 log.error(
218 log.error(
219 "Ignoring path %s since it cannot be decoded into str.",
219 "Ignoring path %s since it cannot be decoded into str.",
220 # Using "repr" to make sure that we see the byte value in case
220 # Using "repr" to make sure that we see the byte value in case
221 # of support.
221 # of support.
222 repr(item))
222 repr(item))
223 return False
223 return False
224 return True
224 return True
225
225
226 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
226 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
227
227
228 return dirpaths
228 return dirpaths
229
229
230
230
231 def _is_dir_writable(path):
231 def _is_dir_writable(path):
232 """
232 """
233 Probe if `path` is writable.
233 Probe if `path` is writable.
234
234
235 Due to trouble on Cygwin / Windows, this is actually probing if it is
235 Due to trouble on Cygwin / Windows, this is actually probing if it is
236 possible to create a file inside of `path`, stat does not produce reliable
236 possible to create a file inside of `path`, stat does not produce reliable
237 results in this case.
237 results in this case.
238 """
238 """
239 try:
239 try:
240 with tempfile.TemporaryFile(dir=path):
240 with tempfile.TemporaryFile(dir=path):
241 pass
241 pass
242 except OSError:
242 except OSError:
243 return False
243 return False
244 return True
244 return True
245
245
246
246
247 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
247 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
248 """
248 """
249 Returns True if given path is a valid repository False otherwise.
249 Returns True if given path is a valid repository False otherwise.
250 If expect_scm param is given also, compare if given scm is the same
250 If expect_scm param is given also, compare if given scm is the same
251 as expected from scm parameter. If explicit_scm is given don't try to
251 as expected from scm parameter. If explicit_scm is given don't try to
252 detect the scm, just use the given one to check if repo is valid
252 detect the scm, just use the given one to check if repo is valid
253
253
254 :param repo_name:
254 :param repo_name:
255 :param base_path:
255 :param base_path:
256 :param expect_scm:
256 :param expect_scm:
257 :param explicit_scm:
257 :param explicit_scm:
258 :param config:
258 :param config:
259
259
260 :return True: if given path is a valid repository
260 :return True: if given path is a valid repository
261 """
261 """
262 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
262 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
263 log.debug('Checking if `%s` is a valid path for repository. '
263 log.debug('Checking if `%s` is a valid path for repository. '
264 'Explicit type: %s', repo_name, explicit_scm)
264 'Explicit type: %s', repo_name, explicit_scm)
265
265
266 try:
266 try:
267 if explicit_scm:
267 if explicit_scm:
268 detected_scms = [get_scm_backend(explicit_scm)(
268 detected_scms = [get_scm_backend(explicit_scm)(
269 full_path, config=config).alias]
269 full_path, config=config).alias]
270 else:
270 else:
271 detected_scms = get_scm(full_path)
271 detected_scms = get_scm(full_path)
272
272
273 if expect_scm:
273 if expect_scm:
274 return detected_scms[0] == expect_scm
274 return detected_scms[0] == expect_scm
275 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
275 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
276 return True
276 return True
277 except VCSError:
277 except VCSError:
278 log.debug('path: %s is not a valid repo !', full_path)
278 log.debug('path: %s is not a valid repo !', full_path)
279 return False
279 return False
280
280
281
281
282 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
282 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
283 """
283 """
284 Returns True if a given path is a repository group, False otherwise
284 Returns True if a given path is a repository group, False otherwise
285
285
286 :param repo_group_name:
286 :param repo_group_name:
287 :param base_path:
287 :param base_path:
288 """
288 """
289 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
289 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
290 log.debug('Checking if `%s` is a valid path for repository group',
290 log.debug('Checking if `%s` is a valid path for repository group',
291 repo_group_name)
291 repo_group_name)
292
292
293 # check if it's not a repo
293 # check if it's not a repo
294 if is_valid_repo(repo_group_name, base_path):
294 if is_valid_repo(repo_group_name, base_path):
295 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
295 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
296 return False
296 return False
297
297
298 try:
298 try:
299 # we need to check bare git repos at higher level
299 # we need to check bare git repos at higher level
300 # since we might match branches/hooks/info/objects or possible
300 # since we might match branches/hooks/info/objects or possible
301 # other things inside bare git repo
301 # other things inside bare git repo
302 maybe_repo = os.path.dirname(full_path)
302 maybe_repo = os.path.dirname(full_path)
303 if maybe_repo == base_path:
303 if maybe_repo == base_path:
304 # skip root level repo check; we know root location CANNOT BE a repo group
304 # skip root level repo check; we know root location CANNOT BE a repo group
305 return False
305 return False
306
306
307 scm_ = get_scm(maybe_repo)
307 scm_ = get_scm(maybe_repo)
308 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
308 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
309 return False
309 return False
310 except VCSError:
310 except VCSError:
311 pass
311 pass
312
312
313 # check if it's a valid path
313 # check if it's a valid path
314 if skip_path_check or os.path.isdir(full_path):
314 if skip_path_check or os.path.isdir(full_path):
315 log.debug('path: %s is a valid repo group !', full_path)
315 log.debug('path: %s is a valid repo group !', full_path)
316 return True
316 return True
317
317
318 log.debug('path: %s is not a valid repo group !', full_path)
318 log.debug('path: %s is not a valid repo group !', full_path)
319 return False
319 return False
320
320
321
321
322 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
322 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
323 while True:
323 while True:
324 ok = input(prompt)
324 ok = input(prompt)
325 if ok.lower() in ('y', 'ye', 'yes'):
325 if ok.lower() in ('y', 'ye', 'yes'):
326 return True
326 return True
327 if ok.lower() in ('n', 'no', 'nop', 'nope'):
327 if ok.lower() in ('n', 'no', 'nop', 'nope'):
328 return False
328 return False
329 retries = retries - 1
329 retries = retries - 1
330 if retries < 0:
330 if retries < 0:
331 raise OSError
331 raise OSError
332 print(complaint)
332 print(complaint)
333
333
334 # propagated from mercurial documentation
334 # propagated from mercurial documentation
335 ui_sections = [
335 ui_sections = [
336 'alias', 'auth',
336 'alias', 'auth',
337 'decode/encode', 'defaults',
337 'decode/encode', 'defaults',
338 'diff', 'email',
338 'diff', 'email',
339 'extensions', 'format',
339 'extensions', 'format',
340 'merge-patterns', 'merge-tools',
340 'merge-patterns', 'merge-tools',
341 'hooks', 'http_proxy',
341 'hooks', 'http_proxy',
342 'smtp', 'patch',
342 'smtp', 'patch',
343 'paths', 'profiling',
343 'paths', 'profiling',
344 'server', 'trusted',
344 'server', 'trusted',
345 'ui', 'web', ]
345 'ui', 'web', ]
346
346
347
347
348 def config_data_from_db(clear_session=True, repo=None):
348 def config_data_from_db(clear_session=True, repo=None):
349 """
349 """
350 Read the configuration data from the database and return configuration
350 Read the configuration data from the database and return configuration
351 tuples.
351 tuples.
352 """
352 """
353 from rhodecode.model.settings import VcsSettingsModel
353 from rhodecode.model.settings import VcsSettingsModel
354
354
355 config = []
355 config = []
356
356
357 sa = meta.Session()
357 sa = meta.Session()
358 settings_model = VcsSettingsModel(repo=repo, sa=sa)
358 settings_model = VcsSettingsModel(repo=repo, sa=sa)
359
359
360 ui_settings = settings_model.get_ui_settings()
360 ui_settings = settings_model.get_ui_settings()
361
361
362 ui_data = []
362 ui_data = []
363 for setting in ui_settings:
363 for setting in ui_settings:
364 if setting.active:
364 if setting.active:
365 ui_data.append((setting.section, setting.key, setting.value))
365 ui_data.append((setting.section, setting.key, setting.value))
366 config.append((
366 config.append((
367 safe_str(setting.section), safe_str(setting.key),
367 safe_str(setting.section), safe_str(setting.key),
368 safe_str(setting.value)))
368 safe_str(setting.value)))
369 if setting.key == 'push_ssl':
369 if setting.key == 'push_ssl':
370 # force set push_ssl requirement to False, rhodecode
370 # force set push_ssl requirement to False, rhodecode
371 # handles that
371 # handles that
372 config.append((
372 config.append((
373 safe_str(setting.section), safe_str(setting.key), False))
373 safe_str(setting.section), safe_str(setting.key), False))
374 log.debug(
374 log.debug(
375 'settings ui from db@repo[%s]: %s',
375 'settings ui from db@repo[%s]: %s',
376 repo,
376 repo,
377 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
377 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
378 if clear_session:
378 if clear_session:
379 meta.Session.remove()
379 meta.Session.remove()
380
380
381 # TODO: mikhail: probably it makes no sense to re-read hooks information.
381 # TODO: mikhail: probably it makes no sense to re-read hooks information.
382 # It's already there and activated/deactivated
382 # It's already there and activated/deactivated
383 skip_entries = []
383 skip_entries = []
384 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
384 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
385 if 'pull' not in enabled_hook_classes:
385 if 'pull' not in enabled_hook_classes:
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
387 if 'push' not in enabled_hook_classes:
387 if 'push' not in enabled_hook_classes:
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
389 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
389 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
390 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
390 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
391
391
392 config = [entry for entry in config if entry[:2] not in skip_entries]
392 config = [entry for entry in config if entry[:2] not in skip_entries]
393
393
394 return config
394 return config
395
395
396
396
397 def make_db_config(clear_session=True, repo=None):
397 def make_db_config(clear_session=True, repo=None):
398 """
398 """
399 Create a :class:`Config` instance based on the values in the database.
399 Create a :class:`Config` instance based on the values in the database.
400 """
400 """
401 config = Config()
401 config = Config()
402 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
402 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
403 for section, option, value in config_data:
403 for section, option, value in config_data:
404 config.set(section, option, value)
404 config.set(section, option, value)
405 return config
405 return config
406
406
407
407
408 def get_enabled_hook_classes(ui_settings):
408 def get_enabled_hook_classes(ui_settings):
409 """
409 """
410 Return the enabled hook classes.
410 Return the enabled hook classes.
411
411
412 :param ui_settings: List of ui_settings as returned
412 :param ui_settings: List of ui_settings as returned
413 by :meth:`VcsSettingsModel.get_ui_settings`
413 by :meth:`VcsSettingsModel.get_ui_settings`
414
414
415 :return: a list with the enabled hook classes. The order is not guaranteed.
415 :return: a list with the enabled hook classes. The order is not guaranteed.
416 :rtype: list
416 :rtype: list
417 """
417 """
418 enabled_hooks = []
418 enabled_hooks = []
419 active_hook_keys = [
419 active_hook_keys = [
420 key for section, key, value, active in ui_settings
420 key for section, key, value, active in ui_settings
421 if section == 'hooks' and active]
421 if section == 'hooks' and active]
422
422
423 hook_names = {
423 hook_names = {
424 RhodeCodeUi.HOOK_PUSH: 'push',
424 RhodeCodeUi.HOOK_PUSH: 'push',
425 RhodeCodeUi.HOOK_PULL: 'pull',
425 RhodeCodeUi.HOOK_PULL: 'pull',
426 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
426 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
427 }
427 }
428
428
429 for key in active_hook_keys:
429 for key in active_hook_keys:
430 hook = hook_names.get(key)
430 hook = hook_names.get(key)
431 if hook:
431 if hook:
432 enabled_hooks.append(hook)
432 enabled_hooks.append(hook)
433
433
434 return enabled_hooks
434 return enabled_hooks
435
435
436
436
437 def set_rhodecode_config(config):
437 def set_rhodecode_config(config):
438 """
438 """
439 Updates pyramid config with new settings from database
439 Updates pyramid config with new settings from database
440
440
441 :param config:
441 :param config:
442 """
442 """
443 from rhodecode.model.settings import SettingsModel
443 from rhodecode.model.settings import SettingsModel
444 app_settings = SettingsModel().get_all_settings()
444 app_settings = SettingsModel().get_all_settings()
445
445
446 for k, v in list(app_settings.items()):
446 for k, v in list(app_settings.items()):
447 config[k] = v
447 config[k] = v
448
448
449
449
450 def get_rhodecode_realm():
450 def get_rhodecode_realm():
451 """
451 """
452 Return the rhodecode realm from database.
452 Return the rhodecode realm from database.
453 """
453 """
454 from rhodecode.model.settings import SettingsModel
454 from rhodecode.model.settings import SettingsModel
455 realm = SettingsModel().get_setting_by_name('realm')
455 realm = SettingsModel().get_setting_by_name('realm')
456 return safe_str(realm.app_settings_value)
456 return safe_str(realm.app_settings_value)
457
457
458
458
459 def get_rhodecode_base_path():
459 def get_rhodecode_base_path():
460 """
460 """
461 Returns the base path. The base path is the filesystem path which points
461 Returns the base path. The base path is the filesystem path which points
462 to the repository store.
462 to the repository store.
463 """
463 """
464
464
465 import rhodecode
465 import rhodecode
466 return rhodecode.CONFIG['default_base_path']
466 return rhodecode.CONFIG['default_base_path']
467
467
468
468
469 def map_groups(path):
469 def map_groups(path):
470 """
470 """
471 Given a full path to a repository, create all nested groups that this
471 Given a full path to a repository, create all nested groups that this
472 repo is inside. This function creates parent-child relationships between
472 repo is inside. This function creates parent-child relationships between
473 groups and creates default perms for all new groups.
473 groups and creates default perms for all new groups.
474
474
475 :param paths: full path to repository
475 :param paths: full path to repository
476 """
476 """
477 from rhodecode.model.repo_group import RepoGroupModel
477 from rhodecode.model.repo_group import RepoGroupModel
478 sa = meta.Session()
478 sa = meta.Session()
479 groups = path.split(Repository.NAME_SEP)
479 groups = path.split(Repository.NAME_SEP)
480 parent = None
480 parent = None
481 group = None
481 group = None
482
482
483 # last element is repo in nested groups structure
483 # last element is repo in nested groups structure
484 groups = groups[:-1]
484 groups = groups[:-1]
485 rgm = RepoGroupModel(sa)
485 rgm = RepoGroupModel(sa)
486 owner = User.get_first_super_admin()
486 owner = User.get_first_super_admin()
487 for lvl, group_name in enumerate(groups):
487 for lvl, group_name in enumerate(groups):
488 group_name = '/'.join(groups[:lvl] + [group_name])
488 group_name = '/'.join(groups[:lvl] + [group_name])
489 group = RepoGroup.get_by_group_name(group_name)
489 group = RepoGroup.get_by_group_name(group_name)
490 desc = '%s group' % group_name
490 desc = '%s group' % group_name
491
491
492 # skip folders that are now removed repos
492 # skip folders that are now removed repos
493 if REMOVED_REPO_PAT.match(group_name):
493 if REMOVED_REPO_PAT.match(group_name):
494 break
494 break
495
495
496 if group is None:
496 if group is None:
497 log.debug('creating group level: %s group_name: %s',
497 log.debug('creating group level: %s group_name: %s',
498 lvl, group_name)
498 lvl, group_name)
499 group = RepoGroup(group_name, parent)
499 group = RepoGroup(group_name, parent)
500 group.group_description = desc
500 group.group_description = desc
501 group.user = owner
501 group.user = owner
502 sa.add(group)
502 sa.add(group)
503 perm_obj = rgm._create_default_perms(group)
503 perm_obj = rgm._create_default_perms(group)
504 sa.add(perm_obj)
504 sa.add(perm_obj)
505 sa.flush()
505 sa.flush()
506
506
507 parent = group
507 parent = group
508 return group
508 return group
509
509
510
510
511 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
511 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
512 """
512 """
513 maps all repos given in initial_repo_list, non existing repositories
513 maps all repos given in initial_repo_list, non existing repositories
514 are created, if remove_obsolete is True it also checks for db entries
514 are created, if remove_obsolete is True it also checks for db entries
515 that are not in initial_repo_list and removes them.
515 that are not in initial_repo_list and removes them.
516
516
517 :param initial_repo_list: list of repositories found by scanning methods
517 :param initial_repo_list: list of repositories found by scanning methods
518 :param remove_obsolete: check for obsolete entries in database
518 :param remove_obsolete: check for obsolete entries in database
519 """
519 """
520 from rhodecode.model.repo import RepoModel
520 from rhodecode.model.repo import RepoModel
521 from rhodecode.model.repo_group import RepoGroupModel
521 from rhodecode.model.repo_group import RepoGroupModel
522 from rhodecode.model.settings import SettingsModel
522 from rhodecode.model.settings import SettingsModel
523
523
524 sa = meta.Session()
524 sa = meta.Session()
525 repo_model = RepoModel()
525 repo_model = RepoModel()
526 user = User.get_first_super_admin()
526 user = User.get_first_super_admin()
527 added = []
527 added = []
528
528
529 # creation defaults
529 # creation defaults
530 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
530 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
531 enable_statistics = defs.get('repo_enable_statistics')
531 enable_statistics = defs.get('repo_enable_statistics')
532 enable_locking = defs.get('repo_enable_locking')
532 enable_locking = defs.get('repo_enable_locking')
533 enable_downloads = defs.get('repo_enable_downloads')
533 enable_downloads = defs.get('repo_enable_downloads')
534 private = defs.get('repo_private')
534 private = defs.get('repo_private')
535
535
536 for name, repo in list(initial_repo_list.items()):
536 for name, repo in list(initial_repo_list.items()):
537 group = map_groups(name)
537 group = map_groups(name)
538 str_name = safe_str(name)
538 str_name = safe_str(name)
539 db_repo = repo_model.get_by_repo_name(str_name)
539 db_repo = repo_model.get_by_repo_name(str_name)
540
540
541 # found repo that is on filesystem not in RhodeCode database
541 # found repo that is on filesystem not in RhodeCode database
542 if not db_repo:
542 if not db_repo:
543 log.info('repository `%s` not found in the database, creating now', name)
543 log.info('repository `%s` not found in the database, creating now', name)
544 added.append(name)
544 added.append(name)
545 desc = (repo.description
545 desc = (repo.description
546 if repo.description != 'unknown'
546 if repo.description != 'unknown'
547 else '%s repository' % name)
547 else '%s repository' % name)
548
548
549 db_repo = repo_model._create_repo(
549 db_repo = repo_model._create_repo(
550 repo_name=name,
550 repo_name=name,
551 repo_type=repo.alias,
551 repo_type=repo.alias,
552 description=desc,
552 description=desc,
553 repo_group=getattr(group, 'group_id', None),
553 repo_group=getattr(group, 'group_id', None),
554 owner=user,
554 owner=user,
555 enable_locking=enable_locking,
555 enable_locking=enable_locking,
556 enable_downloads=enable_downloads,
556 enable_downloads=enable_downloads,
557 enable_statistics=enable_statistics,
557 enable_statistics=enable_statistics,
558 private=private,
558 private=private,
559 state=Repository.STATE_CREATED
559 state=Repository.STATE_CREATED
560 )
560 )
561 sa.commit()
561 sa.commit()
562 # we added that repo just now, and make sure we updated server info
562 # we added that repo just now, and make sure we updated server info
563 if db_repo.repo_type == 'git':
563 if db_repo.repo_type == 'git':
564 git_repo = db_repo.scm_instance()
564 git_repo = db_repo.scm_instance()
565 # update repository server-info
565 # update repository server-info
566 log.debug('Running update server info')
566 log.debug('Running update server info')
567 git_repo._update_server_info()
567 git_repo._update_server_info(force=True)
568
568
569 db_repo.update_commit_cache()
569 db_repo.update_commit_cache()
570
570
571 config = db_repo._config
571 config = db_repo._config
572 config.set('extensions', 'largefiles', '')
572 config.set('extensions', 'largefiles', '')
573 repo = db_repo.scm_instance(config=config)
573 repo = db_repo.scm_instance(config=config)
574 repo.install_hooks(force=force_hooks_rebuild)
574 repo.install_hooks(force=force_hooks_rebuild)
575
575
576 removed = []
576 removed = []
577 if remove_obsolete:
577 if remove_obsolete:
578 # remove from database those repositories that are not in the filesystem
578 # remove from database those repositories that are not in the filesystem
579 for repo in sa.query(Repository).all():
579 for repo in sa.query(Repository).all():
580 if repo.repo_name not in list(initial_repo_list.keys()):
580 if repo.repo_name not in list(initial_repo_list.keys()):
581 log.debug("Removing non-existing repository found in db `%s`",
581 log.debug("Removing non-existing repository found in db `%s`",
582 repo.repo_name)
582 repo.repo_name)
583 try:
583 try:
584 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
584 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
585 sa.commit()
585 sa.commit()
586 removed.append(repo.repo_name)
586 removed.append(repo.repo_name)
587 except Exception:
587 except Exception:
588 # don't hold further removals on error
588 # don't hold further removals on error
589 log.error(traceback.format_exc())
589 log.error(traceback.format_exc())
590 sa.rollback()
590 sa.rollback()
591
591
592 def splitter(full_repo_name):
592 def splitter(full_repo_name):
593 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
593 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
594 gr_name = None
594 gr_name = None
595 if len(_parts) == 2:
595 if len(_parts) == 2:
596 gr_name = _parts[0]
596 gr_name = _parts[0]
597 return gr_name
597 return gr_name
598
598
599 initial_repo_group_list = [splitter(x) for x in
599 initial_repo_group_list = [splitter(x) for x in
600 list(initial_repo_list.keys()) if splitter(x)]
600 list(initial_repo_list.keys()) if splitter(x)]
601
601
602 # remove from database those repository groups that are not in the
602 # remove from database those repository groups that are not in the
603 # filesystem due to parent child relationships we need to delete them
603 # filesystem due to parent child relationships we need to delete them
604 # in a specific order of most nested first
604 # in a specific order of most nested first
605 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
605 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
606 def nested_sort(gr):
606 def nested_sort(gr):
607 return len(gr.split('/'))
607 return len(gr.split('/'))
608 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
608 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
609 if group_name not in initial_repo_group_list:
609 if group_name not in initial_repo_group_list:
610 repo_group = RepoGroup.get_by_group_name(group_name)
610 repo_group = RepoGroup.get_by_group_name(group_name)
611 if (repo_group.children.all() or
611 if (repo_group.children.all() or
612 not RepoGroupModel().check_exist_filesystem(
612 not RepoGroupModel().check_exist_filesystem(
613 group_name=group_name, exc_on_failure=False)):
613 group_name=group_name, exc_on_failure=False)):
614 continue
614 continue
615
615
616 log.info(
616 log.info(
617 'Removing non-existing repository group found in db `%s`',
617 'Removing non-existing repository group found in db `%s`',
618 group_name)
618 group_name)
619 try:
619 try:
620 RepoGroupModel(sa).delete(group_name, fs_remove=False)
620 RepoGroupModel(sa).delete(group_name, fs_remove=False)
621 sa.commit()
621 sa.commit()
622 removed.append(group_name)
622 removed.append(group_name)
623 except Exception:
623 except Exception:
624 # don't hold further removals on error
624 # don't hold further removals on error
625 log.exception(
625 log.exception(
626 'Unable to remove repository group `%s`',
626 'Unable to remove repository group `%s`',
627 group_name)
627 group_name)
628 sa.rollback()
628 sa.rollback()
629 raise
629 raise
630
630
631 return added, removed
631 return added, removed
632
632
633
633
634 def load_rcextensions(root_path):
634 def load_rcextensions(root_path):
635 import rhodecode
635 import rhodecode
636 from rhodecode.config import conf
636 from rhodecode.config import conf
637
637
638 path = os.path.join(root_path)
638 path = os.path.join(root_path)
639 sys.path.append(path)
639 sys.path.append(path)
640
640
641 try:
641 try:
642 rcextensions = __import__('rcextensions')
642 rcextensions = __import__('rcextensions')
643 except ImportError:
643 except ImportError:
644 if os.path.isdir(os.path.join(path, 'rcextensions')):
644 if os.path.isdir(os.path.join(path, 'rcextensions')):
645 log.warning('Unable to load rcextensions from %s', path)
645 log.warning('Unable to load rcextensions from %s', path)
646 rcextensions = None
646 rcextensions = None
647
647
648 if rcextensions:
648 if rcextensions:
649 log.info('Loaded rcextensions from %s...', rcextensions)
649 log.info('Loaded rcextensions from %s...', rcextensions)
650 rhodecode.EXTENSIONS = rcextensions
650 rhodecode.EXTENSIONS = rcextensions
651
651
652 # Additional mappings that are not present in the pygments lexers
652 # Additional mappings that are not present in the pygments lexers
653 conf.LANGUAGES_EXTENSIONS_MAP.update(
653 conf.LANGUAGES_EXTENSIONS_MAP.update(
654 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
654 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
655
655
656
656
657 def get_custom_lexer(extension):
657 def get_custom_lexer(extension):
658 """
658 """
659 returns a custom lexer if it is defined in rcextensions module, or None
659 returns a custom lexer if it is defined in rcextensions module, or None
660 if there's no custom lexer defined
660 if there's no custom lexer defined
661 """
661 """
662 import rhodecode
662 import rhodecode
663 from pygments import lexers
663 from pygments import lexers
664
664
665 # custom override made by RhodeCode
665 # custom override made by RhodeCode
666 if extension in ['mako']:
666 if extension in ['mako']:
667 return lexers.get_lexer_by_name('html+mako')
667 return lexers.get_lexer_by_name('html+mako')
668
668
669 # check if we didn't define this extension as other lexer
669 # check if we didn't define this extension as other lexer
670 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
670 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
671 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
671 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
672 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
672 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
673 return lexers.get_lexer_by_name(_lexer_name)
673 return lexers.get_lexer_by_name(_lexer_name)
674
674
675
675
676 #==============================================================================
676 #==============================================================================
677 # TEST FUNCTIONS AND CREATORS
677 # TEST FUNCTIONS AND CREATORS
678 #==============================================================================
678 #==============================================================================
679 def create_test_index(repo_location, config):
679 def create_test_index(repo_location, config):
680 """
680 """
681 Makes default test index.
681 Makes default test index.
682 """
682 """
683 try:
683 try:
684 import rc_testdata
684 import rc_testdata
685 except ImportError:
685 except ImportError:
686 raise ImportError('Failed to import rc_testdata, '
686 raise ImportError('Failed to import rc_testdata, '
687 'please make sure this package is installed from requirements_test.txt')
687 'please make sure this package is installed from requirements_test.txt')
688 rc_testdata.extract_search_index(
688 rc_testdata.extract_search_index(
689 'vcs_search_index', os.path.dirname(config['search.location']))
689 'vcs_search_index', os.path.dirname(config['search.location']))
690
690
691
691
692 def create_test_directory(test_path):
692 def create_test_directory(test_path):
693 """
693 """
694 Create test directory if it doesn't exist.
694 Create test directory if it doesn't exist.
695 """
695 """
696 if not os.path.isdir(test_path):
696 if not os.path.isdir(test_path):
697 log.debug('Creating testdir %s', test_path)
697 log.debug('Creating testdir %s', test_path)
698 os.makedirs(test_path)
698 os.makedirs(test_path)
699
699
700
700
701 def create_test_database(test_path, config):
701 def create_test_database(test_path, config):
702 """
702 """
703 Makes a fresh database.
703 Makes a fresh database.
704 """
704 """
705 from rhodecode.lib.db_manage import DbManage
705 from rhodecode.lib.db_manage import DbManage
706 from rhodecode.lib.utils2 import get_encryption_key
706 from rhodecode.lib.utils2 import get_encryption_key
707
707
708 # PART ONE create db
708 # PART ONE create db
709 dbconf = config['sqlalchemy.db1.url']
709 dbconf = config['sqlalchemy.db1.url']
710 enc_key = get_encryption_key(config)
710 enc_key = get_encryption_key(config)
711
711
712 log.debug('making test db %s', dbconf)
712 log.debug('making test db %s', dbconf)
713
713
714 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
714 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
715 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
715 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
716 dbmanage.create_tables(override=True)
716 dbmanage.create_tables(override=True)
717 dbmanage.set_db_version()
717 dbmanage.set_db_version()
718 # for tests dynamically set new root paths based on generated content
718 # for tests dynamically set new root paths based on generated content
719 dbmanage.create_settings(dbmanage.config_prompt(test_path))
719 dbmanage.create_settings(dbmanage.config_prompt(test_path))
720 dbmanage.create_default_user()
720 dbmanage.create_default_user()
721 dbmanage.create_test_admin_and_users()
721 dbmanage.create_test_admin_and_users()
722 dbmanage.create_permissions()
722 dbmanage.create_permissions()
723 dbmanage.populate_default_permissions()
723 dbmanage.populate_default_permissions()
724 Session().commit()
724 Session().commit()
725
725
726
726
727 def create_test_repositories(test_path, config):
727 def create_test_repositories(test_path, config):
728 """
728 """
729 Creates test repositories in the temporary directory. Repositories are
729 Creates test repositories in the temporary directory. Repositories are
730 extracted from archives within the rc_testdata package.
730 extracted from archives within the rc_testdata package.
731 """
731 """
732 import rc_testdata
732 import rc_testdata
733 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
733 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
734
734
735 log.debug('making test vcs repositories')
735 log.debug('making test vcs repositories')
736
736
737 idx_path = config['search.location']
737 idx_path = config['search.location']
738 data_path = config['cache_dir']
738 data_path = config['cache_dir']
739
739
740 # clean index and data
740 # clean index and data
741 if idx_path and os.path.exists(idx_path):
741 if idx_path and os.path.exists(idx_path):
742 log.debug('remove %s', idx_path)
742 log.debug('remove %s', idx_path)
743 shutil.rmtree(idx_path)
743 shutil.rmtree(idx_path)
744
744
745 if data_path and os.path.exists(data_path):
745 if data_path and os.path.exists(data_path):
746 log.debug('remove %s', data_path)
746 log.debug('remove %s', data_path)
747 shutil.rmtree(data_path)
747 shutil.rmtree(data_path)
748
748
749 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
749 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
750 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
750 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
751
751
752 # Note: Subversion is in the process of being integrated with the system,
752 # Note: Subversion is in the process of being integrated with the system,
753 # until we have a properly packed version of the test svn repository, this
753 # until we have a properly packed version of the test svn repository, this
754 # tries to copy over the repo from a package "rc_testdata"
754 # tries to copy over the repo from a package "rc_testdata"
755 svn_repo_path = rc_testdata.get_svn_repo_archive()
755 svn_repo_path = rc_testdata.get_svn_repo_archive()
756 with tarfile.open(svn_repo_path) as tar:
756 with tarfile.open(svn_repo_path) as tar:
757 tar.extractall(jn(test_path, SVN_REPO))
757 tar.extractall(jn(test_path, SVN_REPO))
758
758
759
759
760 def password_changed(auth_user, session):
760 def password_changed(auth_user, session):
761 # Never report password change in case of default user or anonymous user.
761 # Never report password change in case of default user or anonymous user.
762 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
762 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
763 return False
763 return False
764
764
765 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
765 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
766 rhodecode_user = session.get('rhodecode_user', {})
766 rhodecode_user = session.get('rhodecode_user', {})
767 session_password_hash = rhodecode_user.get('password', '')
767 session_password_hash = rhodecode_user.get('password', '')
768 return password_hash != session_password_hash
768 return password_hash != session_password_hash
769
769
770
770
771 def read_opensource_licenses():
771 def read_opensource_licenses():
772 global _license_cache
772 global _license_cache
773
773
774 if not _license_cache:
774 if not _license_cache:
775 licenses = pkg_resources.resource_string(
775 licenses = pkg_resources.resource_string(
776 'rhodecode', 'config/licenses.json')
776 'rhodecode', 'config/licenses.json')
777 _license_cache = json.loads(licenses)
777 _license_cache = json.loads(licenses)
778
778
779 return _license_cache
779 return _license_cache
780
780
781
781
782 def generate_platform_uuid():
782 def generate_platform_uuid():
783 """
783 """
784 Generates platform UUID based on it's name
784 Generates platform UUID based on it's name
785 """
785 """
786 import platform
786 import platform
787
787
788 try:
788 try:
789 uuid_list = [platform.platform()]
789 uuid_list = [platform.platform()]
790 return sha256_safe(':'.join(uuid_list))
790 return sha256_safe(':'.join(uuid_list))
791 except Exception as e:
791 except Exception as e:
792 log.error('Failed to generate host uuid: %s', e)
792 log.error('Failed to generate host uuid: %s', e)
793 return 'UNDEFINED'
793 return 'UNDEFINED'
794
794
795
795
796 def send_test_email(recipients, email_body='TEST EMAIL'):
796 def send_test_email(recipients, email_body='TEST EMAIL'):
797 """
797 """
798 Simple code for generating test emails.
798 Simple code for generating test emails.
799 Usage::
799 Usage::
800
800
801 from rhodecode.lib import utils
801 from rhodecode.lib import utils
802 utils.send_test_email()
802 utils.send_test_email()
803 """
803 """
804 from rhodecode.lib.celerylib import tasks, run_task
804 from rhodecode.lib.celerylib import tasks, run_task
805
805
806 email_body = email_body_plaintext = email_body
806 email_body = email_body_plaintext = email_body
807 subject = f'SUBJECT FROM: {socket.gethostname()}'
807 subject = f'SUBJECT FROM: {socket.gethostname()}'
808 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
808 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
@@ -1,1053 +1,1053 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 GIT repository module
20 GIT repository module
21 """
21 """
22
22
23 import logging
23 import logging
24 import os
24 import os
25 import re
25 import re
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from collections import OrderedDict
29 from collections import OrderedDict
30 from rhodecode.lib.datelib import (
30 from rhodecode.lib.datelib import (
31 utcdate_fromtimestamp, makedate, date_astimestamp)
31 utcdate_fromtimestamp, makedate, date_astimestamp)
32 from rhodecode.lib.hash_utils import safe_str
32 from rhodecode.lib.hash_utils import safe_str
33 from rhodecode.lib.utils2 import CachedProperty
33 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.vcs import connection, path as vcspath
34 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs.backends.base import (
35 from rhodecode.lib.vcs.backends.base import (
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 MergeFailureReason, Reference)
37 MergeFailureReason, Reference)
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitDoesNotExistError, EmptyRepositoryError,
42 CommitDoesNotExistError, EmptyRepositoryError,
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
44
44
45
45
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 class GitRepository(BaseRepository):
51 class GitRepository(BaseRepository):
52 """
52 """
53 Git repository backend.
53 Git repository backend.
54 """
54 """
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
66
66
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68
68
69 # caches
69 # caches
70 self._commit_ids = {}
70 self._commit_ids = {}
71
71
72 @LazyProperty
72 @LazyProperty
73 def _remote(self):
73 def _remote(self):
74 repo_id = self.path
74 repo_id = self.path
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76
76
77 @LazyProperty
77 @LazyProperty
78 def bare(self):
78 def bare(self):
79 return self._remote.bare()
79 return self._remote.bare()
80
80
81 @LazyProperty
81 @LazyProperty
82 def head(self):
82 def head(self):
83 return self._remote.head()
83 return self._remote.head()
84
84
85 @CachedProperty
85 @CachedProperty
86 def commit_ids(self):
86 def commit_ids(self):
87 """
87 """
88 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
89 attribute allows external tools to inject commit ids from cache.
89 attribute allows external tools to inject commit ids from cache.
90 """
90 """
91 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
92 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
93 return commit_ids
93 return commit_ids
94
94
95 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
96 self._commit_ids = {commit_id: index
96 self._commit_ids = {commit_id: index
97 for index, commit_id in enumerate(commit_ids)}
97 for index, commit_id in enumerate(commit_ids)}
98
98
99 def run_git_command(self, cmd, **opts):
99 def run_git_command(self, cmd, **opts):
100 """
100 """
101 Runs given ``cmd`` as git command and returns tuple
101 Runs given ``cmd`` as git command and returns tuple
102 (stdout, stderr).
102 (stdout, stderr).
103
103
104 :param cmd: git command to be executed
104 :param cmd: git command to be executed
105 :param opts: env options to pass into Subprocess command
105 :param opts: env options to pass into Subprocess command
106 """
106 """
107 if not isinstance(cmd, list):
107 if not isinstance(cmd, list):
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
109
109
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 out, err = self._remote.run_git_command(cmd, **opts)
111 out, err = self._remote.run_git_command(cmd, **opts)
112 if err and not skip_stderr_log:
112 if err and not skip_stderr_log:
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 return out, err
114 return out, err
115
115
116 @staticmethod
116 @staticmethod
117 def check_url(url, config):
117 def check_url(url, config):
118 """
118 """
119 Function will check given url and try to verify if it's a valid
119 Function will check given url and try to verify if it's a valid
120 link. Sometimes it may happened that git will issue basic
120 link. Sometimes it may happened that git will issue basic
121 auth request that can cause whole API to hang when used from python
121 auth request that can cause whole API to hang when used from python
122 or other external calls.
122 or other external calls.
123
123
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 when the return code is non 200
125 when the return code is non 200
126 """
126 """
127 # check first if it's not an url
127 # check first if it's not an url
128 if os.path.isdir(url) or url.startswith('file:'):
128 if os.path.isdir(url) or url.startswith('file:'):
129 return True
129 return True
130
130
131 if '+' in url.split('://', 1)[0]:
131 if '+' in url.split('://', 1)[0]:
132 url = url.split('+', 1)[1]
132 url = url.split('+', 1)[1]
133
133
134 # Request the _remote to verify the url
134 # Request the _remote to verify the url
135 return connection.Git.check_url(url, config.serialize())
135 return connection.Git.check_url(url, config.serialize())
136
136
137 @staticmethod
137 @staticmethod
138 def is_valid_repository(path):
138 def is_valid_repository(path):
139 if os.path.isdir(os.path.join(path, '.git')):
139 if os.path.isdir(os.path.join(path, '.git')):
140 return True
140 return True
141 # check case of bare repository
141 # check case of bare repository
142 try:
142 try:
143 GitRepository(path)
143 GitRepository(path)
144 return True
144 return True
145 except VCSError:
145 except VCSError:
146 pass
146 pass
147 return False
147 return False
148
148
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 bare=False):
150 bare=False):
151 if create and os.path.exists(self.path):
151 if create and os.path.exists(self.path):
152 raise RepositoryError(
152 raise RepositoryError(
153 f"Cannot create repository at {self.path}, location already exist")
153 f"Cannot create repository at {self.path}, location already exist")
154
154
155 if bare and do_workspace_checkout:
155 if bare and do_workspace_checkout:
156 raise RepositoryError("Cannot update a bare repository")
156 raise RepositoryError("Cannot update a bare repository")
157 try:
157 try:
158
158
159 if src_url:
159 if src_url:
160 # check URL before any actions
160 # check URL before any actions
161 GitRepository.check_url(src_url, self.config)
161 GitRepository.check_url(src_url, self.config)
162
162
163 if create:
163 if create:
164 if bare:
164 if bare:
165 self._remote.init_bare()
165 self._remote.init_bare()
166 else:
166 else:
167 self._remote.init()
167 self._remote.init()
168
168
169 if src_url and bare:
169 if src_url and bare:
170 # bare repository only allows a fetch and checkout is not allowed
170 # bare repository only allows a fetch and checkout is not allowed
171 self.fetch(src_url, commit_ids=None)
171 self.fetch(src_url, commit_ids=None)
172 elif src_url:
172 elif src_url:
173 self.pull(src_url, commit_ids=None,
173 self.pull(src_url, commit_ids=None,
174 update_after=do_workspace_checkout)
174 update_after=do_workspace_checkout)
175
175
176 else:
176 else:
177 if not self._remote.assert_correct_path():
177 if not self._remote.assert_correct_path():
178 raise RepositoryError(
178 raise RepositoryError(
179 f'Path "{self.path}" does not contain a Git repository')
179 f'Path "{self.path}" does not contain a Git repository')
180
180
181 # TODO: johbo: check if we have to translate the OSError here
181 # TODO: johbo: check if we have to translate the OSError here
182 except OSError as err:
182 except OSError as err:
183 raise RepositoryError(err)
183 raise RepositoryError(err)
184
184
185 def _get_all_commit_ids(self):
185 def _get_all_commit_ids(self):
186 return self._remote.get_all_commit_ids()
186 return self._remote.get_all_commit_ids()
187
187
188 def _get_commit_ids(self, filters=None):
188 def _get_commit_ids(self, filters=None):
189 # we must check if this repo is not empty, since later command
189 # we must check if this repo is not empty, since later command
190 # fails if it is. And it's cheaper to ask than throw the subprocess
190 # fails if it is. And it's cheaper to ask than throw the subprocess
191 # errors
191 # errors
192
192
193 head = self._remote.head(show_exc=False)
193 head = self._remote.head(show_exc=False)
194
194
195 if not head:
195 if not head:
196 return []
196 return []
197
197
198 rev_filter = ['--branches', '--tags']
198 rev_filter = ['--branches', '--tags']
199 extra_filter = []
199 extra_filter = []
200
200
201 if filters:
201 if filters:
202 if filters.get('since'):
202 if filters.get('since'):
203 extra_filter.append('--since=%s' % (filters['since']))
203 extra_filter.append('--since=%s' % (filters['since']))
204 if filters.get('until'):
204 if filters.get('until'):
205 extra_filter.append('--until=%s' % (filters['until']))
205 extra_filter.append('--until=%s' % (filters['until']))
206 if filters.get('branch_name'):
206 if filters.get('branch_name'):
207 rev_filter = []
207 rev_filter = []
208 extra_filter.append(filters['branch_name'])
208 extra_filter.append(filters['branch_name'])
209 rev_filter.extend(extra_filter)
209 rev_filter.extend(extra_filter)
210
210
211 # if filters.get('start') or filters.get('end'):
211 # if filters.get('start') or filters.get('end'):
212 # # skip is offset, max-count is limit
212 # # skip is offset, max-count is limit
213 # if filters.get('start'):
213 # if filters.get('start'):
214 # extra_filter += ' --skip=%s' % filters['start']
214 # extra_filter += ' --skip=%s' % filters['start']
215 # if filters.get('end'):
215 # if filters.get('end'):
216 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
216 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
217
217
218 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
218 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
219 try:
219 try:
220 output, __ = self.run_git_command(cmd)
220 output, __ = self.run_git_command(cmd)
221 except RepositoryError:
221 except RepositoryError:
222 # Can be raised for empty repositories
222 # Can be raised for empty repositories
223 return []
223 return []
224 return output.splitlines()
224 return output.splitlines()
225
225
226 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
226 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
227
227
228 def is_null(value):
228 def is_null(value):
229 return len(value) == commit_id_or_idx.count('0')
229 return len(value) == commit_id_or_idx.count('0')
230
230
231 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
231 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
232 return self.commit_ids[-1]
232 return self.commit_ids[-1]
233
233
234 commit_missing_err = "Commit {} does not exist for `{}`".format(
234 commit_missing_err = "Commit {} does not exist for `{}`".format(
235 *map(safe_str, [commit_id_or_idx, self.name]))
235 *map(safe_str, [commit_id_or_idx, self.name]))
236
236
237 is_bstr = isinstance(commit_id_or_idx, str)
237 is_bstr = isinstance(commit_id_or_idx, str)
238 is_branch = reference_obj and reference_obj.branch
238 is_branch = reference_obj and reference_obj.branch
239
239
240 lookup_ok = False
240 lookup_ok = False
241 if is_bstr:
241 if is_bstr:
242 # Need to call remote to translate id for tagging scenarios,
242 # Need to call remote to translate id for tagging scenarios,
243 # or branch that are numeric
243 # or branch that are numeric
244 try:
244 try:
245 remote_data = self._remote.get_object(commit_id_or_idx,
245 remote_data = self._remote.get_object(commit_id_or_idx,
246 maybe_unreachable=maybe_unreachable)
246 maybe_unreachable=maybe_unreachable)
247 commit_id_or_idx = remote_data["commit_id"]
247 commit_id_or_idx = remote_data["commit_id"]
248 lookup_ok = True
248 lookup_ok = True
249 except (CommitDoesNotExistError,):
249 except (CommitDoesNotExistError,):
250 lookup_ok = False
250 lookup_ok = False
251
251
252 if lookup_ok is False:
252 if lookup_ok is False:
253 is_numeric_idx = \
253 is_numeric_idx = \
254 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
254 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
255 or isinstance(commit_id_or_idx, int)
255 or isinstance(commit_id_or_idx, int)
256 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
256 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
257 try:
257 try:
258 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
258 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
259 lookup_ok = True
259 lookup_ok = True
260 except Exception:
260 except Exception:
261 raise CommitDoesNotExistError(commit_missing_err)
261 raise CommitDoesNotExistError(commit_missing_err)
262
262
263 # we failed regular lookup, and by integer number lookup
263 # we failed regular lookup, and by integer number lookup
264 if lookup_ok is False:
264 if lookup_ok is False:
265 raise CommitDoesNotExistError(commit_missing_err)
265 raise CommitDoesNotExistError(commit_missing_err)
266
266
267 # Ensure we return full id
267 # Ensure we return full id
268 if not SHA_PATTERN.match(str(commit_id_or_idx)):
268 if not SHA_PATTERN.match(str(commit_id_or_idx)):
269 raise CommitDoesNotExistError(
269 raise CommitDoesNotExistError(
270 "Given commit id %s not recognized" % commit_id_or_idx)
270 "Given commit id %s not recognized" % commit_id_or_idx)
271 return commit_id_or_idx
271 return commit_id_or_idx
272
272
273 def get_hook_location(self):
273 def get_hook_location(self):
274 """
274 """
275 returns absolute path to location where hooks are stored
275 returns absolute path to location where hooks are stored
276 """
276 """
277 loc = os.path.join(self.path, 'hooks')
277 loc = os.path.join(self.path, 'hooks')
278 if not self.bare:
278 if not self.bare:
279 loc = os.path.join(self.path, '.git', 'hooks')
279 loc = os.path.join(self.path, '.git', 'hooks')
280 return loc
280 return loc
281
281
282 @LazyProperty
282 @LazyProperty
283 def last_change(self):
283 def last_change(self):
284 """
284 """
285 Returns last change made on this repository as
285 Returns last change made on this repository as
286 `datetime.datetime` object.
286 `datetime.datetime` object.
287 """
287 """
288 try:
288 try:
289 return self.get_commit().date
289 return self.get_commit().date
290 except RepositoryError:
290 except RepositoryError:
291 tzoffset = makedate()[1]
291 tzoffset = makedate()[1]
292 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
292 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
293
293
294 def _get_fs_mtime(self):
294 def _get_fs_mtime(self):
295 idx_loc = '' if self.bare else '.git'
295 idx_loc = '' if self.bare else '.git'
296 # fallback to filesystem
296 # fallback to filesystem
297 in_path = os.path.join(self.path, idx_loc, "index")
297 in_path = os.path.join(self.path, idx_loc, "index")
298 he_path = os.path.join(self.path, idx_loc, "HEAD")
298 he_path = os.path.join(self.path, idx_loc, "HEAD")
299 if os.path.exists(in_path):
299 if os.path.exists(in_path):
300 return os.stat(in_path).st_mtime
300 return os.stat(in_path).st_mtime
301 else:
301 else:
302 return os.stat(he_path).st_mtime
302 return os.stat(he_path).st_mtime
303
303
304 @LazyProperty
304 @LazyProperty
305 def description(self):
305 def description(self):
306 description = self._remote.get_description()
306 description = self._remote.get_description()
307 return safe_str(description or self.DEFAULT_DESCRIPTION)
307 return safe_str(description or self.DEFAULT_DESCRIPTION)
308
308
309 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
309 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
310 if self.is_empty():
310 if self.is_empty():
311 return OrderedDict()
311 return OrderedDict()
312
312
313 result = []
313 result = []
314 for ref, sha in self._refs.items():
314 for ref, sha in self._refs.items():
315 if ref.startswith(prefix):
315 if ref.startswith(prefix):
316 ref_name = ref
316 ref_name = ref
317 if strip_prefix:
317 if strip_prefix:
318 ref_name = ref[len(prefix):]
318 ref_name = ref[len(prefix):]
319 result.append((safe_str(ref_name), sha))
319 result.append((safe_str(ref_name), sha))
320
320
321 def get_name(entry):
321 def get_name(entry):
322 return entry[0]
322 return entry[0]
323
323
324 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
324 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
325
325
326 def _get_branches(self):
326 def _get_branches(self):
327 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
327 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
328
328
329 @CachedProperty
329 @CachedProperty
330 def branches(self):
330 def branches(self):
331 return self._get_branches()
331 return self._get_branches()
332
332
333 @CachedProperty
333 @CachedProperty
334 def branches_closed(self):
334 def branches_closed(self):
335 return {}
335 return {}
336
336
337 @CachedProperty
337 @CachedProperty
338 def bookmarks(self):
338 def bookmarks(self):
339 return {}
339 return {}
340
340
341 @CachedProperty
341 @CachedProperty
342 def branches_all(self):
342 def branches_all(self):
343 all_branches = {}
343 all_branches = {}
344 all_branches.update(self.branches)
344 all_branches.update(self.branches)
345 all_branches.update(self.branches_closed)
345 all_branches.update(self.branches_closed)
346 return all_branches
346 return all_branches
347
347
348 @CachedProperty
348 @CachedProperty
349 def tags(self):
349 def tags(self):
350 return self._get_tags()
350 return self._get_tags()
351
351
352 def _get_tags(self):
352 def _get_tags(self):
353 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
353 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
354
354
355 def tag(self, name, user, commit_id=None, message=None, date=None,
355 def tag(self, name, user, commit_id=None, message=None, date=None,
356 **kwargs):
356 **kwargs):
357 # TODO: fix this method to apply annotated tags correct with message
357 # TODO: fix this method to apply annotated tags correct with message
358 """
358 """
359 Creates and returns a tag for the given ``commit_id``.
359 Creates and returns a tag for the given ``commit_id``.
360
360
361 :param name: name for new tag
361 :param name: name for new tag
362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
363 :param commit_id: commit id for which new tag would be created
363 :param commit_id: commit id for which new tag would be created
364 :param message: message of the tag's commit
364 :param message: message of the tag's commit
365 :param date: date of tag's commit
365 :param date: date of tag's commit
366
366
367 :raises TagAlreadyExistError: if tag with same name already exists
367 :raises TagAlreadyExistError: if tag with same name already exists
368 """
368 """
369 if name in self.tags:
369 if name in self.tags:
370 raise TagAlreadyExistError("Tag %s already exists" % name)
370 raise TagAlreadyExistError("Tag %s already exists" % name)
371 commit = self.get_commit(commit_id=commit_id)
371 commit = self.get_commit(commit_id=commit_id)
372 message = message or f"Added tag {name} for commit {commit.raw_id}"
372 message = message or f"Added tag {name} for commit {commit.raw_id}"
373
373
374 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
374 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
375
375
376 self._invalidate_prop_cache('tags')
376 self._invalidate_prop_cache('tags')
377 self._invalidate_prop_cache('_refs')
377 self._invalidate_prop_cache('_refs')
378
378
379 return commit
379 return commit
380
380
381 def remove_tag(self, name, user, message=None, date=None):
381 def remove_tag(self, name, user, message=None, date=None):
382 """
382 """
383 Removes tag with the given ``name``.
383 Removes tag with the given ``name``.
384
384
385 :param name: name of the tag to be removed
385 :param name: name of the tag to be removed
386 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
386 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 :param message: message of the tag's removal commit
387 :param message: message of the tag's removal commit
388 :param date: date of tag's removal commit
388 :param date: date of tag's removal commit
389
389
390 :raises TagDoesNotExistError: if tag with given name does not exists
390 :raises TagDoesNotExistError: if tag with given name does not exists
391 """
391 """
392 if name not in self.tags:
392 if name not in self.tags:
393 raise TagDoesNotExistError("Tag %s does not exist" % name)
393 raise TagDoesNotExistError("Tag %s does not exist" % name)
394
394
395 self._remote.tag_remove(name)
395 self._remote.tag_remove(name)
396 self._invalidate_prop_cache('tags')
396 self._invalidate_prop_cache('tags')
397 self._invalidate_prop_cache('_refs')
397 self._invalidate_prop_cache('_refs')
398
398
399 def _get_refs(self):
399 def _get_refs(self):
400 return self._remote.get_refs()
400 return self._remote.get_refs()
401
401
402 @CachedProperty
402 @CachedProperty
403 def _refs(self):
403 def _refs(self):
404 return self._get_refs()
404 return self._get_refs()
405
405
406 @property
406 @property
407 def _ref_tree(self):
407 def _ref_tree(self):
408 node = tree = {}
408 node = tree = {}
409 for ref, sha in self._refs.items():
409 for ref, sha in self._refs.items():
410 path = ref.split('/')
410 path = ref.split('/')
411 for bit in path[:-1]:
411 for bit in path[:-1]:
412 node = node.setdefault(bit, {})
412 node = node.setdefault(bit, {})
413 node[path[-1]] = sha
413 node[path[-1]] = sha
414 node = tree
414 node = tree
415 return tree
415 return tree
416
416
417 def get_remote_ref(self, ref_name):
417 def get_remote_ref(self, ref_name):
418 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
418 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
419 try:
419 try:
420 return self._refs[ref_key]
420 return self._refs[ref_key]
421 except Exception:
421 except Exception:
422 return
422 return
423
423
424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
425 translate_tag=True, maybe_unreachable=False, reference_obj=None):
425 translate_tag=True, maybe_unreachable=False, reference_obj=None):
426 """
426 """
427 Returns `GitCommit` object representing commit from git repository
427 Returns `GitCommit` object representing commit from git repository
428 at the given `commit_id` or head (most recent commit) if None given.
428 at the given `commit_id` or head (most recent commit) if None given.
429 """
429 """
430
430
431 if self.is_empty():
431 if self.is_empty():
432 raise EmptyRepositoryError("There are no commits yet")
432 raise EmptyRepositoryError("There are no commits yet")
433
433
434 if commit_id is not None:
434 if commit_id is not None:
435 self._validate_commit_id(commit_id)
435 self._validate_commit_id(commit_id)
436 try:
436 try:
437 # we have cached idx, use it without contacting the remote
437 # we have cached idx, use it without contacting the remote
438 idx = self._commit_ids[commit_id]
438 idx = self._commit_ids[commit_id]
439 return GitCommit(self, commit_id, idx, pre_load=pre_load)
439 return GitCommit(self, commit_id, idx, pre_load=pre_load)
440 except KeyError:
440 except KeyError:
441 pass
441 pass
442
442
443 elif commit_idx is not None:
443 elif commit_idx is not None:
444 self._validate_commit_idx(commit_idx)
444 self._validate_commit_idx(commit_idx)
445 try:
445 try:
446 _commit_id = self.commit_ids[commit_idx]
446 _commit_id = self.commit_ids[commit_idx]
447 if commit_idx < 0:
447 if commit_idx < 0:
448 commit_idx = self.commit_ids.index(_commit_id)
448 commit_idx = self.commit_ids.index(_commit_id)
449 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
449 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
450 except IndexError:
450 except IndexError:
451 commit_id = commit_idx
451 commit_id = commit_idx
452 else:
452 else:
453 commit_id = "tip"
453 commit_id = "tip"
454
454
455 if translate_tag:
455 if translate_tag:
456 commit_id = self._lookup_commit(
456 commit_id = self._lookup_commit(
457 commit_id, maybe_unreachable=maybe_unreachable,
457 commit_id, maybe_unreachable=maybe_unreachable,
458 reference_obj=reference_obj)
458 reference_obj=reference_obj)
459
459
460 try:
460 try:
461 idx = self._commit_ids[commit_id]
461 idx = self._commit_ids[commit_id]
462 except KeyError:
462 except KeyError:
463 idx = -1
463 idx = -1
464
464
465 return GitCommit(self, commit_id, idx, pre_load=pre_load)
465 return GitCommit(self, commit_id, idx, pre_load=pre_load)
466
466
467 def get_commits(
467 def get_commits(
468 self, start_id=None, end_id=None, start_date=None, end_date=None,
468 self, start_id=None, end_id=None, start_date=None, end_date=None,
469 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
469 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
470 """
470 """
471 Returns generator of `GitCommit` objects from start to end (both
471 Returns generator of `GitCommit` objects from start to end (both
472 are inclusive), in ascending date order.
472 are inclusive), in ascending date order.
473
473
474 :param start_id: None, str(commit_id)
474 :param start_id: None, str(commit_id)
475 :param end_id: None, str(commit_id)
475 :param end_id: None, str(commit_id)
476 :param start_date: if specified, commits with commit date less than
476 :param start_date: if specified, commits with commit date less than
477 ``start_date`` would be filtered out from returned set
477 ``start_date`` would be filtered out from returned set
478 :param end_date: if specified, commits with commit date greater than
478 :param end_date: if specified, commits with commit date greater than
479 ``end_date`` would be filtered out from returned set
479 ``end_date`` would be filtered out from returned set
480 :param branch_name: if specified, commits not reachable from given
480 :param branch_name: if specified, commits not reachable from given
481 branch would be filtered out from returned set
481 branch would be filtered out from returned set
482 :param show_hidden: Show hidden commits such as obsolete or hidden from
482 :param show_hidden: Show hidden commits such as obsolete or hidden from
483 Mercurial evolve
483 Mercurial evolve
484 :raise BranchDoesNotExistError: If given `branch_name` does not
484 :raise BranchDoesNotExistError: If given `branch_name` does not
485 exist.
485 exist.
486 :raise CommitDoesNotExistError: If commits for given `start` or
486 :raise CommitDoesNotExistError: If commits for given `start` or
487 `end` could not be found.
487 `end` could not be found.
488
488
489 """
489 """
490 if self.is_empty():
490 if self.is_empty():
491 raise EmptyRepositoryError("There are no commits yet")
491 raise EmptyRepositoryError("There are no commits yet")
492
492
493 self._validate_branch_name(branch_name)
493 self._validate_branch_name(branch_name)
494
494
495 if start_id is not None:
495 if start_id is not None:
496 self._validate_commit_id(start_id)
496 self._validate_commit_id(start_id)
497 if end_id is not None:
497 if end_id is not None:
498 self._validate_commit_id(end_id)
498 self._validate_commit_id(end_id)
499
499
500 start_raw_id = self._lookup_commit(start_id)
500 start_raw_id = self._lookup_commit(start_id)
501 start_pos = self._commit_ids[start_raw_id] if start_id else None
501 start_pos = self._commit_ids[start_raw_id] if start_id else None
502 end_raw_id = self._lookup_commit(end_id)
502 end_raw_id = self._lookup_commit(end_id)
503 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
503 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
504
504
505 if None not in [start_id, end_id] and start_pos > end_pos:
505 if None not in [start_id, end_id] and start_pos > end_pos:
506 raise RepositoryError(
506 raise RepositoryError(
507 "Start commit '%s' cannot be after end commit '%s'" %
507 "Start commit '%s' cannot be after end commit '%s'" %
508 (start_id, end_id))
508 (start_id, end_id))
509
509
510 if end_pos is not None:
510 if end_pos is not None:
511 end_pos += 1
511 end_pos += 1
512
512
513 filter_ = []
513 filter_ = []
514 if branch_name:
514 if branch_name:
515 filter_.append({'branch_name': branch_name})
515 filter_.append({'branch_name': branch_name})
516 if start_date and not end_date:
516 if start_date and not end_date:
517 filter_.append({'since': start_date})
517 filter_.append({'since': start_date})
518 if end_date and not start_date:
518 if end_date and not start_date:
519 filter_.append({'until': end_date})
519 filter_.append({'until': end_date})
520 if start_date and end_date:
520 if start_date and end_date:
521 filter_.append({'since': start_date})
521 filter_.append({'since': start_date})
522 filter_.append({'until': end_date})
522 filter_.append({'until': end_date})
523
523
524 # if start_pos or end_pos:
524 # if start_pos or end_pos:
525 # filter_.append({'start': start_pos})
525 # filter_.append({'start': start_pos})
526 # filter_.append({'end': end_pos})
526 # filter_.append({'end': end_pos})
527
527
528 if filter_:
528 if filter_:
529 revfilters = {
529 revfilters = {
530 'branch_name': branch_name,
530 'branch_name': branch_name,
531 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
531 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
532 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
532 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
533 'start': start_pos,
533 'start': start_pos,
534 'end': end_pos,
534 'end': end_pos,
535 }
535 }
536 commit_ids = self._get_commit_ids(filters=revfilters)
536 commit_ids = self._get_commit_ids(filters=revfilters)
537
537
538 else:
538 else:
539 commit_ids = self.commit_ids
539 commit_ids = self.commit_ids
540
540
541 if start_pos or end_pos:
541 if start_pos or end_pos:
542 commit_ids = commit_ids[start_pos: end_pos]
542 commit_ids = commit_ids[start_pos: end_pos]
543
543
544 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
544 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
545 translate_tag=translate_tags)
545 translate_tag=translate_tags)
546
546
547 def get_diff(
547 def get_diff(
548 self, commit1, commit2, path='', ignore_whitespace=False,
548 self, commit1, commit2, path='', ignore_whitespace=False,
549 context=3, path1=None):
549 context=3, path1=None):
550 """
550 """
551 Returns (git like) *diff*, as plain text. Shows changes introduced by
551 Returns (git like) *diff*, as plain text. Shows changes introduced by
552 ``commit2`` since ``commit1``.
552 ``commit2`` since ``commit1``.
553
553
554 :param commit1: Entry point from which diff is shown. Can be
554 :param commit1: Entry point from which diff is shown. Can be
555 ``self.EMPTY_COMMIT`` - in this case, patch showing all
555 ``self.EMPTY_COMMIT`` - in this case, patch showing all
556 the changes since empty state of the repository until ``commit2``
556 the changes since empty state of the repository until ``commit2``
557 :param commit2: Until which commits changes should be shown.
557 :param commit2: Until which commits changes should be shown.
558 :param path:
558 :param path:
559 :param ignore_whitespace: If set to ``True``, would not show whitespace
559 :param ignore_whitespace: If set to ``True``, would not show whitespace
560 changes. Defaults to ``False``.
560 changes. Defaults to ``False``.
561 :param context: How many lines before/after changed lines should be
561 :param context: How many lines before/after changed lines should be
562 shown. Defaults to ``3``.
562 shown. Defaults to ``3``.
563 :param path1:
563 :param path1:
564 """
564 """
565 self._validate_diff_commits(commit1, commit2)
565 self._validate_diff_commits(commit1, commit2)
566 if path1 is not None and path1 != path:
566 if path1 is not None and path1 != path:
567 raise ValueError("Diff of two different paths not supported.")
567 raise ValueError("Diff of two different paths not supported.")
568
568
569 if path:
569 if path:
570 file_filter = path
570 file_filter = path
571 else:
571 else:
572 file_filter = None
572 file_filter = None
573
573
574 diff = self._remote.diff(
574 diff = self._remote.diff(
575 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
575 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
576 opt_ignorews=ignore_whitespace,
576 opt_ignorews=ignore_whitespace,
577 context=context)
577 context=context)
578
578
579 return GitDiff(diff)
579 return GitDiff(diff)
580
580
581 def strip(self, commit_id, branch_name):
581 def strip(self, commit_id, branch_name):
582 commit = self.get_commit(commit_id=commit_id)
582 commit = self.get_commit(commit_id=commit_id)
583 if commit.merge:
583 if commit.merge:
584 raise Exception('Cannot reset to merge commit')
584 raise Exception('Cannot reset to merge commit')
585
585
586 if not branch_name:
586 if not branch_name:
587 raise ValueError(f'git strip requires a valid branch name, got {branch_name} instead')
587 raise ValueError(f'git strip requires a valid branch name, got {branch_name} instead')
588
588
589 # parent is going to be the new head now
589 # parent is going to be the new head now
590 commit = commit.parents[0]
590 commit = commit.parents[0]
591 self._remote.update_refs(f'refs/heads/{branch_name}', commit.raw_id)
591 self._remote.update_refs(f'refs/heads/{branch_name}', commit.raw_id)
592
592
593 # clear cached properties
593 # clear cached properties
594 self._invalidate_prop_cache('commit_ids')
594 self._invalidate_prop_cache('commit_ids')
595 self._invalidate_prop_cache('_refs')
595 self._invalidate_prop_cache('_refs')
596 self._invalidate_prop_cache('branches')
596 self._invalidate_prop_cache('branches')
597
597
598 return len(self.commit_ids)
598 return len(self.commit_ids)
599
599
600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
602 self, commit_id1, repo2, commit_id2)
602 self, commit_id1, repo2, commit_id2)
603
603
604 if commit_id1 == commit_id2:
604 if commit_id1 == commit_id2:
605 return commit_id1
605 return commit_id1
606
606
607 if self != repo2:
607 if self != repo2:
608 commits = self._remote.get_missing_revs(
608 commits = self._remote.get_missing_revs(
609 commit_id1, commit_id2, repo2.path)
609 commit_id1, commit_id2, repo2.path)
610 if commits:
610 if commits:
611 commit = repo2.get_commit(commits[-1])
611 commit = repo2.get_commit(commits[-1])
612 if commit.parents:
612 if commit.parents:
613 ancestor_id = commit.parents[0].raw_id
613 ancestor_id = commit.parents[0].raw_id
614 else:
614 else:
615 ancestor_id = None
615 ancestor_id = None
616 else:
616 else:
617 # no commits from other repo, ancestor_id is the commit_id2
617 # no commits from other repo, ancestor_id is the commit_id2
618 ancestor_id = commit_id2
618 ancestor_id = commit_id2
619 else:
619 else:
620 output, __ = self.run_git_command(
620 output, __ = self.run_git_command(
621 ['merge-base', commit_id1, commit_id2])
621 ['merge-base', commit_id1, commit_id2])
622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
623
623
624 log.debug('Found common ancestor with sha: %s', ancestor_id)
624 log.debug('Found common ancestor with sha: %s', ancestor_id)
625
625
626 return ancestor_id
626 return ancestor_id
627
627
628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
629 repo1 = self
629 repo1 = self
630 ancestor_id = None
630 ancestor_id = None
631
631
632 if commit_id1 == commit_id2:
632 if commit_id1 == commit_id2:
633 commits = []
633 commits = []
634 elif repo1 != repo2:
634 elif repo1 != repo2:
635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
636 repo2.path)
636 repo2.path)
637 commits = [
637 commits = [
638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
639 for commit_id in reversed(missing_ids)]
639 for commit_id in reversed(missing_ids)]
640 else:
640 else:
641 output, __ = repo1.run_git_command(
641 output, __ = repo1.run_git_command(
642 ['log', '--reverse', '--pretty=format: %H', '-s',
642 ['log', '--reverse', '--pretty=format: %H', '-s',
643 f'{commit_id1}..{commit_id2}'])
643 f'{commit_id1}..{commit_id2}'])
644 commits = [
644 commits = [
645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
647
647
648 return commits
648 return commits
649
649
650 @LazyProperty
650 @LazyProperty
651 def in_memory_commit(self):
651 def in_memory_commit(self):
652 """
652 """
653 Returns ``GitInMemoryCommit`` object for this repository.
653 Returns ``GitInMemoryCommit`` object for this repository.
654 """
654 """
655 return GitInMemoryCommit(self)
655 return GitInMemoryCommit(self)
656
656
657 def pull(self, url, commit_ids=None, update_after=False):
657 def pull(self, url, commit_ids=None, update_after=False):
658 """
658 """
659 Pull changes from external location. Pull is different in GIT
659 Pull changes from external location. Pull is different in GIT
660 that fetch since it's doing a checkout
660 that fetch since it's doing a checkout
661
661
662 :param commit_ids: Optional. Can be set to a list of commit ids
662 :param commit_ids: Optional. Can be set to a list of commit ids
663 which shall be pulled from the other repository.
663 which shall be pulled from the other repository.
664 """
664 """
665 refs = None
665 refs = None
666 if commit_ids is not None:
666 if commit_ids is not None:
667 remote_refs = self._remote.get_remote_refs(url)
667 remote_refs = self._remote.get_remote_refs(url)
668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
669 self._remote.pull(url, refs=refs, update_after=update_after)
669 self._remote.pull(url, refs=refs, update_after=update_after)
670 self._remote.invalidate_vcs_cache()
670 self._remote.invalidate_vcs_cache()
671
671
672 def fetch(self, url, commit_ids=None, **kwargs):
672 def fetch(self, url, commit_ids=None, **kwargs):
673 """
673 """
674 Fetch all git objects from external location.
674 Fetch all git objects from external location.
675 """
675 """
676 self._remote.sync_fetch(url, refs=commit_ids, **kwargs)
676 self._remote.sync_fetch(url, refs=commit_ids, **kwargs)
677 self._remote.invalidate_vcs_cache()
677 self._remote.invalidate_vcs_cache()
678
678
679 def push(self, url, **kwargs):
679 def push(self, url, **kwargs):
680 refs = None
680 refs = None
681 self._remote.sync_push(url, refs=refs, **kwargs)
681 self._remote.sync_push(url, refs=refs, **kwargs)
682
682
683 def set_refs(self, ref_name, commit_id):
683 def set_refs(self, ref_name, commit_id):
684 self._remote.set_refs(ref_name, commit_id)
684 self._remote.set_refs(ref_name, commit_id)
685 self._invalidate_prop_cache('_refs')
685 self._invalidate_prop_cache('_refs')
686
686
687 def remove_ref(self, ref_name):
687 def remove_ref(self, ref_name):
688 self._remote.remove_ref(ref_name)
688 self._remote.remove_ref(ref_name)
689 self._invalidate_prop_cache('_refs')
689 self._invalidate_prop_cache('_refs')
690
690
691 def run_gc(self, prune=True):
691 def run_gc(self, prune=True):
692 cmd = ['gc', '--aggressive']
692 cmd = ['gc', '--aggressive']
693 if prune:
693 if prune:
694 cmd += ['--prune=now']
694 cmd += ['--prune=now']
695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
696 return stderr
696 return stderr
697
697
698 def _update_server_info(self):
698 def _update_server_info(self, force=False):
699 """
699 """
700 runs gits update-server-info command in this repo instance
700 runs gits update-server-info command in this repo instance
701 """
701 """
702 self._remote.update_server_info()
702 self._remote.update_server_info(force=force)
703
703
704 def _current_branch(self):
704 def _current_branch(self):
705 """
705 """
706 Return the name of the current branch.
706 Return the name of the current branch.
707
707
708 It only works for non bare repositories (i.e. repositories with a
708 It only works for non bare repositories (i.e. repositories with a
709 working copy)
709 working copy)
710 """
710 """
711 if self.bare:
711 if self.bare:
712 raise RepositoryError('Bare git repos do not have active branches')
712 raise RepositoryError('Bare git repos do not have active branches')
713
713
714 if self.is_empty():
714 if self.is_empty():
715 return None
715 return None
716
716
717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
718 return stdout.strip()
718 return stdout.strip()
719
719
720 def _checkout(self, branch_name, create=False, force=False):
720 def _checkout(self, branch_name, create=False, force=False):
721 """
721 """
722 Checkout a branch in the working directory.
722 Checkout a branch in the working directory.
723
723
724 It tries to create the branch if create is True, failing if the branch
724 It tries to create the branch if create is True, failing if the branch
725 already exists.
725 already exists.
726
726
727 It only works for non bare repositories (i.e. repositories with a
727 It only works for non bare repositories (i.e. repositories with a
728 working copy)
728 working copy)
729 """
729 """
730 if self.bare:
730 if self.bare:
731 raise RepositoryError('Cannot checkout branches in a bare git repo')
731 raise RepositoryError('Cannot checkout branches in a bare git repo')
732
732
733 cmd = ['checkout']
733 cmd = ['checkout']
734 if force:
734 if force:
735 cmd.append('-f')
735 cmd.append('-f')
736 if create:
736 if create:
737 cmd.append('-b')
737 cmd.append('-b')
738 cmd.append(branch_name)
738 cmd.append(branch_name)
739 self.run_git_command(cmd, fail_on_stderr=False)
739 self.run_git_command(cmd, fail_on_stderr=False)
740
740
741 def _create_branch(self, branch_name, commit_id):
741 def _create_branch(self, branch_name, commit_id):
742 """
742 """
743 creates a branch in a GIT repo
743 creates a branch in a GIT repo
744 """
744 """
745 self._remote.create_branch(branch_name, commit_id)
745 self._remote.create_branch(branch_name, commit_id)
746
746
747 def _identify(self):
747 def _identify(self):
748 """
748 """
749 Return the current state of the working directory.
749 Return the current state of the working directory.
750 """
750 """
751 if self.bare:
751 if self.bare:
752 raise RepositoryError('Bare git repos do not have active branches')
752 raise RepositoryError('Bare git repos do not have active branches')
753
753
754 if self.is_empty():
754 if self.is_empty():
755 return None
755 return None
756
756
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 return stdout.strip()
758 return stdout.strip()
759
759
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 """
761 """
762 Create a local clone of the current repo.
762 Create a local clone of the current repo.
763 """
763 """
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 # clone will only fetch the active branch.
765 # clone will only fetch the active branch.
766 cmd = ['clone', '--branch', branch_name,
766 cmd = ['clone', '--branch', branch_name,
767 self.path, os.path.abspath(clone_path)]
767 self.path, os.path.abspath(clone_path)]
768
768
769 self.run_git_command(cmd, fail_on_stderr=False)
769 self.run_git_command(cmd, fail_on_stderr=False)
770
770
771 # if we get the different source branch, make sure we also fetch it for
771 # if we get the different source branch, make sure we also fetch it for
772 # merge conditions
772 # merge conditions
773 if source_branch and source_branch != branch_name:
773 if source_branch and source_branch != branch_name:
774 # check if the ref exists.
774 # check if the ref exists.
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 if shadow_repo.get_remote_ref(source_branch):
776 if shadow_repo.get_remote_ref(source_branch):
777 cmd = ['fetch', self.path, source_branch]
777 cmd = ['fetch', self.path, source_branch]
778 self.run_git_command(cmd, fail_on_stderr=False)
778 self.run_git_command(cmd, fail_on_stderr=False)
779
779
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 """
781 """
782 Fetch a branch from a local repository.
782 Fetch a branch from a local repository.
783 """
783 """
784 repository_path = os.path.abspath(repository_path)
784 repository_path = os.path.abspath(repository_path)
785 if repository_path == self.path:
785 if repository_path == self.path:
786 raise ValueError('Cannot fetch from the same repository')
786 raise ValueError('Cannot fetch from the same repository')
787
787
788 if use_origin:
788 if use_origin:
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 branch=branch_name)
790 branch=branch_name)
791
791
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 repository_path, branch_name]
793 repository_path, branch_name]
794 self.run_git_command(cmd, fail_on_stderr=False)
794 self.run_git_command(cmd, fail_on_stderr=False)
795
795
796 def _local_reset(self, branch_name):
796 def _local_reset(self, branch_name):
797 branch_name = f'{branch_name}'
797 branch_name = f'{branch_name}'
798 cmd = ['reset', '--hard', branch_name, '--']
798 cmd = ['reset', '--hard', branch_name, '--']
799 self.run_git_command(cmd, fail_on_stderr=False)
799 self.run_git_command(cmd, fail_on_stderr=False)
800
800
801 def _last_fetch_heads(self):
801 def _last_fetch_heads(self):
802 """
802 """
803 Return the last fetched heads that need merging.
803 Return the last fetched heads that need merging.
804
804
805 The algorithm is defined at
805 The algorithm is defined at
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 """
807 """
808 if not self.bare:
808 if not self.bare:
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 else:
810 else:
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812
812
813 heads = []
813 heads = []
814 with open(fetch_heads_path) as f:
814 with open(fetch_heads_path) as f:
815 for line in f:
815 for line in f:
816 if ' not-for-merge ' in line:
816 if ' not-for-merge ' in line:
817 continue
817 continue
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 heads.append(line)
819 heads.append(line)
820
820
821 return heads
821 return heads
822
822
823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
825
825
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 """
827 """
828 Pull a branch from a local repository.
828 Pull a branch from a local repository.
829 """
829 """
830 if self.bare:
830 if self.bare:
831 raise RepositoryError('Cannot pull into a bare git repository')
831 raise RepositoryError('Cannot pull into a bare git repository')
832 # N.B.(skreft): The --ff-only option is to make sure this is a
832 # N.B.(skreft): The --ff-only option is to make sure this is a
833 # fast-forward (i.e., we are only pulling new changes and there are no
833 # fast-forward (i.e., we are only pulling new changes and there are no
834 # conflicts with our current branch)
834 # conflicts with our current branch)
835 # Additionally, that option needs to go before --no-tags, otherwise git
835 # Additionally, that option needs to go before --no-tags, otherwise git
836 # pull complains about it being an unknown flag.
836 # pull complains about it being an unknown flag.
837 cmd = ['pull']
837 cmd = ['pull']
838 if ff_only:
838 if ff_only:
839 cmd.append('--ff-only')
839 cmd.append('--ff-only')
840 cmd.extend(['--no-tags', repository_path, branch_name])
840 cmd.extend(['--no-tags', repository_path, branch_name])
841 self.run_git_command(cmd, fail_on_stderr=False)
841 self.run_git_command(cmd, fail_on_stderr=False)
842
842
843 def _local_merge(self, merge_message, user_name, user_email, heads):
843 def _local_merge(self, merge_message, user_name, user_email, heads):
844 """
844 """
845 Merge the given head into the checked out branch.
845 Merge the given head into the checked out branch.
846
846
847 It will force a merge commit.
847 It will force a merge commit.
848
848
849 Currently it raises an error if the repo is empty, as it is not possible
849 Currently it raises an error if the repo is empty, as it is not possible
850 to create a merge commit in an empty repo.
850 to create a merge commit in an empty repo.
851
851
852 :param merge_message: The message to use for the merge commit.
852 :param merge_message: The message to use for the merge commit.
853 :param heads: the heads to merge.
853 :param heads: the heads to merge.
854 """
854 """
855 if self.bare:
855 if self.bare:
856 raise RepositoryError('Cannot merge into a bare git repository')
856 raise RepositoryError('Cannot merge into a bare git repository')
857
857
858 if not heads:
858 if not heads:
859 return
859 return
860
860
861 if self.is_empty():
861 if self.is_empty():
862 # TODO(skreft): do something more robust in this case.
862 # TODO(skreft): do something more robust in this case.
863 raise RepositoryError('Do not know how to merge into empty repositories yet')
863 raise RepositoryError('Do not know how to merge into empty repositories yet')
864 unresolved = None
864 unresolved = None
865
865
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 # commit message. We also specify the user who is doing the merge.
867 # commit message. We also specify the user who is doing the merge.
868 cmd = ['-c', f'user.name="{user_name}"',
868 cmd = ['-c', f'user.name="{user_name}"',
869 '-c', f'user.email={user_email}',
869 '-c', f'user.email={user_email}',
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
871
871
872 merge_cmd = cmd + heads
872 merge_cmd = cmd + heads
873
873
874 try:
874 try:
875 self.run_git_command(merge_cmd, fail_on_stderr=False)
875 self.run_git_command(merge_cmd, fail_on_stderr=False)
876 except RepositoryError:
876 except RepositoryError:
877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
878 fail_on_stderr=False)[0].splitlines()
878 fail_on_stderr=False)[0].splitlines()
879 # NOTE(marcink): we add U notation for consistent with HG backend output
879 # NOTE(marcink): we add U notation for consistent with HG backend output
880 unresolved = [f'U {f}' for f in files]
880 unresolved = [f'U {f}' for f in files]
881
881
882 # Cleanup any merge leftovers
882 # Cleanup any merge leftovers
883 self._remote.invalidate_vcs_cache()
883 self._remote.invalidate_vcs_cache()
884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
885
885
886 if unresolved:
886 if unresolved:
887 raise UnresolvedFilesInRepo(unresolved)
887 raise UnresolvedFilesInRepo(unresolved)
888 else:
888 else:
889 raise
889 raise
890
890
891 def _local_push(
891 def _local_push(
892 self, source_branch, repository_path, target_branch,
892 self, source_branch, repository_path, target_branch,
893 enable_hooks=False, rc_scm_data=None):
893 enable_hooks=False, rc_scm_data=None):
894 """
894 """
895 Push the source_branch to the given repository and target_branch.
895 Push the source_branch to the given repository and target_branch.
896
896
897 Currently it if the target_branch is not master and the target repo is
897 Currently it if the target_branch is not master and the target repo is
898 empty, the push will work, but then GitRepository won't be able to find
898 empty, the push will work, but then GitRepository won't be able to find
899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
900 pointing to master, which does not exist).
900 pointing to master, which does not exist).
901
901
902 It does not run the hooks in the target repo.
902 It does not run the hooks in the target repo.
903 """
903 """
904 # TODO(skreft): deal with the case in which the target repo is empty,
904 # TODO(skreft): deal with the case in which the target repo is empty,
905 # and the target_branch is not master.
905 # and the target_branch is not master.
906 target_repo = GitRepository(repository_path)
906 target_repo = GitRepository(repository_path)
907 if (not target_repo.bare and
907 if (not target_repo.bare and
908 target_repo._current_branch() == target_branch):
908 target_repo._current_branch() == target_branch):
909 # Git prevents pushing to the checked out branch, so simulate it by
909 # Git prevents pushing to the checked out branch, so simulate it by
910 # pulling into the target repository.
910 # pulling into the target repository.
911 target_repo._local_pull(self.path, source_branch)
911 target_repo._local_pull(self.path, source_branch)
912 else:
912 else:
913 cmd = ['push', os.path.abspath(repository_path),
913 cmd = ['push', os.path.abspath(repository_path),
914 f'{source_branch}:{target_branch}']
914 f'{source_branch}:{target_branch}']
915 gitenv = {}
915 gitenv = {}
916 if rc_scm_data:
916 if rc_scm_data:
917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
918
918
919 if not enable_hooks:
919 if not enable_hooks:
920 gitenv['RC_SKIP_HOOKS'] = '1'
920 gitenv['RC_SKIP_HOOKS'] = '1'
921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
922
922
923 def _get_new_pr_branch(self, source_branch, target_branch):
923 def _get_new_pr_branch(self, source_branch, target_branch):
924 prefix = f'pr_{source_branch}-{target_branch}_'
924 prefix = f'pr_{source_branch}-{target_branch}_'
925 pr_branches = []
925 pr_branches = []
926 for branch in self.branches:
926 for branch in self.branches:
927 if branch.startswith(prefix):
927 if branch.startswith(prefix):
928 pr_branches.append(int(branch[len(prefix):]))
928 pr_branches.append(int(branch[len(prefix):]))
929
929
930 if not pr_branches:
930 if not pr_branches:
931 branch_id = 0
931 branch_id = 0
932 else:
932 else:
933 branch_id = max(pr_branches) + 1
933 branch_id = max(pr_branches) + 1
934
934
935 return '%s%d' % (prefix, branch_id)
935 return '%s%d' % (prefix, branch_id)
936
936
937 def _maybe_prepare_merge_workspace(
937 def _maybe_prepare_merge_workspace(
938 self, repo_id, workspace_id, target_ref, source_ref):
938 self, repo_id, workspace_id, target_ref, source_ref):
939 shadow_repository_path = self._get_shadow_repository_path(
939 shadow_repository_path = self._get_shadow_repository_path(
940 self.path, repo_id, workspace_id)
940 self.path, repo_id, workspace_id)
941 if not os.path.exists(shadow_repository_path):
941 if not os.path.exists(shadow_repository_path):
942 self._local_clone(
942 self._local_clone(
943 shadow_repository_path, target_ref.name, source_ref.name)
943 shadow_repository_path, target_ref.name, source_ref.name)
944 log.debug('Prepared %s shadow repository in %s',
944 log.debug('Prepared %s shadow repository in %s',
945 self.alias, shadow_repository_path)
945 self.alias, shadow_repository_path)
946
946
947 return shadow_repository_path
947 return shadow_repository_path
948
948
949 def _merge_repo(self, repo_id, workspace_id, target_ref,
949 def _merge_repo(self, repo_id, workspace_id, target_ref,
950 source_repo, source_ref, merge_message,
950 source_repo, source_ref, merge_message,
951 merger_name, merger_email, dry_run=False,
951 merger_name, merger_email, dry_run=False,
952 use_rebase=False, close_branch=False):
952 use_rebase=False, close_branch=False):
953
953
954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
955 'rebase' if use_rebase else 'merge', dry_run)
955 'rebase' if use_rebase else 'merge', dry_run)
956
956
957 if target_ref.commit_id != self.branches[target_ref.name]:
957 if target_ref.commit_id != self.branches[target_ref.name]:
958 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
958 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
959 target_ref.commit_id, self.branches[target_ref.name])
959 target_ref.commit_id, self.branches[target_ref.name])
960 return MergeResponse(
960 return MergeResponse(
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
962 metadata={'target_ref': target_ref})
962 metadata={'target_ref': target_ref})
963
963
964 shadow_repository_path = self._maybe_prepare_merge_workspace(
964 shadow_repository_path = self._maybe_prepare_merge_workspace(
965 repo_id, workspace_id, target_ref, source_ref)
965 repo_id, workspace_id, target_ref, source_ref)
966 shadow_repo = self.get_shadow_instance(shadow_repository_path)
966 shadow_repo = self.get_shadow_instance(shadow_repository_path)
967
967
968 # checkout source, if it's different. Otherwise we could not
968 # checkout source, if it's different. Otherwise we could not
969 # fetch proper commits for merge testing
969 # fetch proper commits for merge testing
970 if source_ref.name != target_ref.name:
970 if source_ref.name != target_ref.name:
971 if shadow_repo.get_remote_ref(source_ref.name):
971 if shadow_repo.get_remote_ref(source_ref.name):
972 shadow_repo._checkout(source_ref.name, force=True)
972 shadow_repo._checkout(source_ref.name, force=True)
973
973
974 # checkout target, and fetch changes
974 # checkout target, and fetch changes
975 shadow_repo._checkout(target_ref.name, force=True)
975 shadow_repo._checkout(target_ref.name, force=True)
976
976
977 # fetch/reset pull the target, in case it is changed
977 # fetch/reset pull the target, in case it is changed
978 # this handles even force changes
978 # this handles even force changes
979 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
979 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
980 shadow_repo._local_reset(target_ref.name)
980 shadow_repo._local_reset(target_ref.name)
981
981
982 # Need to reload repo to invalidate the cache, or otherwise we cannot
982 # Need to reload repo to invalidate the cache, or otherwise we cannot
983 # retrieve the last target commit.
983 # retrieve the last target commit.
984 shadow_repo = self.get_shadow_instance(shadow_repository_path)
984 shadow_repo = self.get_shadow_instance(shadow_repository_path)
985 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
985 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
986 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
986 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
987 target_ref, target_ref.commit_id,
987 target_ref, target_ref.commit_id,
988 shadow_repo.branches[target_ref.name])
988 shadow_repo.branches[target_ref.name])
989 return MergeResponse(
989 return MergeResponse(
990 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
990 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
991 metadata={'target_ref': target_ref})
991 metadata={'target_ref': target_ref})
992
992
993 # calculate new branch
993 # calculate new branch
994 pr_branch = shadow_repo._get_new_pr_branch(
994 pr_branch = shadow_repo._get_new_pr_branch(
995 source_ref.name, target_ref.name)
995 source_ref.name, target_ref.name)
996 log.debug('using pull-request merge branch: `%s`', pr_branch)
996 log.debug('using pull-request merge branch: `%s`', pr_branch)
997 # checkout to temp branch, and fetch changes
997 # checkout to temp branch, and fetch changes
998 shadow_repo._checkout(pr_branch, create=True)
998 shadow_repo._checkout(pr_branch, create=True)
999 try:
999 try:
1000 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1000 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1001 except RepositoryError:
1001 except RepositoryError:
1002 log.exception('Failure when doing local fetch on '
1002 log.exception('Failure when doing local fetch on '
1003 'shadow repo: %s', shadow_repo)
1003 'shadow repo: %s', shadow_repo)
1004 return MergeResponse(
1004 return MergeResponse(
1005 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1005 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1006 metadata={'source_ref': source_ref})
1006 metadata={'source_ref': source_ref})
1007
1007
1008 merge_ref = None
1008 merge_ref = None
1009 merge_failure_reason = MergeFailureReason.NONE
1009 merge_failure_reason = MergeFailureReason.NONE
1010 metadata = {}
1010 metadata = {}
1011 try:
1011 try:
1012 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1012 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1013 [source_ref.commit_id])
1013 [source_ref.commit_id])
1014 merge_possible = True
1014 merge_possible = True
1015
1015
1016 # Need to invalidate the cache, or otherwise we
1016 # Need to invalidate the cache, or otherwise we
1017 # cannot retrieve the merge commit.
1017 # cannot retrieve the merge commit.
1018 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1018 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1019 merge_commit_id = shadow_repo.branches[pr_branch]
1019 merge_commit_id = shadow_repo.branches[pr_branch]
1020
1020
1021 # Set a reference pointing to the merge commit. This reference may
1021 # Set a reference pointing to the merge commit. This reference may
1022 # be used to easily identify the last successful merge commit in
1022 # be used to easily identify the last successful merge commit in
1023 # the shadow repository.
1023 # the shadow repository.
1024 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1024 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1025 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1025 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1026 except RepositoryError as e:
1026 except RepositoryError as e:
1027 log.exception('Failure when doing local merge on git shadow repo')
1027 log.exception('Failure when doing local merge on git shadow repo')
1028 if isinstance(e, UnresolvedFilesInRepo):
1028 if isinstance(e, UnresolvedFilesInRepo):
1029 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1029 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1030
1030
1031 merge_possible = False
1031 merge_possible = False
1032 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1032 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1033
1033
1034 if merge_possible and not dry_run:
1034 if merge_possible and not dry_run:
1035 try:
1035 try:
1036 shadow_repo._local_push(
1036 shadow_repo._local_push(
1037 pr_branch, self.path, target_ref.name, enable_hooks=True,
1037 pr_branch, self.path, target_ref.name, enable_hooks=True,
1038 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1038 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1039 merge_succeeded = True
1039 merge_succeeded = True
1040 except RepositoryError:
1040 except RepositoryError:
1041 log.exception(
1041 log.exception(
1042 'Failure when doing local push from the shadow '
1042 'Failure when doing local push from the shadow '
1043 'repository to the target repository at %s.', self.path)
1043 'repository to the target repository at %s.', self.path)
1044 merge_succeeded = False
1044 merge_succeeded = False
1045 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1045 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1046 metadata['target'] = 'git shadow repo'
1046 metadata['target'] = 'git shadow repo'
1047 metadata['merge_commit'] = pr_branch
1047 metadata['merge_commit'] = pr_branch
1048 else:
1048 else:
1049 merge_succeeded = False
1049 merge_succeeded = False
1050
1050
1051 return MergeResponse(
1051 return MergeResponse(
1052 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1052 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1053 metadata=metadata)
1053 metadata=metadata)
General Comments 0
You need to be logged in to leave comments. Login now