##// END OF EJS Templates
chore(mercurial): Fixed usage of str in generation of mercurial configs, and fixed largefiles call
super-admin -
r5188:643e5e48 default
parent child Browse files
Show More
@@ -1,1013 +1,1017 b''
1 1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 HG repository module
21 21 """
22 22 import os
23 23 import logging
24 24 import binascii
25 25 import configparser
26 26 import urllib.request
27 27 import urllib.parse
28 28 import urllib.error
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from collections import OrderedDict
33 33 from rhodecode.lib.datelib import (
34 34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 35 from rhodecode.lib.str_utils import safe_str
36 36 from rhodecode.lib.utils2 import CachedProperty
37 37 from rhodecode.lib.vcs import connection, exceptions
38 38 from rhodecode.lib.vcs.backends.base import (
39 39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = {commit_id: index
106 106 for index, commit_id in enumerate(commit_ids)}
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(n, h,) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(n, h,) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = f"Added tag {name} for commit {commit.short_id}"
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (n, h) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup=False)
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def hg_rebuild_fn_cache(self):
294 294 update_cache = self._remote.hg_rebuild_fn_cache()
295 295
296 296 self._remote.invalidate_vcs_cache()
297 297 return update_cache
298 298
299 299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 301 self, commit_id1, repo2, commit_id2)
302 302
303 303 if commit_id1 == commit_id2:
304 304 return commit_id1
305 305
306 306 ancestors = self._remote.revs_from_revspec(
307 307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 308 other_path=repo2.path)
309 309
310 310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311 311
312 312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 313 return ancestor_id
314 314
315 315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 316 if commit_id1 == commit_id2:
317 317 commits = []
318 318 else:
319 319 if merge:
320 320 indexes = self._remote.revs_from_revspec(
321 321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 323 else:
324 324 indexes = self._remote.revs_from_revspec(
325 325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 326 commit_id1, other_path=repo2.path)
327 327
328 328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 329 for idx in indexes]
330 330
331 331 return commits
332 332
333 333 @staticmethod
334 334 def check_url(url, config):
335 335 """
336 336 Function will check given url and try to verify if it's a valid
337 337 link. Sometimes it may happened that mercurial will issue basic
338 338 auth request that can cause whole API to hang when used from python
339 339 or other external calls.
340 340
341 341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 342 when the return code is non 200
343 343 """
344 344 # check first if it's not an local url
345 345 if os.path.isdir(url) or url.startswith('file:'):
346 346 return True
347 347
348 348 # Request the _remote to verify the url
349 349 return connection.Hg.check_url(url, config.serialize())
350 350
351 351 @staticmethod
352 352 def is_valid_repository(path):
353 353 return os.path.isdir(os.path.join(path, '.hg'))
354 354
355 355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 356 """
357 357 Function will check for mercurial repository in given path. If there
358 358 is no repository in that path it will raise an exception unless
359 359 `create` parameter is set to True - in that case repository would
360 360 be created.
361 361
362 362 If `src_url` is given, would try to clone repository from the
363 363 location at given clone_point. Additionally it'll make update to
364 364 working copy accordingly to `do_workspace_checkout` flag.
365 365 """
366 366 if create and os.path.exists(self.path):
367 367 raise RepositoryError(
368 368 f"Cannot create repository at {self.path}, location already exist")
369 369
370 370 if src_url:
371 371 url = str(self._get_url(src_url))
372 372 MercurialRepository.check_url(url, self.config)
373 373
374 374 self._remote.clone(url, self.path, do_workspace_checkout)
375 375
376 376 # Don't try to create if we've already cloned repo
377 377 create = False
378 378
379 379 if create:
380 380 os.makedirs(self.path, mode=0o755)
381 381
382 382 self._remote.localrepository(create)
383 383
384 384 @LazyProperty
385 385 def in_memory_commit(self):
386 386 return MercurialInMemoryCommit(self)
387 387
388 388 @LazyProperty
389 389 def description(self):
390 390 description = self._remote.get_config_value(
391 391 'web', 'description', untrusted=True)
392 392 return safe_str(description or self.DEFAULT_DESCRIPTION)
393 393
394 394 @LazyProperty
395 395 def contact(self):
396 396 contact = (
397 397 self._remote.get_config_value("web", "contact") or
398 398 self._remote.get_config_value("ui", "username"))
399 399 return safe_str(contact or self.DEFAULT_CONTACT)
400 400
401 401 @LazyProperty
402 402 def last_change(self):
403 403 """
404 404 Returns last change made on this repository as
405 405 `datetime.datetime` object.
406 406 """
407 407 try:
408 408 return self.get_commit().date
409 409 except RepositoryError:
410 410 tzoffset = makedate()[1]
411 411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412 412
413 413 def _get_fs_mtime(self):
414 414 # fallback to filesystem
415 415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 416 st_path = os.path.join(self.path, '.hg', "store")
417 417 if os.path.exists(cl_path):
418 418 return os.stat(cl_path).st_mtime
419 419 else:
420 420 return os.stat(st_path).st_mtime
421 421
422 422 def _get_url(self, url):
423 423 """
424 424 Returns normalized url. If schema is not given, would fall
425 425 to filesystem
426 426 (``file:///``) schema.
427 427 """
428 428 if url != 'default' and '://' not in url:
429 429 url = "file:" + urllib.request.pathname2url(url)
430 430 return url
431 431
432 432 def get_hook_location(self):
433 433 """
434 434 returns absolute path to location where hooks are stored
435 435 """
436 436 return os.path.join(self.path, '.hg', '.hgrc')
437 437
438 438 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 439 translate_tag=None, maybe_unreachable=False, reference_obj=None):
440 440 """
441 441 Returns ``MercurialCommit`` object representing repository's
442 442 commit at the given `commit_id` or `commit_idx`.
443 443 """
444 444 if self.is_empty():
445 445 raise EmptyRepositoryError("There are no commits yet")
446 446
447 447 if commit_id is not None:
448 448 self._validate_commit_id(commit_id)
449 449 try:
450 450 # we have cached idx, use it without contacting the remote
451 451 idx = self._commit_ids[commit_id]
452 452 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 453 except KeyError:
454 454 pass
455 455
456 456 elif commit_idx is not None:
457 457 self._validate_commit_idx(commit_idx)
458 458 try:
459 459 _commit_id = self.commit_ids[commit_idx]
460 460 if commit_idx < 0:
461 461 commit_idx = self.commit_ids.index(_commit_id)
462 462
463 463 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 464 except IndexError:
465 465 commit_id = commit_idx
466 466 else:
467 467 commit_id = "tip"
468 468
469 469 # case here is no cached version, do an actual lookup instead
470 470 try:
471 471 raw_id, idx = self._remote.lookup(commit_id, both=True)
472 472 except CommitDoesNotExistError:
473 473 msg = "Commit {} does not exist for `{}`".format(
474 474 *map(safe_str, [commit_id, self.name]))
475 475 raise CommitDoesNotExistError(msg)
476 476
477 477 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
478 478
479 479 def get_commits(
480 480 self, start_id=None, end_id=None, start_date=None, end_date=None,
481 481 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
482 482 """
483 483 Returns generator of ``MercurialCommit`` objects from start to end
484 484 (both are inclusive)
485 485
486 486 :param start_id: None, str(commit_id)
487 487 :param end_id: None, str(commit_id)
488 488 :param start_date: if specified, commits with commit date less than
489 489 ``start_date`` would be filtered out from returned set
490 490 :param end_date: if specified, commits with commit date greater than
491 491 ``end_date`` would be filtered out from returned set
492 492 :param branch_name: if specified, commits not reachable from given
493 493 branch would be filtered out from returned set
494 494 :param show_hidden: Show hidden commits such as obsolete or hidden from
495 495 Mercurial evolve
496 496 :raise BranchDoesNotExistError: If given ``branch_name`` does not
497 497 exist.
498 498 :raise CommitDoesNotExistError: If commit for given ``start`` or
499 499 ``end`` could not be found.
500 500 """
501 501 # actually we should check now if it's not an empty repo
502 502 if self.is_empty():
503 503 raise EmptyRepositoryError("There are no commits yet")
504 504 self._validate_branch_name(branch_name)
505 505
506 506 branch_ancestors = False
507 507 if start_id is not None:
508 508 self._validate_commit_id(start_id)
509 509 c_start = self.get_commit(commit_id=start_id)
510 510 start_pos = self._commit_ids[c_start.raw_id]
511 511 else:
512 512 start_pos = None
513 513
514 514 if end_id is not None:
515 515 self._validate_commit_id(end_id)
516 516 c_end = self.get_commit(commit_id=end_id)
517 517 end_pos = max(0, self._commit_ids[c_end.raw_id])
518 518 else:
519 519 end_pos = None
520 520
521 521 if None not in [start_id, end_id] and start_pos > end_pos:
522 522 raise RepositoryError(
523 523 "Start commit '%s' cannot be after end commit '%s'" %
524 524 (start_id, end_id))
525 525
526 526 if end_pos is not None:
527 527 end_pos += 1
528 528
529 529 commit_filter = []
530 530
531 531 if branch_name and not branch_ancestors:
532 532 commit_filter.append(f'branch("{branch_name}")')
533 533 elif branch_name and branch_ancestors:
534 534 commit_filter.append(f'ancestors(branch("{branch_name}"))')
535 535
536 536 if start_date and not end_date:
537 537 commit_filter.append(f'date(">{start_date}")')
538 538 if end_date and not start_date:
539 539 commit_filter.append(f'date("<{end_date}")')
540 540 if start_date and end_date:
541 541 commit_filter.append(
542 542 f'date(">{start_date}") and date("<{end_date}")')
543 543
544 544 if not show_hidden:
545 545 commit_filter.append('not obsolete()')
546 546 commit_filter.append('not hidden()')
547 547
548 548 # TODO: johbo: Figure out a simpler way for this solution
549 549 collection_generator = CollectionGenerator
550 550 if commit_filter:
551 551 commit_filter = ' and '.join(map(safe_str, commit_filter))
552 552 revisions = self._remote.rev_range([commit_filter])
553 553 collection_generator = MercurialIndexBasedCollectionGenerator
554 554 else:
555 555 revisions = self.commit_ids
556 556
557 557 if start_pos or end_pos:
558 558 revisions = revisions[start_pos:end_pos]
559 559
560 560 return collection_generator(self, revisions, pre_load=pre_load)
561 561
562 562 def pull(self, url, commit_ids=None):
563 563 """
564 564 Pull changes from external location.
565 565
566 566 :param commit_ids: Optional. Can be set to a list of commit ids
567 567 which shall be pulled from the other repository.
568 568 """
569 569 url = self._get_url(url)
570 570 self._remote.pull(url, commit_ids=commit_ids)
571 571 self._remote.invalidate_vcs_cache()
572 572
573 573 def fetch(self, url, commit_ids=None):
574 574 """
575 575 Backward compatibility with GIT fetch==pull
576 576 """
577 577 return self.pull(url, commit_ids=commit_ids)
578 578
579 579 def push(self, url):
580 580 url = self._get_url(url)
581 581 self._remote.sync_push(url)
582 582
583 583 def _local_clone(self, clone_path):
584 584 """
585 585 Create a local clone of the current repo.
586 586 """
587 587 self._remote.clone(self.path, clone_path, update_after_clone=True,
588 588 hooks=False)
589 589
590 590 def _update(self, revision, clean=False):
591 591 """
592 592 Update the working copy to the specified revision.
593 593 """
594 594 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
595 595 self._remote.update(revision, clean=clean)
596 596
597 597 def _identify(self):
598 598 """
599 599 Return the current state of the working directory.
600 600 """
601 601 return self._remote.identify().strip().rstrip('+')
602 602
603 603 def _heads(self, branch=None):
604 604 """
605 605 Return the commit ids of the repository heads.
606 606 """
607 607 return self._remote.heads(branch=branch).strip().split(' ')
608 608
609 609 def _ancestor(self, revision1, revision2):
610 610 """
611 611 Return the common ancestor of the two revisions.
612 612 """
613 613 return self._remote.ancestor(revision1, revision2)
614 614
615 615 def _local_push(
616 616 self, revision, repository_path, push_branches=False,
617 617 enable_hooks=False):
618 618 """
619 619 Push the given revision to the specified repository.
620 620
621 621 :param push_branches: allow to create branches in the target repo.
622 622 """
623 623 self._remote.push(
624 624 [revision], repository_path, hooks=enable_hooks,
625 625 push_branches=push_branches)
626 626
627 627 def _local_merge(self, target_ref, merge_message, user_name, user_email,
628 628 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
629 629 """
630 630 Merge the given source_revision into the checked out revision.
631 631
632 632 Returns the commit id of the merge and a boolean indicating if the
633 633 commit needs to be pushed.
634 634 """
635
635 636 source_ref_commit_id = source_ref.commit_id
636 637 target_ref_commit_id = target_ref.commit_id
637 638
638 639 # update our workdir to target ref, for proper merge
639 640 self._update(target_ref_commit_id, clean=True)
640 641
641 642 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
642 643 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
643 644
644 645 if close_commit_id:
645 646 # NOTE(marcink): if we get the close commit, this is our new source
646 647 # which will include the close commit itself.
647 648 source_ref_commit_id = close_commit_id
648 649
649 650 if ancestor == source_ref_commit_id:
650 651 # Nothing to do, the changes were already integrated
651 652 return target_ref_commit_id, False
652 653
653 654 elif ancestor == target_ref_commit_id and is_the_same_branch:
654 655 # In this case we should force a commit message
655 656 return source_ref_commit_id, True
656 657
657 658 unresolved = None
658 659 if use_rebase:
659 660 try:
660 661 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
661 662 self.bookmark(bookmark_name, revision=source_ref.commit_id)
662 663 self._remote.rebase(
663 664 source=source_ref_commit_id, dest=target_ref_commit_id)
664 665 self._remote.invalidate_vcs_cache()
665 666 self._update(bookmark_name, clean=True)
666 667 return self._identify(), True
667 668 except RepositoryError as e:
668 669 # The rebase-abort may raise another exception which 'hides'
669 670 # the original one, therefore we log it here.
670 671 log.exception('Error while rebasing shadow repo during merge.')
671 672 if 'unresolved conflicts' in safe_str(e):
672 673 unresolved = self._remote.get_unresolved_files()
673 674 log.debug('unresolved files: %s', unresolved)
674 675
675 676 # Cleanup any rebase leftovers
676 677 self._remote.invalidate_vcs_cache()
677 678 self._remote.rebase(abort=True)
678 679 self._remote.invalidate_vcs_cache()
679 680 self._remote.update(clean=True)
680 681 if unresolved:
681 682 raise UnresolvedFilesInRepo(unresolved)
682 683 else:
683 684 raise
684 685 else:
685 686 try:
686 687 self._remote.merge(source_ref_commit_id)
687 688 self._remote.invalidate_vcs_cache()
688 689 self._remote.commit(
689 690 message=safe_str(merge_message),
690 691 username=safe_str(f'{user_name} <{user_email}>'))
691 692 self._remote.invalidate_vcs_cache()
692 693 return self._identify(), True
693 694 except RepositoryError as e:
694 695 # The merge-abort may raise another exception which 'hides'
695 696 # the original one, therefore we log it here.
696 697 log.exception('Error while merging shadow repo during merge.')
697 698 if 'unresolved merge conflicts' in safe_str(e):
698 699 unresolved = self._remote.get_unresolved_files()
699 700 log.debug('unresolved files: %s', unresolved)
700 701
701 702 # Cleanup any merge leftovers
702 703 self._remote.update(clean=True)
703 704 if unresolved:
704 705 raise UnresolvedFilesInRepo(unresolved)
705 706 else:
706 707 raise
707 708
708 709 def _local_close(self, target_ref, user_name, user_email,
709 710 source_ref, close_message=''):
710 711 """
711 712 Close the branch of the given source_revision
712 713
713 714 Returns the commit id of the close and a boolean indicating if the
714 715 commit needs to be pushed.
715 716 """
716 717 self._update(source_ref.commit_id)
717 718 message = close_message or f"Closing branch: `{source_ref.name}`"
718 719 try:
719 720 self._remote.commit(
720 721 message=safe_str(message),
721 722 username=safe_str(f'{user_name} <{user_email}>'),
722 723 close_branch=True)
723 724 self._remote.invalidate_vcs_cache()
724 725 return self._identify(), True
725 726 except RepositoryError:
726 727 # Cleanup any commit leftovers
727 728 self._remote.update(clean=True)
728 729 raise
729 730
730 731 def _is_the_same_branch(self, target_ref, source_ref):
731 732 return (
732 733 self._get_branch_name(target_ref) ==
733 734 self._get_branch_name(source_ref))
734 735
735 736 def _get_branch_name(self, ref):
736 737 if ref.type == 'branch':
737 738 return ref.name
738 739 return self._remote.ctx_branch(ref.commit_id)
739 740
740 741 def _maybe_prepare_merge_workspace(
741 742 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
742 743 shadow_repository_path = self._get_shadow_repository_path(
743 744 self.path, repo_id, workspace_id)
744 745 if not os.path.exists(shadow_repository_path):
745 746 self._local_clone(shadow_repository_path)
746 747 log.debug(
747 748 'Prepared shadow repository in %s', shadow_repository_path)
748 749
749 750 return shadow_repository_path
750 751
751 752 def _merge_repo(self, repo_id, workspace_id, target_ref,
752 753 source_repo, source_ref, merge_message,
753 754 merger_name, merger_email, dry_run=False,
754 755 use_rebase=False, close_branch=False):
755 756
756 757 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
757 758 'rebase' if use_rebase else 'merge', dry_run)
759
758 760 if target_ref.commit_id not in self._heads():
759 761 return MergeResponse(
760 762 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
761 763 metadata={'target_ref': target_ref})
762 764
763 765 try:
764 766 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
765 767 heads_all = self._heads(target_ref.name)
766 768 max_heads = 10
767 769 if len(heads_all) > max_heads:
768 770 heads = '\n,'.join(
769 771 heads_all[:max_heads] +
770 772 [f'and {len(heads_all)-max_heads} more.'])
771 773 else:
772 774 heads = '\n,'.join(heads_all)
773 775 metadata = {
774 776 'target_ref': target_ref,
775 777 'source_ref': source_ref,
776 778 'heads': heads
777 779 }
778 780 return MergeResponse(
779 781 False, False, None,
780 782 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
781 783 metadata=metadata)
782 784 except CommitDoesNotExistError:
783 785 log.exception('Failure when looking up branch heads on hg target')
784 786 return MergeResponse(
785 787 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
786 788 metadata={'target_ref': target_ref})
787 789
788 790 shadow_repository_path = self._maybe_prepare_merge_workspace(
789 791 repo_id, workspace_id, target_ref, source_ref)
790 792 shadow_repo = self.get_shadow_instance(shadow_repository_path)
791 793
792 794 log.debug('Pulling in target reference %s', target_ref)
793 795 self._validate_pull_reference(target_ref)
794 796 shadow_repo._local_pull(self.path, target_ref)
795 797
796 798 try:
797 799 log.debug('Pulling in source reference %s', source_ref)
798 800 source_repo._validate_pull_reference(source_ref)
799 801 shadow_repo._local_pull(source_repo.path, source_ref)
800 802 except CommitDoesNotExistError:
801 803 log.exception('Failure when doing local pull on hg shadow repo')
802 804 return MergeResponse(
803 805 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
804 806 metadata={'source_ref': source_ref})
805 807
806 808 merge_ref = None
807 809 merge_commit_id = None
808 810 close_commit_id = None
809 811 merge_failure_reason = MergeFailureReason.NONE
810 812 metadata = {}
811 813
812 814 # enforce that close branch should be used only in case we source from
813 815 # an actual Branch
814 816 close_branch = close_branch and source_ref.type == 'branch'
815 817
816 818 # don't allow to close branch if source and target are the same
817 819 close_branch = close_branch and source_ref.name != target_ref.name
818 820
819 821 needs_push_on_close = False
820 822 if close_branch and not use_rebase and not dry_run:
821 823 try:
822 824 close_commit_id, needs_push_on_close = shadow_repo._local_close(
823 825 target_ref, merger_name, merger_email, source_ref)
824 826 merge_possible = True
825 827 except RepositoryError:
826 828 log.exception('Failure when doing close branch on '
827 829 'shadow repo: %s', shadow_repo)
828 830 merge_possible = False
829 831 merge_failure_reason = MergeFailureReason.MERGE_FAILED
830 832 else:
831 833 merge_possible = True
832 834
833 835 needs_push = False
834 836 if merge_possible:
835 837
836 838 try:
837 839 merge_commit_id, needs_push = shadow_repo._local_merge(
838 840 target_ref, merge_message, merger_name, merger_email,
839 841 source_ref, use_rebase=use_rebase,
840 842 close_commit_id=close_commit_id, dry_run=dry_run)
841 843 merge_possible = True
842 844
843 845 # read the state of the close action, if it
844 846 # maybe required a push
845 847 needs_push = needs_push or needs_push_on_close
846 848
847 849 # Set a bookmark pointing to the merge commit. This bookmark
848 850 # may be used to easily identify the last successful merge
849 851 # commit in the shadow repository.
850 852 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
851 853 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
852 854 except SubrepoMergeError:
853 855 log.exception(
854 856 'Subrepo merge error during local merge on hg shadow repo.')
855 857 merge_possible = False
856 858 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
857 859 needs_push = False
858 860 except RepositoryError as e:
859 861 log.exception('Failure when doing local merge on hg shadow repo')
862 metadata['unresolved_files'] = 'no unresolved files found'
863
860 864 if isinstance(e, UnresolvedFilesInRepo):
861 865 all_conflicts = list(e.args[0])
862 866 max_conflicts = 20
863 867 if len(all_conflicts) > max_conflicts:
864 868 conflicts = all_conflicts[:max_conflicts] \
865 869 + [f'and {len(all_conflicts)-max_conflicts} more.']
866 870 else:
867 871 conflicts = all_conflicts
868 872 metadata['unresolved_files'] = \
869 873 '\n* conflict: ' + \
870 874 ('\n * conflict: '.join(conflicts))
871 875
872 876 merge_possible = False
873 877 merge_failure_reason = MergeFailureReason.MERGE_FAILED
874 878 needs_push = False
875 879
876 880 if merge_possible and not dry_run:
877 881 if needs_push:
878 882 # In case the target is a bookmark, update it, so after pushing
879 883 # the bookmarks is also updated in the target.
880 884 if target_ref.type == 'book':
881 885 shadow_repo.bookmark(
882 886 target_ref.name, revision=merge_commit_id)
883 887 try:
884 888 shadow_repo_with_hooks = self.get_shadow_instance(
885 889 shadow_repository_path,
886 890 enable_hooks=True)
887 891 # This is the actual merge action, we push from shadow
888 892 # into origin.
889 893 # Note: the push_branches option will push any new branch
890 894 # defined in the source repository to the target. This may
891 895 # be dangerous as branches are permanent in Mercurial.
892 896 # This feature was requested in issue #441.
893 897 shadow_repo_with_hooks._local_push(
894 898 merge_commit_id, self.path, push_branches=True,
895 899 enable_hooks=True)
896 900
897 901 # maybe we also need to push the close_commit_id
898 902 if close_commit_id:
899 903 shadow_repo_with_hooks._local_push(
900 904 close_commit_id, self.path, push_branches=True,
901 905 enable_hooks=True)
902 906 merge_succeeded = True
903 907 except RepositoryError:
904 908 log.exception(
905 909 'Failure when doing local push from the shadow '
906 910 'repository to the target repository at %s.', self.path)
907 911 merge_succeeded = False
908 912 merge_failure_reason = MergeFailureReason.PUSH_FAILED
909 913 metadata['target'] = 'hg shadow repo'
910 914 metadata['merge_commit'] = merge_commit_id
911 915 else:
912 916 merge_succeeded = True
913 917 else:
914 918 merge_succeeded = False
915 919
916 920 return MergeResponse(
917 921 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
918 922 metadata=metadata)
919 923
920 924 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
921 925 config = self.config.copy()
922 926 if not enable_hooks:
923 927 config.clear_section('hooks')
924 928 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
925 929
926 930 def _validate_pull_reference(self, reference):
927 931 if not (reference.name in self.bookmarks or
928 932 reference.name in self.branches or
929 933 self.get_commit(reference.commit_id)):
930 934 raise CommitDoesNotExistError(
931 935 'Unknown branch, bookmark or commit id')
932 936
933 937 def _local_pull(self, repository_path, reference):
934 938 """
935 939 Fetch a branch, bookmark or commit from a local repository.
936 940 """
937 941 repository_path = os.path.abspath(repository_path)
938 942 if repository_path == self.path:
939 943 raise ValueError('Cannot pull from the same repository')
940 944
941 945 reference_type_to_option_name = {
942 946 'book': 'bookmark',
943 947 'branch': 'branch',
944 948 }
945 949 option_name = reference_type_to_option_name.get(
946 950 reference.type, 'revision')
947 951
948 952 if option_name == 'revision':
949 953 ref = reference.commit_id
950 954 else:
951 955 ref = reference.name
952 956
953 957 options = {option_name: [ref]}
954 958 self._remote.pull_cmd(repository_path, hooks=False, **options)
955 959 self._remote.invalidate_vcs_cache()
956 960
957 961 def bookmark(self, bookmark, revision=None):
958 962 if isinstance(bookmark, str):
959 963 bookmark = safe_str(bookmark)
960 964 self._remote.bookmark(bookmark, revision=revision)
961 965 self._remote.invalidate_vcs_cache()
962 966
963 967 def get_path_permissions(self, username):
964 968 hgacl_file = os.path.join(self.path, '.hg/hgacl')
965 969
966 970 def read_patterns(suffix):
967 971 svalue = None
968 972 for section, option in [
969 973 ('narrowacl', username + suffix),
970 974 ('narrowacl', 'default' + suffix),
971 975 ('narrowhgacl', username + suffix),
972 976 ('narrowhgacl', 'default' + suffix)
973 977 ]:
974 978 try:
975 979 svalue = hgacl.get(section, option)
976 980 break # stop at the first value we find
977 981 except configparser.NoOptionError:
978 982 pass
979 983 if not svalue:
980 984 return None
981 985 result = ['/']
982 986 for pattern in svalue.split():
983 987 result.append(pattern)
984 988 if '*' not in pattern and '?' not in pattern:
985 989 result.append(pattern + '/*')
986 990 return result
987 991
988 992 if os.path.exists(hgacl_file):
989 993 try:
990 994 hgacl = configparser.RawConfigParser()
991 995 hgacl.read(hgacl_file)
992 996
993 997 includes = read_patterns('.includes')
994 998 excludes = read_patterns('.excludes')
995 999 return BasePathPermissionChecker.create_from_patterns(
996 1000 includes, excludes)
997 1001 except BaseException as e:
998 1002 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
999 1003 hgacl_file, self.name, e)
1000 1004 raise exceptions.RepositoryRequirementError(msg)
1001 1005 else:
1002 1006 return None
1003 1007
1004 1008
1005 1009 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1006 1010
1007 1011 def _commit_factory(self, commit_id):
1008 1012 if isinstance(commit_id, int):
1009 1013 return self.repo.get_commit(
1010 1014 commit_idx=commit_id, pre_load=self.pre_load)
1011 1015 else:
1012 1016 return self.repo.get_commit(
1013 1017 commit_id=commit_id, pre_load=self.pre_load)
@@ -1,1181 +1,1181 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import os
21 21
22 22 import mock
23 23 import pytest
24 24
25 25 from rhodecode.lib.str_utils import safe_bytes
26 26 from rhodecode.lib.utils import make_db_config
27 27 from rhodecode.lib.vcs import backends
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 Reference, MergeResponse, MergeFailureReason)
30 30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 31 from rhodecode.lib.vcs.exceptions import (
32 32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
33 33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
34 34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
35 35
36 36
37 37 pytestmark = pytest.mark.backends("hg")
38 38
39 39
40 40 def repo_path_generator():
41 41 """
42 42 Return a different path to be used for cloning repos.
43 43 """
44 44 i = 0
45 45 while True:
46 46 i += 1
47 47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
48 48
49 49 REPO_PATH_GENERATOR = repo_path_generator()
50 50
51 51
52 52 @pytest.fixture(scope='class', autouse=True)
53 53 def repo(request, baseapp):
54 54 repo = MercurialRepository(TEST_HG_REPO)
55 55 if request.cls:
56 56 request.cls.repo = repo
57 57 return repo
58 58
59 59
60 60 class TestMercurialRepository(object):
61 61
62 62 # pylint: disable=protected-access
63 63
64 64 def get_clone_repo(self):
65 65 """
66 66 Return a clone of the base repo.
67 67 """
68 68 clone_path = next(REPO_PATH_GENERATOR)
69 69 repo_clone = MercurialRepository(
70 70 clone_path, create=True, src_url=self.repo.path)
71 71
72 72 return repo_clone
73 73
74 74 def get_empty_repo(self):
75 75 """
76 76 Return an empty repo.
77 77 """
78 78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
79 79
80 80 def test_wrong_repo_path(self):
81 81 wrong_repo_path = '/tmp/errorrepo_hg'
82 82 with pytest.raises(RepositoryError):
83 83 MercurialRepository(wrong_repo_path)
84 84
85 85 def test_unicode_path_repo(self):
86 86 with pytest.raises(VCSError):
87 87 MercurialRepository('iShouldFail')
88 88
89 89 def test_unicode_commit_id(self):
90 90 with pytest.raises(CommitDoesNotExistError):
91 91 self.repo.get_commit('unicode-commit-id')
92 92 with pytest.raises(CommitDoesNotExistError):
93 93 self.repo.get_commit('unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
94 94
95 95 def test_unicode_bookmark(self):
96 96 self.repo.bookmark('unicode-bookmark')
97 97 self.repo.bookmark('unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
98 98
99 99 def test_unicode_branch(self):
100 100 with pytest.raises(KeyError):
101 101 assert self.repo.branches['unicode-branch']
102 102 with pytest.raises(KeyError):
103 103 assert self.repo.branches['unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
104 104
105 105 def test_repo_clone(self):
106 106 if os.path.exists(TEST_HG_REPO_CLONE):
107 107 self.fail(
108 108 'Cannot test mercurial clone repo as location %s already '
109 109 'exists. You should manually remove it first.'
110 110 % TEST_HG_REPO_CLONE)
111 111
112 112 repo = MercurialRepository(TEST_HG_REPO)
113 113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
114 114 src_url=TEST_HG_REPO)
115 115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 116 # Checking hashes of commits should be enough
117 117 for commit in repo.get_commits():
118 118 raw_id = commit.raw_id
119 119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
120 120
121 121 def test_repo_clone_with_update(self):
122 122 repo = MercurialRepository(TEST_HG_REPO)
123 123 repo_clone = MercurialRepository(
124 124 TEST_HG_REPO_CLONE + '_w_update',
125 125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
126 126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127 127
128 128 # check if current workdir was updated
129 129 assert os.path.isfile(
130 130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
131 131
132 132 def test_repo_clone_without_update(self):
133 133 repo = MercurialRepository(TEST_HG_REPO)
134 134 repo_clone = MercurialRepository(
135 135 TEST_HG_REPO_CLONE + '_wo_update',
136 136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
137 137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
138 138 assert not os.path.isfile(
139 139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
140 140
141 141 def test_commit_ids(self):
142 142 # there are 21 commits at bitbucket now
143 143 # so we can assume they would be available from now on
144 144 subset = {'b986218ba1c9b0d6a259fac9b050b1724ed8e545', '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
145 145 '6cba7170863a2411822803fa77a0a264f1310b35', '56349e29c2af3ac913b28bde9a2c6154436e615b',
146 146 '2dda4e345facb0ccff1a191052dd1606dba6781d', '6fff84722075f1607a30f436523403845f84cd9e',
147 147 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
148 148 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', 'be90031137367893f1c406e0a8683010fd115b79',
149 149 'db8e58be770518cbb2b1cdfa69146e47cd481481', '84478366594b424af694a6c784cb991a16b87c21',
150 150 '17f8e105dddb9f339600389c6dc7175d395a535c', '20a662e756499bde3095ffc9bc0643d1def2d0eb',
151 151 '2e319b85e70a707bba0beff866d9f9de032aa4f9', '786facd2c61deb9cf91e9534735124fb8fc11842',
152 152 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', 'aa6a0de05b7612707db567078e130a6cd114a9a7',
153 153 'eada5a770da98ab0dd7325e29d00e0714f228d09'
154 154 }
155 155 assert subset.issubset(set(self.repo.commit_ids))
156 156
157 157 # check if we have the proper order of commits
158 158 org = [
159 159 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
160 160 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
161 161 '6cba7170863a2411822803fa77a0a264f1310b35',
162 162 '56349e29c2af3ac913b28bde9a2c6154436e615b',
163 163 '2dda4e345facb0ccff1a191052dd1606dba6781d',
164 164 '6fff84722075f1607a30f436523403845f84cd9e',
165 165 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
166 166 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
167 167 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
168 168 'be90031137367893f1c406e0a8683010fd115b79',
169 169 'db8e58be770518cbb2b1cdfa69146e47cd481481',
170 170 '84478366594b424af694a6c784cb991a16b87c21',
171 171 '17f8e105dddb9f339600389c6dc7175d395a535c',
172 172 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
173 173 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
174 174 '786facd2c61deb9cf91e9534735124fb8fc11842',
175 175 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
176 176 'aa6a0de05b7612707db567078e130a6cd114a9a7',
177 177 'eada5a770da98ab0dd7325e29d00e0714f228d09',
178 178 '2c1885c735575ca478bf9e17b0029dca68824458',
179 179 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
180 180 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
181 181 '4fb8326d78e5120da2c7468dcf7098997be385da',
182 182 '62b4a097164940bd66030c4db51687f3ec035eed',
183 183 '536c1a19428381cfea92ac44985304f6a8049569',
184 184 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
185 185 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
186 186 'f8940bcb890a98c4702319fbe36db75ea309b475',
187 187 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
188 188 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
189 189 'ee87846a61c12153b51543bf860e1026c6d3dcba',
190 190 ]
191 191 assert org == self.repo.commit_ids[:31]
192 192
193 193 def test_iter_slice(self):
194 194 sliced = list(self.repo[:10])
195 195 itered = list(self.repo)[:10]
196 196 assert sliced == itered
197 197
198 198 def test_slicing(self):
199 199 # 4 1 5 10 95
200 200 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
201 201 (10, 20, 10), (5, 100, 95)]:
202 202 indexes = list(self.repo[sfrom:sto])
203 203 assert len(indexes) == size
204 204 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
205 205 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
206 206
207 207 def test_branches(self):
208 208 # TODO: Need more tests here
209 209
210 210 # active branches
211 211 assert 'default' in self.repo.branches
212 212 assert 'stable' in self.repo.branches
213 213
214 214 # closed
215 215 assert 'git' in self.repo._get_branches(closed=True)
216 216 assert 'web' in self.repo._get_branches(closed=True)
217 217
218 218 for name, id in self.repo.branches.items():
219 219 assert isinstance(self.repo.get_commit(id), MercurialCommit)
220 220
221 221 def test_tip_in_tags(self):
222 222 # tip is always a tag
223 223 assert 'tip' in self.repo.tags
224 224
225 225 def test_tip_commit_in_tags(self):
226 226 tip = self.repo.get_commit()
227 227 assert self.repo.tags['tip'] == tip.raw_id
228 228
229 229 def test_initial_commit(self):
230 230 init_commit = self.repo.get_commit(commit_idx=0)
231 231 init_author = init_commit.author
232 232
233 233 assert init_commit.message == 'initial import'
234 234 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
235 235 assert init_author == init_commit.committer
236 236 assert sorted(init_commit._file_paths) == sorted([
237 237 'vcs/__init__.py',
238 238 'vcs/backends/BaseRepository.py',
239 239 'vcs/backends/__init__.py',
240 240 ])
241 241 assert sorted(init_commit._dir_paths) == sorted(
242 242 ['', 'vcs', 'vcs/backends'])
243 243
244 244 assert init_commit._dir_paths + init_commit._file_paths == \
245 245 init_commit._paths
246 246
247 247 with pytest.raises(NodeDoesNotExistError):
248 248 init_commit.get_node(path='foobar')
249 249
250 250 node = init_commit.get_node('vcs/')
251 251 assert hasattr(node, 'kind')
252 252 assert node.kind == NodeKind.DIR
253 253
254 254 node = init_commit.get_node('vcs')
255 255 assert hasattr(node, 'kind')
256 256 assert node.kind == NodeKind.DIR
257 257
258 258 node = init_commit.get_node('vcs/__init__.py')
259 259 assert hasattr(node, 'kind')
260 260 assert node.kind == NodeKind.FILE
261 261
262 262 def test_not_existing_commit(self):
263 263 # rawid
264 264 with pytest.raises(RepositoryError):
265 265 self.repo.get_commit('abcd' * 10)
266 266 # shortid
267 267 with pytest.raises(RepositoryError):
268 268 self.repo.get_commit('erro' * 4)
269 269 # numeric
270 270 with pytest.raises(RepositoryError):
271 271 self.repo.get_commit(commit_idx=self.repo.count() + 1)
272 272
273 273 # Small chance we ever get to this one
274 274 idx = pow(2, 30)
275 275 with pytest.raises(RepositoryError):
276 276 self.repo.get_commit(commit_idx=idx)
277 277
278 278 def test_commit10(self):
279 279 commit10 = self.repo.get_commit(commit_idx=10)
280 280 README = """===
281 281 VCS
282 282 ===
283 283
284 284 Various Version Control System management abstraction layer for Python.
285 285
286 286 Introduction
287 287 ------------
288 288
289 289 TODO: To be written...
290 290
291 291 """
292 292 node = commit10.get_node('README.rst')
293 293 assert node.kind == NodeKind.FILE
294 294 assert node.str_content == README
295 295
296 296 def test_local_clone(self):
297 297 clone_path = next(REPO_PATH_GENERATOR)
298 298 self.repo._local_clone(clone_path)
299 299 repo_clone = MercurialRepository(clone_path)
300 300
301 301 assert self.repo.commit_ids == repo_clone.commit_ids
302 302
303 303 def test_local_clone_fails_if_target_exists(self):
304 304 with pytest.raises(RepositoryError):
305 305 self.repo._local_clone(self.repo.path)
306 306
307 307 def test_update(self):
308 308 repo_clone = self.get_clone_repo()
309 309 branches = repo_clone.branches
310 310
311 311 repo_clone._update('default')
312 312 assert branches['default'] == repo_clone._identify()
313 313 repo_clone._update('stable')
314 314 assert branches['stable'] == repo_clone._identify()
315 315
316 316 def test_local_pull_branch(self):
317 317 target_repo = self.get_empty_repo()
318 318 source_repo = self.get_clone_repo()
319 319
320 320 default = Reference(
321 321 'branch', 'default', source_repo.branches['default'])
322 322 target_repo._local_pull(source_repo.path, default)
323 323 target_repo = MercurialRepository(target_repo.path)
324 324 assert (target_repo.branches['default'] ==
325 325 source_repo.branches['default'])
326 326
327 327 stable = Reference('branch', 'stable', source_repo.branches['stable'])
328 328 target_repo._local_pull(source_repo.path, stable)
329 329 target_repo = MercurialRepository(target_repo.path)
330 330 assert target_repo.branches['stable'] == source_repo.branches['stable']
331 331
332 332 def test_local_pull_bookmark(self):
333 333 target_repo = self.get_empty_repo()
334 334 source_repo = self.get_clone_repo()
335 335
336 336 commits = list(source_repo.get_commits(branch_name='default'))
337 337 foo1_id = commits[-5].raw_id
338 338 foo1 = Reference('book', 'foo1', foo1_id)
339 339 source_repo._update(foo1_id)
340 340 source_repo.bookmark('foo1')
341 341
342 342 foo2_id = commits[-3].raw_id
343 343 foo2 = Reference('book', 'foo2', foo2_id)
344 344 source_repo._update(foo2_id)
345 345 source_repo.bookmark('foo2')
346 346
347 347 target_repo._local_pull(source_repo.path, foo1)
348 348 target_repo = MercurialRepository(target_repo.path)
349 349 assert target_repo.branches['default'] == commits[-5].raw_id
350 350
351 351 target_repo._local_pull(source_repo.path, foo2)
352 352 target_repo = MercurialRepository(target_repo.path)
353 353 assert target_repo.branches['default'] == commits[-3].raw_id
354 354
355 355 def test_local_pull_commit(self):
356 356 target_repo = self.get_empty_repo()
357 357 source_repo = self.get_clone_repo()
358 358
359 359 commits = list(source_repo.get_commits(branch_name='default'))
360 360 commit_id = commits[-5].raw_id
361 361 commit = Reference('rev', commit_id, commit_id)
362 362 target_repo._local_pull(source_repo.path, commit)
363 363 target_repo = MercurialRepository(target_repo.path)
364 364 assert target_repo.branches['default'] == commit_id
365 365
366 366 commit_id = commits[-3].raw_id
367 367 commit = Reference('rev', commit_id, commit_id)
368 368 target_repo._local_pull(source_repo.path, commit)
369 369 target_repo = MercurialRepository(target_repo.path)
370 370 assert target_repo.branches['default'] == commit_id
371 371
372 372 def test_local_pull_from_same_repo(self):
373 373 reference = Reference('branch', 'default', None)
374 374 with pytest.raises(ValueError):
375 375 self.repo._local_pull(self.repo.path, reference)
376 376
377 377 def test_validate_pull_reference_raises_on_missing_reference(
378 378 self, vcsbackend_hg):
379 379 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
380 380 reference = Reference(
381 381 'book', 'invalid_reference', 'a' * 40)
382 382
383 383 with pytest.raises(CommitDoesNotExistError):
384 384 target_repo._validate_pull_reference(reference)
385 385
386 386 def test_heads(self):
387 387 assert set(self.repo._heads()) == set(self.repo.branches.values())
388 388
389 389 def test_ancestor(self):
390 390 commits = [
391 391 c.raw_id for c in self.repo.get_commits(branch_name='default')]
392 392 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
393 393 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
394 394
395 395 def test_local_push(self):
396 396 target_repo = self.get_empty_repo()
397 397
398 398 revisions = list(self.repo.get_commits(branch_name='default'))
399 399 revision = revisions[-5].raw_id
400 400 self.repo._local_push(revision, target_repo.path)
401 401
402 402 target_repo = MercurialRepository(target_repo.path)
403 403
404 404 assert target_repo.branches['default'] == revision
405 405
406 406 def test_hooks_can_be_enabled_for_local_push(self):
407 407 revision = 'deadbeef'
408 408 repo_path = 'test_group/test_repo'
409 409 with mock.patch.object(self.repo, '_remote') as remote_mock:
410 410 self.repo._local_push(revision, repo_path, enable_hooks=True)
411 411 remote_mock.push.assert_called_once_with(
412 412 [revision], repo_path, hooks=True, push_branches=False)
413 413
414 414 def test_local_merge(self, vcsbackend_hg):
415 415 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
416 416 source_repo = vcsbackend_hg.clone_repo(target_repo)
417 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
417 vcsbackend_hg.add_file(target_repo, b'README_MERGE1', b'Version 1')
418 418 target_repo = MercurialRepository(target_repo.path)
419 419 target_rev = target_repo.branches['default']
420 420 target_ref = Reference(
421 421 type='branch', name='default', commit_id=target_rev)
422 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
422 vcsbackend_hg.add_file(source_repo, b'README_MERGE2', b'Version 2')
423 423 source_repo = MercurialRepository(source_repo.path)
424 424 source_rev = source_repo.branches['default']
425 425 source_ref = Reference(
426 426 type='branch', name='default', commit_id=source_rev)
427 427
428 428 target_repo._local_pull(source_repo.path, source_ref)
429 429
430 430 merge_message = 'Merge message\n\nDescription:...'
431 431 user_name = 'Albert Einstein'
432 432 user_email = 'albert@einstein.com'
433 433 merge_commit_id, needs_push = target_repo._local_merge(
434 434 target_ref, merge_message, user_name, user_email, source_ref)
435 435 assert needs_push
436 436
437 437 target_repo = MercurialRepository(target_repo.path)
438 438 assert target_repo.commit_ids[-3] == target_rev
439 439 assert target_repo.commit_ids[-2] == source_rev
440 440 last_commit = target_repo.get_commit(merge_commit_id)
441 441 assert last_commit.message.strip() == merge_message
442 442 assert last_commit.author == '%s <%s>' % (user_name, user_email)
443 443
444 444 assert not os.path.exists(
445 445 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
446 446
447 447 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
448 448 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
449 449 source_repo = vcsbackend_hg.clone_repo(target_repo)
450 450 target_rev = target_repo.branches['default']
451 451 target_ref = Reference(
452 452 type='branch', name='default', commit_id=target_rev)
453 453 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
454 454 source_repo = MercurialRepository(source_repo.path)
455 455 source_rev = source_repo.branches['default']
456 456 source_ref = Reference(
457 457 type='branch', name='default', commit_id=source_rev)
458 458
459 459 target_repo._local_pull(source_repo.path, source_ref)
460 460
461 461 merge_message = 'Merge message\n\nDescription:...'
462 462 user_name = 'Albert Einstein'
463 463 user_email = 'albert@einstein.com'
464 464 merge_commit_id, needs_push = target_repo._local_merge(
465 465 target_ref, merge_message, user_name, user_email, source_ref)
466 466 assert merge_commit_id == source_rev
467 467 assert needs_push
468 468
469 469 target_repo = MercurialRepository(target_repo.path)
470 470 assert target_repo.commit_ids[-2] == target_rev
471 471 assert target_repo.commit_ids[-1] == source_rev
472 472
473 473 assert not os.path.exists(
474 474 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
475 475
476 476 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
477 477 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
478 478 target_rev = target_repo.branches['default']
479 479 target_ref = Reference(
480 480 type='branch', name='default', commit_id=target_rev)
481 481
482 482 merge_message = 'Merge message\n\nDescription:...'
483 483 user_name = 'Albert Einstein'
484 484 user_email = 'albert@einstein.com'
485 485 merge_commit_id, needs_push = target_repo._local_merge(
486 486 target_ref, merge_message, user_name, user_email, target_ref)
487 487 assert merge_commit_id == target_rev
488 488 assert not needs_push
489 489
490 490 target_repo = MercurialRepository(target_repo.path)
491 491 assert target_repo.commit_ids[-1] == target_rev
492 492
493 493 assert not os.path.exists(
494 494 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
495 495
496 496 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
497 497 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
498 498 source_repo = vcsbackend_hg.clone_repo(target_repo)
499 499 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
500 500 target_repo = MercurialRepository(target_repo.path)
501 501 target_rev = target_repo.branches['default']
502 502 target_ref = Reference(
503 503 type='branch', name='default', commit_id=target_rev)
504 504 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
505 505 source_repo = MercurialRepository(source_repo.path)
506 506 source_rev = source_repo.branches['default']
507 507 source_ref = Reference(
508 508 type='branch', name='default', commit_id=source_rev)
509 509
510 510 target_repo._local_pull(source_repo.path, source_ref)
511 511 with pytest.raises(RepositoryError):
512 512 target_repo._local_merge(
513 513 target_ref, 'merge_message', 'user name', 'user@name.com',
514 514 source_ref)
515 515
516 516 # Check we are not left in an intermediate merge state
517 517 assert not os.path.exists(
518 518 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
519 519
520 520 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
521 521 commits = [
522 522 {'message': 'a'},
523 523 {'message': 'b', 'branch': 'b'},
524 524 ]
525 525 repo = backend_hg.create_repo(commits)
526 526 commit_ids = backend_hg.commit_ids
527 527 target_ref = Reference(
528 528 type='branch', name='default', commit_id=commit_ids['a'])
529 529 source_ref = Reference(
530 530 type='branch', name='b', commit_id=commit_ids['b'])
531 531 merge_message = 'Merge message\n\nDescription:...'
532 532 user_name = 'Albert Einstein'
533 533 user_email = 'albert@einstein.com'
534 534 vcs_repo = repo.scm_instance()
535 535 merge_commit_id, needs_push = vcs_repo._local_merge(
536 536 target_ref, merge_message, user_name, user_email, source_ref)
537 537 assert merge_commit_id != source_ref.commit_id
538 538 assert needs_push is True
539 539 commit = vcs_repo.get_commit(merge_commit_id)
540 540 assert commit.merge is True
541 541 assert commit.message == merge_message
542 542
543 543 def test_maybe_prepare_merge_workspace(self):
544 544 workspace = self.repo._maybe_prepare_merge_workspace(
545 545 1, 'pr2', 'unused', 'unused2')
546 546
547 547 assert os.path.isdir(workspace)
548 548 workspace_repo = MercurialRepository(workspace)
549 549 assert workspace_repo.branches == self.repo.branches
550 550
551 551 # Calling it a second time should also succeed
552 552 workspace = self.repo._maybe_prepare_merge_workspace(
553 553 1, 'pr2', 'unused', 'unused2')
554 554 assert os.path.isdir(workspace)
555 555
556 556 def test_cleanup_merge_workspace(self):
557 557 workspace = self.repo._maybe_prepare_merge_workspace(
558 558 1, 'pr3', 'unused', 'unused2')
559 559
560 560 assert os.path.isdir(workspace)
561 561 self.repo.cleanup_merge_workspace(1, 'pr3')
562 562
563 563 assert not os.path.exists(workspace)
564 564
565 565 def test_cleanup_merge_workspace_invalid_workspace_id(self):
566 566 # No assert: because in case of an inexistent workspace this function
567 567 # should still succeed.
568 568 self.repo.cleanup_merge_workspace(1, 'pr4')
569 569
570 570 def test_merge_target_is_bookmark(self, vcsbackend_hg):
571 571 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
572 572 source_repo = vcsbackend_hg.clone_repo(target_repo)
573 573 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
574 574 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
575 575 imc = source_repo.in_memory_commit
576 576 imc.add(FileNode(b'file_x', content=source_repo.name))
577 577 imc.commit(
578 578 message='Automatic commit from repo merge test',
579 579 author='Automatic <automatic@rhodecode.com>')
580 580 target_commit = target_repo.get_commit()
581 581 source_commit = source_repo.get_commit()
582 582 default_branch = target_repo.DEFAULT_BRANCH_NAME
583 583 bookmark_name = 'bookmark'
584 584 target_repo._update(default_branch)
585 585 target_repo.bookmark(bookmark_name)
586 586 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
587 587 source_ref = Reference('branch', default_branch, source_commit.raw_id)
588 588 workspace_id = 'test-merge'
589 589 repo_id = repo_id_generator(target_repo.path)
590 590 merge_response = target_repo.merge(
591 591 repo_id, workspace_id, target_ref, source_repo, source_ref,
592 592 'test user', 'test@rhodecode.com', 'merge message 1',
593 593 dry_run=False)
594 594 expected_merge_response = MergeResponse(
595 595 True, True, merge_response.merge_ref,
596 596 MergeFailureReason.NONE)
597 597 assert merge_response == expected_merge_response
598 598
599 599 target_repo = backends.get_backend(vcsbackend_hg.alias)(
600 600 target_repo.path)
601 601 target_commits = list(target_repo.get_commits())
602 602 commit_ids = [c.raw_id for c in target_commits[:-1]]
603 603 assert source_ref.commit_id in commit_ids
604 604 assert target_ref.commit_id in commit_ids
605 605
606 606 merge_commit = target_commits[-1]
607 607 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
608 608 assert merge_commit.message.strip() == 'merge message 1'
609 609 assert merge_commit.author == 'test user <test@rhodecode.com>'
610 610
611 611 # Check the bookmark was updated in the target repo
612 612 assert (
613 613 target_repo.bookmarks[bookmark_name] ==
614 614 merge_response.merge_ref.commit_id)
615 615
616 616 def test_merge_source_is_bookmark(self, vcsbackend_hg):
617 617 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
618 618 source_repo = vcsbackend_hg.clone_repo(target_repo)
619 619 imc = source_repo.in_memory_commit
620 620 imc.add(FileNode(b'file_x', content=source_repo.name))
621 621 imc.commit(
622 622 message='Automatic commit from repo merge test',
623 623 author='Automatic <automatic@rhodecode.com>')
624 624 target_commit = target_repo.get_commit()
625 625 source_commit = source_repo.get_commit()
626 626 default_branch = target_repo.DEFAULT_BRANCH_NAME
627 627 bookmark_name = 'bookmark'
628 628 target_ref = Reference('branch', default_branch, target_commit.raw_id)
629 629 source_repo._update(default_branch)
630 630 source_repo.bookmark(bookmark_name)
631 631 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
632 632 workspace_id = 'test-merge'
633 633 repo_id = repo_id_generator(target_repo.path)
634 634 merge_response = target_repo.merge(
635 635 repo_id, workspace_id, target_ref, source_repo, source_ref,
636 636 'test user', 'test@rhodecode.com', 'merge message 1',
637 637 dry_run=False)
638 638 expected_merge_response = MergeResponse(
639 639 True, True, merge_response.merge_ref,
640 640 MergeFailureReason.NONE)
641 641 assert merge_response == expected_merge_response
642 642
643 643 target_repo = backends.get_backend(vcsbackend_hg.alias)(
644 644 target_repo.path)
645 645 target_commits = list(target_repo.get_commits())
646 646 commit_ids = [c.raw_id for c in target_commits]
647 647 assert source_ref.commit_id == commit_ids[-1]
648 648 assert target_ref.commit_id == commit_ids[-2]
649 649
650 650 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
651 651 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
652 652 source_repo = vcsbackend_hg.clone_repo(target_repo)
653 653 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
654 654 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
655 655
656 656 # add an extra head to the target repo
657 657 imc = target_repo.in_memory_commit
658 658 imc.add(FileNode(b'file_x', content='foo'))
659 659 commits = list(target_repo.get_commits())
660 660 imc.commit(
661 661 message='Automatic commit from repo merge test',
662 662 author='Automatic <automatic@rhodecode.com>', parents=commits[0:1])
663 663
664 664 target_commit = target_repo.get_commit()
665 665 source_commit = source_repo.get_commit()
666 666 default_branch = target_repo.DEFAULT_BRANCH_NAME
667 667 target_repo._update(default_branch)
668 668
669 669 target_ref = Reference('branch', default_branch, target_commit.raw_id)
670 670 source_ref = Reference('branch', default_branch, source_commit.raw_id)
671 671 workspace_id = 'test-merge'
672 672
673 673 assert len(target_repo._heads(branch='default')) == 2
674 674 heads = target_repo._heads(branch='default')
675 675 expected_merge_response = MergeResponse(
676 676 False, False, None,
677 677 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
678 678 metadata={'heads': heads})
679 679 repo_id = repo_id_generator(target_repo.path)
680 680 merge_response = target_repo.merge(
681 681 repo_id, workspace_id, target_ref, source_repo, source_ref,
682 682 'test user', 'test@rhodecode.com', 'merge message 1',
683 683 dry_run=False)
684 684 assert merge_response == expected_merge_response
685 685
686 686 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
687 687 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
688 688 source_repo = vcsbackend_hg.clone_repo(target_repo)
689 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
690 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
689 vcsbackend_hg.add_file(target_repo, b'README_MERGE1', b'Version 1')
690 vcsbackend_hg.add_file(source_repo, b'README_MERGE2', b'Version 2')
691 691
692 692 imc = source_repo.in_memory_commit
693 693 imc.add(FileNode(b'file_x', content=safe_bytes(source_repo.name)))
694 694 imc.commit(
695 695 message='Automatic commit from repo merge test',
696 696 author='Automatic <automatic@rhodecode.com>')
697 697
698 698 target_commit = target_repo.get_commit()
699 699 source_commit = source_repo.get_commit()
700 700
701 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
701 vcsbackend_hg.add_file(source_repo, b'LICENSE', b'LICENSE Info')
702 702
703 703 default_branch = target_repo.DEFAULT_BRANCH_NAME
704 704 bookmark_name = 'bookmark'
705 705 source_repo._update(default_branch)
706 706 source_repo.bookmark(bookmark_name)
707 707
708 708 target_ref = Reference('branch', default_branch, target_commit.raw_id)
709 709 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
710 710 repo_id = repo_id_generator(target_repo.path)
711 711 workspace_id = 'test-merge'
712 712
713 713 merge_response = target_repo.merge(
714 714 repo_id, workspace_id, target_ref, source_repo, source_ref,
715 715 'test user', 'test@rhodecode.com', 'merge message 1',
716 716 dry_run=False, use_rebase=True)
717 717
718 718 expected_merge_response = MergeResponse(
719 719 True, True, merge_response.merge_ref,
720 720 MergeFailureReason.NONE)
721 721 assert merge_response == expected_merge_response
722 722
723 723 target_repo = backends.get_backend(vcsbackend_hg.alias)(
724 724 target_repo.path)
725 725 last_commit = target_repo.get_commit()
726 726 assert last_commit.message == source_commit.message
727 727 assert last_commit.author == source_commit.author
728 728 # This checks that we effectively did a rebase
729 729 assert last_commit.raw_id != source_commit.raw_id
730 730
731 731 # Check the target has only 4 commits: 2 were already in target and
732 732 # only two should have been added
733 733 assert len(target_repo.commit_ids) == 2 + 2
734 734
735 735
736 736 class TestGetShadowInstance(object):
737 737
738 738 @pytest.fixture()
739 739 def repo(self, vcsbackend_hg, monkeypatch):
740 740 repo = vcsbackend_hg.repo
741 741 monkeypatch.setattr(repo, 'config', mock.Mock())
742 742 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
743 743 return repo
744 744
745 745 def test_passes_config(self, repo):
746 746 shadow = repo.get_shadow_instance(repo.path)
747 747 assert shadow.config == repo.config.copy()
748 748
749 749 def test_disables_hooks(self, repo):
750 750 shadow = repo.get_shadow_instance(repo.path)
751 751 shadow.config.clear_section.assert_called_once_with('hooks')
752 752
753 753 def test_allows_to_keep_hooks(self, repo):
754 754 shadow = repo.get_shadow_instance(repo.path, enable_hooks=True)
755 755 assert not shadow.config.clear_section.called
756 756
757 757
758 758 class TestMercurialCommit(object):
759 759
760 760 def _test_equality(self, commit):
761 761 idx = commit.idx
762 762 assert commit == self.repo.get_commit(commit_idx=idx)
763 763
764 764 def test_equality(self):
765 765 indexes = [0, 10, 20]
766 766 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
767 767 for commit in commits:
768 768 self._test_equality(commit)
769 769
770 770 def test_default_commit(self):
771 771 tip = self.repo.get_commit('tip')
772 772 assert tip == self.repo.get_commit()
773 773 assert tip == self.repo.get_commit(commit_id=None)
774 774 assert tip == self.repo.get_commit(commit_idx=None)
775 775 assert tip == list(self.repo[-1:])[0]
776 776
777 777 def test_root_node(self):
778 778 tip = self.repo.get_commit('tip')
779 779 assert tip.root is tip.get_node('')
780 780
781 781 def test_lazy_fetch(self):
782 782 """
783 783 Test if commit's nodes expands and are cached as we walk through
784 784 the commit. This test is somewhat hard to write as order of tests
785 785 is a key here. Written by running command after command in a shell.
786 786 """
787 787 commit = self.repo.get_commit(commit_idx=45)
788 788 assert len(commit.nodes) == 0
789 789 root = commit.root
790 790 assert len(commit.nodes) == 1
791 791 assert len(root.nodes) == 8
792 792 # accessing root.nodes updates commit.nodes
793 793 assert len(commit.nodes) == 9
794 794
795 795 docs = root.get_node('docs')
796 796 # we haven't yet accessed anything new as docs dir was already cached
797 797 assert len(commit.nodes) == 9
798 798 assert len(docs.nodes) == 8
799 799 # accessing docs.nodes updates commit.nodes
800 800 assert len(commit.nodes) == 17
801 801
802 802 assert docs is commit.get_node('docs')
803 803 assert docs is root.nodes[0]
804 804 assert docs is root.dirs[0]
805 805 assert docs is commit.get_node('docs')
806 806
807 807 def test_nodes_with_commit(self):
808 808 commit = self.repo.get_commit(commit_idx=45)
809 809 root = commit.root
810 810 docs = root.get_node('docs')
811 811 assert docs is commit.get_node('docs')
812 812 api = docs.get_node('api')
813 813 assert api is commit.get_node('docs/api')
814 814 index = api.get_node('index.rst')
815 815 assert index is commit.get_node('docs/api/index.rst')
816 816 assert index is commit.get_node(
817 817 'docs').get_node('api').get_node('index.rst')
818 818
819 819 def test_branch_and_tags(self):
820 820 commit0 = self.repo.get_commit(commit_idx=0)
821 821 assert commit0.branch == 'default'
822 822 assert commit0.tags == []
823 823
824 824 commit10 = self.repo.get_commit(commit_idx=10)
825 825 assert commit10.branch == 'default'
826 826 assert commit10.tags == []
827 827
828 828 commit44 = self.repo.get_commit(commit_idx=44)
829 829 assert commit44.branch == 'web'
830 830
831 831 tip = self.repo.get_commit('tip')
832 832 assert 'tip' in tip.tags
833 833
834 834 def test_bookmarks(self):
835 835 commit0 = self.repo.get_commit(commit_idx=0)
836 836 assert commit0.bookmarks == []
837 837
838 838 def _test_file_size(self, idx, path, size):
839 839 node = self.repo.get_commit(commit_idx=idx).get_node(path)
840 840 assert node.is_file()
841 841 assert node.size == size
842 842
843 843 def test_file_size(self):
844 844 to_check = (
845 845 (10, 'setup.py', 1068),
846 846 (20, 'setup.py', 1106),
847 847 (60, 'setup.py', 1074),
848 848
849 849 (10, 'vcs/backends/base.py', 2921),
850 850 (20, 'vcs/backends/base.py', 3936),
851 851 (60, 'vcs/backends/base.py', 6189),
852 852 )
853 853 for idx, path, size in to_check:
854 854 self._test_file_size(idx, path, size)
855 855
856 856 def test_file_history_from_commits(self):
857 857 node = self.repo[10].get_node('setup.py')
858 858 commit_ids = [commit.raw_id for commit in node.history]
859 859 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
860 860
861 861 node = self.repo[20].get_node('setup.py')
862 862 node_ids = [commit.raw_id for commit in node.history]
863 863 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
864 864 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
865 865
866 866 # special case we check history from commit that has this particular
867 867 # file changed this means we check if it's included as well
868 868 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
869 869 .get_node('setup.py')
870 870 node_ids = [commit.raw_id for commit in node.history]
871 871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
872 872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
873 873
874 874 def test_file_history(self):
875 875 # we can only check if those commits are present in the history
876 876 # as we cannot update this test every time file is changed
877 877 files = {
878 878 'setup.py': [7, 18, 45, 46, 47, 69, 77],
879 879 'vcs/nodes.py': [
880 880 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
881 881 'vcs/backends/hg.py': [
882 882 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
883 883 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
884 884 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
885 885 }
886 886 for path, indexes in files.items():
887 887 tip = self.repo.get_commit(commit_idx=indexes[-1])
888 888 node = tip.get_node(path)
889 889 node_indexes = [commit.idx for commit in node.history]
890 890 assert set(indexes).issubset(set(node_indexes)), (
891 891 "We assumed that %s is subset of commits for which file %s "
892 892 "has been changed, and history of that node returned: %s"
893 893 % (indexes, path, node_indexes))
894 894
895 895 def test_file_annotate(self):
896 896 files = {
897 897 'vcs/backends/__init__.py': {
898 898 89: {
899 899 'lines_no': 31,
900 900 'commits': [
901 901 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
902 902 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
903 903 32, 32, 32, 32, 37, 32, 37, 37, 32,
904 904 32, 32
905 905 ]
906 906 },
907 907 20: {
908 908 'lines_no': 1,
909 909 'commits': [4]
910 910 },
911 911 55: {
912 912 'lines_no': 31,
913 913 'commits': [
914 914 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
915 915 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
916 916 32, 32, 32, 32, 37, 32, 37, 37, 32,
917 917 32, 32
918 918 ]
919 919 }
920 920 },
921 921 'vcs/exceptions.py': {
922 922 89: {
923 923 'lines_no': 18,
924 924 'commits': [
925 925 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
926 926 16, 16, 17, 16, 16, 18, 18, 18
927 927 ]
928 928 },
929 929 20: {
930 930 'lines_no': 18,
931 931 'commits': [
932 932 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
933 933 16, 16, 17, 16, 16, 18, 18, 18
934 934 ]
935 935 },
936 936 55: {
937 937 'lines_no': 18,
938 938 'commits': [
939 939 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
940 940 17, 16, 16, 18, 18, 18
941 941 ]
942 942 }
943 943 },
944 944 'MANIFEST.in': {
945 945 89: {
946 946 'lines_no': 5,
947 947 'commits': [7, 7, 7, 71, 71]
948 948 },
949 949 20: {
950 950 'lines_no': 3,
951 951 'commits': [7, 7, 7]
952 952 },
953 953 55: {
954 954 'lines_no': 3,
955 955 'commits': [7, 7, 7]
956 956 }
957 957 }
958 958 }
959 959
960 960 for fname, commit_dict in files.items():
961 961 for idx, __ in commit_dict.items():
962 962 commit = self.repo.get_commit(commit_idx=idx)
963 963 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
964 964 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
965 965 assert l1_1 == l1_2
966 966 l1 = l1_2 = [
967 967 x[2]().idx for x in commit.get_file_annotate(fname)]
968 968 l2 = files[fname][idx]['commits']
969 969 assert l1 == l2, (
970 970 "The lists of commit for %s@commit_id%s"
971 971 "from annotation list should match each other,"
972 972 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
973 973
974 974 def test_commit_state(self):
975 975 """
976 976 Tests which files have been added/changed/removed at particular commit
977 977 """
978 978
979 979 # commit_id 46ad32a4f974:
980 980 # hg st --rev 46ad32a4f974
981 981 # changed: 13
982 982 # added: 20
983 983 # removed: 1
984 984 changed = set([
985 985 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
986 986 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
987 987 'vcs/__init__.py', 'vcs/backends/__init__.py',
988 988 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
989 989 'vcs/utils/__init__.py'])
990 990
991 991 added = set([
992 992 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
993 993 'docs/api/index.rst', 'docs/api/nodes.rst',
994 994 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
995 995 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
996 996 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
997 997 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
998 998 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
999 999 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1000 1000 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1001 1001 'vcs/web/simplevcs/views.py'])
1002 1002
1003 1003 removed = set(['docs/api.rst'])
1004 1004
1005 1005 commit64 = self.repo.get_commit('46ad32a4f974')
1006 1006 assert set((node.path for node in commit64.added)) == added
1007 1007 assert set((node.path for node in commit64.changed)) == changed
1008 1008 assert set((node.path for node in commit64.removed)) == removed
1009 1009
1010 1010 # commit_id b090f22d27d6:
1011 1011 # hg st --rev b090f22d27d6
1012 1012 # changed: 13
1013 1013 # added: 20
1014 1014 # removed: 1
1015 1015 commit88 = self.repo.get_commit('b090f22d27d6')
1016 1016 assert set((node.path for node in commit88.added)) == set()
1017 1017 assert set((node.path for node in commit88.changed)) == \
1018 1018 set(['.hgignore'])
1019 1019 assert set((node.path for node in commit88.removed)) == set()
1020 1020
1021 1021 #
1022 1022 # 85:
1023 1023 # added: 2 [
1024 1024 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1025 1025 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1026 1026 # removed: 1 ['vcs/utils/web.py']
1027 1027 commit85 = self.repo.get_commit(commit_idx=85)
1028 1028 assert set((node.path for node in commit85.added)) == set([
1029 1029 'vcs/utils/diffs.py',
1030 1030 'vcs/web/simplevcs/views/diffs.py'])
1031 1031 assert set((node.path for node in commit85.changed)) == set([
1032 1032 'vcs/web/simplevcs/models.py',
1033 1033 'vcs/web/simplevcs/utils.py',
1034 1034 'vcs/web/simplevcs/views/__init__.py',
1035 1035 'vcs/web/simplevcs/views/repository.py',
1036 1036 ])
1037 1037 assert set((node.path for node in commit85.removed)) == \
1038 1038 set(['vcs/utils/web.py'])
1039 1039
1040 1040 def test_files_state(self):
1041 1041 """
1042 1042 Tests state of FileNodes.
1043 1043 """
1044 1044 commit = self.repo.get_commit(commit_idx=85)
1045 1045 node = commit.get_node('vcs/utils/diffs.py')
1046 1046 assert node.state, NodeState.ADDED
1047 1047 assert node.added
1048 1048 assert not node.changed
1049 1049 assert not node.not_changed
1050 1050 assert not node.removed
1051 1051
1052 1052 commit = self.repo.get_commit(commit_idx=88)
1053 1053 node = commit.get_node('.hgignore')
1054 1054 assert node.state, NodeState.CHANGED
1055 1055 assert not node.added
1056 1056 assert node.changed
1057 1057 assert not node.not_changed
1058 1058 assert not node.removed
1059 1059
1060 1060 commit = self.repo.get_commit(commit_idx=85)
1061 1061 node = commit.get_node('setup.py')
1062 1062 assert node.state, NodeState.NOT_CHANGED
1063 1063 assert not node.added
1064 1064 assert not node.changed
1065 1065 assert node.not_changed
1066 1066 assert not node.removed
1067 1067
1068 1068 # If node has REMOVED state then trying to fetch it would raise
1069 1069 # CommitError exception
1070 1070 commit = self.repo.get_commit(commit_idx=2)
1071 1071 path = 'vcs/backends/BaseRepository.py'
1072 1072 with pytest.raises(NodeDoesNotExistError):
1073 1073 commit.get_node(path)
1074 1074 # but it would be one of ``removed`` (commit's attribute)
1075 1075 assert path in [rf.path for rf in commit.removed]
1076 1076
1077 1077 def test_commit_message_is_unicode(self):
1078 1078 for cm in self.repo:
1079 1079 assert type(cm.message) == str
1080 1080
1081 1081 def test_commit_author_is_unicode(self):
1082 1082 for cm in self.repo:
1083 1083 assert type(cm.author) == str
1084 1084
1085 1085 def test_repo_files_content_type(self):
1086 1086 test_commit = self.repo.get_commit(commit_idx=100)
1087 1087 for node in test_commit.get_node('/'):
1088 1088 if node.is_file():
1089 1089 assert type(node.content) == bytes
1090 1090 assert type(node.str_content) == str
1091 1091
1092 1092 def test_wrong_path(self):
1093 1093 # There is 'setup.py' in the root dir but not there:
1094 1094 path = 'foo/bar/setup.py'
1095 1095 with pytest.raises(VCSError):
1096 1096 self.repo.get_commit().get_node(path)
1097 1097
1098 1098 def test_author_email(self):
1099 1099 assert 'marcin@python-blog.com' == \
1100 1100 self.repo.get_commit('b986218ba1c9').author_email
1101 1101 assert 'lukasz.balcerzak@python-center.pl' == \
1102 1102 self.repo.get_commit('3803844fdbd3').author_email
1103 1103 assert '' == self.repo.get_commit('84478366594b').author_email
1104 1104
1105 1105 def test_author_username(self):
1106 1106 assert 'Marcin Kuzminski' == \
1107 1107 self.repo.get_commit('b986218ba1c9').author_name
1108 1108 assert 'Lukasz Balcerzak' == \
1109 1109 self.repo.get_commit('3803844fdbd3').author_name
1110 1110 assert 'marcink' == \
1111 1111 self.repo.get_commit('84478366594b').author_name
1112 1112
1113 1113
1114 1114 class TestLargeFileRepo(object):
1115 1115
1116 1116 def test_large_file(self, backend_hg):
1117 1117 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1118 1118
1119 1119 tip = repo.scm_instance().get_commit()
1120 1120 node = tip.get_node('.hglf/thisfileislarge')
1121 1121
1122 1122 lf_node = node.get_largefile_node()
1123 1123
1124 1124 assert lf_node.is_largefile() is True
1125 1125 assert lf_node.size == 1024000
1126 1126 assert lf_node.name == '.hglf/thisfileislarge'
1127 1127
1128 1128
1129 1129 class TestGetBranchName(object):
1130 1130 def test_returns_ref_name_when_type_is_branch(self):
1131 1131 ref = self._create_ref('branch', 'fake-name')
1132 1132 result = self.repo._get_branch_name(ref)
1133 1133 assert result == ref.name
1134 1134
1135 1135 @pytest.mark.parametrize("type_", ("book", "tag"))
1136 1136 def test_queries_remote_when_type_is_not_branch(self, type_):
1137 1137 ref = self._create_ref(type_, 'wrong-fake-name')
1138 1138 with mock.patch.object(self.repo, "_remote") as remote_mock:
1139 1139 remote_mock.ctx_branch.return_value = "fake-name"
1140 1140 result = self.repo._get_branch_name(ref)
1141 1141 assert result == "fake-name"
1142 1142 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1143 1143
1144 1144 def _create_ref(self, type_, name):
1145 1145 ref = mock.Mock()
1146 1146 ref.type = type_
1147 1147 ref.name = 'wrong-fake-name'
1148 1148 ref.commit_id = "deadbeef"
1149 1149 return ref
1150 1150
1151 1151
1152 1152 class TestIsTheSameBranch(object):
1153 1153 def test_returns_true_when_branches_are_equal(self):
1154 1154 source_ref = mock.Mock(name="source-ref")
1155 1155 target_ref = mock.Mock(name="target-ref")
1156 1156 branch_name_patcher = mock.patch.object(
1157 1157 self.repo, "_get_branch_name", return_value="default")
1158 1158 with branch_name_patcher as branch_name_mock:
1159 1159 result = self.repo._is_the_same_branch(source_ref, target_ref)
1160 1160
1161 1161 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1162 1162 assert branch_name_mock.call_args_list == expected_calls
1163 1163 assert result is True
1164 1164
1165 1165 def test_returns_false_when_branches_are_not_equal(self):
1166 1166 source_ref = mock.Mock(name="source-ref")
1167 1167 source_ref.name = "source-branch"
1168 1168 target_ref = mock.Mock(name="target-ref")
1169 1169 source_ref.name = "target-branch"
1170 1170
1171 1171 def side_effect(ref):
1172 1172 return ref.name
1173 1173
1174 1174 branch_name_patcher = mock.patch.object(
1175 1175 self.repo, "_get_branch_name", side_effect=side_effect)
1176 1176 with branch_name_patcher as branch_name_mock:
1177 1177 result = self.repo._is_the_same_branch(source_ref, target_ref)
1178 1178
1179 1179 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1180 1180 assert branch_name_mock.call_args_list == expected_calls
1181 1181 assert result is False
General Comments 0
You need to be logged in to leave comments. Login now