##// END OF EJS Templates
py3: drop b'' from debug message "moving bookmarks"
Yuya Nishihara -
r36854:a00c38b3 default
parent child Browse files
Show More
@@ -1,406 +1,407 b''
1 1 test-abort-checkin.t
2 2 test-add.t
3 3 test-addremove-similar.t
4 4 test-addremove.t
5 5 test-amend-subrepo.t
6 6 test-amend.t
7 7 test-ancestor.py
8 8 test-annotate.py
9 9 test-annotate.t
10 10 test-archive-symlinks.t
11 11 test-atomictempfile.py
12 12 test-audit-path.t
13 13 test-audit-subrepo.t
14 14 test-automv.t
15 15 test-backout.t
16 16 test-backwards-remove.t
17 17 test-basic.t
18 18 test-bheads.t
19 19 test-bisect.t
20 20 test-bisect2.t
21 21 test-bisect3.t
22 22 test-blackbox.t
23 23 test-bookmarks-current.t
24 24 test-bookmarks-merge.t
25 25 test-bookmarks-rebase.t
26 26 test-bookmarks-strip.t
27 27 test-bookmarks.t
28 test-branch-change.t
28 29 test-branch-option.t
29 30 test-branch-tag-confict.t
30 31 test-branches.t
31 32 test-bundle-phases.t
32 33 test-bundle-type.t
33 34 test-bundle-vs-outgoing.t
34 35 test-bundle2-multiple-changegroups.t
35 36 test-cappedreader.py
36 37 test-casecollision.t
37 38 test-cat.t
38 39 test-censor.t
39 40 test-changelog-exec.t
40 41 test-check-commit.t
41 42 test-check-execute.t
42 43 test-check-module-imports.t
43 44 test-check-pyflakes.t
44 45 test-check-pylint.t
45 46 test-check-shbang.t
46 47 test-children.t
47 48 test-clone-pull-corruption.t
48 49 test-clone-r.t
49 50 test-clone-update-order.t
50 51 test-command-template.t
51 52 test-commit-amend.t
52 53 test-commit-multiple.t
53 54 test-commit-unresolved.t
54 55 test-commit.t
55 56 test-committer.t
56 57 test-completion.t
57 58 test-config-env.py
58 59 test-config.t
59 60 test-conflict.t
60 61 test-confused-revert.t
61 62 test-contrib-check-code.t
62 63 test-contrib-check-commit.t
63 64 test-convert-authormap.t
64 65 test-convert-clonebranches.t
65 66 test-convert-datesort.t
66 67 test-convert-filemap.t
67 68 test-convert-hg-sink.t
68 69 test-convert-hg-source.t
69 70 test-convert-hg-startrev.t
70 71 test-copy-move-merge.t
71 72 test-copy.t
72 73 test-copytrace-heuristics.t
73 74 test-debugbuilddag.t
74 75 test-debugbundle.t
75 76 test-debugextensions.t
76 77 test-debugindexdot.t
77 78 test-debugrename.t
78 79 test-default-push.t
79 80 test-diff-binary-file.t
80 81 test-diff-change.t
81 82 test-diff-copy-depth.t
82 83 test-diff-hashes.t
83 84 test-diff-issue2761.t
84 85 test-diff-newlines.t
85 86 test-diff-reverse.t
86 87 test-diff-subdir.t
87 88 test-diffdir.t
88 89 test-directaccess.t
89 90 test-dirstate-backup.t
90 91 test-dirstate-nonnormalset.t
91 92 test-doctest.py
92 93 test-double-merge.t
93 94 test-drawdag.t
94 95 test-duplicateoptions.py
95 96 test-empty-dir.t
96 97 test-empty-file.t
97 98 test-empty-group.t
98 99 test-empty.t
99 100 test-encode.t
100 101 test-encoding-func.py
101 102 test-encoding.t
102 103 test-eol-add.t
103 104 test-eol-clone.t
104 105 test-eol-hook.t
105 106 test-eol-tag.t
106 107 test-eol-update.t
107 108 test-excessive-merge.t
108 109 test-exchange-obsmarkers-case-A1.t
109 110 test-exchange-obsmarkers-case-A2.t
110 111 test-exchange-obsmarkers-case-A3.t
111 112 test-exchange-obsmarkers-case-A4.t
112 113 test-exchange-obsmarkers-case-A5.t
113 114 test-exchange-obsmarkers-case-A6.t
114 115 test-exchange-obsmarkers-case-A7.t
115 116 test-exchange-obsmarkers-case-B1.t
116 117 test-exchange-obsmarkers-case-B2.t
117 118 test-exchange-obsmarkers-case-B3.t
118 119 test-exchange-obsmarkers-case-B4.t
119 120 test-exchange-obsmarkers-case-B5.t
120 121 test-exchange-obsmarkers-case-B6.t
121 122 test-exchange-obsmarkers-case-B7.t
122 123 test-exchange-obsmarkers-case-C1.t
123 124 test-exchange-obsmarkers-case-C2.t
124 125 test-exchange-obsmarkers-case-C3.t
125 126 test-exchange-obsmarkers-case-C4.t
126 127 test-exchange-obsmarkers-case-D1.t
127 128 test-exchange-obsmarkers-case-D2.t
128 129 test-exchange-obsmarkers-case-D3.t
129 130 test-exchange-obsmarkers-case-D4.t
130 131 test-execute-bit.t
131 132 test-extdiff.t
132 133 test-extra-filelog-entry.t
133 134 test-filebranch.t
134 135 test-fileset-generated.t
135 136 test-flags.t
136 137 test-generaldelta.t
137 138 test-getbundle.t
138 139 test-git-export.t
139 140 test-glog-topological.t
140 141 test-gpg.t
141 142 test-graft.t
142 143 test-hghave.t
143 144 test-hgignore.t
144 145 test-hgk.t
145 146 test-hgweb-bundle.t
146 147 test-hgweb-descend-empties.t
147 148 test-hgweb-removed.t
148 149 test-histedit-arguments.t
149 150 test-histedit-base.t
150 151 test-histedit-bookmark-motion.t
151 152 test-histedit-commute.t
152 153 test-histedit-drop.t
153 154 test-histedit-edit.t
154 155 test-histedit-fold-non-commute.t
155 156 test-histedit-fold.t
156 157 test-histedit-no-change.t
157 158 test-histedit-non-commute-abort.t
158 159 test-histedit-non-commute.t
159 160 test-histedit-obsolete.t
160 161 test-histedit-outgoing.t
161 162 test-histedit-templates.t
162 163 test-http-branchmap.t
163 164 test-http-bundle1.t
164 165 test-http-clone-r.t
165 166 test-identify.t
166 167 test-import-unknown.t
167 168 test-imports-checker.t
168 169 test-inherit-mode.t
169 170 test-issue1089.t
170 171 test-issue1102.t
171 172 test-issue1175.t
172 173 test-issue1306.t
173 174 test-issue1438.t
174 175 test-issue1502.t
175 176 test-issue1802.t
176 177 test-issue1877.t
177 178 test-issue1993.t
178 179 test-issue2137.t
179 180 test-issue3084.t
180 181 test-issue4074.t
181 182 test-issue522.t
182 183 test-issue586.t
183 184 test-issue612.t
184 185 test-issue619.t
185 186 test-issue672.t
186 187 test-issue842.t
187 188 test-journal-exists.t
188 189 test-largefiles-cache.t
189 190 test-largefiles-misc.t
190 191 test-largefiles-small-disk.t
191 192 test-largefiles-update.t
192 193 test-lfs-largefiles.t
193 194 test-locate.t
194 195 test-lock-badness.t
195 196 test-log.t
196 197 test-logexchange.t
197 198 test-lrucachedict.py
198 199 test-mactext.t
199 200 test-manifest-merging.t
200 201 test-manifest.py
201 202 test-manifest.t
202 203 test-match.py
203 204 test-mdiff.py
204 205 test-merge-changedelete.t
205 206 test-merge-closedheads.t
206 207 test-merge-commit.t
207 208 test-merge-criss-cross.t
208 209 test-merge-default.t
209 210 test-merge-force.t
210 211 test-merge-halt.t
211 212 test-merge-internal-tools-pattern.t
212 213 test-merge-local.t
213 214 test-merge-remove.t
214 215 test-merge-revert.t
215 216 test-merge-revert2.t
216 217 test-merge-subrepos.t
217 218 test-merge-symlinks.t
218 219 test-merge-types.t
219 220 test-merge1.t
220 221 test-merge10.t
221 222 test-merge2.t
222 223 test-merge4.t
223 224 test-merge5.t
224 225 test-merge6.t
225 226 test-merge7.t
226 227 test-merge8.t
227 228 test-merge9.t
228 229 test-mq-git.t
229 230 test-mq-header-date.t
230 231 test-mq-header-from.t
231 232 test-mq-pull-from-bundle.t
232 233 test-mq-qdiff.t
233 234 test-mq-qfold.t
234 235 test-mq-qgoto.t
235 236 test-mq-qimport-fail-cleanup.t
236 237 test-mq-qpush-exact.t
237 238 test-mq-qqueue.t
238 239 test-mq-qrefresh-interactive.t
239 240 test-mq-qrefresh-replace-log-message.t
240 241 test-mq-qrefresh.t
241 242 test-mq-qrename.t
242 243 test-mq-qsave.t
243 244 test-mq-safety.t
244 245 test-mq-subrepo.t
245 246 test-mq-symlinks.t
246 247 test-mv-cp-st-diff.t
247 248 test-narrow-archive.t
248 249 test-narrow-clone-no-ellipsis.t
249 250 test-narrow-clone-nonlinear.t
250 251 test-narrow-clone.t
251 252 test-narrow-commit.t
252 253 test-narrow-copies.t
253 254 test-narrow-debugcommands.t
254 255 test-narrow-debugrebuilddirstate.t
255 256 test-narrow-exchange-merges.t
256 257 test-narrow-exchange.t
257 258 test-narrow-expanddirstate.t
258 259 test-narrow-merge.t
259 260 test-narrow-patch.t
260 261 test-narrow-patterns.t
261 262 test-narrow-pull.t
262 263 test-narrow-rebase.t
263 264 test-narrow-shallow-merges.t
264 265 test-narrow-shallow.t
265 266 test-narrow-strip.t
266 267 test-narrow-update.t
267 268 test-nested-repo.t
268 269 test-newbranch.t
269 270 test-obshistory.t
270 271 test-obsmarker-template.t
271 272 test-obsmarkers-effectflag.t
272 273 test-obsolete-bundle-strip.t
273 274 test-obsolete-changeset-exchange.t
274 275 test-obsolete-checkheads.t
275 276 test-obsolete-distributed.t
276 277 test-obsolete-tag-cache.t
277 278 test-parents.t
278 279 test-pathconflicts-merge.t
279 280 test-pathconflicts-update.t
280 281 test-pending.t
281 282 test-permissions.t
282 283 test-phases.t
283 284 test-pull-branch.t
284 285 test-pull-http.t
285 286 test-pull-permission.t
286 287 test-pull-pull-corruption.t
287 288 test-pull-r.t
288 289 test-pull-update.t
289 290 test-purge.t
290 291 test-push-checkheads-partial-C1.t
291 292 test-push-checkheads-partial-C2.t
292 293 test-push-checkheads-partial-C3.t
293 294 test-push-checkheads-partial-C4.t
294 295 test-push-checkheads-pruned-B1.t
295 296 test-push-checkheads-pruned-B2.t
296 297 test-push-checkheads-pruned-B3.t
297 298 test-push-checkheads-pruned-B4.t
298 299 test-push-checkheads-pruned-B5.t
299 300 test-push-checkheads-pruned-B6.t
300 301 test-push-checkheads-pruned-B7.t
301 302 test-push-checkheads-pruned-B8.t
302 303 test-push-checkheads-superceed-A1.t
303 304 test-push-checkheads-superceed-A2.t
304 305 test-push-checkheads-superceed-A3.t
305 306 test-push-checkheads-superceed-A4.t
306 307 test-push-checkheads-superceed-A5.t
307 308 test-push-checkheads-superceed-A6.t
308 309 test-push-checkheads-superceed-A7.t
309 310 test-push-checkheads-superceed-A8.t
310 311 test-push-checkheads-unpushed-D1.t
311 312 test-push-checkheads-unpushed-D2.t
312 313 test-push-checkheads-unpushed-D3.t
313 314 test-push-checkheads-unpushed-D4.t
314 315 test-push-checkheads-unpushed-D5.t
315 316 test-push-checkheads-unpushed-D6.t
316 317 test-push-checkheads-unpushed-D7.t
317 318 test-push-http.t
318 319 test-push-warn.t
319 320 test-pushvars.t
320 321 test-rebase-abort.t
321 322 test-rebase-base-flag.t
322 323 test-rebase-bookmarks.t
323 324 test-rebase-brute-force.t
324 325 test-rebase-cache.t
325 326 test-rebase-check-restore.t
326 327 test-rebase-collapse.t
327 328 test-rebase-dest.t
328 329 test-rebase-detach.t
329 330 test-rebase-emptycommit.t
330 331 test-rebase-inmemory.t
331 332 test-rebase-interruptions.t
332 333 test-rebase-issue-noparam-single-rev.t
333 334 test-rebase-legacy.t
334 335 test-rebase-mq-skip.t
335 336 test-rebase-named-branches.t
336 337 test-rebase-newancestor.t
337 338 test-rebase-obsolete.t
338 339 test-rebase-parameters.t
339 340 test-rebase-partial.t
340 341 test-rebase-pull.t
341 342 test-rebase-rename.t
342 343 test-rebase-scenario-global.t
343 344 test-rebase-templates.t
344 345 test-rebase-transaction.t
345 346 test-record.t
346 347 test-relink.t
347 348 test-remove.t
348 349 test-rename-after-merge.t
349 350 test-rename-dir-merge.t
350 351 test-rename-merge1.t
351 352 test-rename.t
352 353 test-repair-strip.t
353 354 test-repo-compengines.t
354 355 test-resolve.t
355 356 test-revert-flags.t
356 357 test-revert-unknown.t
357 358 test-revlog-ancestry.py
358 359 test-revlog-group-emptyiter.t
359 360 test-revlog-mmapindex.t
360 361 test-revlog-packentry.t
361 362 test-revset-dirstate-parents.t
362 363 test-revset-outgoing.t
363 364 test-rollback.t
364 365 test-run-tests.py
365 366 test-schemes.t
366 367 test-serve.t
367 368 test-share.t
368 369 test-show-stack.t
369 370 test-show-work.t
370 371 test-show.t
371 372 test-simple-update.t
372 373 test-single-head.t
373 374 test-sparse-clear.t
374 375 test-sparse-merges.t
375 376 test-sparse-requirement.t
376 377 test-sparse-verbose-json.t
377 378 test-ssh-clone-r.t
378 379 test-ssh-proto.t
379 380 test-sshserver.py
380 381 test-status-rev.t
381 382 test-status-terse.t
382 383 test-strip-cross.t
383 384 test-strip.t
384 385 test-subrepo-deep-nested-change.t
385 386 test-subrepo.t
386 387 test-symlinks.t
387 388 test-tag.t
388 389 test-tags.t
389 390 test-treemanifest.t
390 391 test-unamend.t
391 392 test-uncommit.t
392 393 test-unified-test.t
393 394 test-unrelated-pull.t
394 395 test-up-local-change.t
395 396 test-update-branches.t
396 397 test-update-dest.t
397 398 test-update-issue1456.t
398 399 test-update-names.t
399 400 test-update-reverse.t
400 401 test-upgrade-repo.t
401 402 test-url-rev.t
402 403 test-username-newline.t
403 404 test-verify.t
404 405 test-websub.t
405 406 test-win32text.t
406 407 test-xdg.t
@@ -1,1425 +1,1426 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import glob
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import socket
16 16 import subprocess
17 17 import weakref
18 18
19 19 from .i18n import _
20 20 from .node import (
21 21 hex,
22 22 nullid,
23 23 short,
24 24 wdirid,
25 25 wdirrev,
26 26 )
27 27
28 28 from . import (
29 29 encoding,
30 30 error,
31 31 match as matchmod,
32 32 obsolete,
33 33 obsutil,
34 34 pathutil,
35 35 phases,
36 36 pycompat,
37 37 revsetlang,
38 38 similar,
39 39 url,
40 40 util,
41 41 vfs,
42 42 )
43 43
44 44 if pycompat.iswindows:
45 45 from . import scmwindows as scmplatform
46 46 else:
47 47 from . import scmposix as scmplatform
48 48
49 49 termsize = scmplatform.termsize
50 50
51 51 class status(tuple):
52 52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 53 and 'ignored' properties are only relevant to the working copy.
54 54 '''
55 55
56 56 __slots__ = ()
57 57
58 58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 59 clean):
60 60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 61 ignored, clean))
62 62
63 63 @property
64 64 def modified(self):
65 65 '''files that have been modified'''
66 66 return self[0]
67 67
68 68 @property
69 69 def added(self):
70 70 '''files that have been added'''
71 71 return self[1]
72 72
73 73 @property
74 74 def removed(self):
75 75 '''files that have been removed'''
76 76 return self[2]
77 77
78 78 @property
79 79 def deleted(self):
80 80 '''files that are in the dirstate, but have been deleted from the
81 81 working copy (aka "missing")
82 82 '''
83 83 return self[3]
84 84
85 85 @property
86 86 def unknown(self):
87 87 '''files not in the dirstate that are not ignored'''
88 88 return self[4]
89 89
90 90 @property
91 91 def ignored(self):
92 92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 93 return self[5]
94 94
95 95 @property
96 96 def clean(self):
97 97 '''files that have not been modified'''
98 98 return self[6]
99 99
100 100 def __repr__(self, *args, **kwargs):
101 101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 102 'unknown=%r, ignored=%r, clean=%r>') % self)
103 103
104 104 def itersubrepos(ctx1, ctx2):
105 105 """find subrepos in ctx1 or ctx2"""
106 106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111 111
112 112 missing = set()
113 113
114 114 for subpath in ctx2.substate:
115 115 if subpath not in ctx1.substate:
116 116 del subpaths[subpath]
117 117 missing.add(subpath)
118 118
119 119 for subpath, ctx in sorted(subpaths.iteritems()):
120 120 yield subpath, ctx.sub(subpath)
121 121
122 122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 123 # status and diff will have an accurate result when it does
124 124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 125 # against itself.
126 126 for subpath in missing:
127 127 yield subpath, ctx2.nullsub(subpath, ctx1)
128 128
129 129 def nochangesfound(ui, repo, excluded=None):
130 130 '''Report no changes for push/pull, excluded is None or a list of
131 131 nodes excluded from the push/pull.
132 132 '''
133 133 secretlist = []
134 134 if excluded:
135 135 for n in excluded:
136 136 ctx = repo[n]
137 137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 138 secretlist.append(n)
139 139
140 140 if secretlist:
141 141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 142 % len(secretlist))
143 143 else:
144 144 ui.status(_("no changes found\n"))
145 145
146 146 def callcatch(ui, func):
147 147 """call func() with global exception handling
148 148
149 149 return func() if no exception happens. otherwise do some error handling
150 150 and return an exit code accordingly. does not handle all exceptions.
151 151 """
152 152 try:
153 153 try:
154 154 return func()
155 155 except: # re-raises
156 156 ui.traceback()
157 157 raise
158 158 # Global exception handling, alphabetically
159 159 # Mercurial-specific first, followed by built-in and library exceptions
160 160 except error.LockHeld as inst:
161 161 if inst.errno == errno.ETIMEDOUT:
162 162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 163 else:
164 164 reason = _('lock held by %r') % inst.locker
165 165 ui.warn(_("abort: %s: %s\n")
166 166 % (inst.desc or util.forcebytestr(inst.filename), reason))
167 167 if not inst.locker:
168 168 ui.warn(_("(lock might be very busy)\n"))
169 169 except error.LockUnavailable as inst:
170 170 ui.warn(_("abort: could not lock %s: %s\n") %
171 171 (inst.desc or util.forcebytestr(inst.filename),
172 172 encoding.strtolocal(inst.strerror)))
173 173 except error.OutOfBandError as inst:
174 174 if inst.args:
175 175 msg = _("abort: remote error:\n")
176 176 else:
177 177 msg = _("abort: remote error\n")
178 178 ui.warn(msg)
179 179 if inst.args:
180 180 ui.warn(''.join(inst.args))
181 181 if inst.hint:
182 182 ui.warn('(%s)\n' % inst.hint)
183 183 except error.RepoError as inst:
184 184 ui.warn(_("abort: %s!\n") % inst)
185 185 if inst.hint:
186 186 ui.warn(_("(%s)\n") % inst.hint)
187 187 except error.ResponseError as inst:
188 188 ui.warn(_("abort: %s") % inst.args[0])
189 189 msg = inst.args[1]
190 190 if isinstance(msg, type(u'')):
191 191 msg = pycompat.sysbytes(msg)
192 192 if not isinstance(msg, bytes):
193 193 ui.warn(" %r\n" % (msg,))
194 194 elif not msg:
195 195 ui.warn(_(" empty string\n"))
196 196 else:
197 197 ui.warn("\n%r\n" % util.ellipsis(msg))
198 198 except error.CensoredNodeError as inst:
199 199 ui.warn(_("abort: file censored %s!\n") % inst)
200 200 except error.RevlogError as inst:
201 201 ui.warn(_("abort: %s!\n") % inst)
202 202 except error.InterventionRequired as inst:
203 203 ui.warn("%s\n" % inst)
204 204 if inst.hint:
205 205 ui.warn(_("(%s)\n") % inst.hint)
206 206 return 1
207 207 except error.WdirUnsupported:
208 208 ui.warn(_("abort: working directory revision cannot be specified\n"))
209 209 except error.Abort as inst:
210 210 ui.warn(_("abort: %s\n") % inst)
211 211 if inst.hint:
212 212 ui.warn(_("(%s)\n") % inst.hint)
213 213 except ImportError as inst:
214 214 ui.warn(_("abort: %s!\n") % util.forcebytestr(inst))
215 215 m = util.forcebytestr(inst).split()[-1]
216 216 if m in "mpatch bdiff".split():
217 217 ui.warn(_("(did you forget to compile extensions?)\n"))
218 218 elif m in "zlib".split():
219 219 ui.warn(_("(is your Python install correct?)\n"))
220 220 except IOError as inst:
221 221 if util.safehasattr(inst, "code"):
222 222 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
223 223 elif util.safehasattr(inst, "reason"):
224 224 try: # usually it is in the form (errno, strerror)
225 225 reason = inst.reason.args[1]
226 226 except (AttributeError, IndexError):
227 227 # it might be anything, for example a string
228 228 reason = inst.reason
229 229 if isinstance(reason, unicode):
230 230 # SSLError of Python 2.7.9 contains a unicode
231 231 reason = encoding.unitolocal(reason)
232 232 ui.warn(_("abort: error: %s\n") % reason)
233 233 elif (util.safehasattr(inst, "args")
234 234 and inst.args and inst.args[0] == errno.EPIPE):
235 235 pass
236 236 elif getattr(inst, "strerror", None):
237 237 if getattr(inst, "filename", None):
238 238 ui.warn(_("abort: %s: %s\n") % (
239 239 encoding.strtolocal(inst.strerror),
240 240 util.forcebytestr(inst.filename)))
241 241 else:
242 242 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
243 243 else:
244 244 raise
245 245 except OSError as inst:
246 246 if getattr(inst, "filename", None) is not None:
247 247 ui.warn(_("abort: %s: '%s'\n") % (
248 248 encoding.strtolocal(inst.strerror),
249 249 util.forcebytestr(inst.filename)))
250 250 else:
251 251 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
252 252 except MemoryError:
253 253 ui.warn(_("abort: out of memory\n"))
254 254 except SystemExit as inst:
255 255 # Commands shouldn't sys.exit directly, but give a return code.
256 256 # Just in case catch this and and pass exit code to caller.
257 257 return inst.code
258 258 except socket.error as inst:
259 259 ui.warn(_("abort: %s\n") % util.forcebytestr(inst.args[-1]))
260 260
261 261 return -1
262 262
263 263 def checknewlabel(repo, lbl, kind):
264 264 # Do not use the "kind" parameter in ui output.
265 265 # It makes strings difficult to translate.
266 266 if lbl in ['tip', '.', 'null']:
267 267 raise error.Abort(_("the name '%s' is reserved") % lbl)
268 268 for c in (':', '\0', '\n', '\r'):
269 269 if c in lbl:
270 270 raise error.Abort(
271 271 _("%r cannot be used in a name") % pycompat.bytestr(c))
272 272 try:
273 273 int(lbl)
274 274 raise error.Abort(_("cannot use an integer as a name"))
275 275 except ValueError:
276 276 pass
277 277 if lbl.strip() != lbl:
278 278 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
279 279
280 280 def checkfilename(f):
281 281 '''Check that the filename f is an acceptable filename for a tracked file'''
282 282 if '\r' in f or '\n' in f:
283 283 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
284 284
285 285 def checkportable(ui, f):
286 286 '''Check if filename f is portable and warn or abort depending on config'''
287 287 checkfilename(f)
288 288 abort, warn = checkportabilityalert(ui)
289 289 if abort or warn:
290 290 msg = util.checkwinfilename(f)
291 291 if msg:
292 292 msg = "%s: %s" % (msg, util.shellquote(f))
293 293 if abort:
294 294 raise error.Abort(msg)
295 295 ui.warn(_("warning: %s\n") % msg)
296 296
297 297 def checkportabilityalert(ui):
298 298 '''check if the user's config requests nothing, a warning, or abort for
299 299 non-portable filenames'''
300 300 val = ui.config('ui', 'portablefilenames')
301 301 lval = val.lower()
302 302 bval = util.parsebool(val)
303 303 abort = pycompat.iswindows or lval == 'abort'
304 304 warn = bval or lval == 'warn'
305 305 if bval is None and not (warn or abort or lval == 'ignore'):
306 306 raise error.ConfigError(
307 307 _("ui.portablefilenames value is invalid ('%s')") % val)
308 308 return abort, warn
309 309
310 310 class casecollisionauditor(object):
311 311 def __init__(self, ui, abort, dirstate):
312 312 self._ui = ui
313 313 self._abort = abort
314 314 allfiles = '\0'.join(dirstate._map)
315 315 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
316 316 self._dirstate = dirstate
317 317 # The purpose of _newfiles is so that we don't complain about
318 318 # case collisions if someone were to call this object with the
319 319 # same filename twice.
320 320 self._newfiles = set()
321 321
322 322 def __call__(self, f):
323 323 if f in self._newfiles:
324 324 return
325 325 fl = encoding.lower(f)
326 326 if fl in self._loweredfiles and f not in self._dirstate:
327 327 msg = _('possible case-folding collision for %s') % f
328 328 if self._abort:
329 329 raise error.Abort(msg)
330 330 self._ui.warn(_("warning: %s\n") % msg)
331 331 self._loweredfiles.add(fl)
332 332 self._newfiles.add(f)
333 333
334 334 def filteredhash(repo, maxrev):
335 335 """build hash of filtered revisions in the current repoview.
336 336
337 337 Multiple caches perform up-to-date validation by checking that the
338 338 tiprev and tipnode stored in the cache file match the current repository.
339 339 However, this is not sufficient for validating repoviews because the set
340 340 of revisions in the view may change without the repository tiprev and
341 341 tipnode changing.
342 342
343 343 This function hashes all the revs filtered from the view and returns
344 344 that SHA-1 digest.
345 345 """
346 346 cl = repo.changelog
347 347 if not cl.filteredrevs:
348 348 return None
349 349 key = None
350 350 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
351 351 if revs:
352 352 s = hashlib.sha1()
353 353 for rev in revs:
354 354 s.update('%d;' % rev)
355 355 key = s.digest()
356 356 return key
357 357
358 358 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
359 359 '''yield every hg repository under path, always recursively.
360 360 The recurse flag will only control recursion into repo working dirs'''
361 361 def errhandler(err):
362 362 if err.filename == path:
363 363 raise err
364 364 samestat = getattr(os.path, 'samestat', None)
365 365 if followsym and samestat is not None:
366 366 def adddir(dirlst, dirname):
367 367 dirstat = os.stat(dirname)
368 368 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
369 369 if not match:
370 370 dirlst.append(dirstat)
371 371 return not match
372 372 else:
373 373 followsym = False
374 374
375 375 if (seen_dirs is None) and followsym:
376 376 seen_dirs = []
377 377 adddir(seen_dirs, path)
378 378 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
379 379 dirs.sort()
380 380 if '.hg' in dirs:
381 381 yield root # found a repository
382 382 qroot = os.path.join(root, '.hg', 'patches')
383 383 if os.path.isdir(os.path.join(qroot, '.hg')):
384 384 yield qroot # we have a patch queue repo here
385 385 if recurse:
386 386 # avoid recursing inside the .hg directory
387 387 dirs.remove('.hg')
388 388 else:
389 389 dirs[:] = [] # don't descend further
390 390 elif followsym:
391 391 newdirs = []
392 392 for d in dirs:
393 393 fname = os.path.join(root, d)
394 394 if adddir(seen_dirs, fname):
395 395 if os.path.islink(fname):
396 396 for hgname in walkrepos(fname, True, seen_dirs):
397 397 yield hgname
398 398 else:
399 399 newdirs.append(d)
400 400 dirs[:] = newdirs
401 401
402 402 def binnode(ctx):
403 403 """Return binary node id for a given basectx"""
404 404 node = ctx.node()
405 405 if node is None:
406 406 return wdirid
407 407 return node
408 408
409 409 def intrev(ctx):
410 410 """Return integer for a given basectx that can be used in comparison or
411 411 arithmetic operation"""
412 412 rev = ctx.rev()
413 413 if rev is None:
414 414 return wdirrev
415 415 return rev
416 416
417 417 def formatchangeid(ctx):
418 418 """Format changectx as '{rev}:{node|formatnode}', which is the default
419 419 template provided by logcmdutil.changesettemplater"""
420 420 repo = ctx.repo()
421 421 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
422 422
423 423 def formatrevnode(ui, rev, node):
424 424 """Format given revision and node depending on the current verbosity"""
425 425 if ui.debugflag:
426 426 hexfunc = hex
427 427 else:
428 428 hexfunc = short
429 429 return '%d:%s' % (rev, hexfunc(node))
430 430
431 431 def revsingle(repo, revspec, default='.', localalias=None):
432 432 if not revspec and revspec != 0:
433 433 return repo[default]
434 434
435 435 l = revrange(repo, [revspec], localalias=localalias)
436 436 if not l:
437 437 raise error.Abort(_('empty revision set'))
438 438 return repo[l.last()]
439 439
440 440 def _pairspec(revspec):
441 441 tree = revsetlang.parse(revspec)
442 442 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
443 443
444 444 def revpair(repo, revs):
445 445 if not revs:
446 446 return repo.dirstate.p1(), None
447 447
448 448 l = revrange(repo, revs)
449 449
450 450 if not l:
451 451 first = second = None
452 452 elif l.isascending():
453 453 first = l.min()
454 454 second = l.max()
455 455 elif l.isdescending():
456 456 first = l.max()
457 457 second = l.min()
458 458 else:
459 459 first = l.first()
460 460 second = l.last()
461 461
462 462 if first is None:
463 463 raise error.Abort(_('empty revision range'))
464 464 if (first == second and len(revs) >= 2
465 465 and not all(revrange(repo, [r]) for r in revs)):
466 466 raise error.Abort(_('empty revision on one side of range'))
467 467
468 468 # if top-level is range expression, the result must always be a pair
469 469 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
470 470 return repo.lookup(first), None
471 471
472 472 return repo.lookup(first), repo.lookup(second)
473 473
474 474 def revrange(repo, specs, localalias=None):
475 475 """Execute 1 to many revsets and return the union.
476 476
477 477 This is the preferred mechanism for executing revsets using user-specified
478 478 config options, such as revset aliases.
479 479
480 480 The revsets specified by ``specs`` will be executed via a chained ``OR``
481 481 expression. If ``specs`` is empty, an empty result is returned.
482 482
483 483 ``specs`` can contain integers, in which case they are assumed to be
484 484 revision numbers.
485 485
486 486 It is assumed the revsets are already formatted. If you have arguments
487 487 that need to be expanded in the revset, call ``revsetlang.formatspec()``
488 488 and pass the result as an element of ``specs``.
489 489
490 490 Specifying a single revset is allowed.
491 491
492 492 Returns a ``revset.abstractsmartset`` which is a list-like interface over
493 493 integer revisions.
494 494 """
495 495 allspecs = []
496 496 for spec in specs:
497 497 if isinstance(spec, int):
498 498 spec = revsetlang.formatspec('rev(%d)', spec)
499 499 allspecs.append(spec)
500 500 return repo.anyrevs(allspecs, user=True, localalias=localalias)
501 501
502 502 def meaningfulparents(repo, ctx):
503 503 """Return list of meaningful (or all if debug) parentrevs for rev.
504 504
505 505 For merges (two non-nullrev revisions) both parents are meaningful.
506 506 Otherwise the first parent revision is considered meaningful if it
507 507 is not the preceding revision.
508 508 """
509 509 parents = ctx.parents()
510 510 if len(parents) > 1:
511 511 return parents
512 512 if repo.ui.debugflag:
513 513 return [parents[0], repo['null']]
514 514 if parents[0].rev() >= intrev(ctx) - 1:
515 515 return []
516 516 return parents
517 517
518 518 def expandpats(pats):
519 519 '''Expand bare globs when running on windows.
520 520 On posix we assume it already has already been done by sh.'''
521 521 if not util.expandglobs:
522 522 return list(pats)
523 523 ret = []
524 524 for kindpat in pats:
525 525 kind, pat = matchmod._patsplit(kindpat, None)
526 526 if kind is None:
527 527 try:
528 528 globbed = glob.glob(pat)
529 529 except re.error:
530 530 globbed = [pat]
531 531 if globbed:
532 532 ret.extend(globbed)
533 533 continue
534 534 ret.append(kindpat)
535 535 return ret
536 536
537 537 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
538 538 badfn=None):
539 539 '''Return a matcher and the patterns that were used.
540 540 The matcher will warn about bad matches, unless an alternate badfn callback
541 541 is provided.'''
542 542 if pats == ("",):
543 543 pats = []
544 544 if opts is None:
545 545 opts = {}
546 546 if not globbed and default == 'relpath':
547 547 pats = expandpats(pats or [])
548 548
549 549 def bad(f, msg):
550 550 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
551 551
552 552 if badfn is None:
553 553 badfn = bad
554 554
555 555 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
556 556 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
557 557
558 558 if m.always():
559 559 pats = []
560 560 return m, pats
561 561
562 562 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
563 563 badfn=None):
564 564 '''Return a matcher that will warn about bad matches.'''
565 565 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
566 566
567 567 def matchall(repo):
568 568 '''Return a matcher that will efficiently match everything.'''
569 569 return matchmod.always(repo.root, repo.getcwd())
570 570
571 571 def matchfiles(repo, files, badfn=None):
572 572 '''Return a matcher that will efficiently match exactly these files.'''
573 573 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
574 574
575 575 def parsefollowlinespattern(repo, rev, pat, msg):
576 576 """Return a file name from `pat` pattern suitable for usage in followlines
577 577 logic.
578 578 """
579 579 if not matchmod.patkind(pat):
580 580 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
581 581 else:
582 582 ctx = repo[rev]
583 583 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
584 584 files = [f for f in ctx if m(f)]
585 585 if len(files) != 1:
586 586 raise error.ParseError(msg)
587 587 return files[0]
588 588
589 589 def origpath(ui, repo, filepath):
590 590 '''customize where .orig files are created
591 591
592 592 Fetch user defined path from config file: [ui] origbackuppath = <path>
593 593 Fall back to default (filepath with .orig suffix) if not specified
594 594 '''
595 595 origbackuppath = ui.config('ui', 'origbackuppath')
596 596 if not origbackuppath:
597 597 return filepath + ".orig"
598 598
599 599 # Convert filepath from an absolute path into a path inside the repo.
600 600 filepathfromroot = util.normpath(os.path.relpath(filepath,
601 601 start=repo.root))
602 602
603 603 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
604 604 origbackupdir = origvfs.dirname(filepathfromroot)
605 605 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
606 606 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
607 607
608 608 # Remove any files that conflict with the backup file's path
609 609 for f in reversed(list(util.finddirs(filepathfromroot))):
610 610 if origvfs.isfileorlink(f):
611 611 ui.note(_('removing conflicting file: %s\n')
612 612 % origvfs.join(f))
613 613 origvfs.unlink(f)
614 614 break
615 615
616 616 origvfs.makedirs(origbackupdir)
617 617
618 618 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
619 619 ui.note(_('removing conflicting directory: %s\n')
620 620 % origvfs.join(filepathfromroot))
621 621 origvfs.rmtree(filepathfromroot, forcibly=True)
622 622
623 623 return origvfs.join(filepathfromroot)
624 624
625 625 class _containsnode(object):
626 626 """proxy __contains__(node) to container.__contains__ which accepts revs"""
627 627
628 628 def __init__(self, repo, revcontainer):
629 629 self._torev = repo.changelog.rev
630 630 self._revcontains = revcontainer.__contains__
631 631
632 632 def __contains__(self, node):
633 633 return self._revcontains(self._torev(node))
634 634
635 635 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
636 636 """do common cleanups when old nodes are replaced by new nodes
637 637
638 638 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
639 639 (we might also want to move working directory parent in the future)
640 640
641 641 By default, bookmark moves are calculated automatically from 'replacements',
642 642 but 'moves' can be used to override that. Also, 'moves' may include
643 643 additional bookmark moves that should not have associated obsmarkers.
644 644
645 645 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
646 646 have replacements. operation is a string, like "rebase".
647 647
648 648 metadata is dictionary containing metadata to be stored in obsmarker if
649 649 obsolescence is enabled.
650 650 """
651 651 if not replacements and not moves:
652 652 return
653 653
654 654 # translate mapping's other forms
655 655 if not util.safehasattr(replacements, 'items'):
656 656 replacements = {n: () for n in replacements}
657 657
658 658 # Calculate bookmark movements
659 659 if moves is None:
660 660 moves = {}
661 661 # Unfiltered repo is needed since nodes in replacements might be hidden.
662 662 unfi = repo.unfiltered()
663 663 for oldnode, newnodes in replacements.items():
664 664 if oldnode in moves:
665 665 continue
666 666 if len(newnodes) > 1:
667 667 # usually a split, take the one with biggest rev number
668 668 newnode = next(unfi.set('max(%ln)', newnodes)).node()
669 669 elif len(newnodes) == 0:
670 670 # move bookmark backwards
671 671 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
672 672 list(replacements)))
673 673 if roots:
674 674 newnode = roots[0].node()
675 675 else:
676 676 newnode = nullid
677 677 else:
678 678 newnode = newnodes[0]
679 679 moves[oldnode] = newnode
680 680
681 681 with repo.transaction('cleanup') as tr:
682 682 # Move bookmarks
683 683 bmarks = repo._bookmarks
684 684 bmarkchanges = []
685 685 allnewnodes = [n for ns in replacements.values() for n in ns]
686 686 for oldnode, newnode in moves.items():
687 687 oldbmarks = repo.nodebookmarks(oldnode)
688 688 if not oldbmarks:
689 689 continue
690 690 from . import bookmarks # avoid import cycle
691 691 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
692 (oldbmarks, hex(oldnode), hex(newnode)))
692 (util.rapply(pycompat.maybebytestr, oldbmarks),
693 hex(oldnode), hex(newnode)))
693 694 # Delete divergent bookmarks being parents of related newnodes
694 695 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
695 696 allnewnodes, newnode, oldnode)
696 697 deletenodes = _containsnode(repo, deleterevs)
697 698 for name in oldbmarks:
698 699 bmarkchanges.append((name, newnode))
699 700 for b in bookmarks.divergent2delete(repo, deletenodes, name):
700 701 bmarkchanges.append((b, None))
701 702
702 703 if bmarkchanges:
703 704 bmarks.applychanges(repo, tr, bmarkchanges)
704 705
705 706 # Obsolete or strip nodes
706 707 if obsolete.isenabled(repo, obsolete.createmarkersopt):
707 708 # If a node is already obsoleted, and we want to obsolete it
708 709 # without a successor, skip that obssolete request since it's
709 710 # unnecessary. That's the "if s or not isobs(n)" check below.
710 711 # Also sort the node in topology order, that might be useful for
711 712 # some obsstore logic.
712 713 # NOTE: the filtering and sorting might belong to createmarkers.
713 714 isobs = unfi.obsstore.successors.__contains__
714 715 torev = unfi.changelog.rev
715 716 sortfunc = lambda ns: torev(ns[0])
716 717 rels = [(unfi[n], tuple(unfi[m] for m in s))
717 718 for n, s in sorted(replacements.items(), key=sortfunc)
718 719 if s or not isobs(n)]
719 720 if rels:
720 721 obsolete.createmarkers(repo, rels, operation=operation,
721 722 metadata=metadata)
722 723 else:
723 724 from . import repair # avoid import cycle
724 725 tostrip = list(replacements)
725 726 if tostrip:
726 727 repair.delayedstrip(repo.ui, repo, tostrip, operation)
727 728
728 729 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
729 730 if opts is None:
730 731 opts = {}
731 732 m = matcher
732 733 if dry_run is None:
733 734 dry_run = opts.get('dry_run')
734 735 if similarity is None:
735 736 similarity = float(opts.get('similarity') or 0)
736 737
737 738 ret = 0
738 739 join = lambda f: os.path.join(prefix, f)
739 740
740 741 wctx = repo[None]
741 742 for subpath in sorted(wctx.substate):
742 743 submatch = matchmod.subdirmatcher(subpath, m)
743 744 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
744 745 sub = wctx.sub(subpath)
745 746 try:
746 747 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
747 748 ret = 1
748 749 except error.LookupError:
749 750 repo.ui.status(_("skipping missing subrepository: %s\n")
750 751 % join(subpath))
751 752
752 753 rejected = []
753 754 def badfn(f, msg):
754 755 if f in m.files():
755 756 m.bad(f, msg)
756 757 rejected.append(f)
757 758
758 759 badmatch = matchmod.badmatch(m, badfn)
759 760 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
760 761 badmatch)
761 762
762 763 unknownset = set(unknown + forgotten)
763 764 toprint = unknownset.copy()
764 765 toprint.update(deleted)
765 766 for abs in sorted(toprint):
766 767 if repo.ui.verbose or not m.exact(abs):
767 768 if abs in unknownset:
768 769 status = _('adding %s\n') % m.uipath(abs)
769 770 else:
770 771 status = _('removing %s\n') % m.uipath(abs)
771 772 repo.ui.status(status)
772 773
773 774 renames = _findrenames(repo, m, added + unknown, removed + deleted,
774 775 similarity)
775 776
776 777 if not dry_run:
777 778 _markchanges(repo, unknown + forgotten, deleted, renames)
778 779
779 780 for f in rejected:
780 781 if f in m.files():
781 782 return 1
782 783 return ret
783 784
784 785 def marktouched(repo, files, similarity=0.0):
785 786 '''Assert that files have somehow been operated upon. files are relative to
786 787 the repo root.'''
787 788 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
788 789 rejected = []
789 790
790 791 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
791 792
792 793 if repo.ui.verbose:
793 794 unknownset = set(unknown + forgotten)
794 795 toprint = unknownset.copy()
795 796 toprint.update(deleted)
796 797 for abs in sorted(toprint):
797 798 if abs in unknownset:
798 799 status = _('adding %s\n') % abs
799 800 else:
800 801 status = _('removing %s\n') % abs
801 802 repo.ui.status(status)
802 803
803 804 renames = _findrenames(repo, m, added + unknown, removed + deleted,
804 805 similarity)
805 806
806 807 _markchanges(repo, unknown + forgotten, deleted, renames)
807 808
808 809 for f in rejected:
809 810 if f in m.files():
810 811 return 1
811 812 return 0
812 813
813 814 def _interestingfiles(repo, matcher):
814 815 '''Walk dirstate with matcher, looking for files that addremove would care
815 816 about.
816 817
817 818 This is different from dirstate.status because it doesn't care about
818 819 whether files are modified or clean.'''
819 820 added, unknown, deleted, removed, forgotten = [], [], [], [], []
820 821 audit_path = pathutil.pathauditor(repo.root, cached=True)
821 822
822 823 ctx = repo[None]
823 824 dirstate = repo.dirstate
824 825 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
825 826 unknown=True, ignored=False, full=False)
826 827 for abs, st in walkresults.iteritems():
827 828 dstate = dirstate[abs]
828 829 if dstate == '?' and audit_path.check(abs):
829 830 unknown.append(abs)
830 831 elif dstate != 'r' and not st:
831 832 deleted.append(abs)
832 833 elif dstate == 'r' and st:
833 834 forgotten.append(abs)
834 835 # for finding renames
835 836 elif dstate == 'r' and not st:
836 837 removed.append(abs)
837 838 elif dstate == 'a':
838 839 added.append(abs)
839 840
840 841 return added, unknown, deleted, removed, forgotten
841 842
842 843 def _findrenames(repo, matcher, added, removed, similarity):
843 844 '''Find renames from removed files to added ones.'''
844 845 renames = {}
845 846 if similarity > 0:
846 847 for old, new, score in similar.findrenames(repo, added, removed,
847 848 similarity):
848 849 if (repo.ui.verbose or not matcher.exact(old)
849 850 or not matcher.exact(new)):
850 851 repo.ui.status(_('recording removal of %s as rename to %s '
851 852 '(%d%% similar)\n') %
852 853 (matcher.rel(old), matcher.rel(new),
853 854 score * 100))
854 855 renames[new] = old
855 856 return renames
856 857
857 858 def _markchanges(repo, unknown, deleted, renames):
858 859 '''Marks the files in unknown as added, the files in deleted as removed,
859 860 and the files in renames as copied.'''
860 861 wctx = repo[None]
861 862 with repo.wlock():
862 863 wctx.forget(deleted)
863 864 wctx.add(unknown)
864 865 for new, old in renames.iteritems():
865 866 wctx.copy(old, new)
866 867
867 868 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
868 869 """Update the dirstate to reflect the intent of copying src to dst. For
869 870 different reasons it might not end with dst being marked as copied from src.
870 871 """
871 872 origsrc = repo.dirstate.copied(src) or src
872 873 if dst == origsrc: # copying back a copy?
873 874 if repo.dirstate[dst] not in 'mn' and not dryrun:
874 875 repo.dirstate.normallookup(dst)
875 876 else:
876 877 if repo.dirstate[origsrc] == 'a' and origsrc == src:
877 878 if not ui.quiet:
878 879 ui.warn(_("%s has not been committed yet, so no copy "
879 880 "data will be stored for %s.\n")
880 881 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
881 882 if repo.dirstate[dst] in '?r' and not dryrun:
882 883 wctx.add([dst])
883 884 elif not dryrun:
884 885 wctx.copy(origsrc, dst)
885 886
886 887 def readrequires(opener, supported):
887 888 '''Reads and parses .hg/requires and checks if all entries found
888 889 are in the list of supported features.'''
889 890 requirements = set(opener.read("requires").splitlines())
890 891 missings = []
891 892 for r in requirements:
892 893 if r not in supported:
893 894 if not r or not r[0:1].isalnum():
894 895 raise error.RequirementError(_(".hg/requires file is corrupt"))
895 896 missings.append(r)
896 897 missings.sort()
897 898 if missings:
898 899 raise error.RequirementError(
899 900 _("repository requires features unknown to this Mercurial: %s")
900 901 % " ".join(missings),
901 902 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
902 903 " for more information"))
903 904 return requirements
904 905
905 906 def writerequires(opener, requirements):
906 907 with opener('requires', 'w') as fp:
907 908 for r in sorted(requirements):
908 909 fp.write("%s\n" % r)
909 910
910 911 class filecachesubentry(object):
911 912 def __init__(self, path, stat):
912 913 self.path = path
913 914 self.cachestat = None
914 915 self._cacheable = None
915 916
916 917 if stat:
917 918 self.cachestat = filecachesubentry.stat(self.path)
918 919
919 920 if self.cachestat:
920 921 self._cacheable = self.cachestat.cacheable()
921 922 else:
922 923 # None means we don't know yet
923 924 self._cacheable = None
924 925
925 926 def refresh(self):
926 927 if self.cacheable():
927 928 self.cachestat = filecachesubentry.stat(self.path)
928 929
929 930 def cacheable(self):
930 931 if self._cacheable is not None:
931 932 return self._cacheable
932 933
933 934 # we don't know yet, assume it is for now
934 935 return True
935 936
936 937 def changed(self):
937 938 # no point in going further if we can't cache it
938 939 if not self.cacheable():
939 940 return True
940 941
941 942 newstat = filecachesubentry.stat(self.path)
942 943
943 944 # we may not know if it's cacheable yet, check again now
944 945 if newstat and self._cacheable is None:
945 946 self._cacheable = newstat.cacheable()
946 947
947 948 # check again
948 949 if not self._cacheable:
949 950 return True
950 951
951 952 if self.cachestat != newstat:
952 953 self.cachestat = newstat
953 954 return True
954 955 else:
955 956 return False
956 957
957 958 @staticmethod
958 959 def stat(path):
959 960 try:
960 961 return util.cachestat(path)
961 962 except OSError as e:
962 963 if e.errno != errno.ENOENT:
963 964 raise
964 965
965 966 class filecacheentry(object):
966 967 def __init__(self, paths, stat=True):
967 968 self._entries = []
968 969 for path in paths:
969 970 self._entries.append(filecachesubentry(path, stat))
970 971
971 972 def changed(self):
972 973 '''true if any entry has changed'''
973 974 for entry in self._entries:
974 975 if entry.changed():
975 976 return True
976 977 return False
977 978
978 979 def refresh(self):
979 980 for entry in self._entries:
980 981 entry.refresh()
981 982
982 983 class filecache(object):
983 984 '''A property like decorator that tracks files under .hg/ for updates.
984 985
985 986 Records stat info when called in _filecache.
986 987
987 988 On subsequent calls, compares old stat info with new info, and recreates the
988 989 object when any of the files changes, updating the new stat info in
989 990 _filecache.
990 991
991 992 Mercurial either atomic renames or appends for files under .hg,
992 993 so to ensure the cache is reliable we need the filesystem to be able
993 994 to tell us if a file has been replaced. If it can't, we fallback to
994 995 recreating the object on every call (essentially the same behavior as
995 996 propertycache).
996 997
997 998 '''
998 999 def __init__(self, *paths):
999 1000 self.paths = paths
1000 1001
1001 1002 def join(self, obj, fname):
1002 1003 """Used to compute the runtime path of a cached file.
1003 1004
1004 1005 Users should subclass filecache and provide their own version of this
1005 1006 function to call the appropriate join function on 'obj' (an instance
1006 1007 of the class that its member function was decorated).
1007 1008 """
1008 1009 raise NotImplementedError
1009 1010
1010 1011 def __call__(self, func):
1011 1012 self.func = func
1012 1013 self.name = func.__name__.encode('ascii')
1013 1014 return self
1014 1015
1015 1016 def __get__(self, obj, type=None):
1016 1017 # if accessed on the class, return the descriptor itself.
1017 1018 if obj is None:
1018 1019 return self
1019 1020 # do we need to check if the file changed?
1020 1021 if self.name in obj.__dict__:
1021 1022 assert self.name in obj._filecache, self.name
1022 1023 return obj.__dict__[self.name]
1023 1024
1024 1025 entry = obj._filecache.get(self.name)
1025 1026
1026 1027 if entry:
1027 1028 if entry.changed():
1028 1029 entry.obj = self.func(obj)
1029 1030 else:
1030 1031 paths = [self.join(obj, path) for path in self.paths]
1031 1032
1032 1033 # We stat -before- creating the object so our cache doesn't lie if
1033 1034 # a writer modified between the time we read and stat
1034 1035 entry = filecacheentry(paths, True)
1035 1036 entry.obj = self.func(obj)
1036 1037
1037 1038 obj._filecache[self.name] = entry
1038 1039
1039 1040 obj.__dict__[self.name] = entry.obj
1040 1041 return entry.obj
1041 1042
1042 1043 def __set__(self, obj, value):
1043 1044 if self.name not in obj._filecache:
1044 1045 # we add an entry for the missing value because X in __dict__
1045 1046 # implies X in _filecache
1046 1047 paths = [self.join(obj, path) for path in self.paths]
1047 1048 ce = filecacheentry(paths, False)
1048 1049 obj._filecache[self.name] = ce
1049 1050 else:
1050 1051 ce = obj._filecache[self.name]
1051 1052
1052 1053 ce.obj = value # update cached copy
1053 1054 obj.__dict__[self.name] = value # update copy returned by obj.x
1054 1055
1055 1056 def __delete__(self, obj):
1056 1057 try:
1057 1058 del obj.__dict__[self.name]
1058 1059 except KeyError:
1059 1060 raise AttributeError(self.name)
1060 1061
1061 1062 def extdatasource(repo, source):
1062 1063 """Gather a map of rev -> value dict from the specified source
1063 1064
1064 1065 A source spec is treated as a URL, with a special case shell: type
1065 1066 for parsing the output from a shell command.
1066 1067
1067 1068 The data is parsed as a series of newline-separated records where
1068 1069 each record is a revision specifier optionally followed by a space
1069 1070 and a freeform string value. If the revision is known locally, it
1070 1071 is converted to a rev, otherwise the record is skipped.
1071 1072
1072 1073 Note that both key and value are treated as UTF-8 and converted to
1073 1074 the local encoding. This allows uniformity between local and
1074 1075 remote data sources.
1075 1076 """
1076 1077
1077 1078 spec = repo.ui.config("extdata", source)
1078 1079 if not spec:
1079 1080 raise error.Abort(_("unknown extdata source '%s'") % source)
1080 1081
1081 1082 data = {}
1082 1083 src = proc = None
1083 1084 try:
1084 1085 if spec.startswith("shell:"):
1085 1086 # external commands should be run relative to the repo root
1086 1087 cmd = spec[6:]
1087 1088 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1088 1089 close_fds=util.closefds,
1089 1090 stdout=subprocess.PIPE, cwd=repo.root)
1090 1091 src = proc.stdout
1091 1092 else:
1092 1093 # treat as a URL or file
1093 1094 src = url.open(repo.ui, spec)
1094 1095 for l in src:
1095 1096 if " " in l:
1096 1097 k, v = l.strip().split(" ", 1)
1097 1098 else:
1098 1099 k, v = l.strip(), ""
1099 1100
1100 1101 k = encoding.tolocal(k)
1101 1102 try:
1102 1103 data[repo[k].rev()] = encoding.tolocal(v)
1103 1104 except (error.LookupError, error.RepoLookupError):
1104 1105 pass # we ignore data for nodes that don't exist locally
1105 1106 finally:
1106 1107 if proc:
1107 1108 proc.communicate()
1108 1109 if src:
1109 1110 src.close()
1110 1111 if proc and proc.returncode != 0:
1111 1112 raise error.Abort(_("extdata command '%s' failed: %s")
1112 1113 % (cmd, util.explainexit(proc.returncode)[0]))
1113 1114
1114 1115 return data
1115 1116
1116 1117 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1117 1118 if lock is None:
1118 1119 raise error.LockInheritanceContractViolation(
1119 1120 'lock can only be inherited while held')
1120 1121 if environ is None:
1121 1122 environ = {}
1122 1123 with lock.inherit() as locker:
1123 1124 environ[envvar] = locker
1124 1125 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1125 1126
1126 1127 def wlocksub(repo, cmd, *args, **kwargs):
1127 1128 """run cmd as a subprocess that allows inheriting repo's wlock
1128 1129
1129 1130 This can only be called while the wlock is held. This takes all the
1130 1131 arguments that ui.system does, and returns the exit code of the
1131 1132 subprocess."""
1132 1133 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1133 1134 **kwargs)
1134 1135
1135 1136 def gdinitconfig(ui):
1136 1137 """helper function to know if a repo should be created as general delta
1137 1138 """
1138 1139 # experimental config: format.generaldelta
1139 1140 return (ui.configbool('format', 'generaldelta')
1140 1141 or ui.configbool('format', 'usegeneraldelta'))
1141 1142
1142 1143 def gddeltaconfig(ui):
1143 1144 """helper function to know if incoming delta should be optimised
1144 1145 """
1145 1146 # experimental config: format.generaldelta
1146 1147 return ui.configbool('format', 'generaldelta')
1147 1148
1148 1149 class simplekeyvaluefile(object):
1149 1150 """A simple file with key=value lines
1150 1151
1151 1152 Keys must be alphanumerics and start with a letter, values must not
1152 1153 contain '\n' characters"""
1153 1154 firstlinekey = '__firstline'
1154 1155
1155 1156 def __init__(self, vfs, path, keys=None):
1156 1157 self.vfs = vfs
1157 1158 self.path = path
1158 1159
1159 1160 def read(self, firstlinenonkeyval=False):
1160 1161 """Read the contents of a simple key-value file
1161 1162
1162 1163 'firstlinenonkeyval' indicates whether the first line of file should
1163 1164 be treated as a key-value pair or reuturned fully under the
1164 1165 __firstline key."""
1165 1166 lines = self.vfs.readlines(self.path)
1166 1167 d = {}
1167 1168 if firstlinenonkeyval:
1168 1169 if not lines:
1169 1170 e = _("empty simplekeyvalue file")
1170 1171 raise error.CorruptedState(e)
1171 1172 # we don't want to include '\n' in the __firstline
1172 1173 d[self.firstlinekey] = lines[0][:-1]
1173 1174 del lines[0]
1174 1175
1175 1176 try:
1176 1177 # the 'if line.strip()' part prevents us from failing on empty
1177 1178 # lines which only contain '\n' therefore are not skipped
1178 1179 # by 'if line'
1179 1180 updatedict = dict(line[:-1].split('=', 1) for line in lines
1180 1181 if line.strip())
1181 1182 if self.firstlinekey in updatedict:
1182 1183 e = _("%r can't be used as a key")
1183 1184 raise error.CorruptedState(e % self.firstlinekey)
1184 1185 d.update(updatedict)
1185 1186 except ValueError as e:
1186 1187 raise error.CorruptedState(str(e))
1187 1188 return d
1188 1189
1189 1190 def write(self, data, firstline=None):
1190 1191 """Write key=>value mapping to a file
1191 1192 data is a dict. Keys must be alphanumerical and start with a letter.
1192 1193 Values must not contain newline characters.
1193 1194
1194 1195 If 'firstline' is not None, it is written to file before
1195 1196 everything else, as it is, not in a key=value form"""
1196 1197 lines = []
1197 1198 if firstline is not None:
1198 1199 lines.append('%s\n' % firstline)
1199 1200
1200 1201 for k, v in data.items():
1201 1202 if k == self.firstlinekey:
1202 1203 e = "key name '%s' is reserved" % self.firstlinekey
1203 1204 raise error.ProgrammingError(e)
1204 1205 if not k[0:1].isalpha():
1205 1206 e = "keys must start with a letter in a key-value file"
1206 1207 raise error.ProgrammingError(e)
1207 1208 if not k.isalnum():
1208 1209 e = "invalid key name in a simple key-value file"
1209 1210 raise error.ProgrammingError(e)
1210 1211 if '\n' in v:
1211 1212 e = "invalid value in a simple key-value file"
1212 1213 raise error.ProgrammingError(e)
1213 1214 lines.append("%s=%s\n" % (k, v))
1214 1215 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1215 1216 fp.write(''.join(lines))
1216 1217
1217 1218 _reportobsoletedsource = [
1218 1219 'debugobsolete',
1219 1220 'pull',
1220 1221 'push',
1221 1222 'serve',
1222 1223 'unbundle',
1223 1224 ]
1224 1225
1225 1226 _reportnewcssource = [
1226 1227 'pull',
1227 1228 'unbundle',
1228 1229 ]
1229 1230
1230 1231 # a list of (repo, ctx, files) functions called by various commands to allow
1231 1232 # extensions to ensure the corresponding files are available locally, before the
1232 1233 # command uses them.
1233 1234 fileprefetchhooks = util.hooks()
1234 1235
1235 1236 # A marker that tells the evolve extension to suppress its own reporting
1236 1237 _reportstroubledchangesets = True
1237 1238
1238 1239 def registersummarycallback(repo, otr, txnname=''):
1239 1240 """register a callback to issue a summary after the transaction is closed
1240 1241 """
1241 1242 def txmatch(sources):
1242 1243 return any(txnname.startswith(source) for source in sources)
1243 1244
1244 1245 categories = []
1245 1246
1246 1247 def reportsummary(func):
1247 1248 """decorator for report callbacks."""
1248 1249 # The repoview life cycle is shorter than the one of the actual
1249 1250 # underlying repository. So the filtered object can die before the
1250 1251 # weakref is used leading to troubles. We keep a reference to the
1251 1252 # unfiltered object and restore the filtering when retrieving the
1252 1253 # repository through the weakref.
1253 1254 filtername = repo.filtername
1254 1255 reporef = weakref.ref(repo.unfiltered())
1255 1256 def wrapped(tr):
1256 1257 repo = reporef()
1257 1258 if filtername:
1258 1259 repo = repo.filtered(filtername)
1259 1260 func(repo, tr)
1260 1261 newcat = '%02i-txnreport' % len(categories)
1261 1262 otr.addpostclose(newcat, wrapped)
1262 1263 categories.append(newcat)
1263 1264 return wrapped
1264 1265
1265 1266 if txmatch(_reportobsoletedsource):
1266 1267 @reportsummary
1267 1268 def reportobsoleted(repo, tr):
1268 1269 obsoleted = obsutil.getobsoleted(repo, tr)
1269 1270 if obsoleted:
1270 1271 repo.ui.status(_('obsoleted %i changesets\n')
1271 1272 % len(obsoleted))
1272 1273
1273 1274 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1274 1275 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1275 1276 instabilitytypes = [
1276 1277 ('orphan', 'orphan'),
1277 1278 ('phase-divergent', 'phasedivergent'),
1278 1279 ('content-divergent', 'contentdivergent'),
1279 1280 ]
1280 1281
1281 1282 def getinstabilitycounts(repo):
1282 1283 filtered = repo.changelog.filteredrevs
1283 1284 counts = {}
1284 1285 for instability, revset in instabilitytypes:
1285 1286 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1286 1287 filtered)
1287 1288 return counts
1288 1289
1289 1290 oldinstabilitycounts = getinstabilitycounts(repo)
1290 1291 @reportsummary
1291 1292 def reportnewinstabilities(repo, tr):
1292 1293 newinstabilitycounts = getinstabilitycounts(repo)
1293 1294 for instability, revset in instabilitytypes:
1294 1295 delta = (newinstabilitycounts[instability] -
1295 1296 oldinstabilitycounts[instability])
1296 1297 if delta > 0:
1297 1298 repo.ui.warn(_('%i new %s changesets\n') %
1298 1299 (delta, instability))
1299 1300
1300 1301 if txmatch(_reportnewcssource):
1301 1302 @reportsummary
1302 1303 def reportnewcs(repo, tr):
1303 1304 """Report the range of new revisions pulled/unbundled."""
1304 1305 newrevs = tr.changes.get('revs', xrange(0, 0))
1305 1306 if not newrevs:
1306 1307 return
1307 1308
1308 1309 # Compute the bounds of new revisions' range, excluding obsoletes.
1309 1310 unfi = repo.unfiltered()
1310 1311 revs = unfi.revs('%ld and not obsolete()', newrevs)
1311 1312 if not revs:
1312 1313 # Got only obsoletes.
1313 1314 return
1314 1315 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1315 1316
1316 1317 if minrev == maxrev:
1317 1318 revrange = minrev
1318 1319 else:
1319 1320 revrange = '%s:%s' % (minrev, maxrev)
1320 1321 repo.ui.status(_('new changesets %s\n') % revrange)
1321 1322
1322 1323 def nodesummaries(repo, nodes, maxnumnodes=4):
1323 1324 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1324 1325 return ' '.join(short(h) for h in nodes)
1325 1326 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1326 1327 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1327 1328
1328 1329 def enforcesinglehead(repo, tr, desc):
1329 1330 """check that no named branch has multiple heads"""
1330 1331 if desc in ('strip', 'repair'):
1331 1332 # skip the logic during strip
1332 1333 return
1333 1334 visible = repo.filtered('visible')
1334 1335 # possible improvement: we could restrict the check to affected branch
1335 1336 for name, heads in visible.branchmap().iteritems():
1336 1337 if len(heads) > 1:
1337 1338 msg = _('rejecting multiple heads on branch "%s"')
1338 1339 msg %= name
1339 1340 hint = _('%d heads: %s')
1340 1341 hint %= (len(heads), nodesummaries(repo, heads))
1341 1342 raise error.Abort(msg, hint=hint)
1342 1343
1343 1344 def wrapconvertsink(sink):
1344 1345 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1345 1346 before it is used, whether or not the convert extension was formally loaded.
1346 1347 """
1347 1348 return sink
1348 1349
1349 1350 def unhidehashlikerevs(repo, specs, hiddentype):
1350 1351 """parse the user specs and unhide changesets whose hash or revision number
1351 1352 is passed.
1352 1353
1353 1354 hiddentype can be: 1) 'warn': warn while unhiding changesets
1354 1355 2) 'nowarn': don't warn while unhiding changesets
1355 1356
1356 1357 returns a repo object with the required changesets unhidden
1357 1358 """
1358 1359 if not repo.filtername or not repo.ui.configbool('experimental',
1359 1360 'directaccess'):
1360 1361 return repo
1361 1362
1362 1363 if repo.filtername not in ('visible', 'visible-hidden'):
1363 1364 return repo
1364 1365
1365 1366 symbols = set()
1366 1367 for spec in specs:
1367 1368 try:
1368 1369 tree = revsetlang.parse(spec)
1369 1370 except error.ParseError: # will be reported by scmutil.revrange()
1370 1371 continue
1371 1372
1372 1373 symbols.update(revsetlang.gethashlikesymbols(tree))
1373 1374
1374 1375 if not symbols:
1375 1376 return repo
1376 1377
1377 1378 revs = _getrevsfromsymbols(repo, symbols)
1378 1379
1379 1380 if not revs:
1380 1381 return repo
1381 1382
1382 1383 if hiddentype == 'warn':
1383 1384 unfi = repo.unfiltered()
1384 1385 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1385 1386 repo.ui.warn(_("warning: accessing hidden changesets for write "
1386 1387 "operation: %s\n") % revstr)
1387 1388
1388 1389 # we have to use new filtername to separate branch/tags cache until we can
1389 1390 # disbale these cache when revisions are dynamically pinned.
1390 1391 return repo.filtered('visible-hidden', revs)
1391 1392
1392 1393 def _getrevsfromsymbols(repo, symbols):
1393 1394 """parse the list of symbols and returns a set of revision numbers of hidden
1394 1395 changesets present in symbols"""
1395 1396 revs = set()
1396 1397 unfi = repo.unfiltered()
1397 1398 unficl = unfi.changelog
1398 1399 cl = repo.changelog
1399 1400 tiprev = len(unficl)
1400 1401 pmatch = unficl._partialmatch
1401 1402 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1402 1403 for s in symbols:
1403 1404 try:
1404 1405 n = int(s)
1405 1406 if n <= tiprev:
1406 1407 if not allowrevnums:
1407 1408 continue
1408 1409 else:
1409 1410 if n not in cl:
1410 1411 revs.add(n)
1411 1412 continue
1412 1413 except ValueError:
1413 1414 pass
1414 1415
1415 1416 try:
1416 1417 s = pmatch(s)
1417 1418 except error.LookupError:
1418 1419 s = None
1419 1420
1420 1421 if s is not None:
1421 1422 rev = unficl.rev(s)
1422 1423 if rev not in cl:
1423 1424 revs.add(rev)
1424 1425
1425 1426 return revs
General Comments 0
You need to be logged in to leave comments. Login now