##// END OF EJS Templates
scmutil: make shortest() respect disambiguation revset...
Martin von Zweigbergk -
r38879:6f7c9527 default
parent child Browse files
Show More
@@ -1,1718 +1,1733 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28
28
29 from . import (
29 from . import (
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 pycompat,
37 pycompat,
38 revsetlang,
38 revsetlang,
39 similar,
39 similar,
40 url,
40 url,
41 util,
41 util,
42 vfs,
42 vfs,
43 )
43 )
44
44
45 from .utils import (
45 from .utils import (
46 procutil,
46 procutil,
47 stringutil,
47 stringutil,
48 )
48 )
49
49
50 if pycompat.iswindows:
50 if pycompat.iswindows:
51 from . import scmwindows as scmplatform
51 from . import scmwindows as scmplatform
52 else:
52 else:
53 from . import scmposix as scmplatform
53 from . import scmposix as scmplatform
54
54
55 termsize = scmplatform.termsize
55 termsize = scmplatform.termsize
56
56
57 class status(tuple):
57 class status(tuple):
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
58 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
59 and 'ignored' properties are only relevant to the working copy.
59 and 'ignored' properties are only relevant to the working copy.
60 '''
60 '''
61
61
62 __slots__ = ()
62 __slots__ = ()
63
63
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
64 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
65 clean):
65 clean):
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
66 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
67 ignored, clean))
67 ignored, clean))
68
68
69 @property
69 @property
70 def modified(self):
70 def modified(self):
71 '''files that have been modified'''
71 '''files that have been modified'''
72 return self[0]
72 return self[0]
73
73
74 @property
74 @property
75 def added(self):
75 def added(self):
76 '''files that have been added'''
76 '''files that have been added'''
77 return self[1]
77 return self[1]
78
78
79 @property
79 @property
80 def removed(self):
80 def removed(self):
81 '''files that have been removed'''
81 '''files that have been removed'''
82 return self[2]
82 return self[2]
83
83
84 @property
84 @property
85 def deleted(self):
85 def deleted(self):
86 '''files that are in the dirstate, but have been deleted from the
86 '''files that are in the dirstate, but have been deleted from the
87 working copy (aka "missing")
87 working copy (aka "missing")
88 '''
88 '''
89 return self[3]
89 return self[3]
90
90
91 @property
91 @property
92 def unknown(self):
92 def unknown(self):
93 '''files not in the dirstate that are not ignored'''
93 '''files not in the dirstate that are not ignored'''
94 return self[4]
94 return self[4]
95
95
96 @property
96 @property
97 def ignored(self):
97 def ignored(self):
98 '''files not in the dirstate that are ignored (by _dirignore())'''
98 '''files not in the dirstate that are ignored (by _dirignore())'''
99 return self[5]
99 return self[5]
100
100
101 @property
101 @property
102 def clean(self):
102 def clean(self):
103 '''files that have not been modified'''
103 '''files that have not been modified'''
104 return self[6]
104 return self[6]
105
105
106 def __repr__(self, *args, **kwargs):
106 def __repr__(self, *args, **kwargs):
107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
107 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
108 r'unknown=%s, ignored=%s, clean=%s>') %
108 r'unknown=%s, ignored=%s, clean=%s>') %
109 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
109 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
110
110
111 def itersubrepos(ctx1, ctx2):
111 def itersubrepos(ctx1, ctx2):
112 """find subrepos in ctx1 or ctx2"""
112 """find subrepos in ctx1 or ctx2"""
113 # Create a (subpath, ctx) mapping where we prefer subpaths from
113 # Create a (subpath, ctx) mapping where we prefer subpaths from
114 # ctx1. The subpaths from ctx2 are important when the .hgsub file
114 # ctx1. The subpaths from ctx2 are important when the .hgsub file
115 # has been modified (in ctx2) but not yet committed (in ctx1).
115 # has been modified (in ctx2) but not yet committed (in ctx1).
116 subpaths = dict.fromkeys(ctx2.substate, ctx2)
116 subpaths = dict.fromkeys(ctx2.substate, ctx2)
117 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
117 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
118
118
119 missing = set()
119 missing = set()
120
120
121 for subpath in ctx2.substate:
121 for subpath in ctx2.substate:
122 if subpath not in ctx1.substate:
122 if subpath not in ctx1.substate:
123 del subpaths[subpath]
123 del subpaths[subpath]
124 missing.add(subpath)
124 missing.add(subpath)
125
125
126 for subpath, ctx in sorted(subpaths.iteritems()):
126 for subpath, ctx in sorted(subpaths.iteritems()):
127 yield subpath, ctx.sub(subpath)
127 yield subpath, ctx.sub(subpath)
128
128
129 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
129 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
130 # status and diff will have an accurate result when it does
130 # status and diff will have an accurate result when it does
131 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
131 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
132 # against itself.
132 # against itself.
133 for subpath in missing:
133 for subpath in missing:
134 yield subpath, ctx2.nullsub(subpath, ctx1)
134 yield subpath, ctx2.nullsub(subpath, ctx1)
135
135
136 def nochangesfound(ui, repo, excluded=None):
136 def nochangesfound(ui, repo, excluded=None):
137 '''Report no changes for push/pull, excluded is None or a list of
137 '''Report no changes for push/pull, excluded is None or a list of
138 nodes excluded from the push/pull.
138 nodes excluded from the push/pull.
139 '''
139 '''
140 secretlist = []
140 secretlist = []
141 if excluded:
141 if excluded:
142 for n in excluded:
142 for n in excluded:
143 ctx = repo[n]
143 ctx = repo[n]
144 if ctx.phase() >= phases.secret and not ctx.extinct():
144 if ctx.phase() >= phases.secret and not ctx.extinct():
145 secretlist.append(n)
145 secretlist.append(n)
146
146
147 if secretlist:
147 if secretlist:
148 ui.status(_("no changes found (ignored %d secret changesets)\n")
148 ui.status(_("no changes found (ignored %d secret changesets)\n")
149 % len(secretlist))
149 % len(secretlist))
150 else:
150 else:
151 ui.status(_("no changes found\n"))
151 ui.status(_("no changes found\n"))
152
152
153 def callcatch(ui, func):
153 def callcatch(ui, func):
154 """call func() with global exception handling
154 """call func() with global exception handling
155
155
156 return func() if no exception happens. otherwise do some error handling
156 return func() if no exception happens. otherwise do some error handling
157 and return an exit code accordingly. does not handle all exceptions.
157 and return an exit code accordingly. does not handle all exceptions.
158 """
158 """
159 try:
159 try:
160 try:
160 try:
161 return func()
161 return func()
162 except: # re-raises
162 except: # re-raises
163 ui.traceback()
163 ui.traceback()
164 raise
164 raise
165 # Global exception handling, alphabetically
165 # Global exception handling, alphabetically
166 # Mercurial-specific first, followed by built-in and library exceptions
166 # Mercurial-specific first, followed by built-in and library exceptions
167 except error.LockHeld as inst:
167 except error.LockHeld as inst:
168 if inst.errno == errno.ETIMEDOUT:
168 if inst.errno == errno.ETIMEDOUT:
169 reason = _('timed out waiting for lock held by %r') % inst.locker
169 reason = _('timed out waiting for lock held by %r') % inst.locker
170 else:
170 else:
171 reason = _('lock held by %r') % inst.locker
171 reason = _('lock held by %r') % inst.locker
172 ui.error(_("abort: %s: %s\n") % (
172 ui.error(_("abort: %s: %s\n") % (
173 inst.desc or stringutil.forcebytestr(inst.filename), reason))
173 inst.desc or stringutil.forcebytestr(inst.filename), reason))
174 if not inst.locker:
174 if not inst.locker:
175 ui.error(_("(lock might be very busy)\n"))
175 ui.error(_("(lock might be very busy)\n"))
176 except error.LockUnavailable as inst:
176 except error.LockUnavailable as inst:
177 ui.error(_("abort: could not lock %s: %s\n") %
177 ui.error(_("abort: could not lock %s: %s\n") %
178 (inst.desc or stringutil.forcebytestr(inst.filename),
178 (inst.desc or stringutil.forcebytestr(inst.filename),
179 encoding.strtolocal(inst.strerror)))
179 encoding.strtolocal(inst.strerror)))
180 except error.OutOfBandError as inst:
180 except error.OutOfBandError as inst:
181 if inst.args:
181 if inst.args:
182 msg = _("abort: remote error:\n")
182 msg = _("abort: remote error:\n")
183 else:
183 else:
184 msg = _("abort: remote error\n")
184 msg = _("abort: remote error\n")
185 ui.error(msg)
185 ui.error(msg)
186 if inst.args:
186 if inst.args:
187 ui.error(''.join(inst.args))
187 ui.error(''.join(inst.args))
188 if inst.hint:
188 if inst.hint:
189 ui.error('(%s)\n' % inst.hint)
189 ui.error('(%s)\n' % inst.hint)
190 except error.RepoError as inst:
190 except error.RepoError as inst:
191 ui.error(_("abort: %s!\n") % inst)
191 ui.error(_("abort: %s!\n") % inst)
192 if inst.hint:
192 if inst.hint:
193 ui.error(_("(%s)\n") % inst.hint)
193 ui.error(_("(%s)\n") % inst.hint)
194 except error.ResponseError as inst:
194 except error.ResponseError as inst:
195 ui.error(_("abort: %s") % inst.args[0])
195 ui.error(_("abort: %s") % inst.args[0])
196 msg = inst.args[1]
196 msg = inst.args[1]
197 if isinstance(msg, type(u'')):
197 if isinstance(msg, type(u'')):
198 msg = pycompat.sysbytes(msg)
198 msg = pycompat.sysbytes(msg)
199 if not isinstance(msg, bytes):
199 if not isinstance(msg, bytes):
200 ui.error(" %r\n" % (msg,))
200 ui.error(" %r\n" % (msg,))
201 elif not msg:
201 elif not msg:
202 ui.error(_(" empty string\n"))
202 ui.error(_(" empty string\n"))
203 else:
203 else:
204 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
204 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
205 except error.CensoredNodeError as inst:
205 except error.CensoredNodeError as inst:
206 ui.error(_("abort: file censored %s!\n") % inst)
206 ui.error(_("abort: file censored %s!\n") % inst)
207 except error.RevlogError as inst:
207 except error.RevlogError as inst:
208 ui.error(_("abort: %s!\n") % inst)
208 ui.error(_("abort: %s!\n") % inst)
209 except error.InterventionRequired as inst:
209 except error.InterventionRequired as inst:
210 ui.error("%s\n" % inst)
210 ui.error("%s\n" % inst)
211 if inst.hint:
211 if inst.hint:
212 ui.error(_("(%s)\n") % inst.hint)
212 ui.error(_("(%s)\n") % inst.hint)
213 return 1
213 return 1
214 except error.WdirUnsupported:
214 except error.WdirUnsupported:
215 ui.error(_("abort: working directory revision cannot be specified\n"))
215 ui.error(_("abort: working directory revision cannot be specified\n"))
216 except error.Abort as inst:
216 except error.Abort as inst:
217 ui.error(_("abort: %s\n") % inst)
217 ui.error(_("abort: %s\n") % inst)
218 if inst.hint:
218 if inst.hint:
219 ui.error(_("(%s)\n") % inst.hint)
219 ui.error(_("(%s)\n") % inst.hint)
220 except ImportError as inst:
220 except ImportError as inst:
221 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
221 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
222 m = stringutil.forcebytestr(inst).split()[-1]
222 m = stringutil.forcebytestr(inst).split()[-1]
223 if m in "mpatch bdiff".split():
223 if m in "mpatch bdiff".split():
224 ui.error(_("(did you forget to compile extensions?)\n"))
224 ui.error(_("(did you forget to compile extensions?)\n"))
225 elif m in "zlib".split():
225 elif m in "zlib".split():
226 ui.error(_("(is your Python install correct?)\n"))
226 ui.error(_("(is your Python install correct?)\n"))
227 except IOError as inst:
227 except IOError as inst:
228 if util.safehasattr(inst, "code"):
228 if util.safehasattr(inst, "code"):
229 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
229 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
230 elif util.safehasattr(inst, "reason"):
230 elif util.safehasattr(inst, "reason"):
231 try: # usually it is in the form (errno, strerror)
231 try: # usually it is in the form (errno, strerror)
232 reason = inst.reason.args[1]
232 reason = inst.reason.args[1]
233 except (AttributeError, IndexError):
233 except (AttributeError, IndexError):
234 # it might be anything, for example a string
234 # it might be anything, for example a string
235 reason = inst.reason
235 reason = inst.reason
236 if isinstance(reason, pycompat.unicode):
236 if isinstance(reason, pycompat.unicode):
237 # SSLError of Python 2.7.9 contains a unicode
237 # SSLError of Python 2.7.9 contains a unicode
238 reason = encoding.unitolocal(reason)
238 reason = encoding.unitolocal(reason)
239 ui.error(_("abort: error: %s\n") % reason)
239 ui.error(_("abort: error: %s\n") % reason)
240 elif (util.safehasattr(inst, "args")
240 elif (util.safehasattr(inst, "args")
241 and inst.args and inst.args[0] == errno.EPIPE):
241 and inst.args and inst.args[0] == errno.EPIPE):
242 pass
242 pass
243 elif getattr(inst, "strerror", None):
243 elif getattr(inst, "strerror", None):
244 if getattr(inst, "filename", None):
244 if getattr(inst, "filename", None):
245 ui.error(_("abort: %s: %s\n") % (
245 ui.error(_("abort: %s: %s\n") % (
246 encoding.strtolocal(inst.strerror),
246 encoding.strtolocal(inst.strerror),
247 stringutil.forcebytestr(inst.filename)))
247 stringutil.forcebytestr(inst.filename)))
248 else:
248 else:
249 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
249 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
250 else:
250 else:
251 raise
251 raise
252 except OSError as inst:
252 except OSError as inst:
253 if getattr(inst, "filename", None) is not None:
253 if getattr(inst, "filename", None) is not None:
254 ui.error(_("abort: %s: '%s'\n") % (
254 ui.error(_("abort: %s: '%s'\n") % (
255 encoding.strtolocal(inst.strerror),
255 encoding.strtolocal(inst.strerror),
256 stringutil.forcebytestr(inst.filename)))
256 stringutil.forcebytestr(inst.filename)))
257 else:
257 else:
258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
258 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
259 except MemoryError:
259 except MemoryError:
260 ui.error(_("abort: out of memory\n"))
260 ui.error(_("abort: out of memory\n"))
261 except SystemExit as inst:
261 except SystemExit as inst:
262 # Commands shouldn't sys.exit directly, but give a return code.
262 # Commands shouldn't sys.exit directly, but give a return code.
263 # Just in case catch this and and pass exit code to caller.
263 # Just in case catch this and and pass exit code to caller.
264 return inst.code
264 return inst.code
265 except socket.error as inst:
265 except socket.error as inst:
266 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
266 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
267
267
268 return -1
268 return -1
269
269
270 def checknewlabel(repo, lbl, kind):
270 def checknewlabel(repo, lbl, kind):
271 # Do not use the "kind" parameter in ui output.
271 # Do not use the "kind" parameter in ui output.
272 # It makes strings difficult to translate.
272 # It makes strings difficult to translate.
273 if lbl in ['tip', '.', 'null']:
273 if lbl in ['tip', '.', 'null']:
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
274 raise error.Abort(_("the name '%s' is reserved") % lbl)
275 for c in (':', '\0', '\n', '\r'):
275 for c in (':', '\0', '\n', '\r'):
276 if c in lbl:
276 if c in lbl:
277 raise error.Abort(
277 raise error.Abort(
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
278 _("%r cannot be used in a name") % pycompat.bytestr(c))
279 try:
279 try:
280 int(lbl)
280 int(lbl)
281 raise error.Abort(_("cannot use an integer as a name"))
281 raise error.Abort(_("cannot use an integer as a name"))
282 except ValueError:
282 except ValueError:
283 pass
283 pass
284 if lbl.strip() != lbl:
284 if lbl.strip() != lbl:
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
285 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
286
286
287 def checkfilename(f):
287 def checkfilename(f):
288 '''Check that the filename f is an acceptable filename for a tracked file'''
288 '''Check that the filename f is an acceptable filename for a tracked file'''
289 if '\r' in f or '\n' in f:
289 if '\r' in f or '\n' in f:
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
290 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
291 % pycompat.bytestr(f))
291 % pycompat.bytestr(f))
292
292
293 def checkportable(ui, f):
293 def checkportable(ui, f):
294 '''Check if filename f is portable and warn or abort depending on config'''
294 '''Check if filename f is portable and warn or abort depending on config'''
295 checkfilename(f)
295 checkfilename(f)
296 abort, warn = checkportabilityalert(ui)
296 abort, warn = checkportabilityalert(ui)
297 if abort or warn:
297 if abort or warn:
298 msg = util.checkwinfilename(f)
298 msg = util.checkwinfilename(f)
299 if msg:
299 if msg:
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
300 msg = "%s: %s" % (msg, procutil.shellquote(f))
301 if abort:
301 if abort:
302 raise error.Abort(msg)
302 raise error.Abort(msg)
303 ui.warn(_("warning: %s\n") % msg)
303 ui.warn(_("warning: %s\n") % msg)
304
304
305 def checkportabilityalert(ui):
305 def checkportabilityalert(ui):
306 '''check if the user's config requests nothing, a warning, or abort for
306 '''check if the user's config requests nothing, a warning, or abort for
307 non-portable filenames'''
307 non-portable filenames'''
308 val = ui.config('ui', 'portablefilenames')
308 val = ui.config('ui', 'portablefilenames')
309 lval = val.lower()
309 lval = val.lower()
310 bval = stringutil.parsebool(val)
310 bval = stringutil.parsebool(val)
311 abort = pycompat.iswindows or lval == 'abort'
311 abort = pycompat.iswindows or lval == 'abort'
312 warn = bval or lval == 'warn'
312 warn = bval or lval == 'warn'
313 if bval is None and not (warn or abort or lval == 'ignore'):
313 if bval is None and not (warn or abort or lval == 'ignore'):
314 raise error.ConfigError(
314 raise error.ConfigError(
315 _("ui.portablefilenames value is invalid ('%s')") % val)
315 _("ui.portablefilenames value is invalid ('%s')") % val)
316 return abort, warn
316 return abort, warn
317
317
318 class casecollisionauditor(object):
318 class casecollisionauditor(object):
319 def __init__(self, ui, abort, dirstate):
319 def __init__(self, ui, abort, dirstate):
320 self._ui = ui
320 self._ui = ui
321 self._abort = abort
321 self._abort = abort
322 allfiles = '\0'.join(dirstate._map)
322 allfiles = '\0'.join(dirstate._map)
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
323 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
324 self._dirstate = dirstate
324 self._dirstate = dirstate
325 # The purpose of _newfiles is so that we don't complain about
325 # The purpose of _newfiles is so that we don't complain about
326 # case collisions if someone were to call this object with the
326 # case collisions if someone were to call this object with the
327 # same filename twice.
327 # same filename twice.
328 self._newfiles = set()
328 self._newfiles = set()
329
329
330 def __call__(self, f):
330 def __call__(self, f):
331 if f in self._newfiles:
331 if f in self._newfiles:
332 return
332 return
333 fl = encoding.lower(f)
333 fl = encoding.lower(f)
334 if fl in self._loweredfiles and f not in self._dirstate:
334 if fl in self._loweredfiles and f not in self._dirstate:
335 msg = _('possible case-folding collision for %s') % f
335 msg = _('possible case-folding collision for %s') % f
336 if self._abort:
336 if self._abort:
337 raise error.Abort(msg)
337 raise error.Abort(msg)
338 self._ui.warn(_("warning: %s\n") % msg)
338 self._ui.warn(_("warning: %s\n") % msg)
339 self._loweredfiles.add(fl)
339 self._loweredfiles.add(fl)
340 self._newfiles.add(f)
340 self._newfiles.add(f)
341
341
342 def filteredhash(repo, maxrev):
342 def filteredhash(repo, maxrev):
343 """build hash of filtered revisions in the current repoview.
343 """build hash of filtered revisions in the current repoview.
344
344
345 Multiple caches perform up-to-date validation by checking that the
345 Multiple caches perform up-to-date validation by checking that the
346 tiprev and tipnode stored in the cache file match the current repository.
346 tiprev and tipnode stored in the cache file match the current repository.
347 However, this is not sufficient for validating repoviews because the set
347 However, this is not sufficient for validating repoviews because the set
348 of revisions in the view may change without the repository tiprev and
348 of revisions in the view may change without the repository tiprev and
349 tipnode changing.
349 tipnode changing.
350
350
351 This function hashes all the revs filtered from the view and returns
351 This function hashes all the revs filtered from the view and returns
352 that SHA-1 digest.
352 that SHA-1 digest.
353 """
353 """
354 cl = repo.changelog
354 cl = repo.changelog
355 if not cl.filteredrevs:
355 if not cl.filteredrevs:
356 return None
356 return None
357 key = None
357 key = None
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
358 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
359 if revs:
359 if revs:
360 s = hashlib.sha1()
360 s = hashlib.sha1()
361 for rev in revs:
361 for rev in revs:
362 s.update('%d;' % rev)
362 s.update('%d;' % rev)
363 key = s.digest()
363 key = s.digest()
364 return key
364 return key
365
365
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
366 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
367 '''yield every hg repository under path, always recursively.
367 '''yield every hg repository under path, always recursively.
368 The recurse flag will only control recursion into repo working dirs'''
368 The recurse flag will only control recursion into repo working dirs'''
369 def errhandler(err):
369 def errhandler(err):
370 if err.filename == path:
370 if err.filename == path:
371 raise err
371 raise err
372 samestat = getattr(os.path, 'samestat', None)
372 samestat = getattr(os.path, 'samestat', None)
373 if followsym and samestat is not None:
373 if followsym and samestat is not None:
374 def adddir(dirlst, dirname):
374 def adddir(dirlst, dirname):
375 dirstat = os.stat(dirname)
375 dirstat = os.stat(dirname)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
376 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
377 if not match:
377 if not match:
378 dirlst.append(dirstat)
378 dirlst.append(dirstat)
379 return not match
379 return not match
380 else:
380 else:
381 followsym = False
381 followsym = False
382
382
383 if (seen_dirs is None) and followsym:
383 if (seen_dirs is None) and followsym:
384 seen_dirs = []
384 seen_dirs = []
385 adddir(seen_dirs, path)
385 adddir(seen_dirs, path)
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
387 dirs.sort()
387 dirs.sort()
388 if '.hg' in dirs:
388 if '.hg' in dirs:
389 yield root # found a repository
389 yield root # found a repository
390 qroot = os.path.join(root, '.hg', 'patches')
390 qroot = os.path.join(root, '.hg', 'patches')
391 if os.path.isdir(os.path.join(qroot, '.hg')):
391 if os.path.isdir(os.path.join(qroot, '.hg')):
392 yield qroot # we have a patch queue repo here
392 yield qroot # we have a patch queue repo here
393 if recurse:
393 if recurse:
394 # avoid recursing inside the .hg directory
394 # avoid recursing inside the .hg directory
395 dirs.remove('.hg')
395 dirs.remove('.hg')
396 else:
396 else:
397 dirs[:] = [] # don't descend further
397 dirs[:] = [] # don't descend further
398 elif followsym:
398 elif followsym:
399 newdirs = []
399 newdirs = []
400 for d in dirs:
400 for d in dirs:
401 fname = os.path.join(root, d)
401 fname = os.path.join(root, d)
402 if adddir(seen_dirs, fname):
402 if adddir(seen_dirs, fname):
403 if os.path.islink(fname):
403 if os.path.islink(fname):
404 for hgname in walkrepos(fname, True, seen_dirs):
404 for hgname in walkrepos(fname, True, seen_dirs):
405 yield hgname
405 yield hgname
406 else:
406 else:
407 newdirs.append(d)
407 newdirs.append(d)
408 dirs[:] = newdirs
408 dirs[:] = newdirs
409
409
410 def binnode(ctx):
410 def binnode(ctx):
411 """Return binary node id for a given basectx"""
411 """Return binary node id for a given basectx"""
412 node = ctx.node()
412 node = ctx.node()
413 if node is None:
413 if node is None:
414 return wdirid
414 return wdirid
415 return node
415 return node
416
416
417 def intrev(ctx):
417 def intrev(ctx):
418 """Return integer for a given basectx that can be used in comparison or
418 """Return integer for a given basectx that can be used in comparison or
419 arithmetic operation"""
419 arithmetic operation"""
420 rev = ctx.rev()
420 rev = ctx.rev()
421 if rev is None:
421 if rev is None:
422 return wdirrev
422 return wdirrev
423 return rev
423 return rev
424
424
425 def formatchangeid(ctx):
425 def formatchangeid(ctx):
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
426 """Format changectx as '{rev}:{node|formatnode}', which is the default
427 template provided by logcmdutil.changesettemplater"""
427 template provided by logcmdutil.changesettemplater"""
428 repo = ctx.repo()
428 repo = ctx.repo()
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
429 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
430
430
431 def formatrevnode(ui, rev, node):
431 def formatrevnode(ui, rev, node):
432 """Format given revision and node depending on the current verbosity"""
432 """Format given revision and node depending on the current verbosity"""
433 if ui.debugflag:
433 if ui.debugflag:
434 hexfunc = hex
434 hexfunc = hex
435 else:
435 else:
436 hexfunc = short
436 hexfunc = short
437 return '%d:%s' % (rev, hexfunc(node))
437 return '%d:%s' % (rev, hexfunc(node))
438
438
439 def resolvehexnodeidprefix(repo, prefix):
439 def resolvehexnodeidprefix(repo, prefix):
440 try:
440 try:
441 # Uses unfiltered repo because it's faster when prefix is ambiguous/
441 # Uses unfiltered repo because it's faster when prefix is ambiguous/
442 # This matches the shortesthexnodeidprefix() function below.
442 # This matches the shortesthexnodeidprefix() function below.
443 node = repo.unfiltered().changelog._partialmatch(prefix)
443 node = repo.unfiltered().changelog._partialmatch(prefix)
444 except error.AmbiguousPrefixLookupError:
444 except error.AmbiguousPrefixLookupError:
445 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
445 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
446 if revset:
446 if revset:
447 # Clear config to avoid infinite recursion
447 # Clear config to avoid infinite recursion
448 configoverrides = {('experimental',
448 configoverrides = {('experimental',
449 'revisions.disambiguatewithin'): None}
449 'revisions.disambiguatewithin'): None}
450 with repo.ui.configoverride(configoverrides):
450 with repo.ui.configoverride(configoverrides):
451 revs = repo.anyrevs([revset], user=True)
451 revs = repo.anyrevs([revset], user=True)
452 matches = []
452 matches = []
453 for rev in revs:
453 for rev in revs:
454 node = repo.changelog.node(rev)
454 node = repo.changelog.node(rev)
455 if hex(node).startswith(prefix):
455 if hex(node).startswith(prefix):
456 matches.append(node)
456 matches.append(node)
457 if len(matches) == 1:
457 if len(matches) == 1:
458 return matches[0]
458 return matches[0]
459 raise
459 raise
460 if node is None:
460 if node is None:
461 return
461 return
462 repo.changelog.rev(node) # make sure node isn't filtered
462 repo.changelog.rev(node) # make sure node isn't filtered
463 return node
463 return node
464
464
465 def shortesthexnodeidprefix(repo, node, minlength=1):
465 def shortesthexnodeidprefix(repo, node, minlength=1):
466 """Find the shortest unambiguous prefix that matches hexnode."""
466 """Find the shortest unambiguous prefix that matches hexnode."""
467 # _partialmatch() of filtered changelog could take O(len(repo)) time,
467 # _partialmatch() of filtered changelog could take O(len(repo)) time,
468 # which would be unacceptably slow. so we look for hash collision in
468 # which would be unacceptably slow. so we look for hash collision in
469 # unfiltered space, which means some hashes may be slightly longer.
469 # unfiltered space, which means some hashes may be slightly longer.
470 cl = repo.unfiltered().changelog
470 cl = repo.unfiltered().changelog
471
471
472 def isrev(prefix):
472 def isrev(prefix):
473 try:
473 try:
474 i = int(prefix)
474 i = int(prefix)
475 # if we are a pure int, then starting with zero will not be
475 # if we are a pure int, then starting with zero will not be
476 # confused as a rev; or, obviously, if the int is larger
476 # confused as a rev; or, obviously, if the int is larger
477 # than the value of the tip rev
477 # than the value of the tip rev
478 if prefix[0:1] == b'0' or i > len(cl):
478 if prefix[0:1] == b'0' or i > len(cl):
479 return False
479 return False
480 return True
480 return True
481 except ValueError:
481 except ValueError:
482 return False
482 return False
483
483
484 def disambiguate(prefix):
484 def disambiguate(prefix):
485 """Disambiguate against revnums."""
485 """Disambiguate against revnums."""
486 hexnode = hex(node)
486 hexnode = hex(node)
487 for length in range(len(prefix), len(hexnode) + 1):
487 for length in range(len(prefix), len(hexnode) + 1):
488 prefix = hexnode[:length]
488 prefix = hexnode[:length]
489 if not isrev(prefix):
489 if not isrev(prefix):
490 return prefix
490 return prefix
491
491
492 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
493 if revset:
494 revs = repo.anyrevs([revset], user=True)
495 if cl.rev(node) in revs:
496 hexnode = hex(node)
497 for length in range(minlength, len(hexnode) + 1):
498 matches = []
499 prefix = hexnode[:length]
500 for rev in revs:
501 otherhexnode = repo[rev].hex()
502 if prefix == otherhexnode[:length]:
503 matches.append(otherhexnode)
504 if len(matches) == 1:
505 return disambiguate(prefix)
506
492 try:
507 try:
493 return disambiguate(cl.shortest(node, minlength))
508 return disambiguate(cl.shortest(node, minlength))
494 except error.LookupError:
509 except error.LookupError:
495 raise error.RepoLookupError()
510 raise error.RepoLookupError()
496
511
497 def isrevsymbol(repo, symbol):
512 def isrevsymbol(repo, symbol):
498 """Checks if a symbol exists in the repo.
513 """Checks if a symbol exists in the repo.
499
514
500 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
515 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
501 symbol is an ambiguous nodeid prefix.
516 symbol is an ambiguous nodeid prefix.
502 """
517 """
503 try:
518 try:
504 revsymbol(repo, symbol)
519 revsymbol(repo, symbol)
505 return True
520 return True
506 except error.RepoLookupError:
521 except error.RepoLookupError:
507 return False
522 return False
508
523
509 def revsymbol(repo, symbol):
524 def revsymbol(repo, symbol):
510 """Returns a context given a single revision symbol (as string).
525 """Returns a context given a single revision symbol (as string).
511
526
512 This is similar to revsingle(), but accepts only a single revision symbol,
527 This is similar to revsingle(), but accepts only a single revision symbol,
513 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
528 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
514 not "max(public())".
529 not "max(public())".
515 """
530 """
516 if not isinstance(symbol, bytes):
531 if not isinstance(symbol, bytes):
517 msg = ("symbol (%s of type %s) was not a string, did you mean "
532 msg = ("symbol (%s of type %s) was not a string, did you mean "
518 "repo[symbol]?" % (symbol, type(symbol)))
533 "repo[symbol]?" % (symbol, type(symbol)))
519 raise error.ProgrammingError(msg)
534 raise error.ProgrammingError(msg)
520 try:
535 try:
521 if symbol in ('.', 'tip', 'null'):
536 if symbol in ('.', 'tip', 'null'):
522 return repo[symbol]
537 return repo[symbol]
523
538
524 try:
539 try:
525 r = int(symbol)
540 r = int(symbol)
526 if '%d' % r != symbol:
541 if '%d' % r != symbol:
527 raise ValueError
542 raise ValueError
528 l = len(repo.changelog)
543 l = len(repo.changelog)
529 if r < 0:
544 if r < 0:
530 r += l
545 r += l
531 if r < 0 or r >= l and r != wdirrev:
546 if r < 0 or r >= l and r != wdirrev:
532 raise ValueError
547 raise ValueError
533 return repo[r]
548 return repo[r]
534 except error.FilteredIndexError:
549 except error.FilteredIndexError:
535 raise
550 raise
536 except (ValueError, OverflowError, IndexError):
551 except (ValueError, OverflowError, IndexError):
537 pass
552 pass
538
553
539 if len(symbol) == 40:
554 if len(symbol) == 40:
540 try:
555 try:
541 node = bin(symbol)
556 node = bin(symbol)
542 rev = repo.changelog.rev(node)
557 rev = repo.changelog.rev(node)
543 return repo[rev]
558 return repo[rev]
544 except error.FilteredLookupError:
559 except error.FilteredLookupError:
545 raise
560 raise
546 except (TypeError, LookupError):
561 except (TypeError, LookupError):
547 pass
562 pass
548
563
549 # look up bookmarks through the name interface
564 # look up bookmarks through the name interface
550 try:
565 try:
551 node = repo.names.singlenode(repo, symbol)
566 node = repo.names.singlenode(repo, symbol)
552 rev = repo.changelog.rev(node)
567 rev = repo.changelog.rev(node)
553 return repo[rev]
568 return repo[rev]
554 except KeyError:
569 except KeyError:
555 pass
570 pass
556
571
557 node = resolvehexnodeidprefix(repo, symbol)
572 node = resolvehexnodeidprefix(repo, symbol)
558 if node is not None:
573 if node is not None:
559 rev = repo.changelog.rev(node)
574 rev = repo.changelog.rev(node)
560 return repo[rev]
575 return repo[rev]
561
576
562 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
577 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
563
578
564 except error.WdirUnsupported:
579 except error.WdirUnsupported:
565 return repo[None]
580 return repo[None]
566 except (error.FilteredIndexError, error.FilteredLookupError,
581 except (error.FilteredIndexError, error.FilteredLookupError,
567 error.FilteredRepoLookupError):
582 error.FilteredRepoLookupError):
568 raise _filterederror(repo, symbol)
583 raise _filterederror(repo, symbol)
569
584
570 def _filterederror(repo, changeid):
585 def _filterederror(repo, changeid):
571 """build an exception to be raised about a filtered changeid
586 """build an exception to be raised about a filtered changeid
572
587
573 This is extracted in a function to help extensions (eg: evolve) to
588 This is extracted in a function to help extensions (eg: evolve) to
574 experiment with various message variants."""
589 experiment with various message variants."""
575 if repo.filtername.startswith('visible'):
590 if repo.filtername.startswith('visible'):
576
591
577 # Check if the changeset is obsolete
592 # Check if the changeset is obsolete
578 unfilteredrepo = repo.unfiltered()
593 unfilteredrepo = repo.unfiltered()
579 ctx = revsymbol(unfilteredrepo, changeid)
594 ctx = revsymbol(unfilteredrepo, changeid)
580
595
581 # If the changeset is obsolete, enrich the message with the reason
596 # If the changeset is obsolete, enrich the message with the reason
582 # that made this changeset not visible
597 # that made this changeset not visible
583 if ctx.obsolete():
598 if ctx.obsolete():
584 msg = obsutil._getfilteredreason(repo, changeid, ctx)
599 msg = obsutil._getfilteredreason(repo, changeid, ctx)
585 else:
600 else:
586 msg = _("hidden revision '%s'") % changeid
601 msg = _("hidden revision '%s'") % changeid
587
602
588 hint = _('use --hidden to access hidden revisions')
603 hint = _('use --hidden to access hidden revisions')
589
604
590 return error.FilteredRepoLookupError(msg, hint=hint)
605 return error.FilteredRepoLookupError(msg, hint=hint)
591 msg = _("filtered revision '%s' (not in '%s' subset)")
606 msg = _("filtered revision '%s' (not in '%s' subset)")
592 msg %= (changeid, repo.filtername)
607 msg %= (changeid, repo.filtername)
593 return error.FilteredRepoLookupError(msg)
608 return error.FilteredRepoLookupError(msg)
594
609
595 def revsingle(repo, revspec, default='.', localalias=None):
610 def revsingle(repo, revspec, default='.', localalias=None):
596 if not revspec and revspec != 0:
611 if not revspec and revspec != 0:
597 return repo[default]
612 return repo[default]
598
613
599 l = revrange(repo, [revspec], localalias=localalias)
614 l = revrange(repo, [revspec], localalias=localalias)
600 if not l:
615 if not l:
601 raise error.Abort(_('empty revision set'))
616 raise error.Abort(_('empty revision set'))
602 return repo[l.last()]
617 return repo[l.last()]
603
618
604 def _pairspec(revspec):
619 def _pairspec(revspec):
605 tree = revsetlang.parse(revspec)
620 tree = revsetlang.parse(revspec)
606 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
621 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
607
622
608 def revpair(repo, revs):
623 def revpair(repo, revs):
609 if not revs:
624 if not revs:
610 return repo['.'], repo[None]
625 return repo['.'], repo[None]
611
626
612 l = revrange(repo, revs)
627 l = revrange(repo, revs)
613
628
614 if not l:
629 if not l:
615 first = second = None
630 first = second = None
616 elif l.isascending():
631 elif l.isascending():
617 first = l.min()
632 first = l.min()
618 second = l.max()
633 second = l.max()
619 elif l.isdescending():
634 elif l.isdescending():
620 first = l.max()
635 first = l.max()
621 second = l.min()
636 second = l.min()
622 else:
637 else:
623 first = l.first()
638 first = l.first()
624 second = l.last()
639 second = l.last()
625
640
626 if first is None:
641 if first is None:
627 raise error.Abort(_('empty revision range'))
642 raise error.Abort(_('empty revision range'))
628 if (first == second and len(revs) >= 2
643 if (first == second and len(revs) >= 2
629 and not all(revrange(repo, [r]) for r in revs)):
644 and not all(revrange(repo, [r]) for r in revs)):
630 raise error.Abort(_('empty revision on one side of range'))
645 raise error.Abort(_('empty revision on one side of range'))
631
646
632 # if top-level is range expression, the result must always be a pair
647 # if top-level is range expression, the result must always be a pair
633 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
648 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
634 return repo[first], repo[None]
649 return repo[first], repo[None]
635
650
636 return repo[first], repo[second]
651 return repo[first], repo[second]
637
652
638 def revrange(repo, specs, localalias=None):
653 def revrange(repo, specs, localalias=None):
639 """Execute 1 to many revsets and return the union.
654 """Execute 1 to many revsets and return the union.
640
655
641 This is the preferred mechanism for executing revsets using user-specified
656 This is the preferred mechanism for executing revsets using user-specified
642 config options, such as revset aliases.
657 config options, such as revset aliases.
643
658
644 The revsets specified by ``specs`` will be executed via a chained ``OR``
659 The revsets specified by ``specs`` will be executed via a chained ``OR``
645 expression. If ``specs`` is empty, an empty result is returned.
660 expression. If ``specs`` is empty, an empty result is returned.
646
661
647 ``specs`` can contain integers, in which case they are assumed to be
662 ``specs`` can contain integers, in which case they are assumed to be
648 revision numbers.
663 revision numbers.
649
664
650 It is assumed the revsets are already formatted. If you have arguments
665 It is assumed the revsets are already formatted. If you have arguments
651 that need to be expanded in the revset, call ``revsetlang.formatspec()``
666 that need to be expanded in the revset, call ``revsetlang.formatspec()``
652 and pass the result as an element of ``specs``.
667 and pass the result as an element of ``specs``.
653
668
654 Specifying a single revset is allowed.
669 Specifying a single revset is allowed.
655
670
656 Returns a ``revset.abstractsmartset`` which is a list-like interface over
671 Returns a ``revset.abstractsmartset`` which is a list-like interface over
657 integer revisions.
672 integer revisions.
658 """
673 """
659 allspecs = []
674 allspecs = []
660 for spec in specs:
675 for spec in specs:
661 if isinstance(spec, int):
676 if isinstance(spec, int):
662 spec = revsetlang.formatspec('rev(%d)', spec)
677 spec = revsetlang.formatspec('rev(%d)', spec)
663 allspecs.append(spec)
678 allspecs.append(spec)
664 return repo.anyrevs(allspecs, user=True, localalias=localalias)
679 return repo.anyrevs(allspecs, user=True, localalias=localalias)
665
680
666 def meaningfulparents(repo, ctx):
681 def meaningfulparents(repo, ctx):
667 """Return list of meaningful (or all if debug) parentrevs for rev.
682 """Return list of meaningful (or all if debug) parentrevs for rev.
668
683
669 For merges (two non-nullrev revisions) both parents are meaningful.
684 For merges (two non-nullrev revisions) both parents are meaningful.
670 Otherwise the first parent revision is considered meaningful if it
685 Otherwise the first parent revision is considered meaningful if it
671 is not the preceding revision.
686 is not the preceding revision.
672 """
687 """
673 parents = ctx.parents()
688 parents = ctx.parents()
674 if len(parents) > 1:
689 if len(parents) > 1:
675 return parents
690 return parents
676 if repo.ui.debugflag:
691 if repo.ui.debugflag:
677 return [parents[0], repo['null']]
692 return [parents[0], repo['null']]
678 if parents[0].rev() >= intrev(ctx) - 1:
693 if parents[0].rev() >= intrev(ctx) - 1:
679 return []
694 return []
680 return parents
695 return parents
681
696
682 def expandpats(pats):
697 def expandpats(pats):
683 '''Expand bare globs when running on windows.
698 '''Expand bare globs when running on windows.
684 On posix we assume it already has already been done by sh.'''
699 On posix we assume it already has already been done by sh.'''
685 if not util.expandglobs:
700 if not util.expandglobs:
686 return list(pats)
701 return list(pats)
687 ret = []
702 ret = []
688 for kindpat in pats:
703 for kindpat in pats:
689 kind, pat = matchmod._patsplit(kindpat, None)
704 kind, pat = matchmod._patsplit(kindpat, None)
690 if kind is None:
705 if kind is None:
691 try:
706 try:
692 globbed = glob.glob(pat)
707 globbed = glob.glob(pat)
693 except re.error:
708 except re.error:
694 globbed = [pat]
709 globbed = [pat]
695 if globbed:
710 if globbed:
696 ret.extend(globbed)
711 ret.extend(globbed)
697 continue
712 continue
698 ret.append(kindpat)
713 ret.append(kindpat)
699 return ret
714 return ret
700
715
701 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
716 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
702 badfn=None):
717 badfn=None):
703 '''Return a matcher and the patterns that were used.
718 '''Return a matcher and the patterns that were used.
704 The matcher will warn about bad matches, unless an alternate badfn callback
719 The matcher will warn about bad matches, unless an alternate badfn callback
705 is provided.'''
720 is provided.'''
706 if pats == ("",):
721 if pats == ("",):
707 pats = []
722 pats = []
708 if opts is None:
723 if opts is None:
709 opts = {}
724 opts = {}
710 if not globbed and default == 'relpath':
725 if not globbed and default == 'relpath':
711 pats = expandpats(pats or [])
726 pats = expandpats(pats or [])
712
727
713 def bad(f, msg):
728 def bad(f, msg):
714 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
729 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
715
730
716 if badfn is None:
731 if badfn is None:
717 badfn = bad
732 badfn = bad
718
733
719 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
734 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
720 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
735 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
721
736
722 if m.always():
737 if m.always():
723 pats = []
738 pats = []
724 return m, pats
739 return m, pats
725
740
726 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
741 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
727 badfn=None):
742 badfn=None):
728 '''Return a matcher that will warn about bad matches.'''
743 '''Return a matcher that will warn about bad matches.'''
729 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
744 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
730
745
731 def matchall(repo):
746 def matchall(repo):
732 '''Return a matcher that will efficiently match everything.'''
747 '''Return a matcher that will efficiently match everything.'''
733 return matchmod.always(repo.root, repo.getcwd())
748 return matchmod.always(repo.root, repo.getcwd())
734
749
735 def matchfiles(repo, files, badfn=None):
750 def matchfiles(repo, files, badfn=None):
736 '''Return a matcher that will efficiently match exactly these files.'''
751 '''Return a matcher that will efficiently match exactly these files.'''
737 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
752 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
738
753
739 def parsefollowlinespattern(repo, rev, pat, msg):
754 def parsefollowlinespattern(repo, rev, pat, msg):
740 """Return a file name from `pat` pattern suitable for usage in followlines
755 """Return a file name from `pat` pattern suitable for usage in followlines
741 logic.
756 logic.
742 """
757 """
743 if not matchmod.patkind(pat):
758 if not matchmod.patkind(pat):
744 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
759 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
745 else:
760 else:
746 ctx = repo[rev]
761 ctx = repo[rev]
747 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
762 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
748 files = [f for f in ctx if m(f)]
763 files = [f for f in ctx if m(f)]
749 if len(files) != 1:
764 if len(files) != 1:
750 raise error.ParseError(msg)
765 raise error.ParseError(msg)
751 return files[0]
766 return files[0]
752
767
753 def origpath(ui, repo, filepath):
768 def origpath(ui, repo, filepath):
754 '''customize where .orig files are created
769 '''customize where .orig files are created
755
770
756 Fetch user defined path from config file: [ui] origbackuppath = <path>
771 Fetch user defined path from config file: [ui] origbackuppath = <path>
757 Fall back to default (filepath with .orig suffix) if not specified
772 Fall back to default (filepath with .orig suffix) if not specified
758 '''
773 '''
759 origbackuppath = ui.config('ui', 'origbackuppath')
774 origbackuppath = ui.config('ui', 'origbackuppath')
760 if not origbackuppath:
775 if not origbackuppath:
761 return filepath + ".orig"
776 return filepath + ".orig"
762
777
763 # Convert filepath from an absolute path into a path inside the repo.
778 # Convert filepath from an absolute path into a path inside the repo.
764 filepathfromroot = util.normpath(os.path.relpath(filepath,
779 filepathfromroot = util.normpath(os.path.relpath(filepath,
765 start=repo.root))
780 start=repo.root))
766
781
767 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
782 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
768 origbackupdir = origvfs.dirname(filepathfromroot)
783 origbackupdir = origvfs.dirname(filepathfromroot)
769 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
784 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
770 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
785 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
771
786
772 # Remove any files that conflict with the backup file's path
787 # Remove any files that conflict with the backup file's path
773 for f in reversed(list(util.finddirs(filepathfromroot))):
788 for f in reversed(list(util.finddirs(filepathfromroot))):
774 if origvfs.isfileorlink(f):
789 if origvfs.isfileorlink(f):
775 ui.note(_('removing conflicting file: %s\n')
790 ui.note(_('removing conflicting file: %s\n')
776 % origvfs.join(f))
791 % origvfs.join(f))
777 origvfs.unlink(f)
792 origvfs.unlink(f)
778 break
793 break
779
794
780 origvfs.makedirs(origbackupdir)
795 origvfs.makedirs(origbackupdir)
781
796
782 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
797 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
783 ui.note(_('removing conflicting directory: %s\n')
798 ui.note(_('removing conflicting directory: %s\n')
784 % origvfs.join(filepathfromroot))
799 % origvfs.join(filepathfromroot))
785 origvfs.rmtree(filepathfromroot, forcibly=True)
800 origvfs.rmtree(filepathfromroot, forcibly=True)
786
801
787 return origvfs.join(filepathfromroot)
802 return origvfs.join(filepathfromroot)
788
803
789 class _containsnode(object):
804 class _containsnode(object):
790 """proxy __contains__(node) to container.__contains__ which accepts revs"""
805 """proxy __contains__(node) to container.__contains__ which accepts revs"""
791
806
792 def __init__(self, repo, revcontainer):
807 def __init__(self, repo, revcontainer):
793 self._torev = repo.changelog.rev
808 self._torev = repo.changelog.rev
794 self._revcontains = revcontainer.__contains__
809 self._revcontains = revcontainer.__contains__
795
810
796 def __contains__(self, node):
811 def __contains__(self, node):
797 return self._revcontains(self._torev(node))
812 return self._revcontains(self._torev(node))
798
813
799 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
814 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
800 fixphase=False, targetphase=None, backup=True):
815 fixphase=False, targetphase=None, backup=True):
801 """do common cleanups when old nodes are replaced by new nodes
816 """do common cleanups when old nodes are replaced by new nodes
802
817
803 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
818 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
804 (we might also want to move working directory parent in the future)
819 (we might also want to move working directory parent in the future)
805
820
806 By default, bookmark moves are calculated automatically from 'replacements',
821 By default, bookmark moves are calculated automatically from 'replacements',
807 but 'moves' can be used to override that. Also, 'moves' may include
822 but 'moves' can be used to override that. Also, 'moves' may include
808 additional bookmark moves that should not have associated obsmarkers.
823 additional bookmark moves that should not have associated obsmarkers.
809
824
810 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
825 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
811 have replacements. operation is a string, like "rebase".
826 have replacements. operation is a string, like "rebase".
812
827
813 metadata is dictionary containing metadata to be stored in obsmarker if
828 metadata is dictionary containing metadata to be stored in obsmarker if
814 obsolescence is enabled.
829 obsolescence is enabled.
815 """
830 """
816 assert fixphase or targetphase is None
831 assert fixphase or targetphase is None
817 if not replacements and not moves:
832 if not replacements and not moves:
818 return
833 return
819
834
820 # translate mapping's other forms
835 # translate mapping's other forms
821 if not util.safehasattr(replacements, 'items'):
836 if not util.safehasattr(replacements, 'items'):
822 replacements = {n: () for n in replacements}
837 replacements = {n: () for n in replacements}
823
838
824 # Calculate bookmark movements
839 # Calculate bookmark movements
825 if moves is None:
840 if moves is None:
826 moves = {}
841 moves = {}
827 # Unfiltered repo is needed since nodes in replacements might be hidden.
842 # Unfiltered repo is needed since nodes in replacements might be hidden.
828 unfi = repo.unfiltered()
843 unfi = repo.unfiltered()
829 for oldnode, newnodes in replacements.items():
844 for oldnode, newnodes in replacements.items():
830 if oldnode in moves:
845 if oldnode in moves:
831 continue
846 continue
832 if len(newnodes) > 1:
847 if len(newnodes) > 1:
833 # usually a split, take the one with biggest rev number
848 # usually a split, take the one with biggest rev number
834 newnode = next(unfi.set('max(%ln)', newnodes)).node()
849 newnode = next(unfi.set('max(%ln)', newnodes)).node()
835 elif len(newnodes) == 0:
850 elif len(newnodes) == 0:
836 # move bookmark backwards
851 # move bookmark backwards
837 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
852 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
838 list(replacements)))
853 list(replacements)))
839 if roots:
854 if roots:
840 newnode = roots[0].node()
855 newnode = roots[0].node()
841 else:
856 else:
842 newnode = nullid
857 newnode = nullid
843 else:
858 else:
844 newnode = newnodes[0]
859 newnode = newnodes[0]
845 moves[oldnode] = newnode
860 moves[oldnode] = newnode
846
861
847 allnewnodes = [n for ns in replacements.values() for n in ns]
862 allnewnodes = [n for ns in replacements.values() for n in ns]
848 toretract = {}
863 toretract = {}
849 toadvance = {}
864 toadvance = {}
850 if fixphase:
865 if fixphase:
851 precursors = {}
866 precursors = {}
852 for oldnode, newnodes in replacements.items():
867 for oldnode, newnodes in replacements.items():
853 for newnode in newnodes:
868 for newnode in newnodes:
854 precursors.setdefault(newnode, []).append(oldnode)
869 precursors.setdefault(newnode, []).append(oldnode)
855
870
856 allnewnodes.sort(key=lambda n: unfi[n].rev())
871 allnewnodes.sort(key=lambda n: unfi[n].rev())
857 newphases = {}
872 newphases = {}
858 def phase(ctx):
873 def phase(ctx):
859 return newphases.get(ctx.node(), ctx.phase())
874 return newphases.get(ctx.node(), ctx.phase())
860 for newnode in allnewnodes:
875 for newnode in allnewnodes:
861 ctx = unfi[newnode]
876 ctx = unfi[newnode]
862 parentphase = max(phase(p) for p in ctx.parents())
877 parentphase = max(phase(p) for p in ctx.parents())
863 if targetphase is None:
878 if targetphase is None:
864 oldphase = max(unfi[oldnode].phase()
879 oldphase = max(unfi[oldnode].phase()
865 for oldnode in precursors[newnode])
880 for oldnode in precursors[newnode])
866 newphase = max(oldphase, parentphase)
881 newphase = max(oldphase, parentphase)
867 else:
882 else:
868 newphase = max(targetphase, parentphase)
883 newphase = max(targetphase, parentphase)
869 newphases[newnode] = newphase
884 newphases[newnode] = newphase
870 if newphase > ctx.phase():
885 if newphase > ctx.phase():
871 toretract.setdefault(newphase, []).append(newnode)
886 toretract.setdefault(newphase, []).append(newnode)
872 elif newphase < ctx.phase():
887 elif newphase < ctx.phase():
873 toadvance.setdefault(newphase, []).append(newnode)
888 toadvance.setdefault(newphase, []).append(newnode)
874
889
875 with repo.transaction('cleanup') as tr:
890 with repo.transaction('cleanup') as tr:
876 # Move bookmarks
891 # Move bookmarks
877 bmarks = repo._bookmarks
892 bmarks = repo._bookmarks
878 bmarkchanges = []
893 bmarkchanges = []
879 for oldnode, newnode in moves.items():
894 for oldnode, newnode in moves.items():
880 oldbmarks = repo.nodebookmarks(oldnode)
895 oldbmarks = repo.nodebookmarks(oldnode)
881 if not oldbmarks:
896 if not oldbmarks:
882 continue
897 continue
883 from . import bookmarks # avoid import cycle
898 from . import bookmarks # avoid import cycle
884 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
899 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
885 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
900 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
886 hex(oldnode), hex(newnode)))
901 hex(oldnode), hex(newnode)))
887 # Delete divergent bookmarks being parents of related newnodes
902 # Delete divergent bookmarks being parents of related newnodes
888 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
903 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
889 allnewnodes, newnode, oldnode)
904 allnewnodes, newnode, oldnode)
890 deletenodes = _containsnode(repo, deleterevs)
905 deletenodes = _containsnode(repo, deleterevs)
891 for name in oldbmarks:
906 for name in oldbmarks:
892 bmarkchanges.append((name, newnode))
907 bmarkchanges.append((name, newnode))
893 for b in bookmarks.divergent2delete(repo, deletenodes, name):
908 for b in bookmarks.divergent2delete(repo, deletenodes, name):
894 bmarkchanges.append((b, None))
909 bmarkchanges.append((b, None))
895
910
896 if bmarkchanges:
911 if bmarkchanges:
897 bmarks.applychanges(repo, tr, bmarkchanges)
912 bmarks.applychanges(repo, tr, bmarkchanges)
898
913
899 for phase, nodes in toretract.items():
914 for phase, nodes in toretract.items():
900 phases.retractboundary(repo, tr, phase, nodes)
915 phases.retractboundary(repo, tr, phase, nodes)
901 for phase, nodes in toadvance.items():
916 for phase, nodes in toadvance.items():
902 phases.advanceboundary(repo, tr, phase, nodes)
917 phases.advanceboundary(repo, tr, phase, nodes)
903
918
904 # Obsolete or strip nodes
919 # Obsolete or strip nodes
905 if obsolete.isenabled(repo, obsolete.createmarkersopt):
920 if obsolete.isenabled(repo, obsolete.createmarkersopt):
906 # If a node is already obsoleted, and we want to obsolete it
921 # If a node is already obsoleted, and we want to obsolete it
907 # without a successor, skip that obssolete request since it's
922 # without a successor, skip that obssolete request since it's
908 # unnecessary. That's the "if s or not isobs(n)" check below.
923 # unnecessary. That's the "if s or not isobs(n)" check below.
909 # Also sort the node in topology order, that might be useful for
924 # Also sort the node in topology order, that might be useful for
910 # some obsstore logic.
925 # some obsstore logic.
911 # NOTE: the filtering and sorting might belong to createmarkers.
926 # NOTE: the filtering and sorting might belong to createmarkers.
912 isobs = unfi.obsstore.successors.__contains__
927 isobs = unfi.obsstore.successors.__contains__
913 torev = unfi.changelog.rev
928 torev = unfi.changelog.rev
914 sortfunc = lambda ns: torev(ns[0])
929 sortfunc = lambda ns: torev(ns[0])
915 rels = [(unfi[n], tuple(unfi[m] for m in s))
930 rels = [(unfi[n], tuple(unfi[m] for m in s))
916 for n, s in sorted(replacements.items(), key=sortfunc)
931 for n, s in sorted(replacements.items(), key=sortfunc)
917 if s or not isobs(n)]
932 if s or not isobs(n)]
918 if rels:
933 if rels:
919 obsolete.createmarkers(repo, rels, operation=operation,
934 obsolete.createmarkers(repo, rels, operation=operation,
920 metadata=metadata)
935 metadata=metadata)
921 else:
936 else:
922 from . import repair # avoid import cycle
937 from . import repair # avoid import cycle
923 tostrip = list(replacements)
938 tostrip = list(replacements)
924 if tostrip:
939 if tostrip:
925 repair.delayedstrip(repo.ui, repo, tostrip, operation,
940 repair.delayedstrip(repo.ui, repo, tostrip, operation,
926 backup=backup)
941 backup=backup)
927
942
928 def addremove(repo, matcher, prefix, opts=None):
943 def addremove(repo, matcher, prefix, opts=None):
929 if opts is None:
944 if opts is None:
930 opts = {}
945 opts = {}
931 m = matcher
946 m = matcher
932 dry_run = opts.get('dry_run')
947 dry_run = opts.get('dry_run')
933 try:
948 try:
934 similarity = float(opts.get('similarity') or 0)
949 similarity = float(opts.get('similarity') or 0)
935 except ValueError:
950 except ValueError:
936 raise error.Abort(_('similarity must be a number'))
951 raise error.Abort(_('similarity must be a number'))
937 if similarity < 0 or similarity > 100:
952 if similarity < 0 or similarity > 100:
938 raise error.Abort(_('similarity must be between 0 and 100'))
953 raise error.Abort(_('similarity must be between 0 and 100'))
939 similarity /= 100.0
954 similarity /= 100.0
940
955
941 ret = 0
956 ret = 0
942 join = lambda f: os.path.join(prefix, f)
957 join = lambda f: os.path.join(prefix, f)
943
958
944 wctx = repo[None]
959 wctx = repo[None]
945 for subpath in sorted(wctx.substate):
960 for subpath in sorted(wctx.substate):
946 submatch = matchmod.subdirmatcher(subpath, m)
961 submatch = matchmod.subdirmatcher(subpath, m)
947 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
962 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
948 sub = wctx.sub(subpath)
963 sub = wctx.sub(subpath)
949 try:
964 try:
950 if sub.addremove(submatch, prefix, opts):
965 if sub.addremove(submatch, prefix, opts):
951 ret = 1
966 ret = 1
952 except error.LookupError:
967 except error.LookupError:
953 repo.ui.status(_("skipping missing subrepository: %s\n")
968 repo.ui.status(_("skipping missing subrepository: %s\n")
954 % join(subpath))
969 % join(subpath))
955
970
956 rejected = []
971 rejected = []
957 def badfn(f, msg):
972 def badfn(f, msg):
958 if f in m.files():
973 if f in m.files():
959 m.bad(f, msg)
974 m.bad(f, msg)
960 rejected.append(f)
975 rejected.append(f)
961
976
962 badmatch = matchmod.badmatch(m, badfn)
977 badmatch = matchmod.badmatch(m, badfn)
963 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
978 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
964 badmatch)
979 badmatch)
965
980
966 unknownset = set(unknown + forgotten)
981 unknownset = set(unknown + forgotten)
967 toprint = unknownset.copy()
982 toprint = unknownset.copy()
968 toprint.update(deleted)
983 toprint.update(deleted)
969 for abs in sorted(toprint):
984 for abs in sorted(toprint):
970 if repo.ui.verbose or not m.exact(abs):
985 if repo.ui.verbose or not m.exact(abs):
971 if abs in unknownset:
986 if abs in unknownset:
972 status = _('adding %s\n') % m.uipath(abs)
987 status = _('adding %s\n') % m.uipath(abs)
973 else:
988 else:
974 status = _('removing %s\n') % m.uipath(abs)
989 status = _('removing %s\n') % m.uipath(abs)
975 repo.ui.status(status)
990 repo.ui.status(status)
976
991
977 renames = _findrenames(repo, m, added + unknown, removed + deleted,
992 renames = _findrenames(repo, m, added + unknown, removed + deleted,
978 similarity)
993 similarity)
979
994
980 if not dry_run:
995 if not dry_run:
981 _markchanges(repo, unknown + forgotten, deleted, renames)
996 _markchanges(repo, unknown + forgotten, deleted, renames)
982
997
983 for f in rejected:
998 for f in rejected:
984 if f in m.files():
999 if f in m.files():
985 return 1
1000 return 1
986 return ret
1001 return ret
987
1002
988 def marktouched(repo, files, similarity=0.0):
1003 def marktouched(repo, files, similarity=0.0):
989 '''Assert that files have somehow been operated upon. files are relative to
1004 '''Assert that files have somehow been operated upon. files are relative to
990 the repo root.'''
1005 the repo root.'''
991 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1006 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
992 rejected = []
1007 rejected = []
993
1008
994 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1009 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
995
1010
996 if repo.ui.verbose:
1011 if repo.ui.verbose:
997 unknownset = set(unknown + forgotten)
1012 unknownset = set(unknown + forgotten)
998 toprint = unknownset.copy()
1013 toprint = unknownset.copy()
999 toprint.update(deleted)
1014 toprint.update(deleted)
1000 for abs in sorted(toprint):
1015 for abs in sorted(toprint):
1001 if abs in unknownset:
1016 if abs in unknownset:
1002 status = _('adding %s\n') % abs
1017 status = _('adding %s\n') % abs
1003 else:
1018 else:
1004 status = _('removing %s\n') % abs
1019 status = _('removing %s\n') % abs
1005 repo.ui.status(status)
1020 repo.ui.status(status)
1006
1021
1007 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1022 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1008 similarity)
1023 similarity)
1009
1024
1010 _markchanges(repo, unknown + forgotten, deleted, renames)
1025 _markchanges(repo, unknown + forgotten, deleted, renames)
1011
1026
1012 for f in rejected:
1027 for f in rejected:
1013 if f in m.files():
1028 if f in m.files():
1014 return 1
1029 return 1
1015 return 0
1030 return 0
1016
1031
1017 def _interestingfiles(repo, matcher):
1032 def _interestingfiles(repo, matcher):
1018 '''Walk dirstate with matcher, looking for files that addremove would care
1033 '''Walk dirstate with matcher, looking for files that addremove would care
1019 about.
1034 about.
1020
1035
1021 This is different from dirstate.status because it doesn't care about
1036 This is different from dirstate.status because it doesn't care about
1022 whether files are modified or clean.'''
1037 whether files are modified or clean.'''
1023 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1038 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1024 audit_path = pathutil.pathauditor(repo.root, cached=True)
1039 audit_path = pathutil.pathauditor(repo.root, cached=True)
1025
1040
1026 ctx = repo[None]
1041 ctx = repo[None]
1027 dirstate = repo.dirstate
1042 dirstate = repo.dirstate
1028 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1043 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1029 unknown=True, ignored=False, full=False)
1044 unknown=True, ignored=False, full=False)
1030 for abs, st in walkresults.iteritems():
1045 for abs, st in walkresults.iteritems():
1031 dstate = dirstate[abs]
1046 dstate = dirstate[abs]
1032 if dstate == '?' and audit_path.check(abs):
1047 if dstate == '?' and audit_path.check(abs):
1033 unknown.append(abs)
1048 unknown.append(abs)
1034 elif dstate != 'r' and not st:
1049 elif dstate != 'r' and not st:
1035 deleted.append(abs)
1050 deleted.append(abs)
1036 elif dstate == 'r' and st:
1051 elif dstate == 'r' and st:
1037 forgotten.append(abs)
1052 forgotten.append(abs)
1038 # for finding renames
1053 # for finding renames
1039 elif dstate == 'r' and not st:
1054 elif dstate == 'r' and not st:
1040 removed.append(abs)
1055 removed.append(abs)
1041 elif dstate == 'a':
1056 elif dstate == 'a':
1042 added.append(abs)
1057 added.append(abs)
1043
1058
1044 return added, unknown, deleted, removed, forgotten
1059 return added, unknown, deleted, removed, forgotten
1045
1060
1046 def _findrenames(repo, matcher, added, removed, similarity):
1061 def _findrenames(repo, matcher, added, removed, similarity):
1047 '''Find renames from removed files to added ones.'''
1062 '''Find renames from removed files to added ones.'''
1048 renames = {}
1063 renames = {}
1049 if similarity > 0:
1064 if similarity > 0:
1050 for old, new, score in similar.findrenames(repo, added, removed,
1065 for old, new, score in similar.findrenames(repo, added, removed,
1051 similarity):
1066 similarity):
1052 if (repo.ui.verbose or not matcher.exact(old)
1067 if (repo.ui.verbose or not matcher.exact(old)
1053 or not matcher.exact(new)):
1068 or not matcher.exact(new)):
1054 repo.ui.status(_('recording removal of %s as rename to %s '
1069 repo.ui.status(_('recording removal of %s as rename to %s '
1055 '(%d%% similar)\n') %
1070 '(%d%% similar)\n') %
1056 (matcher.rel(old), matcher.rel(new),
1071 (matcher.rel(old), matcher.rel(new),
1057 score * 100))
1072 score * 100))
1058 renames[new] = old
1073 renames[new] = old
1059 return renames
1074 return renames
1060
1075
1061 def _markchanges(repo, unknown, deleted, renames):
1076 def _markchanges(repo, unknown, deleted, renames):
1062 '''Marks the files in unknown as added, the files in deleted as removed,
1077 '''Marks the files in unknown as added, the files in deleted as removed,
1063 and the files in renames as copied.'''
1078 and the files in renames as copied.'''
1064 wctx = repo[None]
1079 wctx = repo[None]
1065 with repo.wlock():
1080 with repo.wlock():
1066 wctx.forget(deleted)
1081 wctx.forget(deleted)
1067 wctx.add(unknown)
1082 wctx.add(unknown)
1068 for new, old in renames.iteritems():
1083 for new, old in renames.iteritems():
1069 wctx.copy(old, new)
1084 wctx.copy(old, new)
1070
1085
1071 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1086 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1072 """Update the dirstate to reflect the intent of copying src to dst. For
1087 """Update the dirstate to reflect the intent of copying src to dst. For
1073 different reasons it might not end with dst being marked as copied from src.
1088 different reasons it might not end with dst being marked as copied from src.
1074 """
1089 """
1075 origsrc = repo.dirstate.copied(src) or src
1090 origsrc = repo.dirstate.copied(src) or src
1076 if dst == origsrc: # copying back a copy?
1091 if dst == origsrc: # copying back a copy?
1077 if repo.dirstate[dst] not in 'mn' and not dryrun:
1092 if repo.dirstate[dst] not in 'mn' and not dryrun:
1078 repo.dirstate.normallookup(dst)
1093 repo.dirstate.normallookup(dst)
1079 else:
1094 else:
1080 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1095 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1081 if not ui.quiet:
1096 if not ui.quiet:
1082 ui.warn(_("%s has not been committed yet, so no copy "
1097 ui.warn(_("%s has not been committed yet, so no copy "
1083 "data will be stored for %s.\n")
1098 "data will be stored for %s.\n")
1084 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1099 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1085 if repo.dirstate[dst] in '?r' and not dryrun:
1100 if repo.dirstate[dst] in '?r' and not dryrun:
1086 wctx.add([dst])
1101 wctx.add([dst])
1087 elif not dryrun:
1102 elif not dryrun:
1088 wctx.copy(origsrc, dst)
1103 wctx.copy(origsrc, dst)
1089
1104
1090 def readrequires(opener, supported):
1105 def readrequires(opener, supported):
1091 '''Reads and parses .hg/requires and checks if all entries found
1106 '''Reads and parses .hg/requires and checks if all entries found
1092 are in the list of supported features.'''
1107 are in the list of supported features.'''
1093 requirements = set(opener.read("requires").splitlines())
1108 requirements = set(opener.read("requires").splitlines())
1094 missings = []
1109 missings = []
1095 for r in requirements:
1110 for r in requirements:
1096 if r not in supported:
1111 if r not in supported:
1097 if not r or not r[0:1].isalnum():
1112 if not r or not r[0:1].isalnum():
1098 raise error.RequirementError(_(".hg/requires file is corrupt"))
1113 raise error.RequirementError(_(".hg/requires file is corrupt"))
1099 missings.append(r)
1114 missings.append(r)
1100 missings.sort()
1115 missings.sort()
1101 if missings:
1116 if missings:
1102 raise error.RequirementError(
1117 raise error.RequirementError(
1103 _("repository requires features unknown to this Mercurial: %s")
1118 _("repository requires features unknown to this Mercurial: %s")
1104 % " ".join(missings),
1119 % " ".join(missings),
1105 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1120 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1106 " for more information"))
1121 " for more information"))
1107 return requirements
1122 return requirements
1108
1123
1109 def writerequires(opener, requirements):
1124 def writerequires(opener, requirements):
1110 with opener('requires', 'w') as fp:
1125 with opener('requires', 'w') as fp:
1111 for r in sorted(requirements):
1126 for r in sorted(requirements):
1112 fp.write("%s\n" % r)
1127 fp.write("%s\n" % r)
1113
1128
1114 class filecachesubentry(object):
1129 class filecachesubentry(object):
1115 def __init__(self, path, stat):
1130 def __init__(self, path, stat):
1116 self.path = path
1131 self.path = path
1117 self.cachestat = None
1132 self.cachestat = None
1118 self._cacheable = None
1133 self._cacheable = None
1119
1134
1120 if stat:
1135 if stat:
1121 self.cachestat = filecachesubentry.stat(self.path)
1136 self.cachestat = filecachesubentry.stat(self.path)
1122
1137
1123 if self.cachestat:
1138 if self.cachestat:
1124 self._cacheable = self.cachestat.cacheable()
1139 self._cacheable = self.cachestat.cacheable()
1125 else:
1140 else:
1126 # None means we don't know yet
1141 # None means we don't know yet
1127 self._cacheable = None
1142 self._cacheable = None
1128
1143
1129 def refresh(self):
1144 def refresh(self):
1130 if self.cacheable():
1145 if self.cacheable():
1131 self.cachestat = filecachesubentry.stat(self.path)
1146 self.cachestat = filecachesubentry.stat(self.path)
1132
1147
1133 def cacheable(self):
1148 def cacheable(self):
1134 if self._cacheable is not None:
1149 if self._cacheable is not None:
1135 return self._cacheable
1150 return self._cacheable
1136
1151
1137 # we don't know yet, assume it is for now
1152 # we don't know yet, assume it is for now
1138 return True
1153 return True
1139
1154
1140 def changed(self):
1155 def changed(self):
1141 # no point in going further if we can't cache it
1156 # no point in going further if we can't cache it
1142 if not self.cacheable():
1157 if not self.cacheable():
1143 return True
1158 return True
1144
1159
1145 newstat = filecachesubentry.stat(self.path)
1160 newstat = filecachesubentry.stat(self.path)
1146
1161
1147 # we may not know if it's cacheable yet, check again now
1162 # we may not know if it's cacheable yet, check again now
1148 if newstat and self._cacheable is None:
1163 if newstat and self._cacheable is None:
1149 self._cacheable = newstat.cacheable()
1164 self._cacheable = newstat.cacheable()
1150
1165
1151 # check again
1166 # check again
1152 if not self._cacheable:
1167 if not self._cacheable:
1153 return True
1168 return True
1154
1169
1155 if self.cachestat != newstat:
1170 if self.cachestat != newstat:
1156 self.cachestat = newstat
1171 self.cachestat = newstat
1157 return True
1172 return True
1158 else:
1173 else:
1159 return False
1174 return False
1160
1175
1161 @staticmethod
1176 @staticmethod
1162 def stat(path):
1177 def stat(path):
1163 try:
1178 try:
1164 return util.cachestat(path)
1179 return util.cachestat(path)
1165 except OSError as e:
1180 except OSError as e:
1166 if e.errno != errno.ENOENT:
1181 if e.errno != errno.ENOENT:
1167 raise
1182 raise
1168
1183
1169 class filecacheentry(object):
1184 class filecacheentry(object):
1170 def __init__(self, paths, stat=True):
1185 def __init__(self, paths, stat=True):
1171 self._entries = []
1186 self._entries = []
1172 for path in paths:
1187 for path in paths:
1173 self._entries.append(filecachesubentry(path, stat))
1188 self._entries.append(filecachesubentry(path, stat))
1174
1189
1175 def changed(self):
1190 def changed(self):
1176 '''true if any entry has changed'''
1191 '''true if any entry has changed'''
1177 for entry in self._entries:
1192 for entry in self._entries:
1178 if entry.changed():
1193 if entry.changed():
1179 return True
1194 return True
1180 return False
1195 return False
1181
1196
1182 def refresh(self):
1197 def refresh(self):
1183 for entry in self._entries:
1198 for entry in self._entries:
1184 entry.refresh()
1199 entry.refresh()
1185
1200
1186 class filecache(object):
1201 class filecache(object):
1187 """A property like decorator that tracks files under .hg/ for updates.
1202 """A property like decorator that tracks files under .hg/ for updates.
1188
1203
1189 On first access, the files defined as arguments are stat()ed and the
1204 On first access, the files defined as arguments are stat()ed and the
1190 results cached. The decorated function is called. The results are stashed
1205 results cached. The decorated function is called. The results are stashed
1191 away in a ``_filecache`` dict on the object whose method is decorated.
1206 away in a ``_filecache`` dict on the object whose method is decorated.
1192
1207
1193 On subsequent access, the cached result is returned.
1208 On subsequent access, the cached result is returned.
1194
1209
1195 On external property set operations, stat() calls are performed and the new
1210 On external property set operations, stat() calls are performed and the new
1196 value is cached.
1211 value is cached.
1197
1212
1198 On property delete operations, cached data is removed.
1213 On property delete operations, cached data is removed.
1199
1214
1200 When using the property API, cached data is always returned, if available:
1215 When using the property API, cached data is always returned, if available:
1201 no stat() is performed to check if the file has changed and if the function
1216 no stat() is performed to check if the file has changed and if the function
1202 needs to be called to reflect file changes.
1217 needs to be called to reflect file changes.
1203
1218
1204 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1219 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1205 can populate an entry before the property's getter is called. In this case,
1220 can populate an entry before the property's getter is called. In this case,
1206 entries in ``_filecache`` will be used during property operations,
1221 entries in ``_filecache`` will be used during property operations,
1207 if available. If the underlying file changes, it is up to external callers
1222 if available. If the underlying file changes, it is up to external callers
1208 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1223 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1209 method result as well as possibly calling ``del obj._filecache[attr]`` to
1224 method result as well as possibly calling ``del obj._filecache[attr]`` to
1210 remove the ``filecacheentry``.
1225 remove the ``filecacheentry``.
1211 """
1226 """
1212
1227
1213 def __init__(self, *paths):
1228 def __init__(self, *paths):
1214 self.paths = paths
1229 self.paths = paths
1215
1230
1216 def join(self, obj, fname):
1231 def join(self, obj, fname):
1217 """Used to compute the runtime path of a cached file.
1232 """Used to compute the runtime path of a cached file.
1218
1233
1219 Users should subclass filecache and provide their own version of this
1234 Users should subclass filecache and provide their own version of this
1220 function to call the appropriate join function on 'obj' (an instance
1235 function to call the appropriate join function on 'obj' (an instance
1221 of the class that its member function was decorated).
1236 of the class that its member function was decorated).
1222 """
1237 """
1223 raise NotImplementedError
1238 raise NotImplementedError
1224
1239
1225 def __call__(self, func):
1240 def __call__(self, func):
1226 self.func = func
1241 self.func = func
1227 self.sname = func.__name__
1242 self.sname = func.__name__
1228 self.name = pycompat.sysbytes(self.sname)
1243 self.name = pycompat.sysbytes(self.sname)
1229 return self
1244 return self
1230
1245
1231 def __get__(self, obj, type=None):
1246 def __get__(self, obj, type=None):
1232 # if accessed on the class, return the descriptor itself.
1247 # if accessed on the class, return the descriptor itself.
1233 if obj is None:
1248 if obj is None:
1234 return self
1249 return self
1235 # do we need to check if the file changed?
1250 # do we need to check if the file changed?
1236 if self.sname in obj.__dict__:
1251 if self.sname in obj.__dict__:
1237 assert self.name in obj._filecache, self.name
1252 assert self.name in obj._filecache, self.name
1238 return obj.__dict__[self.sname]
1253 return obj.__dict__[self.sname]
1239
1254
1240 entry = obj._filecache.get(self.name)
1255 entry = obj._filecache.get(self.name)
1241
1256
1242 if entry:
1257 if entry:
1243 if entry.changed():
1258 if entry.changed():
1244 entry.obj = self.func(obj)
1259 entry.obj = self.func(obj)
1245 else:
1260 else:
1246 paths = [self.join(obj, path) for path in self.paths]
1261 paths = [self.join(obj, path) for path in self.paths]
1247
1262
1248 # We stat -before- creating the object so our cache doesn't lie if
1263 # We stat -before- creating the object so our cache doesn't lie if
1249 # a writer modified between the time we read and stat
1264 # a writer modified between the time we read and stat
1250 entry = filecacheentry(paths, True)
1265 entry = filecacheentry(paths, True)
1251 entry.obj = self.func(obj)
1266 entry.obj = self.func(obj)
1252
1267
1253 obj._filecache[self.name] = entry
1268 obj._filecache[self.name] = entry
1254
1269
1255 obj.__dict__[self.sname] = entry.obj
1270 obj.__dict__[self.sname] = entry.obj
1256 return entry.obj
1271 return entry.obj
1257
1272
1258 def __set__(self, obj, value):
1273 def __set__(self, obj, value):
1259 if self.name not in obj._filecache:
1274 if self.name not in obj._filecache:
1260 # we add an entry for the missing value because X in __dict__
1275 # we add an entry for the missing value because X in __dict__
1261 # implies X in _filecache
1276 # implies X in _filecache
1262 paths = [self.join(obj, path) for path in self.paths]
1277 paths = [self.join(obj, path) for path in self.paths]
1263 ce = filecacheentry(paths, False)
1278 ce = filecacheentry(paths, False)
1264 obj._filecache[self.name] = ce
1279 obj._filecache[self.name] = ce
1265 else:
1280 else:
1266 ce = obj._filecache[self.name]
1281 ce = obj._filecache[self.name]
1267
1282
1268 ce.obj = value # update cached copy
1283 ce.obj = value # update cached copy
1269 obj.__dict__[self.sname] = value # update copy returned by obj.x
1284 obj.__dict__[self.sname] = value # update copy returned by obj.x
1270
1285
1271 def __delete__(self, obj):
1286 def __delete__(self, obj):
1272 try:
1287 try:
1273 del obj.__dict__[self.sname]
1288 del obj.__dict__[self.sname]
1274 except KeyError:
1289 except KeyError:
1275 raise AttributeError(self.sname)
1290 raise AttributeError(self.sname)
1276
1291
1277 def extdatasource(repo, source):
1292 def extdatasource(repo, source):
1278 """Gather a map of rev -> value dict from the specified source
1293 """Gather a map of rev -> value dict from the specified source
1279
1294
1280 A source spec is treated as a URL, with a special case shell: type
1295 A source spec is treated as a URL, with a special case shell: type
1281 for parsing the output from a shell command.
1296 for parsing the output from a shell command.
1282
1297
1283 The data is parsed as a series of newline-separated records where
1298 The data is parsed as a series of newline-separated records where
1284 each record is a revision specifier optionally followed by a space
1299 each record is a revision specifier optionally followed by a space
1285 and a freeform string value. If the revision is known locally, it
1300 and a freeform string value. If the revision is known locally, it
1286 is converted to a rev, otherwise the record is skipped.
1301 is converted to a rev, otherwise the record is skipped.
1287
1302
1288 Note that both key and value are treated as UTF-8 and converted to
1303 Note that both key and value are treated as UTF-8 and converted to
1289 the local encoding. This allows uniformity between local and
1304 the local encoding. This allows uniformity between local and
1290 remote data sources.
1305 remote data sources.
1291 """
1306 """
1292
1307
1293 spec = repo.ui.config("extdata", source)
1308 spec = repo.ui.config("extdata", source)
1294 if not spec:
1309 if not spec:
1295 raise error.Abort(_("unknown extdata source '%s'") % source)
1310 raise error.Abort(_("unknown extdata source '%s'") % source)
1296
1311
1297 data = {}
1312 data = {}
1298 src = proc = None
1313 src = proc = None
1299 try:
1314 try:
1300 if spec.startswith("shell:"):
1315 if spec.startswith("shell:"):
1301 # external commands should be run relative to the repo root
1316 # external commands should be run relative to the repo root
1302 cmd = spec[6:]
1317 cmd = spec[6:]
1303 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1318 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1304 close_fds=procutil.closefds,
1319 close_fds=procutil.closefds,
1305 stdout=subprocess.PIPE, cwd=repo.root)
1320 stdout=subprocess.PIPE, cwd=repo.root)
1306 src = proc.stdout
1321 src = proc.stdout
1307 else:
1322 else:
1308 # treat as a URL or file
1323 # treat as a URL or file
1309 src = url.open(repo.ui, spec)
1324 src = url.open(repo.ui, spec)
1310 for l in src:
1325 for l in src:
1311 if " " in l:
1326 if " " in l:
1312 k, v = l.strip().split(" ", 1)
1327 k, v = l.strip().split(" ", 1)
1313 else:
1328 else:
1314 k, v = l.strip(), ""
1329 k, v = l.strip(), ""
1315
1330
1316 k = encoding.tolocal(k)
1331 k = encoding.tolocal(k)
1317 try:
1332 try:
1318 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1333 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1319 except (error.LookupError, error.RepoLookupError):
1334 except (error.LookupError, error.RepoLookupError):
1320 pass # we ignore data for nodes that don't exist locally
1335 pass # we ignore data for nodes that don't exist locally
1321 finally:
1336 finally:
1322 if proc:
1337 if proc:
1323 proc.communicate()
1338 proc.communicate()
1324 if src:
1339 if src:
1325 src.close()
1340 src.close()
1326 if proc and proc.returncode != 0:
1341 if proc and proc.returncode != 0:
1327 raise error.Abort(_("extdata command '%s' failed: %s")
1342 raise error.Abort(_("extdata command '%s' failed: %s")
1328 % (cmd, procutil.explainexit(proc.returncode)))
1343 % (cmd, procutil.explainexit(proc.returncode)))
1329
1344
1330 return data
1345 return data
1331
1346
1332 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1347 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1333 if lock is None:
1348 if lock is None:
1334 raise error.LockInheritanceContractViolation(
1349 raise error.LockInheritanceContractViolation(
1335 'lock can only be inherited while held')
1350 'lock can only be inherited while held')
1336 if environ is None:
1351 if environ is None:
1337 environ = {}
1352 environ = {}
1338 with lock.inherit() as locker:
1353 with lock.inherit() as locker:
1339 environ[envvar] = locker
1354 environ[envvar] = locker
1340 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1355 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1341
1356
1342 def wlocksub(repo, cmd, *args, **kwargs):
1357 def wlocksub(repo, cmd, *args, **kwargs):
1343 """run cmd as a subprocess that allows inheriting repo's wlock
1358 """run cmd as a subprocess that allows inheriting repo's wlock
1344
1359
1345 This can only be called while the wlock is held. This takes all the
1360 This can only be called while the wlock is held. This takes all the
1346 arguments that ui.system does, and returns the exit code of the
1361 arguments that ui.system does, and returns the exit code of the
1347 subprocess."""
1362 subprocess."""
1348 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1363 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1349 **kwargs)
1364 **kwargs)
1350
1365
1351 class progress(object):
1366 class progress(object):
1352 def __init__(self, ui, topic, unit="", total=None):
1367 def __init__(self, ui, topic, unit="", total=None):
1353 self.ui = ui
1368 self.ui = ui
1354 self.pos = 0
1369 self.pos = 0
1355 self.topic = topic
1370 self.topic = topic
1356 self.unit = unit
1371 self.unit = unit
1357 self.total = total
1372 self.total = total
1358
1373
1359 def __enter__(self):
1374 def __enter__(self):
1360 return self
1375 return self
1361
1376
1362 def __exit__(self, exc_type, exc_value, exc_tb):
1377 def __exit__(self, exc_type, exc_value, exc_tb):
1363 self.complete()
1378 self.complete()
1364
1379
1365 def update(self, pos, item="", total=None):
1380 def update(self, pos, item="", total=None):
1366 assert pos is not None
1381 assert pos is not None
1367 if total:
1382 if total:
1368 self.total = total
1383 self.total = total
1369 self.pos = pos
1384 self.pos = pos
1370 self._print(item)
1385 self._print(item)
1371
1386
1372 def increment(self, step=1, item="", total=None):
1387 def increment(self, step=1, item="", total=None):
1373 self.update(self.pos + step, item, total)
1388 self.update(self.pos + step, item, total)
1374
1389
1375 def complete(self):
1390 def complete(self):
1376 self.ui.progress(self.topic, None)
1391 self.ui.progress(self.topic, None)
1377
1392
1378 def _print(self, item):
1393 def _print(self, item):
1379 self.ui.progress(self.topic, self.pos, item, self.unit,
1394 self.ui.progress(self.topic, self.pos, item, self.unit,
1380 self.total)
1395 self.total)
1381
1396
1382 def gdinitconfig(ui):
1397 def gdinitconfig(ui):
1383 """helper function to know if a repo should be created as general delta
1398 """helper function to know if a repo should be created as general delta
1384 """
1399 """
1385 # experimental config: format.generaldelta
1400 # experimental config: format.generaldelta
1386 return (ui.configbool('format', 'generaldelta')
1401 return (ui.configbool('format', 'generaldelta')
1387 or ui.configbool('format', 'usegeneraldelta')
1402 or ui.configbool('format', 'usegeneraldelta')
1388 or ui.configbool('format', 'sparse-revlog'))
1403 or ui.configbool('format', 'sparse-revlog'))
1389
1404
1390 def gddeltaconfig(ui):
1405 def gddeltaconfig(ui):
1391 """helper function to know if incoming delta should be optimised
1406 """helper function to know if incoming delta should be optimised
1392 """
1407 """
1393 # experimental config: format.generaldelta
1408 # experimental config: format.generaldelta
1394 return ui.configbool('format', 'generaldelta')
1409 return ui.configbool('format', 'generaldelta')
1395
1410
1396 class simplekeyvaluefile(object):
1411 class simplekeyvaluefile(object):
1397 """A simple file with key=value lines
1412 """A simple file with key=value lines
1398
1413
1399 Keys must be alphanumerics and start with a letter, values must not
1414 Keys must be alphanumerics and start with a letter, values must not
1400 contain '\n' characters"""
1415 contain '\n' characters"""
1401 firstlinekey = '__firstline'
1416 firstlinekey = '__firstline'
1402
1417
1403 def __init__(self, vfs, path, keys=None):
1418 def __init__(self, vfs, path, keys=None):
1404 self.vfs = vfs
1419 self.vfs = vfs
1405 self.path = path
1420 self.path = path
1406
1421
1407 def read(self, firstlinenonkeyval=False):
1422 def read(self, firstlinenonkeyval=False):
1408 """Read the contents of a simple key-value file
1423 """Read the contents of a simple key-value file
1409
1424
1410 'firstlinenonkeyval' indicates whether the first line of file should
1425 'firstlinenonkeyval' indicates whether the first line of file should
1411 be treated as a key-value pair or reuturned fully under the
1426 be treated as a key-value pair or reuturned fully under the
1412 __firstline key."""
1427 __firstline key."""
1413 lines = self.vfs.readlines(self.path)
1428 lines = self.vfs.readlines(self.path)
1414 d = {}
1429 d = {}
1415 if firstlinenonkeyval:
1430 if firstlinenonkeyval:
1416 if not lines:
1431 if not lines:
1417 e = _("empty simplekeyvalue file")
1432 e = _("empty simplekeyvalue file")
1418 raise error.CorruptedState(e)
1433 raise error.CorruptedState(e)
1419 # we don't want to include '\n' in the __firstline
1434 # we don't want to include '\n' in the __firstline
1420 d[self.firstlinekey] = lines[0][:-1]
1435 d[self.firstlinekey] = lines[0][:-1]
1421 del lines[0]
1436 del lines[0]
1422
1437
1423 try:
1438 try:
1424 # the 'if line.strip()' part prevents us from failing on empty
1439 # the 'if line.strip()' part prevents us from failing on empty
1425 # lines which only contain '\n' therefore are not skipped
1440 # lines which only contain '\n' therefore are not skipped
1426 # by 'if line'
1441 # by 'if line'
1427 updatedict = dict(line[:-1].split('=', 1) for line in lines
1442 updatedict = dict(line[:-1].split('=', 1) for line in lines
1428 if line.strip())
1443 if line.strip())
1429 if self.firstlinekey in updatedict:
1444 if self.firstlinekey in updatedict:
1430 e = _("%r can't be used as a key")
1445 e = _("%r can't be used as a key")
1431 raise error.CorruptedState(e % self.firstlinekey)
1446 raise error.CorruptedState(e % self.firstlinekey)
1432 d.update(updatedict)
1447 d.update(updatedict)
1433 except ValueError as e:
1448 except ValueError as e:
1434 raise error.CorruptedState(str(e))
1449 raise error.CorruptedState(str(e))
1435 return d
1450 return d
1436
1451
1437 def write(self, data, firstline=None):
1452 def write(self, data, firstline=None):
1438 """Write key=>value mapping to a file
1453 """Write key=>value mapping to a file
1439 data is a dict. Keys must be alphanumerical and start with a letter.
1454 data is a dict. Keys must be alphanumerical and start with a letter.
1440 Values must not contain newline characters.
1455 Values must not contain newline characters.
1441
1456
1442 If 'firstline' is not None, it is written to file before
1457 If 'firstline' is not None, it is written to file before
1443 everything else, as it is, not in a key=value form"""
1458 everything else, as it is, not in a key=value form"""
1444 lines = []
1459 lines = []
1445 if firstline is not None:
1460 if firstline is not None:
1446 lines.append('%s\n' % firstline)
1461 lines.append('%s\n' % firstline)
1447
1462
1448 for k, v in data.items():
1463 for k, v in data.items():
1449 if k == self.firstlinekey:
1464 if k == self.firstlinekey:
1450 e = "key name '%s' is reserved" % self.firstlinekey
1465 e = "key name '%s' is reserved" % self.firstlinekey
1451 raise error.ProgrammingError(e)
1466 raise error.ProgrammingError(e)
1452 if not k[0:1].isalpha():
1467 if not k[0:1].isalpha():
1453 e = "keys must start with a letter in a key-value file"
1468 e = "keys must start with a letter in a key-value file"
1454 raise error.ProgrammingError(e)
1469 raise error.ProgrammingError(e)
1455 if not k.isalnum():
1470 if not k.isalnum():
1456 e = "invalid key name in a simple key-value file"
1471 e = "invalid key name in a simple key-value file"
1457 raise error.ProgrammingError(e)
1472 raise error.ProgrammingError(e)
1458 if '\n' in v:
1473 if '\n' in v:
1459 e = "invalid value in a simple key-value file"
1474 e = "invalid value in a simple key-value file"
1460 raise error.ProgrammingError(e)
1475 raise error.ProgrammingError(e)
1461 lines.append("%s=%s\n" % (k, v))
1476 lines.append("%s=%s\n" % (k, v))
1462 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1477 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1463 fp.write(''.join(lines))
1478 fp.write(''.join(lines))
1464
1479
1465 _reportobsoletedsource = [
1480 _reportobsoletedsource = [
1466 'debugobsolete',
1481 'debugobsolete',
1467 'pull',
1482 'pull',
1468 'push',
1483 'push',
1469 'serve',
1484 'serve',
1470 'unbundle',
1485 'unbundle',
1471 ]
1486 ]
1472
1487
1473 _reportnewcssource = [
1488 _reportnewcssource = [
1474 'pull',
1489 'pull',
1475 'unbundle',
1490 'unbundle',
1476 ]
1491 ]
1477
1492
1478 def prefetchfiles(repo, revs, match):
1493 def prefetchfiles(repo, revs, match):
1479 """Invokes the registered file prefetch functions, allowing extensions to
1494 """Invokes the registered file prefetch functions, allowing extensions to
1480 ensure the corresponding files are available locally, before the command
1495 ensure the corresponding files are available locally, before the command
1481 uses them."""
1496 uses them."""
1482 if match:
1497 if match:
1483 # The command itself will complain about files that don't exist, so
1498 # The command itself will complain about files that don't exist, so
1484 # don't duplicate the message.
1499 # don't duplicate the message.
1485 match = matchmod.badmatch(match, lambda fn, msg: None)
1500 match = matchmod.badmatch(match, lambda fn, msg: None)
1486 else:
1501 else:
1487 match = matchall(repo)
1502 match = matchall(repo)
1488
1503
1489 fileprefetchhooks(repo, revs, match)
1504 fileprefetchhooks(repo, revs, match)
1490
1505
1491 # a list of (repo, revs, match) prefetch functions
1506 # a list of (repo, revs, match) prefetch functions
1492 fileprefetchhooks = util.hooks()
1507 fileprefetchhooks = util.hooks()
1493
1508
1494 # A marker that tells the evolve extension to suppress its own reporting
1509 # A marker that tells the evolve extension to suppress its own reporting
1495 _reportstroubledchangesets = True
1510 _reportstroubledchangesets = True
1496
1511
1497 def registersummarycallback(repo, otr, txnname=''):
1512 def registersummarycallback(repo, otr, txnname=''):
1498 """register a callback to issue a summary after the transaction is closed
1513 """register a callback to issue a summary after the transaction is closed
1499 """
1514 """
1500 def txmatch(sources):
1515 def txmatch(sources):
1501 return any(txnname.startswith(source) for source in sources)
1516 return any(txnname.startswith(source) for source in sources)
1502
1517
1503 categories = []
1518 categories = []
1504
1519
1505 def reportsummary(func):
1520 def reportsummary(func):
1506 """decorator for report callbacks."""
1521 """decorator for report callbacks."""
1507 # The repoview life cycle is shorter than the one of the actual
1522 # The repoview life cycle is shorter than the one of the actual
1508 # underlying repository. So the filtered object can die before the
1523 # underlying repository. So the filtered object can die before the
1509 # weakref is used leading to troubles. We keep a reference to the
1524 # weakref is used leading to troubles. We keep a reference to the
1510 # unfiltered object and restore the filtering when retrieving the
1525 # unfiltered object and restore the filtering when retrieving the
1511 # repository through the weakref.
1526 # repository through the weakref.
1512 filtername = repo.filtername
1527 filtername = repo.filtername
1513 reporef = weakref.ref(repo.unfiltered())
1528 reporef = weakref.ref(repo.unfiltered())
1514 def wrapped(tr):
1529 def wrapped(tr):
1515 repo = reporef()
1530 repo = reporef()
1516 if filtername:
1531 if filtername:
1517 repo = repo.filtered(filtername)
1532 repo = repo.filtered(filtername)
1518 func(repo, tr)
1533 func(repo, tr)
1519 newcat = '%02i-txnreport' % len(categories)
1534 newcat = '%02i-txnreport' % len(categories)
1520 otr.addpostclose(newcat, wrapped)
1535 otr.addpostclose(newcat, wrapped)
1521 categories.append(newcat)
1536 categories.append(newcat)
1522 return wrapped
1537 return wrapped
1523
1538
1524 if txmatch(_reportobsoletedsource):
1539 if txmatch(_reportobsoletedsource):
1525 @reportsummary
1540 @reportsummary
1526 def reportobsoleted(repo, tr):
1541 def reportobsoleted(repo, tr):
1527 obsoleted = obsutil.getobsoleted(repo, tr)
1542 obsoleted = obsutil.getobsoleted(repo, tr)
1528 if obsoleted:
1543 if obsoleted:
1529 repo.ui.status(_('obsoleted %i changesets\n')
1544 repo.ui.status(_('obsoleted %i changesets\n')
1530 % len(obsoleted))
1545 % len(obsoleted))
1531
1546
1532 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1547 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1533 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1548 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1534 instabilitytypes = [
1549 instabilitytypes = [
1535 ('orphan', 'orphan'),
1550 ('orphan', 'orphan'),
1536 ('phase-divergent', 'phasedivergent'),
1551 ('phase-divergent', 'phasedivergent'),
1537 ('content-divergent', 'contentdivergent'),
1552 ('content-divergent', 'contentdivergent'),
1538 ]
1553 ]
1539
1554
1540 def getinstabilitycounts(repo):
1555 def getinstabilitycounts(repo):
1541 filtered = repo.changelog.filteredrevs
1556 filtered = repo.changelog.filteredrevs
1542 counts = {}
1557 counts = {}
1543 for instability, revset in instabilitytypes:
1558 for instability, revset in instabilitytypes:
1544 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1559 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1545 filtered)
1560 filtered)
1546 return counts
1561 return counts
1547
1562
1548 oldinstabilitycounts = getinstabilitycounts(repo)
1563 oldinstabilitycounts = getinstabilitycounts(repo)
1549 @reportsummary
1564 @reportsummary
1550 def reportnewinstabilities(repo, tr):
1565 def reportnewinstabilities(repo, tr):
1551 newinstabilitycounts = getinstabilitycounts(repo)
1566 newinstabilitycounts = getinstabilitycounts(repo)
1552 for instability, revset in instabilitytypes:
1567 for instability, revset in instabilitytypes:
1553 delta = (newinstabilitycounts[instability] -
1568 delta = (newinstabilitycounts[instability] -
1554 oldinstabilitycounts[instability])
1569 oldinstabilitycounts[instability])
1555 msg = getinstabilitymessage(delta, instability)
1570 msg = getinstabilitymessage(delta, instability)
1556 if msg:
1571 if msg:
1557 repo.ui.warn(msg)
1572 repo.ui.warn(msg)
1558
1573
1559 if txmatch(_reportnewcssource):
1574 if txmatch(_reportnewcssource):
1560 @reportsummary
1575 @reportsummary
1561 def reportnewcs(repo, tr):
1576 def reportnewcs(repo, tr):
1562 """Report the range of new revisions pulled/unbundled."""
1577 """Report the range of new revisions pulled/unbundled."""
1563 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
1578 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
1564 if not newrevs:
1579 if not newrevs:
1565 return
1580 return
1566
1581
1567 # Compute the bounds of new revisions' range, excluding obsoletes.
1582 # Compute the bounds of new revisions' range, excluding obsoletes.
1568 unfi = repo.unfiltered()
1583 unfi = repo.unfiltered()
1569 revs = unfi.revs('%ld and not obsolete()', newrevs)
1584 revs = unfi.revs('%ld and not obsolete()', newrevs)
1570 if not revs:
1585 if not revs:
1571 # Got only obsoletes.
1586 # Got only obsoletes.
1572 return
1587 return
1573 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1588 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1574
1589
1575 if minrev == maxrev:
1590 if minrev == maxrev:
1576 revrange = minrev
1591 revrange = minrev
1577 else:
1592 else:
1578 revrange = '%s:%s' % (minrev, maxrev)
1593 revrange = '%s:%s' % (minrev, maxrev)
1579 repo.ui.status(_('new changesets %s\n') % revrange)
1594 repo.ui.status(_('new changesets %s\n') % revrange)
1580
1595
1581 @reportsummary
1596 @reportsummary
1582 def reportphasechanges(repo, tr):
1597 def reportphasechanges(repo, tr):
1583 """Report statistics of phase changes for changesets pre-existing
1598 """Report statistics of phase changes for changesets pre-existing
1584 pull/unbundle.
1599 pull/unbundle.
1585 """
1600 """
1586 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
1601 newrevs = tr.changes.get('revs', pycompat.xrange(0, 0))
1587 phasetracking = tr.changes.get('phases', {})
1602 phasetracking = tr.changes.get('phases', {})
1588 if not phasetracking:
1603 if not phasetracking:
1589 return
1604 return
1590 published = [
1605 published = [
1591 rev for rev, (old, new) in phasetracking.iteritems()
1606 rev for rev, (old, new) in phasetracking.iteritems()
1592 if new == phases.public and rev not in newrevs
1607 if new == phases.public and rev not in newrevs
1593 ]
1608 ]
1594 if not published:
1609 if not published:
1595 return
1610 return
1596 repo.ui.status(_('%d local changesets published\n')
1611 repo.ui.status(_('%d local changesets published\n')
1597 % len(published))
1612 % len(published))
1598
1613
1599 def getinstabilitymessage(delta, instability):
1614 def getinstabilitymessage(delta, instability):
1600 """function to return the message to show warning about new instabilities
1615 """function to return the message to show warning about new instabilities
1601
1616
1602 exists as a separate function so that extension can wrap to show more
1617 exists as a separate function so that extension can wrap to show more
1603 information like how to fix instabilities"""
1618 information like how to fix instabilities"""
1604 if delta > 0:
1619 if delta > 0:
1605 return _('%i new %s changesets\n') % (delta, instability)
1620 return _('%i new %s changesets\n') % (delta, instability)
1606
1621
1607 def nodesummaries(repo, nodes, maxnumnodes=4):
1622 def nodesummaries(repo, nodes, maxnumnodes=4):
1608 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1623 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1609 return ' '.join(short(h) for h in nodes)
1624 return ' '.join(short(h) for h in nodes)
1610 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1625 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1611 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1626 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1612
1627
1613 def enforcesinglehead(repo, tr, desc):
1628 def enforcesinglehead(repo, tr, desc):
1614 """check that no named branch has multiple heads"""
1629 """check that no named branch has multiple heads"""
1615 if desc in ('strip', 'repair'):
1630 if desc in ('strip', 'repair'):
1616 # skip the logic during strip
1631 # skip the logic during strip
1617 return
1632 return
1618 visible = repo.filtered('visible')
1633 visible = repo.filtered('visible')
1619 # possible improvement: we could restrict the check to affected branch
1634 # possible improvement: we could restrict the check to affected branch
1620 for name, heads in visible.branchmap().iteritems():
1635 for name, heads in visible.branchmap().iteritems():
1621 if len(heads) > 1:
1636 if len(heads) > 1:
1622 msg = _('rejecting multiple heads on branch "%s"')
1637 msg = _('rejecting multiple heads on branch "%s"')
1623 msg %= name
1638 msg %= name
1624 hint = _('%d heads: %s')
1639 hint = _('%d heads: %s')
1625 hint %= (len(heads), nodesummaries(repo, heads))
1640 hint %= (len(heads), nodesummaries(repo, heads))
1626 raise error.Abort(msg, hint=hint)
1641 raise error.Abort(msg, hint=hint)
1627
1642
1628 def wrapconvertsink(sink):
1643 def wrapconvertsink(sink):
1629 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1644 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1630 before it is used, whether or not the convert extension was formally loaded.
1645 before it is used, whether or not the convert extension was formally loaded.
1631 """
1646 """
1632 return sink
1647 return sink
1633
1648
1634 def unhidehashlikerevs(repo, specs, hiddentype):
1649 def unhidehashlikerevs(repo, specs, hiddentype):
1635 """parse the user specs and unhide changesets whose hash or revision number
1650 """parse the user specs and unhide changesets whose hash or revision number
1636 is passed.
1651 is passed.
1637
1652
1638 hiddentype can be: 1) 'warn': warn while unhiding changesets
1653 hiddentype can be: 1) 'warn': warn while unhiding changesets
1639 2) 'nowarn': don't warn while unhiding changesets
1654 2) 'nowarn': don't warn while unhiding changesets
1640
1655
1641 returns a repo object with the required changesets unhidden
1656 returns a repo object with the required changesets unhidden
1642 """
1657 """
1643 if not repo.filtername or not repo.ui.configbool('experimental',
1658 if not repo.filtername or not repo.ui.configbool('experimental',
1644 'directaccess'):
1659 'directaccess'):
1645 return repo
1660 return repo
1646
1661
1647 if repo.filtername not in ('visible', 'visible-hidden'):
1662 if repo.filtername not in ('visible', 'visible-hidden'):
1648 return repo
1663 return repo
1649
1664
1650 symbols = set()
1665 symbols = set()
1651 for spec in specs:
1666 for spec in specs:
1652 try:
1667 try:
1653 tree = revsetlang.parse(spec)
1668 tree = revsetlang.parse(spec)
1654 except error.ParseError: # will be reported by scmutil.revrange()
1669 except error.ParseError: # will be reported by scmutil.revrange()
1655 continue
1670 continue
1656
1671
1657 symbols.update(revsetlang.gethashlikesymbols(tree))
1672 symbols.update(revsetlang.gethashlikesymbols(tree))
1658
1673
1659 if not symbols:
1674 if not symbols:
1660 return repo
1675 return repo
1661
1676
1662 revs = _getrevsfromsymbols(repo, symbols)
1677 revs = _getrevsfromsymbols(repo, symbols)
1663
1678
1664 if not revs:
1679 if not revs:
1665 return repo
1680 return repo
1666
1681
1667 if hiddentype == 'warn':
1682 if hiddentype == 'warn':
1668 unfi = repo.unfiltered()
1683 unfi = repo.unfiltered()
1669 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1684 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1670 repo.ui.warn(_("warning: accessing hidden changesets for write "
1685 repo.ui.warn(_("warning: accessing hidden changesets for write "
1671 "operation: %s\n") % revstr)
1686 "operation: %s\n") % revstr)
1672
1687
1673 # we have to use new filtername to separate branch/tags cache until we can
1688 # we have to use new filtername to separate branch/tags cache until we can
1674 # disbale these cache when revisions are dynamically pinned.
1689 # disbale these cache when revisions are dynamically pinned.
1675 return repo.filtered('visible-hidden', revs)
1690 return repo.filtered('visible-hidden', revs)
1676
1691
1677 def _getrevsfromsymbols(repo, symbols):
1692 def _getrevsfromsymbols(repo, symbols):
1678 """parse the list of symbols and returns a set of revision numbers of hidden
1693 """parse the list of symbols and returns a set of revision numbers of hidden
1679 changesets present in symbols"""
1694 changesets present in symbols"""
1680 revs = set()
1695 revs = set()
1681 unfi = repo.unfiltered()
1696 unfi = repo.unfiltered()
1682 unficl = unfi.changelog
1697 unficl = unfi.changelog
1683 cl = repo.changelog
1698 cl = repo.changelog
1684 tiprev = len(unficl)
1699 tiprev = len(unficl)
1685 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1700 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1686 for s in symbols:
1701 for s in symbols:
1687 try:
1702 try:
1688 n = int(s)
1703 n = int(s)
1689 if n <= tiprev:
1704 if n <= tiprev:
1690 if not allowrevnums:
1705 if not allowrevnums:
1691 continue
1706 continue
1692 else:
1707 else:
1693 if n not in cl:
1708 if n not in cl:
1694 revs.add(n)
1709 revs.add(n)
1695 continue
1710 continue
1696 except ValueError:
1711 except ValueError:
1697 pass
1712 pass
1698
1713
1699 try:
1714 try:
1700 s = resolvehexnodeidprefix(unfi, s)
1715 s = resolvehexnodeidprefix(unfi, s)
1701 except (error.LookupError, error.WdirUnsupported):
1716 except (error.LookupError, error.WdirUnsupported):
1702 s = None
1717 s = None
1703
1718
1704 if s is not None:
1719 if s is not None:
1705 rev = unficl.rev(s)
1720 rev = unficl.rev(s)
1706 if rev not in cl:
1721 if rev not in cl:
1707 revs.add(rev)
1722 revs.add(rev)
1708
1723
1709 return revs
1724 return revs
1710
1725
1711 def bookmarkrevs(repo, mark):
1726 def bookmarkrevs(repo, mark):
1712 """
1727 """
1713 Select revisions reachable by a given bookmark
1728 Select revisions reachable by a given bookmark
1714 """
1729 """
1715 return repo.revs("ancestors(bookmark(%s)) - "
1730 return repo.revs("ancestors(bookmark(%s)) - "
1716 "ancestors(head() and not bookmark(%s)) - "
1731 "ancestors(head() and not bookmark(%s)) - "
1717 "ancestors(bookmark() and not bookmark(%s))",
1732 "ancestors(bookmark() and not bookmark(%s))",
1718 mark, mark, mark)
1733 mark, mark, mark)
@@ -1,37 +1,43 b''
1 $ hg init repo
1 $ hg init repo
2 $ cd repo
2 $ cd repo
3
3
4 $ echo 0 > a
4 $ echo 0 > a
5 $ hg ci -qAm 0
5 $ hg ci -qAm 0
6 $ for i in 5 8 14 43; do
6 $ for i in 5 8 14 43; do
7 > hg up -q 0
7 > hg up -q 0
8 > echo $i > a
8 > echo $i > a
9 > hg ci -qm $i
9 > hg ci -qm $i
10 > done
10 > done
11 $ cat <<EOF >> .hg/hgrc
11 $ cat <<EOF >> .hg/hgrc
12 > [alias]
12 > [alias]
13 > l = log -T '{rev}:{shortest(node,1)}\n'
13 > l = log -T '{rev}:{shortest(node,1)}\n'
14 > EOF
14 > EOF
15
15
16 $ hg l
16 $ hg l
17 4:7ba5d
17 4:7ba5d
18 3:7ba57
18 3:7ba57
19 2:72
19 2:72
20 1:9
20 1:9
21 0:b
21 0:b
22 $ cat <<EOF >> .hg/hgrc
22 $ cat <<EOF >> .hg/hgrc
23 > [experimental]
23 > [experimental]
24 > revisions.disambiguatewithin=:3
24 > revisions.disambiguatewithin=:3
25 > EOF
25 > EOF
26 $ hg l
27 4:7ba5d
28 3:7b
29 2:72
30 1:9
31 0:b
26 9 was unambiguous and still is
32 9 was unambiguous and still is
27 $ hg l -r 9
33 $ hg l -r 9
28 1:9
34 1:9
29 7 was ambiguous and still is
35 7 was ambiguous and still is
30 $ hg l -r 7
36 $ hg l -r 7
31 abort: 00changelog.i@7: ambiguous identifier!
37 abort: 00changelog.i@7: ambiguous identifier!
32 [255]
38 [255]
33 7b is no longer ambiguous
39 7b is no longer ambiguous
34 $ hg l -r 7b
40 $ hg l -r 7b
35 3:7ba57
41 3:7b
36
42
37 $ cd ..
43 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now