##// END OF EJS Templates
repo: look up nullrev context by revnum, not symbolic name...
Martin von Zweigbergk -
r39930:d739f423 default
parent child Browse files
Show More
@@ -1,344 +1,347 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import gzip
10 import gzip
11 import os
11 import os
12 import struct
12 import struct
13 import tarfile
13 import tarfile
14 import time
14 import time
15 import zipfile
15 import zipfile
16 import zlib
16 import zlib
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
20 nullrev,
21 )
19
22
20 from . import (
23 from . import (
21 error,
24 error,
22 formatter,
25 formatter,
23 match as matchmod,
26 match as matchmod,
24 pycompat,
27 pycompat,
25 scmutil,
28 scmutil,
26 util,
29 util,
27 vfs as vfsmod,
30 vfs as vfsmod,
28 )
31 )
29 stringio = util.stringio
32 stringio = util.stringio
30
33
31 # from unzip source code:
34 # from unzip source code:
32 _UNX_IFREG = 0x8000
35 _UNX_IFREG = 0x8000
33 _UNX_IFLNK = 0xa000
36 _UNX_IFLNK = 0xa000
34
37
35 def tidyprefix(dest, kind, prefix):
38 def tidyprefix(dest, kind, prefix):
36 '''choose prefix to use for names in archive. make sure prefix is
39 '''choose prefix to use for names in archive. make sure prefix is
37 safe for consumers.'''
40 safe for consumers.'''
38
41
39 if prefix:
42 if prefix:
40 prefix = util.normpath(prefix)
43 prefix = util.normpath(prefix)
41 else:
44 else:
42 if not isinstance(dest, bytes):
45 if not isinstance(dest, bytes):
43 raise ValueError('dest must be string if no prefix')
46 raise ValueError('dest must be string if no prefix')
44 prefix = os.path.basename(dest)
47 prefix = os.path.basename(dest)
45 lower = prefix.lower()
48 lower = prefix.lower()
46 for sfx in exts.get(kind, []):
49 for sfx in exts.get(kind, []):
47 if lower.endswith(sfx):
50 if lower.endswith(sfx):
48 prefix = prefix[:-len(sfx)]
51 prefix = prefix[:-len(sfx)]
49 break
52 break
50 lpfx = os.path.normpath(util.localpath(prefix))
53 lpfx = os.path.normpath(util.localpath(prefix))
51 prefix = util.pconvert(lpfx)
54 prefix = util.pconvert(lpfx)
52 if not prefix.endswith('/'):
55 if not prefix.endswith('/'):
53 prefix += '/'
56 prefix += '/'
54 # Drop the leading '.' path component if present, so Windows can read the
57 # Drop the leading '.' path component if present, so Windows can read the
55 # zip files (issue4634)
58 # zip files (issue4634)
56 if prefix.startswith('./'):
59 if prefix.startswith('./'):
57 prefix = prefix[2:]
60 prefix = prefix[2:]
58 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
61 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
59 raise error.Abort(_('archive prefix contains illegal components'))
62 raise error.Abort(_('archive prefix contains illegal components'))
60 return prefix
63 return prefix
61
64
62 exts = {
65 exts = {
63 'tar': ['.tar'],
66 'tar': ['.tar'],
64 'tbz2': ['.tbz2', '.tar.bz2'],
67 'tbz2': ['.tbz2', '.tar.bz2'],
65 'tgz': ['.tgz', '.tar.gz'],
68 'tgz': ['.tgz', '.tar.gz'],
66 'zip': ['.zip'],
69 'zip': ['.zip'],
67 }
70 }
68
71
69 def guesskind(dest):
72 def guesskind(dest):
70 for kind, extensions in exts.iteritems():
73 for kind, extensions in exts.iteritems():
71 if any(dest.endswith(ext) for ext in extensions):
74 if any(dest.endswith(ext) for ext in extensions):
72 return kind
75 return kind
73 return None
76 return None
74
77
75 def _rootctx(repo):
78 def _rootctx(repo):
76 # repo[0] may be hidden
79 # repo[0] may be hidden
77 for rev in repo:
80 for rev in repo:
78 return repo[rev]
81 return repo[rev]
79 return repo['null']
82 return repo[nullrev]
80
83
81 # {tags} on ctx includes local tags and 'tip', with no current way to limit
84 # {tags} on ctx includes local tags and 'tip', with no current way to limit
82 # that to global tags. Therefore, use {latesttag} as a substitute when
85 # that to global tags. Therefore, use {latesttag} as a substitute when
83 # the distance is 0, since that will be the list of global tags on ctx.
86 # the distance is 0, since that will be the list of global tags on ctx.
84 _defaultmetatemplate = br'''
87 _defaultmetatemplate = br'''
85 repo: {root}
88 repo: {root}
86 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
89 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
87 branch: {branch|utf8}
90 branch: {branch|utf8}
88 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
91 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
89 separate("\n",
92 separate("\n",
90 join(latesttag % "latesttag: {tag}", "\n"),
93 join(latesttag % "latesttag: {tag}", "\n"),
91 "latesttagdistance: {latesttagdistance}",
94 "latesttagdistance: {latesttagdistance}",
92 "changessincelatesttag: {changessincelatesttag}"))}
95 "changessincelatesttag: {changessincelatesttag}"))}
93 '''[1:] # drop leading '\n'
96 '''[1:] # drop leading '\n'
94
97
95 def buildmetadata(ctx):
98 def buildmetadata(ctx):
96 '''build content of .hg_archival.txt'''
99 '''build content of .hg_archival.txt'''
97 repo = ctx.repo()
100 repo = ctx.repo()
98
101
99 opts = {
102 opts = {
100 'template': repo.ui.config('experimental', 'archivemetatemplate',
103 'template': repo.ui.config('experimental', 'archivemetatemplate',
101 _defaultmetatemplate)
104 _defaultmetatemplate)
102 }
105 }
103
106
104 out = util.stringio()
107 out = util.stringio()
105
108
106 fm = formatter.formatter(repo.ui, out, 'archive', opts)
109 fm = formatter.formatter(repo.ui, out, 'archive', opts)
107 fm.startitem()
110 fm.startitem()
108 fm.context(ctx=ctx)
111 fm.context(ctx=ctx)
109 fm.data(root=_rootctx(repo).hex())
112 fm.data(root=_rootctx(repo).hex())
110
113
111 if ctx.rev() is None:
114 if ctx.rev() is None:
112 dirty = ''
115 dirty = ''
113 if ctx.dirty(missing=True):
116 if ctx.dirty(missing=True):
114 dirty = '+'
117 dirty = '+'
115 fm.data(dirty=dirty)
118 fm.data(dirty=dirty)
116 fm.end()
119 fm.end()
117
120
118 return out.getvalue()
121 return out.getvalue()
119
122
120 class tarit(object):
123 class tarit(object):
121 '''write archive to tar file or stream. can write uncompressed,
124 '''write archive to tar file or stream. can write uncompressed,
122 or compress with gzip or bzip2.'''
125 or compress with gzip or bzip2.'''
123
126
124 class GzipFileWithTime(gzip.GzipFile):
127 class GzipFileWithTime(gzip.GzipFile):
125
128
126 def __init__(self, *args, **kw):
129 def __init__(self, *args, **kw):
127 timestamp = None
130 timestamp = None
128 if r'timestamp' in kw:
131 if r'timestamp' in kw:
129 timestamp = kw.pop(r'timestamp')
132 timestamp = kw.pop(r'timestamp')
130 if timestamp is None:
133 if timestamp is None:
131 self.timestamp = time.time()
134 self.timestamp = time.time()
132 else:
135 else:
133 self.timestamp = timestamp
136 self.timestamp = timestamp
134 gzip.GzipFile.__init__(self, *args, **kw)
137 gzip.GzipFile.__init__(self, *args, **kw)
135
138
136 def _write_gzip_header(self):
139 def _write_gzip_header(self):
137 self.fileobj.write('\037\213') # magic header
140 self.fileobj.write('\037\213') # magic header
138 self.fileobj.write('\010') # compression method
141 self.fileobj.write('\010') # compression method
139 fname = self.name
142 fname = self.name
140 if fname and fname.endswith('.gz'):
143 if fname and fname.endswith('.gz'):
141 fname = fname[:-3]
144 fname = fname[:-3]
142 flags = 0
145 flags = 0
143 if fname:
146 if fname:
144 flags = gzip.FNAME
147 flags = gzip.FNAME
145 self.fileobj.write(pycompat.bytechr(flags))
148 self.fileobj.write(pycompat.bytechr(flags))
146 gzip.write32u(self.fileobj, int(self.timestamp))
149 gzip.write32u(self.fileobj, int(self.timestamp))
147 self.fileobj.write('\002')
150 self.fileobj.write('\002')
148 self.fileobj.write('\377')
151 self.fileobj.write('\377')
149 if fname:
152 if fname:
150 self.fileobj.write(fname + '\000')
153 self.fileobj.write(fname + '\000')
151
154
152 def __init__(self, dest, mtime, kind=''):
155 def __init__(self, dest, mtime, kind=''):
153 self.mtime = mtime
156 self.mtime = mtime
154 self.fileobj = None
157 self.fileobj = None
155
158
156 def taropen(mode, name='', fileobj=None):
159 def taropen(mode, name='', fileobj=None):
157 if kind == 'gz':
160 if kind == 'gz':
158 mode = mode[0:1]
161 mode = mode[0:1]
159 if not fileobj:
162 if not fileobj:
160 fileobj = open(name, mode + 'b')
163 fileobj = open(name, mode + 'b')
161 gzfileobj = self.GzipFileWithTime(name,
164 gzfileobj = self.GzipFileWithTime(name,
162 pycompat.sysstr(mode + 'b'),
165 pycompat.sysstr(mode + 'b'),
163 zlib.Z_BEST_COMPRESSION,
166 zlib.Z_BEST_COMPRESSION,
164 fileobj, timestamp=mtime)
167 fileobj, timestamp=mtime)
165 self.fileobj = gzfileobj
168 self.fileobj = gzfileobj
166 return tarfile.TarFile.taropen(
169 return tarfile.TarFile.taropen(
167 name, pycompat.sysstr(mode), gzfileobj)
170 name, pycompat.sysstr(mode), gzfileobj)
168 else:
171 else:
169 return tarfile.open(
172 return tarfile.open(
170 name, pycompat.sysstr(mode + kind), fileobj)
173 name, pycompat.sysstr(mode + kind), fileobj)
171
174
172 if isinstance(dest, bytes):
175 if isinstance(dest, bytes):
173 self.z = taropen('w:', name=dest)
176 self.z = taropen('w:', name=dest)
174 else:
177 else:
175 self.z = taropen('w|', fileobj=dest)
178 self.z = taropen('w|', fileobj=dest)
176
179
177 def addfile(self, name, mode, islink, data):
180 def addfile(self, name, mode, islink, data):
178 name = pycompat.fsdecode(name)
181 name = pycompat.fsdecode(name)
179 i = tarfile.TarInfo(name)
182 i = tarfile.TarInfo(name)
180 i.mtime = self.mtime
183 i.mtime = self.mtime
181 i.size = len(data)
184 i.size = len(data)
182 if islink:
185 if islink:
183 i.type = tarfile.SYMTYPE
186 i.type = tarfile.SYMTYPE
184 i.mode = 0o777
187 i.mode = 0o777
185 i.linkname = pycompat.fsdecode(data)
188 i.linkname = pycompat.fsdecode(data)
186 data = None
189 data = None
187 i.size = 0
190 i.size = 0
188 else:
191 else:
189 i.mode = mode
192 i.mode = mode
190 data = stringio(data)
193 data = stringio(data)
191 self.z.addfile(i, data)
194 self.z.addfile(i, data)
192
195
193 def done(self):
196 def done(self):
194 self.z.close()
197 self.z.close()
195 if self.fileobj:
198 if self.fileobj:
196 self.fileobj.close()
199 self.fileobj.close()
197
200
198 class zipit(object):
201 class zipit(object):
199 '''write archive to zip file or stream. can write uncompressed,
202 '''write archive to zip file or stream. can write uncompressed,
200 or compressed with deflate.'''
203 or compressed with deflate.'''
201
204
202 def __init__(self, dest, mtime, compress=True):
205 def __init__(self, dest, mtime, compress=True):
203 self.z = zipfile.ZipFile(pycompat.fsdecode(dest), r'w',
206 self.z = zipfile.ZipFile(pycompat.fsdecode(dest), r'w',
204 compress and zipfile.ZIP_DEFLATED or
207 compress and zipfile.ZIP_DEFLATED or
205 zipfile.ZIP_STORED)
208 zipfile.ZIP_STORED)
206
209
207 # Python's zipfile module emits deprecation warnings if we try
210 # Python's zipfile module emits deprecation warnings if we try
208 # to store files with a date before 1980.
211 # to store files with a date before 1980.
209 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
212 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
210 if mtime < epoch:
213 if mtime < epoch:
211 mtime = epoch
214 mtime = epoch
212
215
213 self.mtime = mtime
216 self.mtime = mtime
214 self.date_time = time.gmtime(mtime)[:6]
217 self.date_time = time.gmtime(mtime)[:6]
215
218
216 def addfile(self, name, mode, islink, data):
219 def addfile(self, name, mode, islink, data):
217 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
220 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
218 i.compress_type = self.z.compression
221 i.compress_type = self.z.compression
219 # unzip will not honor unix file modes unless file creator is
222 # unzip will not honor unix file modes unless file creator is
220 # set to unix (id 3).
223 # set to unix (id 3).
221 i.create_system = 3
224 i.create_system = 3
222 ftype = _UNX_IFREG
225 ftype = _UNX_IFREG
223 if islink:
226 if islink:
224 mode = 0o777
227 mode = 0o777
225 ftype = _UNX_IFLNK
228 ftype = _UNX_IFLNK
226 i.external_attr = (mode | ftype) << 16
229 i.external_attr = (mode | ftype) << 16
227 # add "extended-timestamp" extra block, because zip archives
230 # add "extended-timestamp" extra block, because zip archives
228 # without this will be extracted with unexpected timestamp,
231 # without this will be extracted with unexpected timestamp,
229 # if TZ is not configured as GMT
232 # if TZ is not configured as GMT
230 i.extra += struct.pack('<hhBl',
233 i.extra += struct.pack('<hhBl',
231 0x5455, # block type: "extended-timestamp"
234 0x5455, # block type: "extended-timestamp"
232 1 + 4, # size of this block
235 1 + 4, # size of this block
233 1, # "modification time is present"
236 1, # "modification time is present"
234 int(self.mtime)) # last modification (UTC)
237 int(self.mtime)) # last modification (UTC)
235 self.z.writestr(i, data)
238 self.z.writestr(i, data)
236
239
237 def done(self):
240 def done(self):
238 self.z.close()
241 self.z.close()
239
242
240 class fileit(object):
243 class fileit(object):
241 '''write archive as files in directory.'''
244 '''write archive as files in directory.'''
242
245
243 def __init__(self, name, mtime):
246 def __init__(self, name, mtime):
244 self.basedir = name
247 self.basedir = name
245 self.opener = vfsmod.vfs(self.basedir)
248 self.opener = vfsmod.vfs(self.basedir)
246 self.mtime = mtime
249 self.mtime = mtime
247
250
248 def addfile(self, name, mode, islink, data):
251 def addfile(self, name, mode, islink, data):
249 if islink:
252 if islink:
250 self.opener.symlink(data, name)
253 self.opener.symlink(data, name)
251 return
254 return
252 f = self.opener(name, "w", atomictemp=False)
255 f = self.opener(name, "w", atomictemp=False)
253 f.write(data)
256 f.write(data)
254 f.close()
257 f.close()
255 destfile = os.path.join(self.basedir, name)
258 destfile = os.path.join(self.basedir, name)
256 os.chmod(destfile, mode)
259 os.chmod(destfile, mode)
257 if self.mtime is not None:
260 if self.mtime is not None:
258 os.utime(destfile, (self.mtime, self.mtime))
261 os.utime(destfile, (self.mtime, self.mtime))
259
262
260 def done(self):
263 def done(self):
261 pass
264 pass
262
265
263 archivers = {
266 archivers = {
264 'files': fileit,
267 'files': fileit,
265 'tar': tarit,
268 'tar': tarit,
266 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
269 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
267 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
270 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
268 'uzip': lambda name, mtime: zipit(name, mtime, False),
271 'uzip': lambda name, mtime: zipit(name, mtime, False),
269 'zip': zipit,
272 'zip': zipit,
270 }
273 }
271
274
272 def archive(repo, dest, node, kind, decode=True, matchfn=None,
275 def archive(repo, dest, node, kind, decode=True, matchfn=None,
273 prefix='', mtime=None, subrepos=False):
276 prefix='', mtime=None, subrepos=False):
274 '''create archive of repo as it was at node.
277 '''create archive of repo as it was at node.
275
278
276 dest can be name of directory, name of archive file, or file
279 dest can be name of directory, name of archive file, or file
277 object to write archive to.
280 object to write archive to.
278
281
279 kind is type of archive to create.
282 kind is type of archive to create.
280
283
281 decode tells whether to put files through decode filters from
284 decode tells whether to put files through decode filters from
282 hgrc.
285 hgrc.
283
286
284 matchfn is function to filter names of files to write to archive.
287 matchfn is function to filter names of files to write to archive.
285
288
286 prefix is name of path to put before every archive member.
289 prefix is name of path to put before every archive member.
287
290
288 mtime is the modified time, in seconds, or None to use the changeset time.
291 mtime is the modified time, in seconds, or None to use the changeset time.
289
292
290 subrepos tells whether to include subrepos.
293 subrepos tells whether to include subrepos.
291 '''
294 '''
292
295
293 if kind == 'files':
296 if kind == 'files':
294 if prefix:
297 if prefix:
295 raise error.Abort(_('cannot give prefix when archiving to files'))
298 raise error.Abort(_('cannot give prefix when archiving to files'))
296 else:
299 else:
297 prefix = tidyprefix(dest, kind, prefix)
300 prefix = tidyprefix(dest, kind, prefix)
298
301
299 def write(name, mode, islink, getdata):
302 def write(name, mode, islink, getdata):
300 data = getdata()
303 data = getdata()
301 if decode:
304 if decode:
302 data = repo.wwritedata(name, data)
305 data = repo.wwritedata(name, data)
303 archiver.addfile(prefix + name, mode, islink, data)
306 archiver.addfile(prefix + name, mode, islink, data)
304
307
305 if kind not in archivers:
308 if kind not in archivers:
306 raise error.Abort(_("unknown archive type '%s'") % kind)
309 raise error.Abort(_("unknown archive type '%s'") % kind)
307
310
308 ctx = repo[node]
311 ctx = repo[node]
309 archiver = archivers[kind](dest, mtime or ctx.date()[0])
312 archiver = archivers[kind](dest, mtime or ctx.date()[0])
310
313
311 if repo.ui.configbool("ui", "archivemeta"):
314 if repo.ui.configbool("ui", "archivemeta"):
312 name = '.hg_archival.txt'
315 name = '.hg_archival.txt'
313 if not matchfn or matchfn(name):
316 if not matchfn or matchfn(name):
314 write(name, 0o644, False, lambda: buildmetadata(ctx))
317 write(name, 0o644, False, lambda: buildmetadata(ctx))
315
318
316 if matchfn:
319 if matchfn:
317 files = [f for f in ctx.manifest().keys() if matchfn(f)]
320 files = [f for f in ctx.manifest().keys() if matchfn(f)]
318 else:
321 else:
319 files = ctx.manifest().keys()
322 files = ctx.manifest().keys()
320 total = len(files)
323 total = len(files)
321 if total:
324 if total:
322 files.sort()
325 files.sort()
323 scmutil.prefetchfiles(repo, [ctx.rev()],
326 scmutil.prefetchfiles(repo, [ctx.rev()],
324 scmutil.matchfiles(repo, files))
327 scmutil.matchfiles(repo, files))
325 progress = scmutil.progress(repo.ui, _('archiving'), unit=_('files'),
328 progress = scmutil.progress(repo.ui, _('archiving'), unit=_('files'),
326 total=total)
329 total=total)
327 progress.update(0)
330 progress.update(0)
328 for f in files:
331 for f in files:
329 ff = ctx.flags(f)
332 ff = ctx.flags(f)
330 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
333 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
331 progress.increment(item=f)
334 progress.increment(item=f)
332 progress.complete()
335 progress.complete()
333
336
334 if subrepos:
337 if subrepos:
335 for subpath in sorted(ctx.substate):
338 for subpath in sorted(ctx.substate):
336 sub = ctx.workingsub(subpath)
339 sub = ctx.workingsub(subpath)
337 submatch = matchmod.subdirmatcher(subpath, matchfn)
340 submatch = matchmod.subdirmatcher(subpath, matchfn)
338 total += sub.archive(archiver, prefix, submatch, decode)
341 total += sub.archive(archiver, prefix, submatch, decode)
339
342
340 if total == 0:
343 if total == 0:
341 raise error.Abort(_('no files match the archive pattern'))
344 raise error.Abort(_('no files match the archive pattern'))
342
345
343 archiver.done()
346 archiver.done()
344 return total
347 return total
@@ -1,1790 +1,1791 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 short,
25 short,
25 wdirid,
26 wdirid,
26 wdirrev,
27 wdirrev,
27 )
28 )
28
29
29 from . import (
30 from . import (
30 encoding,
31 encoding,
31 error,
32 error,
32 match as matchmod,
33 match as matchmod,
33 obsolete,
34 obsolete,
34 obsutil,
35 obsutil,
35 pathutil,
36 pathutil,
36 phases,
37 phases,
37 policy,
38 policy,
38 pycompat,
39 pycompat,
39 revsetlang,
40 revsetlang,
40 similar,
41 similar,
41 url,
42 url,
42 util,
43 util,
43 vfs,
44 vfs,
44 )
45 )
45
46
46 from .utils import (
47 from .utils import (
47 procutil,
48 procutil,
48 stringutil,
49 stringutil,
49 )
50 )
50
51
51 if pycompat.iswindows:
52 if pycompat.iswindows:
52 from . import scmwindows as scmplatform
53 from . import scmwindows as scmplatform
53 else:
54 else:
54 from . import scmposix as scmplatform
55 from . import scmposix as scmplatform
55
56
56 parsers = policy.importmod(r'parsers')
57 parsers = policy.importmod(r'parsers')
57
58
58 termsize = scmplatform.termsize
59 termsize = scmplatform.termsize
59
60
60 class status(tuple):
61 class status(tuple):
61 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
62 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
62 and 'ignored' properties are only relevant to the working copy.
63 and 'ignored' properties are only relevant to the working copy.
63 '''
64 '''
64
65
65 __slots__ = ()
66 __slots__ = ()
66
67
67 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
68 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
68 clean):
69 clean):
69 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
70 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
70 ignored, clean))
71 ignored, clean))
71
72
72 @property
73 @property
73 def modified(self):
74 def modified(self):
74 '''files that have been modified'''
75 '''files that have been modified'''
75 return self[0]
76 return self[0]
76
77
77 @property
78 @property
78 def added(self):
79 def added(self):
79 '''files that have been added'''
80 '''files that have been added'''
80 return self[1]
81 return self[1]
81
82
82 @property
83 @property
83 def removed(self):
84 def removed(self):
84 '''files that have been removed'''
85 '''files that have been removed'''
85 return self[2]
86 return self[2]
86
87
87 @property
88 @property
88 def deleted(self):
89 def deleted(self):
89 '''files that are in the dirstate, but have been deleted from the
90 '''files that are in the dirstate, but have been deleted from the
90 working copy (aka "missing")
91 working copy (aka "missing")
91 '''
92 '''
92 return self[3]
93 return self[3]
93
94
94 @property
95 @property
95 def unknown(self):
96 def unknown(self):
96 '''files not in the dirstate that are not ignored'''
97 '''files not in the dirstate that are not ignored'''
97 return self[4]
98 return self[4]
98
99
99 @property
100 @property
100 def ignored(self):
101 def ignored(self):
101 '''files not in the dirstate that are ignored (by _dirignore())'''
102 '''files not in the dirstate that are ignored (by _dirignore())'''
102 return self[5]
103 return self[5]
103
104
104 @property
105 @property
105 def clean(self):
106 def clean(self):
106 '''files that have not been modified'''
107 '''files that have not been modified'''
107 return self[6]
108 return self[6]
108
109
109 def __repr__(self, *args, **kwargs):
110 def __repr__(self, *args, **kwargs):
110 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
111 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
111 r'unknown=%s, ignored=%s, clean=%s>') %
112 r'unknown=%s, ignored=%s, clean=%s>') %
112 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
113 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
113
114
114 def itersubrepos(ctx1, ctx2):
115 def itersubrepos(ctx1, ctx2):
115 """find subrepos in ctx1 or ctx2"""
116 """find subrepos in ctx1 or ctx2"""
116 # Create a (subpath, ctx) mapping where we prefer subpaths from
117 # Create a (subpath, ctx) mapping where we prefer subpaths from
117 # ctx1. The subpaths from ctx2 are important when the .hgsub file
118 # ctx1. The subpaths from ctx2 are important when the .hgsub file
118 # has been modified (in ctx2) but not yet committed (in ctx1).
119 # has been modified (in ctx2) but not yet committed (in ctx1).
119 subpaths = dict.fromkeys(ctx2.substate, ctx2)
120 subpaths = dict.fromkeys(ctx2.substate, ctx2)
120 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
121 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
121
122
122 missing = set()
123 missing = set()
123
124
124 for subpath in ctx2.substate:
125 for subpath in ctx2.substate:
125 if subpath not in ctx1.substate:
126 if subpath not in ctx1.substate:
126 del subpaths[subpath]
127 del subpaths[subpath]
127 missing.add(subpath)
128 missing.add(subpath)
128
129
129 for subpath, ctx in sorted(subpaths.iteritems()):
130 for subpath, ctx in sorted(subpaths.iteritems()):
130 yield subpath, ctx.sub(subpath)
131 yield subpath, ctx.sub(subpath)
131
132
132 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
133 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
133 # status and diff will have an accurate result when it does
134 # status and diff will have an accurate result when it does
134 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
135 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
135 # against itself.
136 # against itself.
136 for subpath in missing:
137 for subpath in missing:
137 yield subpath, ctx2.nullsub(subpath, ctx1)
138 yield subpath, ctx2.nullsub(subpath, ctx1)
138
139
139 def nochangesfound(ui, repo, excluded=None):
140 def nochangesfound(ui, repo, excluded=None):
140 '''Report no changes for push/pull, excluded is None or a list of
141 '''Report no changes for push/pull, excluded is None or a list of
141 nodes excluded from the push/pull.
142 nodes excluded from the push/pull.
142 '''
143 '''
143 secretlist = []
144 secretlist = []
144 if excluded:
145 if excluded:
145 for n in excluded:
146 for n in excluded:
146 ctx = repo[n]
147 ctx = repo[n]
147 if ctx.phase() >= phases.secret and not ctx.extinct():
148 if ctx.phase() >= phases.secret and not ctx.extinct():
148 secretlist.append(n)
149 secretlist.append(n)
149
150
150 if secretlist:
151 if secretlist:
151 ui.status(_("no changes found (ignored %d secret changesets)\n")
152 ui.status(_("no changes found (ignored %d secret changesets)\n")
152 % len(secretlist))
153 % len(secretlist))
153 else:
154 else:
154 ui.status(_("no changes found\n"))
155 ui.status(_("no changes found\n"))
155
156
156 def callcatch(ui, func):
157 def callcatch(ui, func):
157 """call func() with global exception handling
158 """call func() with global exception handling
158
159
159 return func() if no exception happens. otherwise do some error handling
160 return func() if no exception happens. otherwise do some error handling
160 and return an exit code accordingly. does not handle all exceptions.
161 and return an exit code accordingly. does not handle all exceptions.
161 """
162 """
162 try:
163 try:
163 try:
164 try:
164 return func()
165 return func()
165 except: # re-raises
166 except: # re-raises
166 ui.traceback()
167 ui.traceback()
167 raise
168 raise
168 # Global exception handling, alphabetically
169 # Global exception handling, alphabetically
169 # Mercurial-specific first, followed by built-in and library exceptions
170 # Mercurial-specific first, followed by built-in and library exceptions
170 except error.LockHeld as inst:
171 except error.LockHeld as inst:
171 if inst.errno == errno.ETIMEDOUT:
172 if inst.errno == errno.ETIMEDOUT:
172 reason = _('timed out waiting for lock held by %r') % inst.locker
173 reason = _('timed out waiting for lock held by %r') % inst.locker
173 else:
174 else:
174 reason = _('lock held by %r') % inst.locker
175 reason = _('lock held by %r') % inst.locker
175 ui.error(_("abort: %s: %s\n") % (
176 ui.error(_("abort: %s: %s\n") % (
176 inst.desc or stringutil.forcebytestr(inst.filename), reason))
177 inst.desc or stringutil.forcebytestr(inst.filename), reason))
177 if not inst.locker:
178 if not inst.locker:
178 ui.error(_("(lock might be very busy)\n"))
179 ui.error(_("(lock might be very busy)\n"))
179 except error.LockUnavailable as inst:
180 except error.LockUnavailable as inst:
180 ui.error(_("abort: could not lock %s: %s\n") %
181 ui.error(_("abort: could not lock %s: %s\n") %
181 (inst.desc or stringutil.forcebytestr(inst.filename),
182 (inst.desc or stringutil.forcebytestr(inst.filename),
182 encoding.strtolocal(inst.strerror)))
183 encoding.strtolocal(inst.strerror)))
183 except error.OutOfBandError as inst:
184 except error.OutOfBandError as inst:
184 if inst.args:
185 if inst.args:
185 msg = _("abort: remote error:\n")
186 msg = _("abort: remote error:\n")
186 else:
187 else:
187 msg = _("abort: remote error\n")
188 msg = _("abort: remote error\n")
188 ui.error(msg)
189 ui.error(msg)
189 if inst.args:
190 if inst.args:
190 ui.error(''.join(inst.args))
191 ui.error(''.join(inst.args))
191 if inst.hint:
192 if inst.hint:
192 ui.error('(%s)\n' % inst.hint)
193 ui.error('(%s)\n' % inst.hint)
193 except error.RepoError as inst:
194 except error.RepoError as inst:
194 ui.error(_("abort: %s!\n") % inst)
195 ui.error(_("abort: %s!\n") % inst)
195 if inst.hint:
196 if inst.hint:
196 ui.error(_("(%s)\n") % inst.hint)
197 ui.error(_("(%s)\n") % inst.hint)
197 except error.ResponseError as inst:
198 except error.ResponseError as inst:
198 ui.error(_("abort: %s") % inst.args[0])
199 ui.error(_("abort: %s") % inst.args[0])
199 msg = inst.args[1]
200 msg = inst.args[1]
200 if isinstance(msg, type(u'')):
201 if isinstance(msg, type(u'')):
201 msg = pycompat.sysbytes(msg)
202 msg = pycompat.sysbytes(msg)
202 if not isinstance(msg, bytes):
203 if not isinstance(msg, bytes):
203 ui.error(" %r\n" % (msg,))
204 ui.error(" %r\n" % (msg,))
204 elif not msg:
205 elif not msg:
205 ui.error(_(" empty string\n"))
206 ui.error(_(" empty string\n"))
206 else:
207 else:
207 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
208 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
208 except error.CensoredNodeError as inst:
209 except error.CensoredNodeError as inst:
209 ui.error(_("abort: file censored %s!\n") % inst)
210 ui.error(_("abort: file censored %s!\n") % inst)
210 except error.StorageError as inst:
211 except error.StorageError as inst:
211 ui.error(_("abort: %s!\n") % inst)
212 ui.error(_("abort: %s!\n") % inst)
212 except error.InterventionRequired as inst:
213 except error.InterventionRequired as inst:
213 ui.error("%s\n" % inst)
214 ui.error("%s\n" % inst)
214 if inst.hint:
215 if inst.hint:
215 ui.error(_("(%s)\n") % inst.hint)
216 ui.error(_("(%s)\n") % inst.hint)
216 return 1
217 return 1
217 except error.WdirUnsupported:
218 except error.WdirUnsupported:
218 ui.error(_("abort: working directory revision cannot be specified\n"))
219 ui.error(_("abort: working directory revision cannot be specified\n"))
219 except error.Abort as inst:
220 except error.Abort as inst:
220 ui.error(_("abort: %s\n") % inst)
221 ui.error(_("abort: %s\n") % inst)
221 if inst.hint:
222 if inst.hint:
222 ui.error(_("(%s)\n") % inst.hint)
223 ui.error(_("(%s)\n") % inst.hint)
223 except ImportError as inst:
224 except ImportError as inst:
224 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
225 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
225 m = stringutil.forcebytestr(inst).split()[-1]
226 m = stringutil.forcebytestr(inst).split()[-1]
226 if m in "mpatch bdiff".split():
227 if m in "mpatch bdiff".split():
227 ui.error(_("(did you forget to compile extensions?)\n"))
228 ui.error(_("(did you forget to compile extensions?)\n"))
228 elif m in "zlib".split():
229 elif m in "zlib".split():
229 ui.error(_("(is your Python install correct?)\n"))
230 ui.error(_("(is your Python install correct?)\n"))
230 except IOError as inst:
231 except IOError as inst:
231 if util.safehasattr(inst, "code"):
232 if util.safehasattr(inst, "code"):
232 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
233 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
233 elif util.safehasattr(inst, "reason"):
234 elif util.safehasattr(inst, "reason"):
234 try: # usually it is in the form (errno, strerror)
235 try: # usually it is in the form (errno, strerror)
235 reason = inst.reason.args[1]
236 reason = inst.reason.args[1]
236 except (AttributeError, IndexError):
237 except (AttributeError, IndexError):
237 # it might be anything, for example a string
238 # it might be anything, for example a string
238 reason = inst.reason
239 reason = inst.reason
239 if isinstance(reason, pycompat.unicode):
240 if isinstance(reason, pycompat.unicode):
240 # SSLError of Python 2.7.9 contains a unicode
241 # SSLError of Python 2.7.9 contains a unicode
241 reason = encoding.unitolocal(reason)
242 reason = encoding.unitolocal(reason)
242 ui.error(_("abort: error: %s\n") % reason)
243 ui.error(_("abort: error: %s\n") % reason)
243 elif (util.safehasattr(inst, "args")
244 elif (util.safehasattr(inst, "args")
244 and inst.args and inst.args[0] == errno.EPIPE):
245 and inst.args and inst.args[0] == errno.EPIPE):
245 pass
246 pass
246 elif getattr(inst, "strerror", None):
247 elif getattr(inst, "strerror", None):
247 if getattr(inst, "filename", None):
248 if getattr(inst, "filename", None):
248 ui.error(_("abort: %s: %s\n") % (
249 ui.error(_("abort: %s: %s\n") % (
249 encoding.strtolocal(inst.strerror),
250 encoding.strtolocal(inst.strerror),
250 stringutil.forcebytestr(inst.filename)))
251 stringutil.forcebytestr(inst.filename)))
251 else:
252 else:
252 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
253 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
253 else:
254 else:
254 raise
255 raise
255 except OSError as inst:
256 except OSError as inst:
256 if getattr(inst, "filename", None) is not None:
257 if getattr(inst, "filename", None) is not None:
257 ui.error(_("abort: %s: '%s'\n") % (
258 ui.error(_("abort: %s: '%s'\n") % (
258 encoding.strtolocal(inst.strerror),
259 encoding.strtolocal(inst.strerror),
259 stringutil.forcebytestr(inst.filename)))
260 stringutil.forcebytestr(inst.filename)))
260 else:
261 else:
261 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
262 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
262 except MemoryError:
263 except MemoryError:
263 ui.error(_("abort: out of memory\n"))
264 ui.error(_("abort: out of memory\n"))
264 except SystemExit as inst:
265 except SystemExit as inst:
265 # Commands shouldn't sys.exit directly, but give a return code.
266 # Commands shouldn't sys.exit directly, but give a return code.
266 # Just in case catch this and and pass exit code to caller.
267 # Just in case catch this and and pass exit code to caller.
267 return inst.code
268 return inst.code
268 except socket.error as inst:
269 except socket.error as inst:
269 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
270 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
270
271
271 return -1
272 return -1
272
273
273 def checknewlabel(repo, lbl, kind):
274 def checknewlabel(repo, lbl, kind):
274 # Do not use the "kind" parameter in ui output.
275 # Do not use the "kind" parameter in ui output.
275 # It makes strings difficult to translate.
276 # It makes strings difficult to translate.
276 if lbl in ['tip', '.', 'null']:
277 if lbl in ['tip', '.', 'null']:
277 raise error.Abort(_("the name '%s' is reserved") % lbl)
278 raise error.Abort(_("the name '%s' is reserved") % lbl)
278 for c in (':', '\0', '\n', '\r'):
279 for c in (':', '\0', '\n', '\r'):
279 if c in lbl:
280 if c in lbl:
280 raise error.Abort(
281 raise error.Abort(
281 _("%r cannot be used in a name") % pycompat.bytestr(c))
282 _("%r cannot be used in a name") % pycompat.bytestr(c))
282 try:
283 try:
283 int(lbl)
284 int(lbl)
284 raise error.Abort(_("cannot use an integer as a name"))
285 raise error.Abort(_("cannot use an integer as a name"))
285 except ValueError:
286 except ValueError:
286 pass
287 pass
287 if lbl.strip() != lbl:
288 if lbl.strip() != lbl:
288 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
289 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
289
290
290 def checkfilename(f):
291 def checkfilename(f):
291 '''Check that the filename f is an acceptable filename for a tracked file'''
292 '''Check that the filename f is an acceptable filename for a tracked file'''
292 if '\r' in f or '\n' in f:
293 if '\r' in f or '\n' in f:
293 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
294 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
294 % pycompat.bytestr(f))
295 % pycompat.bytestr(f))
295
296
296 def checkportable(ui, f):
297 def checkportable(ui, f):
297 '''Check if filename f is portable and warn or abort depending on config'''
298 '''Check if filename f is portable and warn or abort depending on config'''
298 checkfilename(f)
299 checkfilename(f)
299 abort, warn = checkportabilityalert(ui)
300 abort, warn = checkportabilityalert(ui)
300 if abort or warn:
301 if abort or warn:
301 msg = util.checkwinfilename(f)
302 msg = util.checkwinfilename(f)
302 if msg:
303 if msg:
303 msg = "%s: %s" % (msg, procutil.shellquote(f))
304 msg = "%s: %s" % (msg, procutil.shellquote(f))
304 if abort:
305 if abort:
305 raise error.Abort(msg)
306 raise error.Abort(msg)
306 ui.warn(_("warning: %s\n") % msg)
307 ui.warn(_("warning: %s\n") % msg)
307
308
308 def checkportabilityalert(ui):
309 def checkportabilityalert(ui):
309 '''check if the user's config requests nothing, a warning, or abort for
310 '''check if the user's config requests nothing, a warning, or abort for
310 non-portable filenames'''
311 non-portable filenames'''
311 val = ui.config('ui', 'portablefilenames')
312 val = ui.config('ui', 'portablefilenames')
312 lval = val.lower()
313 lval = val.lower()
313 bval = stringutil.parsebool(val)
314 bval = stringutil.parsebool(val)
314 abort = pycompat.iswindows or lval == 'abort'
315 abort = pycompat.iswindows or lval == 'abort'
315 warn = bval or lval == 'warn'
316 warn = bval or lval == 'warn'
316 if bval is None and not (warn or abort or lval == 'ignore'):
317 if bval is None and not (warn or abort or lval == 'ignore'):
317 raise error.ConfigError(
318 raise error.ConfigError(
318 _("ui.portablefilenames value is invalid ('%s')") % val)
319 _("ui.portablefilenames value is invalid ('%s')") % val)
319 return abort, warn
320 return abort, warn
320
321
321 class casecollisionauditor(object):
322 class casecollisionauditor(object):
322 def __init__(self, ui, abort, dirstate):
323 def __init__(self, ui, abort, dirstate):
323 self._ui = ui
324 self._ui = ui
324 self._abort = abort
325 self._abort = abort
325 allfiles = '\0'.join(dirstate._map)
326 allfiles = '\0'.join(dirstate._map)
326 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
327 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
327 self._dirstate = dirstate
328 self._dirstate = dirstate
328 # The purpose of _newfiles is so that we don't complain about
329 # The purpose of _newfiles is so that we don't complain about
329 # case collisions if someone were to call this object with the
330 # case collisions if someone were to call this object with the
330 # same filename twice.
331 # same filename twice.
331 self._newfiles = set()
332 self._newfiles = set()
332
333
333 def __call__(self, f):
334 def __call__(self, f):
334 if f in self._newfiles:
335 if f in self._newfiles:
335 return
336 return
336 fl = encoding.lower(f)
337 fl = encoding.lower(f)
337 if fl in self._loweredfiles and f not in self._dirstate:
338 if fl in self._loweredfiles and f not in self._dirstate:
338 msg = _('possible case-folding collision for %s') % f
339 msg = _('possible case-folding collision for %s') % f
339 if self._abort:
340 if self._abort:
340 raise error.Abort(msg)
341 raise error.Abort(msg)
341 self._ui.warn(_("warning: %s\n") % msg)
342 self._ui.warn(_("warning: %s\n") % msg)
342 self._loweredfiles.add(fl)
343 self._loweredfiles.add(fl)
343 self._newfiles.add(f)
344 self._newfiles.add(f)
344
345
345 def filteredhash(repo, maxrev):
346 def filteredhash(repo, maxrev):
346 """build hash of filtered revisions in the current repoview.
347 """build hash of filtered revisions in the current repoview.
347
348
348 Multiple caches perform up-to-date validation by checking that the
349 Multiple caches perform up-to-date validation by checking that the
349 tiprev and tipnode stored in the cache file match the current repository.
350 tiprev and tipnode stored in the cache file match the current repository.
350 However, this is not sufficient for validating repoviews because the set
351 However, this is not sufficient for validating repoviews because the set
351 of revisions in the view may change without the repository tiprev and
352 of revisions in the view may change without the repository tiprev and
352 tipnode changing.
353 tipnode changing.
353
354
354 This function hashes all the revs filtered from the view and returns
355 This function hashes all the revs filtered from the view and returns
355 that SHA-1 digest.
356 that SHA-1 digest.
356 """
357 """
357 cl = repo.changelog
358 cl = repo.changelog
358 if not cl.filteredrevs:
359 if not cl.filteredrevs:
359 return None
360 return None
360 key = None
361 key = None
361 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
362 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
362 if revs:
363 if revs:
363 s = hashlib.sha1()
364 s = hashlib.sha1()
364 for rev in revs:
365 for rev in revs:
365 s.update('%d;' % rev)
366 s.update('%d;' % rev)
366 key = s.digest()
367 key = s.digest()
367 return key
368 return key
368
369
369 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
370 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
370 '''yield every hg repository under path, always recursively.
371 '''yield every hg repository under path, always recursively.
371 The recurse flag will only control recursion into repo working dirs'''
372 The recurse flag will only control recursion into repo working dirs'''
372 def errhandler(err):
373 def errhandler(err):
373 if err.filename == path:
374 if err.filename == path:
374 raise err
375 raise err
375 samestat = getattr(os.path, 'samestat', None)
376 samestat = getattr(os.path, 'samestat', None)
376 if followsym and samestat is not None:
377 if followsym and samestat is not None:
377 def adddir(dirlst, dirname):
378 def adddir(dirlst, dirname):
378 dirstat = os.stat(dirname)
379 dirstat = os.stat(dirname)
379 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
380 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
380 if not match:
381 if not match:
381 dirlst.append(dirstat)
382 dirlst.append(dirstat)
382 return not match
383 return not match
383 else:
384 else:
384 followsym = False
385 followsym = False
385
386
386 if (seen_dirs is None) and followsym:
387 if (seen_dirs is None) and followsym:
387 seen_dirs = []
388 seen_dirs = []
388 adddir(seen_dirs, path)
389 adddir(seen_dirs, path)
389 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
390 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
390 dirs.sort()
391 dirs.sort()
391 if '.hg' in dirs:
392 if '.hg' in dirs:
392 yield root # found a repository
393 yield root # found a repository
393 qroot = os.path.join(root, '.hg', 'patches')
394 qroot = os.path.join(root, '.hg', 'patches')
394 if os.path.isdir(os.path.join(qroot, '.hg')):
395 if os.path.isdir(os.path.join(qroot, '.hg')):
395 yield qroot # we have a patch queue repo here
396 yield qroot # we have a patch queue repo here
396 if recurse:
397 if recurse:
397 # avoid recursing inside the .hg directory
398 # avoid recursing inside the .hg directory
398 dirs.remove('.hg')
399 dirs.remove('.hg')
399 else:
400 else:
400 dirs[:] = [] # don't descend further
401 dirs[:] = [] # don't descend further
401 elif followsym:
402 elif followsym:
402 newdirs = []
403 newdirs = []
403 for d in dirs:
404 for d in dirs:
404 fname = os.path.join(root, d)
405 fname = os.path.join(root, d)
405 if adddir(seen_dirs, fname):
406 if adddir(seen_dirs, fname):
406 if os.path.islink(fname):
407 if os.path.islink(fname):
407 for hgname in walkrepos(fname, True, seen_dirs):
408 for hgname in walkrepos(fname, True, seen_dirs):
408 yield hgname
409 yield hgname
409 else:
410 else:
410 newdirs.append(d)
411 newdirs.append(d)
411 dirs[:] = newdirs
412 dirs[:] = newdirs
412
413
413 def binnode(ctx):
414 def binnode(ctx):
414 """Return binary node id for a given basectx"""
415 """Return binary node id for a given basectx"""
415 node = ctx.node()
416 node = ctx.node()
416 if node is None:
417 if node is None:
417 return wdirid
418 return wdirid
418 return node
419 return node
419
420
420 def intrev(ctx):
421 def intrev(ctx):
421 """Return integer for a given basectx that can be used in comparison or
422 """Return integer for a given basectx that can be used in comparison or
422 arithmetic operation"""
423 arithmetic operation"""
423 rev = ctx.rev()
424 rev = ctx.rev()
424 if rev is None:
425 if rev is None:
425 return wdirrev
426 return wdirrev
426 return rev
427 return rev
427
428
428 def formatchangeid(ctx):
429 def formatchangeid(ctx):
429 """Format changectx as '{rev}:{node|formatnode}', which is the default
430 """Format changectx as '{rev}:{node|formatnode}', which is the default
430 template provided by logcmdutil.changesettemplater"""
431 template provided by logcmdutil.changesettemplater"""
431 repo = ctx.repo()
432 repo = ctx.repo()
432 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
433 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
433
434
434 def formatrevnode(ui, rev, node):
435 def formatrevnode(ui, rev, node):
435 """Format given revision and node depending on the current verbosity"""
436 """Format given revision and node depending on the current verbosity"""
436 if ui.debugflag:
437 if ui.debugflag:
437 hexfunc = hex
438 hexfunc = hex
438 else:
439 else:
439 hexfunc = short
440 hexfunc = short
440 return '%d:%s' % (rev, hexfunc(node))
441 return '%d:%s' % (rev, hexfunc(node))
441
442
442 def resolvehexnodeidprefix(repo, prefix):
443 def resolvehexnodeidprefix(repo, prefix):
443 if (prefix.startswith('x') and
444 if (prefix.startswith('x') and
444 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
445 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
445 prefix = prefix[1:]
446 prefix = prefix[1:]
446 try:
447 try:
447 # Uses unfiltered repo because it's faster when prefix is ambiguous/
448 # Uses unfiltered repo because it's faster when prefix is ambiguous/
448 # This matches the shortesthexnodeidprefix() function below.
449 # This matches the shortesthexnodeidprefix() function below.
449 node = repo.unfiltered().changelog._partialmatch(prefix)
450 node = repo.unfiltered().changelog._partialmatch(prefix)
450 except error.AmbiguousPrefixLookupError:
451 except error.AmbiguousPrefixLookupError:
451 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
452 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
452 if revset:
453 if revset:
453 # Clear config to avoid infinite recursion
454 # Clear config to avoid infinite recursion
454 configoverrides = {('experimental',
455 configoverrides = {('experimental',
455 'revisions.disambiguatewithin'): None}
456 'revisions.disambiguatewithin'): None}
456 with repo.ui.configoverride(configoverrides):
457 with repo.ui.configoverride(configoverrides):
457 revs = repo.anyrevs([revset], user=True)
458 revs = repo.anyrevs([revset], user=True)
458 matches = []
459 matches = []
459 for rev in revs:
460 for rev in revs:
460 node = repo.changelog.node(rev)
461 node = repo.changelog.node(rev)
461 if hex(node).startswith(prefix):
462 if hex(node).startswith(prefix):
462 matches.append(node)
463 matches.append(node)
463 if len(matches) == 1:
464 if len(matches) == 1:
464 return matches[0]
465 return matches[0]
465 raise
466 raise
466 if node is None:
467 if node is None:
467 return
468 return
468 repo.changelog.rev(node) # make sure node isn't filtered
469 repo.changelog.rev(node) # make sure node isn't filtered
469 return node
470 return node
470
471
471 def mayberevnum(repo, prefix):
472 def mayberevnum(repo, prefix):
472 """Checks if the given prefix may be mistaken for a revision number"""
473 """Checks if the given prefix may be mistaken for a revision number"""
473 try:
474 try:
474 i = int(prefix)
475 i = int(prefix)
475 # if we are a pure int, then starting with zero will not be
476 # if we are a pure int, then starting with zero will not be
476 # confused as a rev; or, obviously, if the int is larger
477 # confused as a rev; or, obviously, if the int is larger
477 # than the value of the tip rev
478 # than the value of the tip rev
478 if prefix[0:1] == b'0' or i >= len(repo):
479 if prefix[0:1] == b'0' or i >= len(repo):
479 return False
480 return False
480 return True
481 return True
481 except ValueError:
482 except ValueError:
482 return False
483 return False
483
484
484 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
485 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
485 """Find the shortest unambiguous prefix that matches hexnode.
486 """Find the shortest unambiguous prefix that matches hexnode.
486
487
487 If "cache" is not None, it must be a dictionary that can be used for
488 If "cache" is not None, it must be a dictionary that can be used for
488 caching between calls to this method.
489 caching between calls to this method.
489 """
490 """
490 # _partialmatch() of filtered changelog could take O(len(repo)) time,
491 # _partialmatch() of filtered changelog could take O(len(repo)) time,
491 # which would be unacceptably slow. so we look for hash collision in
492 # which would be unacceptably slow. so we look for hash collision in
492 # unfiltered space, which means some hashes may be slightly longer.
493 # unfiltered space, which means some hashes may be slightly longer.
493
494
494 def disambiguate(prefix):
495 def disambiguate(prefix):
495 """Disambiguate against revnums."""
496 """Disambiguate against revnums."""
496 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
497 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
497 if mayberevnum(repo, prefix):
498 if mayberevnum(repo, prefix):
498 return 'x' + prefix
499 return 'x' + prefix
499 else:
500 else:
500 return prefix
501 return prefix
501
502
502 hexnode = hex(node)
503 hexnode = hex(node)
503 for length in range(len(prefix), len(hexnode) + 1):
504 for length in range(len(prefix), len(hexnode) + 1):
504 prefix = hexnode[:length]
505 prefix = hexnode[:length]
505 if not mayberevnum(repo, prefix):
506 if not mayberevnum(repo, prefix):
506 return prefix
507 return prefix
507
508
508 cl = repo.unfiltered().changelog
509 cl = repo.unfiltered().changelog
509 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
510 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
510 if revset:
511 if revset:
511 revs = None
512 revs = None
512 if cache is not None:
513 if cache is not None:
513 revs = cache.get('disambiguationrevset')
514 revs = cache.get('disambiguationrevset')
514 if revs is None:
515 if revs is None:
515 revs = repo.anyrevs([revset], user=True)
516 revs = repo.anyrevs([revset], user=True)
516 if cache is not None:
517 if cache is not None:
517 cache['disambiguationrevset'] = revs
518 cache['disambiguationrevset'] = revs
518 if cl.rev(node) in revs:
519 if cl.rev(node) in revs:
519 hexnode = hex(node)
520 hexnode = hex(node)
520 nodetree = None
521 nodetree = None
521 if cache is not None:
522 if cache is not None:
522 nodetree = cache.get('disambiguationnodetree')
523 nodetree = cache.get('disambiguationnodetree')
523 if not nodetree:
524 if not nodetree:
524 try:
525 try:
525 nodetree = parsers.nodetree(cl.index, len(revs))
526 nodetree = parsers.nodetree(cl.index, len(revs))
526 except AttributeError:
527 except AttributeError:
527 # no native nodetree
528 # no native nodetree
528 pass
529 pass
529 else:
530 else:
530 for r in revs:
531 for r in revs:
531 nodetree.insert(r)
532 nodetree.insert(r)
532 if cache is not None:
533 if cache is not None:
533 cache['disambiguationnodetree'] = nodetree
534 cache['disambiguationnodetree'] = nodetree
534 if nodetree is not None:
535 if nodetree is not None:
535 length = max(nodetree.shortest(node), minlength)
536 length = max(nodetree.shortest(node), minlength)
536 prefix = hexnode[:length]
537 prefix = hexnode[:length]
537 return disambiguate(prefix)
538 return disambiguate(prefix)
538 for length in range(minlength, len(hexnode) + 1):
539 for length in range(minlength, len(hexnode) + 1):
539 matches = []
540 matches = []
540 prefix = hexnode[:length]
541 prefix = hexnode[:length]
541 for rev in revs:
542 for rev in revs:
542 otherhexnode = repo[rev].hex()
543 otherhexnode = repo[rev].hex()
543 if prefix == otherhexnode[:length]:
544 if prefix == otherhexnode[:length]:
544 matches.append(otherhexnode)
545 matches.append(otherhexnode)
545 if len(matches) == 1:
546 if len(matches) == 1:
546 return disambiguate(prefix)
547 return disambiguate(prefix)
547
548
548 try:
549 try:
549 return disambiguate(cl.shortest(node, minlength))
550 return disambiguate(cl.shortest(node, minlength))
550 except error.LookupError:
551 except error.LookupError:
551 raise error.RepoLookupError()
552 raise error.RepoLookupError()
552
553
553 def isrevsymbol(repo, symbol):
554 def isrevsymbol(repo, symbol):
554 """Checks if a symbol exists in the repo.
555 """Checks if a symbol exists in the repo.
555
556
556 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
557 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
557 symbol is an ambiguous nodeid prefix.
558 symbol is an ambiguous nodeid prefix.
558 """
559 """
559 try:
560 try:
560 revsymbol(repo, symbol)
561 revsymbol(repo, symbol)
561 return True
562 return True
562 except error.RepoLookupError:
563 except error.RepoLookupError:
563 return False
564 return False
564
565
565 def revsymbol(repo, symbol):
566 def revsymbol(repo, symbol):
566 """Returns a context given a single revision symbol (as string).
567 """Returns a context given a single revision symbol (as string).
567
568
568 This is similar to revsingle(), but accepts only a single revision symbol,
569 This is similar to revsingle(), but accepts only a single revision symbol,
569 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
570 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
570 not "max(public())".
571 not "max(public())".
571 """
572 """
572 if not isinstance(symbol, bytes):
573 if not isinstance(symbol, bytes):
573 msg = ("symbol (%s of type %s) was not a string, did you mean "
574 msg = ("symbol (%s of type %s) was not a string, did you mean "
574 "repo[symbol]?" % (symbol, type(symbol)))
575 "repo[symbol]?" % (symbol, type(symbol)))
575 raise error.ProgrammingError(msg)
576 raise error.ProgrammingError(msg)
576 try:
577 try:
577 if symbol in ('.', 'tip', 'null'):
578 if symbol in ('.', 'tip', 'null'):
578 return repo[symbol]
579 return repo[symbol]
579
580
580 try:
581 try:
581 r = int(symbol)
582 r = int(symbol)
582 if '%d' % r != symbol:
583 if '%d' % r != symbol:
583 raise ValueError
584 raise ValueError
584 l = len(repo.changelog)
585 l = len(repo.changelog)
585 if r < 0:
586 if r < 0:
586 r += l
587 r += l
587 if r < 0 or r >= l and r != wdirrev:
588 if r < 0 or r >= l and r != wdirrev:
588 raise ValueError
589 raise ValueError
589 return repo[r]
590 return repo[r]
590 except error.FilteredIndexError:
591 except error.FilteredIndexError:
591 raise
592 raise
592 except (ValueError, OverflowError, IndexError):
593 except (ValueError, OverflowError, IndexError):
593 pass
594 pass
594
595
595 if len(symbol) == 40:
596 if len(symbol) == 40:
596 try:
597 try:
597 node = bin(symbol)
598 node = bin(symbol)
598 rev = repo.changelog.rev(node)
599 rev = repo.changelog.rev(node)
599 return repo[rev]
600 return repo[rev]
600 except error.FilteredLookupError:
601 except error.FilteredLookupError:
601 raise
602 raise
602 except (TypeError, LookupError):
603 except (TypeError, LookupError):
603 pass
604 pass
604
605
605 # look up bookmarks through the name interface
606 # look up bookmarks through the name interface
606 try:
607 try:
607 node = repo.names.singlenode(repo, symbol)
608 node = repo.names.singlenode(repo, symbol)
608 rev = repo.changelog.rev(node)
609 rev = repo.changelog.rev(node)
609 return repo[rev]
610 return repo[rev]
610 except KeyError:
611 except KeyError:
611 pass
612 pass
612
613
613 node = resolvehexnodeidprefix(repo, symbol)
614 node = resolvehexnodeidprefix(repo, symbol)
614 if node is not None:
615 if node is not None:
615 rev = repo.changelog.rev(node)
616 rev = repo.changelog.rev(node)
616 return repo[rev]
617 return repo[rev]
617
618
618 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
619 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
619
620
620 except error.WdirUnsupported:
621 except error.WdirUnsupported:
621 return repo[None]
622 return repo[None]
622 except (error.FilteredIndexError, error.FilteredLookupError,
623 except (error.FilteredIndexError, error.FilteredLookupError,
623 error.FilteredRepoLookupError):
624 error.FilteredRepoLookupError):
624 raise _filterederror(repo, symbol)
625 raise _filterederror(repo, symbol)
625
626
626 def _filterederror(repo, changeid):
627 def _filterederror(repo, changeid):
627 """build an exception to be raised about a filtered changeid
628 """build an exception to be raised about a filtered changeid
628
629
629 This is extracted in a function to help extensions (eg: evolve) to
630 This is extracted in a function to help extensions (eg: evolve) to
630 experiment with various message variants."""
631 experiment with various message variants."""
631 if repo.filtername.startswith('visible'):
632 if repo.filtername.startswith('visible'):
632
633
633 # Check if the changeset is obsolete
634 # Check if the changeset is obsolete
634 unfilteredrepo = repo.unfiltered()
635 unfilteredrepo = repo.unfiltered()
635 ctx = revsymbol(unfilteredrepo, changeid)
636 ctx = revsymbol(unfilteredrepo, changeid)
636
637
637 # If the changeset is obsolete, enrich the message with the reason
638 # If the changeset is obsolete, enrich the message with the reason
638 # that made this changeset not visible
639 # that made this changeset not visible
639 if ctx.obsolete():
640 if ctx.obsolete():
640 msg = obsutil._getfilteredreason(repo, changeid, ctx)
641 msg = obsutil._getfilteredreason(repo, changeid, ctx)
641 else:
642 else:
642 msg = _("hidden revision '%s'") % changeid
643 msg = _("hidden revision '%s'") % changeid
643
644
644 hint = _('use --hidden to access hidden revisions')
645 hint = _('use --hidden to access hidden revisions')
645
646
646 return error.FilteredRepoLookupError(msg, hint=hint)
647 return error.FilteredRepoLookupError(msg, hint=hint)
647 msg = _("filtered revision '%s' (not in '%s' subset)")
648 msg = _("filtered revision '%s' (not in '%s' subset)")
648 msg %= (changeid, repo.filtername)
649 msg %= (changeid, repo.filtername)
649 return error.FilteredRepoLookupError(msg)
650 return error.FilteredRepoLookupError(msg)
650
651
651 def revsingle(repo, revspec, default='.', localalias=None):
652 def revsingle(repo, revspec, default='.', localalias=None):
652 if not revspec and revspec != 0:
653 if not revspec and revspec != 0:
653 return repo[default]
654 return repo[default]
654
655
655 l = revrange(repo, [revspec], localalias=localalias)
656 l = revrange(repo, [revspec], localalias=localalias)
656 if not l:
657 if not l:
657 raise error.Abort(_('empty revision set'))
658 raise error.Abort(_('empty revision set'))
658 return repo[l.last()]
659 return repo[l.last()]
659
660
660 def _pairspec(revspec):
661 def _pairspec(revspec):
661 tree = revsetlang.parse(revspec)
662 tree = revsetlang.parse(revspec)
662 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
663 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
663
664
664 def revpair(repo, revs):
665 def revpair(repo, revs):
665 if not revs:
666 if not revs:
666 return repo['.'], repo[None]
667 return repo['.'], repo[None]
667
668
668 l = revrange(repo, revs)
669 l = revrange(repo, revs)
669
670
670 if not l:
671 if not l:
671 first = second = None
672 first = second = None
672 elif l.isascending():
673 elif l.isascending():
673 first = l.min()
674 first = l.min()
674 second = l.max()
675 second = l.max()
675 elif l.isdescending():
676 elif l.isdescending():
676 first = l.max()
677 first = l.max()
677 second = l.min()
678 second = l.min()
678 else:
679 else:
679 first = l.first()
680 first = l.first()
680 second = l.last()
681 second = l.last()
681
682
682 if first is None:
683 if first is None:
683 raise error.Abort(_('empty revision range'))
684 raise error.Abort(_('empty revision range'))
684 if (first == second and len(revs) >= 2
685 if (first == second and len(revs) >= 2
685 and not all(revrange(repo, [r]) for r in revs)):
686 and not all(revrange(repo, [r]) for r in revs)):
686 raise error.Abort(_('empty revision on one side of range'))
687 raise error.Abort(_('empty revision on one side of range'))
687
688
688 # if top-level is range expression, the result must always be a pair
689 # if top-level is range expression, the result must always be a pair
689 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
690 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
690 return repo[first], repo[None]
691 return repo[first], repo[None]
691
692
692 return repo[first], repo[second]
693 return repo[first], repo[second]
693
694
694 def revrange(repo, specs, localalias=None):
695 def revrange(repo, specs, localalias=None):
695 """Execute 1 to many revsets and return the union.
696 """Execute 1 to many revsets and return the union.
696
697
697 This is the preferred mechanism for executing revsets using user-specified
698 This is the preferred mechanism for executing revsets using user-specified
698 config options, such as revset aliases.
699 config options, such as revset aliases.
699
700
700 The revsets specified by ``specs`` will be executed via a chained ``OR``
701 The revsets specified by ``specs`` will be executed via a chained ``OR``
701 expression. If ``specs`` is empty, an empty result is returned.
702 expression. If ``specs`` is empty, an empty result is returned.
702
703
703 ``specs`` can contain integers, in which case they are assumed to be
704 ``specs`` can contain integers, in which case they are assumed to be
704 revision numbers.
705 revision numbers.
705
706
706 It is assumed the revsets are already formatted. If you have arguments
707 It is assumed the revsets are already formatted. If you have arguments
707 that need to be expanded in the revset, call ``revsetlang.formatspec()``
708 that need to be expanded in the revset, call ``revsetlang.formatspec()``
708 and pass the result as an element of ``specs``.
709 and pass the result as an element of ``specs``.
709
710
710 Specifying a single revset is allowed.
711 Specifying a single revset is allowed.
711
712
712 Returns a ``revset.abstractsmartset`` which is a list-like interface over
713 Returns a ``revset.abstractsmartset`` which is a list-like interface over
713 integer revisions.
714 integer revisions.
714 """
715 """
715 allspecs = []
716 allspecs = []
716 for spec in specs:
717 for spec in specs:
717 if isinstance(spec, int):
718 if isinstance(spec, int):
718 spec = revsetlang.formatspec('rev(%d)', spec)
719 spec = revsetlang.formatspec('rev(%d)', spec)
719 allspecs.append(spec)
720 allspecs.append(spec)
720 return repo.anyrevs(allspecs, user=True, localalias=localalias)
721 return repo.anyrevs(allspecs, user=True, localalias=localalias)
721
722
722 def meaningfulparents(repo, ctx):
723 def meaningfulparents(repo, ctx):
723 """Return list of meaningful (or all if debug) parentrevs for rev.
724 """Return list of meaningful (or all if debug) parentrevs for rev.
724
725
725 For merges (two non-nullrev revisions) both parents are meaningful.
726 For merges (two non-nullrev revisions) both parents are meaningful.
726 Otherwise the first parent revision is considered meaningful if it
727 Otherwise the first parent revision is considered meaningful if it
727 is not the preceding revision.
728 is not the preceding revision.
728 """
729 """
729 parents = ctx.parents()
730 parents = ctx.parents()
730 if len(parents) > 1:
731 if len(parents) > 1:
731 return parents
732 return parents
732 if repo.ui.debugflag:
733 if repo.ui.debugflag:
733 return [parents[0], repo['null']]
734 return [parents[0], repo[nullrev]]
734 if parents[0].rev() >= intrev(ctx) - 1:
735 if parents[0].rev() >= intrev(ctx) - 1:
735 return []
736 return []
736 return parents
737 return parents
737
738
738 def expandpats(pats):
739 def expandpats(pats):
739 '''Expand bare globs when running on windows.
740 '''Expand bare globs when running on windows.
740 On posix we assume it already has already been done by sh.'''
741 On posix we assume it already has already been done by sh.'''
741 if not util.expandglobs:
742 if not util.expandglobs:
742 return list(pats)
743 return list(pats)
743 ret = []
744 ret = []
744 for kindpat in pats:
745 for kindpat in pats:
745 kind, pat = matchmod._patsplit(kindpat, None)
746 kind, pat = matchmod._patsplit(kindpat, None)
746 if kind is None:
747 if kind is None:
747 try:
748 try:
748 globbed = glob.glob(pat)
749 globbed = glob.glob(pat)
749 except re.error:
750 except re.error:
750 globbed = [pat]
751 globbed = [pat]
751 if globbed:
752 if globbed:
752 ret.extend(globbed)
753 ret.extend(globbed)
753 continue
754 continue
754 ret.append(kindpat)
755 ret.append(kindpat)
755 return ret
756 return ret
756
757
757 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
758 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
758 badfn=None):
759 badfn=None):
759 '''Return a matcher and the patterns that were used.
760 '''Return a matcher and the patterns that were used.
760 The matcher will warn about bad matches, unless an alternate badfn callback
761 The matcher will warn about bad matches, unless an alternate badfn callback
761 is provided.'''
762 is provided.'''
762 if pats == ("",):
763 if pats == ("",):
763 pats = []
764 pats = []
764 if opts is None:
765 if opts is None:
765 opts = {}
766 opts = {}
766 if not globbed and default == 'relpath':
767 if not globbed and default == 'relpath':
767 pats = expandpats(pats or [])
768 pats = expandpats(pats or [])
768
769
769 def bad(f, msg):
770 def bad(f, msg):
770 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
771 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
771
772
772 if badfn is None:
773 if badfn is None:
773 badfn = bad
774 badfn = bad
774
775
775 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
776 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
776 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
777 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
777
778
778 if m.always():
779 if m.always():
779 pats = []
780 pats = []
780 return m, pats
781 return m, pats
781
782
782 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
783 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
783 badfn=None):
784 badfn=None):
784 '''Return a matcher that will warn about bad matches.'''
785 '''Return a matcher that will warn about bad matches.'''
785 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
786 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
786
787
787 def matchall(repo):
788 def matchall(repo):
788 '''Return a matcher that will efficiently match everything.'''
789 '''Return a matcher that will efficiently match everything.'''
789 return matchmod.always(repo.root, repo.getcwd())
790 return matchmod.always(repo.root, repo.getcwd())
790
791
791 def matchfiles(repo, files, badfn=None):
792 def matchfiles(repo, files, badfn=None):
792 '''Return a matcher that will efficiently match exactly these files.'''
793 '''Return a matcher that will efficiently match exactly these files.'''
793 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
794 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
794
795
795 def parsefollowlinespattern(repo, rev, pat, msg):
796 def parsefollowlinespattern(repo, rev, pat, msg):
796 """Return a file name from `pat` pattern suitable for usage in followlines
797 """Return a file name from `pat` pattern suitable for usage in followlines
797 logic.
798 logic.
798 """
799 """
799 if not matchmod.patkind(pat):
800 if not matchmod.patkind(pat):
800 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
801 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
801 else:
802 else:
802 ctx = repo[rev]
803 ctx = repo[rev]
803 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
804 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
804 files = [f for f in ctx if m(f)]
805 files = [f for f in ctx if m(f)]
805 if len(files) != 1:
806 if len(files) != 1:
806 raise error.ParseError(msg)
807 raise error.ParseError(msg)
807 return files[0]
808 return files[0]
808
809
809 def origpath(ui, repo, filepath):
810 def origpath(ui, repo, filepath):
810 '''customize where .orig files are created
811 '''customize where .orig files are created
811
812
812 Fetch user defined path from config file: [ui] origbackuppath = <path>
813 Fetch user defined path from config file: [ui] origbackuppath = <path>
813 Fall back to default (filepath with .orig suffix) if not specified
814 Fall back to default (filepath with .orig suffix) if not specified
814 '''
815 '''
815 origbackuppath = ui.config('ui', 'origbackuppath')
816 origbackuppath = ui.config('ui', 'origbackuppath')
816 if not origbackuppath:
817 if not origbackuppath:
817 return filepath + ".orig"
818 return filepath + ".orig"
818
819
819 # Convert filepath from an absolute path into a path inside the repo.
820 # Convert filepath from an absolute path into a path inside the repo.
820 filepathfromroot = util.normpath(os.path.relpath(filepath,
821 filepathfromroot = util.normpath(os.path.relpath(filepath,
821 start=repo.root))
822 start=repo.root))
822
823
823 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
824 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
824 origbackupdir = origvfs.dirname(filepathfromroot)
825 origbackupdir = origvfs.dirname(filepathfromroot)
825 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
826 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
826 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
827 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
827
828
828 # Remove any files that conflict with the backup file's path
829 # Remove any files that conflict with the backup file's path
829 for f in reversed(list(util.finddirs(filepathfromroot))):
830 for f in reversed(list(util.finddirs(filepathfromroot))):
830 if origvfs.isfileorlink(f):
831 if origvfs.isfileorlink(f):
831 ui.note(_('removing conflicting file: %s\n')
832 ui.note(_('removing conflicting file: %s\n')
832 % origvfs.join(f))
833 % origvfs.join(f))
833 origvfs.unlink(f)
834 origvfs.unlink(f)
834 break
835 break
835
836
836 origvfs.makedirs(origbackupdir)
837 origvfs.makedirs(origbackupdir)
837
838
838 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
839 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
839 ui.note(_('removing conflicting directory: %s\n')
840 ui.note(_('removing conflicting directory: %s\n')
840 % origvfs.join(filepathfromroot))
841 % origvfs.join(filepathfromroot))
841 origvfs.rmtree(filepathfromroot, forcibly=True)
842 origvfs.rmtree(filepathfromroot, forcibly=True)
842
843
843 return origvfs.join(filepathfromroot)
844 return origvfs.join(filepathfromroot)
844
845
845 class _containsnode(object):
846 class _containsnode(object):
846 """proxy __contains__(node) to container.__contains__ which accepts revs"""
847 """proxy __contains__(node) to container.__contains__ which accepts revs"""
847
848
848 def __init__(self, repo, revcontainer):
849 def __init__(self, repo, revcontainer):
849 self._torev = repo.changelog.rev
850 self._torev = repo.changelog.rev
850 self._revcontains = revcontainer.__contains__
851 self._revcontains = revcontainer.__contains__
851
852
852 def __contains__(self, node):
853 def __contains__(self, node):
853 return self._revcontains(self._torev(node))
854 return self._revcontains(self._torev(node))
854
855
855 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
856 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
856 fixphase=False, targetphase=None, backup=True):
857 fixphase=False, targetphase=None, backup=True):
857 """do common cleanups when old nodes are replaced by new nodes
858 """do common cleanups when old nodes are replaced by new nodes
858
859
859 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
860 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
860 (we might also want to move working directory parent in the future)
861 (we might also want to move working directory parent in the future)
861
862
862 By default, bookmark moves are calculated automatically from 'replacements',
863 By default, bookmark moves are calculated automatically from 'replacements',
863 but 'moves' can be used to override that. Also, 'moves' may include
864 but 'moves' can be used to override that. Also, 'moves' may include
864 additional bookmark moves that should not have associated obsmarkers.
865 additional bookmark moves that should not have associated obsmarkers.
865
866
866 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
867 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
867 have replacements. operation is a string, like "rebase".
868 have replacements. operation is a string, like "rebase".
868
869
869 metadata is dictionary containing metadata to be stored in obsmarker if
870 metadata is dictionary containing metadata to be stored in obsmarker if
870 obsolescence is enabled.
871 obsolescence is enabled.
871 """
872 """
872 assert fixphase or targetphase is None
873 assert fixphase or targetphase is None
873 if not replacements and not moves:
874 if not replacements and not moves:
874 return
875 return
875
876
876 # translate mapping's other forms
877 # translate mapping's other forms
877 if not util.safehasattr(replacements, 'items'):
878 if not util.safehasattr(replacements, 'items'):
878 replacements = {(n,): () for n in replacements}
879 replacements = {(n,): () for n in replacements}
879 else:
880 else:
880 # upgrading non tuple "source" to tuple ones for BC
881 # upgrading non tuple "source" to tuple ones for BC
881 repls = {}
882 repls = {}
882 for key, value in replacements.items():
883 for key, value in replacements.items():
883 if not isinstance(key, tuple):
884 if not isinstance(key, tuple):
884 key = (key,)
885 key = (key,)
885 repls[key] = value
886 repls[key] = value
886 replacements = repls
887 replacements = repls
887
888
888 # Calculate bookmark movements
889 # Calculate bookmark movements
889 if moves is None:
890 if moves is None:
890 moves = {}
891 moves = {}
891 # Unfiltered repo is needed since nodes in replacements might be hidden.
892 # Unfiltered repo is needed since nodes in replacements might be hidden.
892 unfi = repo.unfiltered()
893 unfi = repo.unfiltered()
893 for oldnodes, newnodes in replacements.items():
894 for oldnodes, newnodes in replacements.items():
894 for oldnode in oldnodes:
895 for oldnode in oldnodes:
895 if oldnode in moves:
896 if oldnode in moves:
896 continue
897 continue
897 if len(newnodes) > 1:
898 if len(newnodes) > 1:
898 # usually a split, take the one with biggest rev number
899 # usually a split, take the one with biggest rev number
899 newnode = next(unfi.set('max(%ln)', newnodes)).node()
900 newnode = next(unfi.set('max(%ln)', newnodes)).node()
900 elif len(newnodes) == 0:
901 elif len(newnodes) == 0:
901 # move bookmark backwards
902 # move bookmark backwards
902 allreplaced = []
903 allreplaced = []
903 for rep in replacements:
904 for rep in replacements:
904 allreplaced.extend(rep)
905 allreplaced.extend(rep)
905 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
906 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
906 allreplaced))
907 allreplaced))
907 if roots:
908 if roots:
908 newnode = roots[0].node()
909 newnode = roots[0].node()
909 else:
910 else:
910 newnode = nullid
911 newnode = nullid
911 else:
912 else:
912 newnode = newnodes[0]
913 newnode = newnodes[0]
913 moves[oldnode] = newnode
914 moves[oldnode] = newnode
914
915
915 allnewnodes = [n for ns in replacements.values() for n in ns]
916 allnewnodes = [n for ns in replacements.values() for n in ns]
916 toretract = {}
917 toretract = {}
917 toadvance = {}
918 toadvance = {}
918 if fixphase:
919 if fixphase:
919 precursors = {}
920 precursors = {}
920 for oldnodes, newnodes in replacements.items():
921 for oldnodes, newnodes in replacements.items():
921 for oldnode in oldnodes:
922 for oldnode in oldnodes:
922 for newnode in newnodes:
923 for newnode in newnodes:
923 precursors.setdefault(newnode, []).append(oldnode)
924 precursors.setdefault(newnode, []).append(oldnode)
924
925
925 allnewnodes.sort(key=lambda n: unfi[n].rev())
926 allnewnodes.sort(key=lambda n: unfi[n].rev())
926 newphases = {}
927 newphases = {}
927 def phase(ctx):
928 def phase(ctx):
928 return newphases.get(ctx.node(), ctx.phase())
929 return newphases.get(ctx.node(), ctx.phase())
929 for newnode in allnewnodes:
930 for newnode in allnewnodes:
930 ctx = unfi[newnode]
931 ctx = unfi[newnode]
931 parentphase = max(phase(p) for p in ctx.parents())
932 parentphase = max(phase(p) for p in ctx.parents())
932 if targetphase is None:
933 if targetphase is None:
933 oldphase = max(unfi[oldnode].phase()
934 oldphase = max(unfi[oldnode].phase()
934 for oldnode in precursors[newnode])
935 for oldnode in precursors[newnode])
935 newphase = max(oldphase, parentphase)
936 newphase = max(oldphase, parentphase)
936 else:
937 else:
937 newphase = max(targetphase, parentphase)
938 newphase = max(targetphase, parentphase)
938 newphases[newnode] = newphase
939 newphases[newnode] = newphase
939 if newphase > ctx.phase():
940 if newphase > ctx.phase():
940 toretract.setdefault(newphase, []).append(newnode)
941 toretract.setdefault(newphase, []).append(newnode)
941 elif newphase < ctx.phase():
942 elif newphase < ctx.phase():
942 toadvance.setdefault(newphase, []).append(newnode)
943 toadvance.setdefault(newphase, []).append(newnode)
943
944
944 with repo.transaction('cleanup') as tr:
945 with repo.transaction('cleanup') as tr:
945 # Move bookmarks
946 # Move bookmarks
946 bmarks = repo._bookmarks
947 bmarks = repo._bookmarks
947 bmarkchanges = []
948 bmarkchanges = []
948 for oldnode, newnode in moves.items():
949 for oldnode, newnode in moves.items():
949 oldbmarks = repo.nodebookmarks(oldnode)
950 oldbmarks = repo.nodebookmarks(oldnode)
950 if not oldbmarks:
951 if not oldbmarks:
951 continue
952 continue
952 from . import bookmarks # avoid import cycle
953 from . import bookmarks # avoid import cycle
953 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
954 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
954 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
955 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
955 hex(oldnode), hex(newnode)))
956 hex(oldnode), hex(newnode)))
956 # Delete divergent bookmarks being parents of related newnodes
957 # Delete divergent bookmarks being parents of related newnodes
957 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
958 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
958 allnewnodes, newnode, oldnode)
959 allnewnodes, newnode, oldnode)
959 deletenodes = _containsnode(repo, deleterevs)
960 deletenodes = _containsnode(repo, deleterevs)
960 for name in oldbmarks:
961 for name in oldbmarks:
961 bmarkchanges.append((name, newnode))
962 bmarkchanges.append((name, newnode))
962 for b in bookmarks.divergent2delete(repo, deletenodes, name):
963 for b in bookmarks.divergent2delete(repo, deletenodes, name):
963 bmarkchanges.append((b, None))
964 bmarkchanges.append((b, None))
964
965
965 if bmarkchanges:
966 if bmarkchanges:
966 bmarks.applychanges(repo, tr, bmarkchanges)
967 bmarks.applychanges(repo, tr, bmarkchanges)
967
968
968 for phase, nodes in toretract.items():
969 for phase, nodes in toretract.items():
969 phases.retractboundary(repo, tr, phase, nodes)
970 phases.retractboundary(repo, tr, phase, nodes)
970 for phase, nodes in toadvance.items():
971 for phase, nodes in toadvance.items():
971 phases.advanceboundary(repo, tr, phase, nodes)
972 phases.advanceboundary(repo, tr, phase, nodes)
972
973
973 # Obsolete or strip nodes
974 # Obsolete or strip nodes
974 if obsolete.isenabled(repo, obsolete.createmarkersopt):
975 if obsolete.isenabled(repo, obsolete.createmarkersopt):
975 # If a node is already obsoleted, and we want to obsolete it
976 # If a node is already obsoleted, and we want to obsolete it
976 # without a successor, skip that obssolete request since it's
977 # without a successor, skip that obssolete request since it's
977 # unnecessary. That's the "if s or not isobs(n)" check below.
978 # unnecessary. That's the "if s or not isobs(n)" check below.
978 # Also sort the node in topology order, that might be useful for
979 # Also sort the node in topology order, that might be useful for
979 # some obsstore logic.
980 # some obsstore logic.
980 # NOTE: the filtering and sorting might belong to createmarkers.
981 # NOTE: the filtering and sorting might belong to createmarkers.
981 isobs = unfi.obsstore.successors.__contains__
982 isobs = unfi.obsstore.successors.__contains__
982 torev = unfi.changelog.rev
983 torev = unfi.changelog.rev
983 sortfunc = lambda ns: torev(ns[0][0])
984 sortfunc = lambda ns: torev(ns[0][0])
984 rels = []
985 rels = []
985 for ns, s in sorted(replacements.items(), key=sortfunc):
986 for ns, s in sorted(replacements.items(), key=sortfunc):
986 for n in ns:
987 for n in ns:
987 if s or not isobs(n):
988 if s or not isobs(n):
988 rel = (unfi[n], tuple(unfi[m] for m in s))
989 rel = (unfi[n], tuple(unfi[m] for m in s))
989 rels.append(rel)
990 rels.append(rel)
990 if rels:
991 if rels:
991 obsolete.createmarkers(repo, rels, operation=operation,
992 obsolete.createmarkers(repo, rels, operation=operation,
992 metadata=metadata)
993 metadata=metadata)
993 else:
994 else:
994 from . import repair # avoid import cycle
995 from . import repair # avoid import cycle
995 tostrip = list(n for ns in replacements for n in ns)
996 tostrip = list(n for ns in replacements for n in ns)
996 if tostrip:
997 if tostrip:
997 repair.delayedstrip(repo.ui, repo, tostrip, operation,
998 repair.delayedstrip(repo.ui, repo, tostrip, operation,
998 backup=backup)
999 backup=backup)
999
1000
1000 def addremove(repo, matcher, prefix, opts=None):
1001 def addremove(repo, matcher, prefix, opts=None):
1001 if opts is None:
1002 if opts is None:
1002 opts = {}
1003 opts = {}
1003 m = matcher
1004 m = matcher
1004 dry_run = opts.get('dry_run')
1005 dry_run = opts.get('dry_run')
1005 try:
1006 try:
1006 similarity = float(opts.get('similarity') or 0)
1007 similarity = float(opts.get('similarity') or 0)
1007 except ValueError:
1008 except ValueError:
1008 raise error.Abort(_('similarity must be a number'))
1009 raise error.Abort(_('similarity must be a number'))
1009 if similarity < 0 or similarity > 100:
1010 if similarity < 0 or similarity > 100:
1010 raise error.Abort(_('similarity must be between 0 and 100'))
1011 raise error.Abort(_('similarity must be between 0 and 100'))
1011 similarity /= 100.0
1012 similarity /= 100.0
1012
1013
1013 ret = 0
1014 ret = 0
1014 join = lambda f: os.path.join(prefix, f)
1015 join = lambda f: os.path.join(prefix, f)
1015
1016
1016 wctx = repo[None]
1017 wctx = repo[None]
1017 for subpath in sorted(wctx.substate):
1018 for subpath in sorted(wctx.substate):
1018 submatch = matchmod.subdirmatcher(subpath, m)
1019 submatch = matchmod.subdirmatcher(subpath, m)
1019 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1020 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1020 sub = wctx.sub(subpath)
1021 sub = wctx.sub(subpath)
1021 try:
1022 try:
1022 if sub.addremove(submatch, prefix, opts):
1023 if sub.addremove(submatch, prefix, opts):
1023 ret = 1
1024 ret = 1
1024 except error.LookupError:
1025 except error.LookupError:
1025 repo.ui.status(_("skipping missing subrepository: %s\n")
1026 repo.ui.status(_("skipping missing subrepository: %s\n")
1026 % join(subpath))
1027 % join(subpath))
1027
1028
1028 rejected = []
1029 rejected = []
1029 def badfn(f, msg):
1030 def badfn(f, msg):
1030 if f in m.files():
1031 if f in m.files():
1031 m.bad(f, msg)
1032 m.bad(f, msg)
1032 rejected.append(f)
1033 rejected.append(f)
1033
1034
1034 badmatch = matchmod.badmatch(m, badfn)
1035 badmatch = matchmod.badmatch(m, badfn)
1035 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1036 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1036 badmatch)
1037 badmatch)
1037
1038
1038 unknownset = set(unknown + forgotten)
1039 unknownset = set(unknown + forgotten)
1039 toprint = unknownset.copy()
1040 toprint = unknownset.copy()
1040 toprint.update(deleted)
1041 toprint.update(deleted)
1041 for abs in sorted(toprint):
1042 for abs in sorted(toprint):
1042 if repo.ui.verbose or not m.exact(abs):
1043 if repo.ui.verbose or not m.exact(abs):
1043 if abs in unknownset:
1044 if abs in unknownset:
1044 status = _('adding %s\n') % m.uipath(abs)
1045 status = _('adding %s\n') % m.uipath(abs)
1045 label = 'addremove.added'
1046 label = 'addremove.added'
1046 else:
1047 else:
1047 status = _('removing %s\n') % m.uipath(abs)
1048 status = _('removing %s\n') % m.uipath(abs)
1048 label = 'addremove.removed'
1049 label = 'addremove.removed'
1049 repo.ui.status(status, label=label)
1050 repo.ui.status(status, label=label)
1050
1051
1051 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1052 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1052 similarity)
1053 similarity)
1053
1054
1054 if not dry_run:
1055 if not dry_run:
1055 _markchanges(repo, unknown + forgotten, deleted, renames)
1056 _markchanges(repo, unknown + forgotten, deleted, renames)
1056
1057
1057 for f in rejected:
1058 for f in rejected:
1058 if f in m.files():
1059 if f in m.files():
1059 return 1
1060 return 1
1060 return ret
1061 return ret
1061
1062
1062 def marktouched(repo, files, similarity=0.0):
1063 def marktouched(repo, files, similarity=0.0):
1063 '''Assert that files have somehow been operated upon. files are relative to
1064 '''Assert that files have somehow been operated upon. files are relative to
1064 the repo root.'''
1065 the repo root.'''
1065 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1066 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1066 rejected = []
1067 rejected = []
1067
1068
1068 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1069 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1069
1070
1070 if repo.ui.verbose:
1071 if repo.ui.verbose:
1071 unknownset = set(unknown + forgotten)
1072 unknownset = set(unknown + forgotten)
1072 toprint = unknownset.copy()
1073 toprint = unknownset.copy()
1073 toprint.update(deleted)
1074 toprint.update(deleted)
1074 for abs in sorted(toprint):
1075 for abs in sorted(toprint):
1075 if abs in unknownset:
1076 if abs in unknownset:
1076 status = _('adding %s\n') % abs
1077 status = _('adding %s\n') % abs
1077 else:
1078 else:
1078 status = _('removing %s\n') % abs
1079 status = _('removing %s\n') % abs
1079 repo.ui.status(status)
1080 repo.ui.status(status)
1080
1081
1081 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1082 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1082 similarity)
1083 similarity)
1083
1084
1084 _markchanges(repo, unknown + forgotten, deleted, renames)
1085 _markchanges(repo, unknown + forgotten, deleted, renames)
1085
1086
1086 for f in rejected:
1087 for f in rejected:
1087 if f in m.files():
1088 if f in m.files():
1088 return 1
1089 return 1
1089 return 0
1090 return 0
1090
1091
1091 def _interestingfiles(repo, matcher):
1092 def _interestingfiles(repo, matcher):
1092 '''Walk dirstate with matcher, looking for files that addremove would care
1093 '''Walk dirstate with matcher, looking for files that addremove would care
1093 about.
1094 about.
1094
1095
1095 This is different from dirstate.status because it doesn't care about
1096 This is different from dirstate.status because it doesn't care about
1096 whether files are modified or clean.'''
1097 whether files are modified or clean.'''
1097 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1098 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1098 audit_path = pathutil.pathauditor(repo.root, cached=True)
1099 audit_path = pathutil.pathauditor(repo.root, cached=True)
1099
1100
1100 ctx = repo[None]
1101 ctx = repo[None]
1101 dirstate = repo.dirstate
1102 dirstate = repo.dirstate
1102 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1103 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1103 unknown=True, ignored=False, full=False)
1104 unknown=True, ignored=False, full=False)
1104 for abs, st in walkresults.iteritems():
1105 for abs, st in walkresults.iteritems():
1105 dstate = dirstate[abs]
1106 dstate = dirstate[abs]
1106 if dstate == '?' and audit_path.check(abs):
1107 if dstate == '?' and audit_path.check(abs):
1107 unknown.append(abs)
1108 unknown.append(abs)
1108 elif dstate != 'r' and not st:
1109 elif dstate != 'r' and not st:
1109 deleted.append(abs)
1110 deleted.append(abs)
1110 elif dstate == 'r' and st:
1111 elif dstate == 'r' and st:
1111 forgotten.append(abs)
1112 forgotten.append(abs)
1112 # for finding renames
1113 # for finding renames
1113 elif dstate == 'r' and not st:
1114 elif dstate == 'r' and not st:
1114 removed.append(abs)
1115 removed.append(abs)
1115 elif dstate == 'a':
1116 elif dstate == 'a':
1116 added.append(abs)
1117 added.append(abs)
1117
1118
1118 return added, unknown, deleted, removed, forgotten
1119 return added, unknown, deleted, removed, forgotten
1119
1120
1120 def _findrenames(repo, matcher, added, removed, similarity):
1121 def _findrenames(repo, matcher, added, removed, similarity):
1121 '''Find renames from removed files to added ones.'''
1122 '''Find renames from removed files to added ones.'''
1122 renames = {}
1123 renames = {}
1123 if similarity > 0:
1124 if similarity > 0:
1124 for old, new, score in similar.findrenames(repo, added, removed,
1125 for old, new, score in similar.findrenames(repo, added, removed,
1125 similarity):
1126 similarity):
1126 if (repo.ui.verbose or not matcher.exact(old)
1127 if (repo.ui.verbose or not matcher.exact(old)
1127 or not matcher.exact(new)):
1128 or not matcher.exact(new)):
1128 repo.ui.status(_('recording removal of %s as rename to %s '
1129 repo.ui.status(_('recording removal of %s as rename to %s '
1129 '(%d%% similar)\n') %
1130 '(%d%% similar)\n') %
1130 (matcher.rel(old), matcher.rel(new),
1131 (matcher.rel(old), matcher.rel(new),
1131 score * 100))
1132 score * 100))
1132 renames[new] = old
1133 renames[new] = old
1133 return renames
1134 return renames
1134
1135
1135 def _markchanges(repo, unknown, deleted, renames):
1136 def _markchanges(repo, unknown, deleted, renames):
1136 '''Marks the files in unknown as added, the files in deleted as removed,
1137 '''Marks the files in unknown as added, the files in deleted as removed,
1137 and the files in renames as copied.'''
1138 and the files in renames as copied.'''
1138 wctx = repo[None]
1139 wctx = repo[None]
1139 with repo.wlock():
1140 with repo.wlock():
1140 wctx.forget(deleted)
1141 wctx.forget(deleted)
1141 wctx.add(unknown)
1142 wctx.add(unknown)
1142 for new, old in renames.iteritems():
1143 for new, old in renames.iteritems():
1143 wctx.copy(old, new)
1144 wctx.copy(old, new)
1144
1145
1145 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1146 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1146 """Update the dirstate to reflect the intent of copying src to dst. For
1147 """Update the dirstate to reflect the intent of copying src to dst. For
1147 different reasons it might not end with dst being marked as copied from src.
1148 different reasons it might not end with dst being marked as copied from src.
1148 """
1149 """
1149 origsrc = repo.dirstate.copied(src) or src
1150 origsrc = repo.dirstate.copied(src) or src
1150 if dst == origsrc: # copying back a copy?
1151 if dst == origsrc: # copying back a copy?
1151 if repo.dirstate[dst] not in 'mn' and not dryrun:
1152 if repo.dirstate[dst] not in 'mn' and not dryrun:
1152 repo.dirstate.normallookup(dst)
1153 repo.dirstate.normallookup(dst)
1153 else:
1154 else:
1154 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1155 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1155 if not ui.quiet:
1156 if not ui.quiet:
1156 ui.warn(_("%s has not been committed yet, so no copy "
1157 ui.warn(_("%s has not been committed yet, so no copy "
1157 "data will be stored for %s.\n")
1158 "data will be stored for %s.\n")
1158 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1159 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1159 if repo.dirstate[dst] in '?r' and not dryrun:
1160 if repo.dirstate[dst] in '?r' and not dryrun:
1160 wctx.add([dst])
1161 wctx.add([dst])
1161 elif not dryrun:
1162 elif not dryrun:
1162 wctx.copy(origsrc, dst)
1163 wctx.copy(origsrc, dst)
1163
1164
1164 def writerequires(opener, requirements):
1165 def writerequires(opener, requirements):
1165 with opener('requires', 'w') as fp:
1166 with opener('requires', 'w') as fp:
1166 for r in sorted(requirements):
1167 for r in sorted(requirements):
1167 fp.write("%s\n" % r)
1168 fp.write("%s\n" % r)
1168
1169
1169 class filecachesubentry(object):
1170 class filecachesubentry(object):
1170 def __init__(self, path, stat):
1171 def __init__(self, path, stat):
1171 self.path = path
1172 self.path = path
1172 self.cachestat = None
1173 self.cachestat = None
1173 self._cacheable = None
1174 self._cacheable = None
1174
1175
1175 if stat:
1176 if stat:
1176 self.cachestat = filecachesubentry.stat(self.path)
1177 self.cachestat = filecachesubentry.stat(self.path)
1177
1178
1178 if self.cachestat:
1179 if self.cachestat:
1179 self._cacheable = self.cachestat.cacheable()
1180 self._cacheable = self.cachestat.cacheable()
1180 else:
1181 else:
1181 # None means we don't know yet
1182 # None means we don't know yet
1182 self._cacheable = None
1183 self._cacheable = None
1183
1184
1184 def refresh(self):
1185 def refresh(self):
1185 if self.cacheable():
1186 if self.cacheable():
1186 self.cachestat = filecachesubentry.stat(self.path)
1187 self.cachestat = filecachesubentry.stat(self.path)
1187
1188
1188 def cacheable(self):
1189 def cacheable(self):
1189 if self._cacheable is not None:
1190 if self._cacheable is not None:
1190 return self._cacheable
1191 return self._cacheable
1191
1192
1192 # we don't know yet, assume it is for now
1193 # we don't know yet, assume it is for now
1193 return True
1194 return True
1194
1195
1195 def changed(self):
1196 def changed(self):
1196 # no point in going further if we can't cache it
1197 # no point in going further if we can't cache it
1197 if not self.cacheable():
1198 if not self.cacheable():
1198 return True
1199 return True
1199
1200
1200 newstat = filecachesubentry.stat(self.path)
1201 newstat = filecachesubentry.stat(self.path)
1201
1202
1202 # we may not know if it's cacheable yet, check again now
1203 # we may not know if it's cacheable yet, check again now
1203 if newstat and self._cacheable is None:
1204 if newstat and self._cacheable is None:
1204 self._cacheable = newstat.cacheable()
1205 self._cacheable = newstat.cacheable()
1205
1206
1206 # check again
1207 # check again
1207 if not self._cacheable:
1208 if not self._cacheable:
1208 return True
1209 return True
1209
1210
1210 if self.cachestat != newstat:
1211 if self.cachestat != newstat:
1211 self.cachestat = newstat
1212 self.cachestat = newstat
1212 return True
1213 return True
1213 else:
1214 else:
1214 return False
1215 return False
1215
1216
1216 @staticmethod
1217 @staticmethod
1217 def stat(path):
1218 def stat(path):
1218 try:
1219 try:
1219 return util.cachestat(path)
1220 return util.cachestat(path)
1220 except OSError as e:
1221 except OSError as e:
1221 if e.errno != errno.ENOENT:
1222 if e.errno != errno.ENOENT:
1222 raise
1223 raise
1223
1224
1224 class filecacheentry(object):
1225 class filecacheentry(object):
1225 def __init__(self, paths, stat=True):
1226 def __init__(self, paths, stat=True):
1226 self._entries = []
1227 self._entries = []
1227 for path in paths:
1228 for path in paths:
1228 self._entries.append(filecachesubentry(path, stat))
1229 self._entries.append(filecachesubentry(path, stat))
1229
1230
1230 def changed(self):
1231 def changed(self):
1231 '''true if any entry has changed'''
1232 '''true if any entry has changed'''
1232 for entry in self._entries:
1233 for entry in self._entries:
1233 if entry.changed():
1234 if entry.changed():
1234 return True
1235 return True
1235 return False
1236 return False
1236
1237
1237 def refresh(self):
1238 def refresh(self):
1238 for entry in self._entries:
1239 for entry in self._entries:
1239 entry.refresh()
1240 entry.refresh()
1240
1241
1241 class filecache(object):
1242 class filecache(object):
1242 """A property like decorator that tracks files under .hg/ for updates.
1243 """A property like decorator that tracks files under .hg/ for updates.
1243
1244
1244 On first access, the files defined as arguments are stat()ed and the
1245 On first access, the files defined as arguments are stat()ed and the
1245 results cached. The decorated function is called. The results are stashed
1246 results cached. The decorated function is called. The results are stashed
1246 away in a ``_filecache`` dict on the object whose method is decorated.
1247 away in a ``_filecache`` dict on the object whose method is decorated.
1247
1248
1248 On subsequent access, the cached result is returned.
1249 On subsequent access, the cached result is returned.
1249
1250
1250 On external property set operations, stat() calls are performed and the new
1251 On external property set operations, stat() calls are performed and the new
1251 value is cached.
1252 value is cached.
1252
1253
1253 On property delete operations, cached data is removed.
1254 On property delete operations, cached data is removed.
1254
1255
1255 When using the property API, cached data is always returned, if available:
1256 When using the property API, cached data is always returned, if available:
1256 no stat() is performed to check if the file has changed and if the function
1257 no stat() is performed to check if the file has changed and if the function
1257 needs to be called to reflect file changes.
1258 needs to be called to reflect file changes.
1258
1259
1259 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1260 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1260 can populate an entry before the property's getter is called. In this case,
1261 can populate an entry before the property's getter is called. In this case,
1261 entries in ``_filecache`` will be used during property operations,
1262 entries in ``_filecache`` will be used during property operations,
1262 if available. If the underlying file changes, it is up to external callers
1263 if available. If the underlying file changes, it is up to external callers
1263 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1264 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1264 method result as well as possibly calling ``del obj._filecache[attr]`` to
1265 method result as well as possibly calling ``del obj._filecache[attr]`` to
1265 remove the ``filecacheentry``.
1266 remove the ``filecacheentry``.
1266 """
1267 """
1267
1268
1268 def __init__(self, *paths):
1269 def __init__(self, *paths):
1269 self.paths = paths
1270 self.paths = paths
1270
1271
1271 def join(self, obj, fname):
1272 def join(self, obj, fname):
1272 """Used to compute the runtime path of a cached file.
1273 """Used to compute the runtime path of a cached file.
1273
1274
1274 Users should subclass filecache and provide their own version of this
1275 Users should subclass filecache and provide their own version of this
1275 function to call the appropriate join function on 'obj' (an instance
1276 function to call the appropriate join function on 'obj' (an instance
1276 of the class that its member function was decorated).
1277 of the class that its member function was decorated).
1277 """
1278 """
1278 raise NotImplementedError
1279 raise NotImplementedError
1279
1280
1280 def __call__(self, func):
1281 def __call__(self, func):
1281 self.func = func
1282 self.func = func
1282 self.sname = func.__name__
1283 self.sname = func.__name__
1283 self.name = pycompat.sysbytes(self.sname)
1284 self.name = pycompat.sysbytes(self.sname)
1284 return self
1285 return self
1285
1286
1286 def __get__(self, obj, type=None):
1287 def __get__(self, obj, type=None):
1287 # if accessed on the class, return the descriptor itself.
1288 # if accessed on the class, return the descriptor itself.
1288 if obj is None:
1289 if obj is None:
1289 return self
1290 return self
1290 # do we need to check if the file changed?
1291 # do we need to check if the file changed?
1291 if self.sname in obj.__dict__:
1292 if self.sname in obj.__dict__:
1292 assert self.name in obj._filecache, self.name
1293 assert self.name in obj._filecache, self.name
1293 return obj.__dict__[self.sname]
1294 return obj.__dict__[self.sname]
1294
1295
1295 entry = obj._filecache.get(self.name)
1296 entry = obj._filecache.get(self.name)
1296
1297
1297 if entry:
1298 if entry:
1298 if entry.changed():
1299 if entry.changed():
1299 entry.obj = self.func(obj)
1300 entry.obj = self.func(obj)
1300 else:
1301 else:
1301 paths = [self.join(obj, path) for path in self.paths]
1302 paths = [self.join(obj, path) for path in self.paths]
1302
1303
1303 # We stat -before- creating the object so our cache doesn't lie if
1304 # We stat -before- creating the object so our cache doesn't lie if
1304 # a writer modified between the time we read and stat
1305 # a writer modified between the time we read and stat
1305 entry = filecacheentry(paths, True)
1306 entry = filecacheentry(paths, True)
1306 entry.obj = self.func(obj)
1307 entry.obj = self.func(obj)
1307
1308
1308 obj._filecache[self.name] = entry
1309 obj._filecache[self.name] = entry
1309
1310
1310 obj.__dict__[self.sname] = entry.obj
1311 obj.__dict__[self.sname] = entry.obj
1311 return entry.obj
1312 return entry.obj
1312
1313
1313 def __set__(self, obj, value):
1314 def __set__(self, obj, value):
1314 if self.name not in obj._filecache:
1315 if self.name not in obj._filecache:
1315 # we add an entry for the missing value because X in __dict__
1316 # we add an entry for the missing value because X in __dict__
1316 # implies X in _filecache
1317 # implies X in _filecache
1317 paths = [self.join(obj, path) for path in self.paths]
1318 paths = [self.join(obj, path) for path in self.paths]
1318 ce = filecacheentry(paths, False)
1319 ce = filecacheentry(paths, False)
1319 obj._filecache[self.name] = ce
1320 obj._filecache[self.name] = ce
1320 else:
1321 else:
1321 ce = obj._filecache[self.name]
1322 ce = obj._filecache[self.name]
1322
1323
1323 ce.obj = value # update cached copy
1324 ce.obj = value # update cached copy
1324 obj.__dict__[self.sname] = value # update copy returned by obj.x
1325 obj.__dict__[self.sname] = value # update copy returned by obj.x
1325
1326
1326 def __delete__(self, obj):
1327 def __delete__(self, obj):
1327 try:
1328 try:
1328 del obj.__dict__[self.sname]
1329 del obj.__dict__[self.sname]
1329 except KeyError:
1330 except KeyError:
1330 raise AttributeError(self.sname)
1331 raise AttributeError(self.sname)
1331
1332
1332 def extdatasource(repo, source):
1333 def extdatasource(repo, source):
1333 """Gather a map of rev -> value dict from the specified source
1334 """Gather a map of rev -> value dict from the specified source
1334
1335
1335 A source spec is treated as a URL, with a special case shell: type
1336 A source spec is treated as a URL, with a special case shell: type
1336 for parsing the output from a shell command.
1337 for parsing the output from a shell command.
1337
1338
1338 The data is parsed as a series of newline-separated records where
1339 The data is parsed as a series of newline-separated records where
1339 each record is a revision specifier optionally followed by a space
1340 each record is a revision specifier optionally followed by a space
1340 and a freeform string value. If the revision is known locally, it
1341 and a freeform string value. If the revision is known locally, it
1341 is converted to a rev, otherwise the record is skipped.
1342 is converted to a rev, otherwise the record is skipped.
1342
1343
1343 Note that both key and value are treated as UTF-8 and converted to
1344 Note that both key and value are treated as UTF-8 and converted to
1344 the local encoding. This allows uniformity between local and
1345 the local encoding. This allows uniformity between local and
1345 remote data sources.
1346 remote data sources.
1346 """
1347 """
1347
1348
1348 spec = repo.ui.config("extdata", source)
1349 spec = repo.ui.config("extdata", source)
1349 if not spec:
1350 if not spec:
1350 raise error.Abort(_("unknown extdata source '%s'") % source)
1351 raise error.Abort(_("unknown extdata source '%s'") % source)
1351
1352
1352 data = {}
1353 data = {}
1353 src = proc = None
1354 src = proc = None
1354 try:
1355 try:
1355 if spec.startswith("shell:"):
1356 if spec.startswith("shell:"):
1356 # external commands should be run relative to the repo root
1357 # external commands should be run relative to the repo root
1357 cmd = spec[6:]
1358 cmd = spec[6:]
1358 proc = subprocess.Popen(procutil.tonativestr(cmd),
1359 proc = subprocess.Popen(procutil.tonativestr(cmd),
1359 shell=True, bufsize=-1,
1360 shell=True, bufsize=-1,
1360 close_fds=procutil.closefds,
1361 close_fds=procutil.closefds,
1361 stdout=subprocess.PIPE,
1362 stdout=subprocess.PIPE,
1362 cwd=procutil.tonativestr(repo.root))
1363 cwd=procutil.tonativestr(repo.root))
1363 src = proc.stdout
1364 src = proc.stdout
1364 else:
1365 else:
1365 # treat as a URL or file
1366 # treat as a URL or file
1366 src = url.open(repo.ui, spec)
1367 src = url.open(repo.ui, spec)
1367 for l in src:
1368 for l in src:
1368 if " " in l:
1369 if " " in l:
1369 k, v = l.strip().split(" ", 1)
1370 k, v = l.strip().split(" ", 1)
1370 else:
1371 else:
1371 k, v = l.strip(), ""
1372 k, v = l.strip(), ""
1372
1373
1373 k = encoding.tolocal(k)
1374 k = encoding.tolocal(k)
1374 try:
1375 try:
1375 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1376 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1376 except (error.LookupError, error.RepoLookupError):
1377 except (error.LookupError, error.RepoLookupError):
1377 pass # we ignore data for nodes that don't exist locally
1378 pass # we ignore data for nodes that don't exist locally
1378 finally:
1379 finally:
1379 if proc:
1380 if proc:
1380 proc.communicate()
1381 proc.communicate()
1381 if src:
1382 if src:
1382 src.close()
1383 src.close()
1383 if proc and proc.returncode != 0:
1384 if proc and proc.returncode != 0:
1384 raise error.Abort(_("extdata command '%s' failed: %s")
1385 raise error.Abort(_("extdata command '%s' failed: %s")
1385 % (cmd, procutil.explainexit(proc.returncode)))
1386 % (cmd, procutil.explainexit(proc.returncode)))
1386
1387
1387 return data
1388 return data
1388
1389
1389 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1390 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1390 if lock is None:
1391 if lock is None:
1391 raise error.LockInheritanceContractViolation(
1392 raise error.LockInheritanceContractViolation(
1392 'lock can only be inherited while held')
1393 'lock can only be inherited while held')
1393 if environ is None:
1394 if environ is None:
1394 environ = {}
1395 environ = {}
1395 with lock.inherit() as locker:
1396 with lock.inherit() as locker:
1396 environ[envvar] = locker
1397 environ[envvar] = locker
1397 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1398 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1398
1399
1399 def wlocksub(repo, cmd, *args, **kwargs):
1400 def wlocksub(repo, cmd, *args, **kwargs):
1400 """run cmd as a subprocess that allows inheriting repo's wlock
1401 """run cmd as a subprocess that allows inheriting repo's wlock
1401
1402
1402 This can only be called while the wlock is held. This takes all the
1403 This can only be called while the wlock is held. This takes all the
1403 arguments that ui.system does, and returns the exit code of the
1404 arguments that ui.system does, and returns the exit code of the
1404 subprocess."""
1405 subprocess."""
1405 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1406 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1406 **kwargs)
1407 **kwargs)
1407
1408
1408 class progress(object):
1409 class progress(object):
1409 def __init__(self, ui, topic, unit="", total=None):
1410 def __init__(self, ui, topic, unit="", total=None):
1410 self.ui = ui
1411 self.ui = ui
1411 self.pos = 0
1412 self.pos = 0
1412 self.topic = topic
1413 self.topic = topic
1413 self.unit = unit
1414 self.unit = unit
1414 self.total = total
1415 self.total = total
1415
1416
1416 def __enter__(self):
1417 def __enter__(self):
1417 return self
1418 return self
1418
1419
1419 def __exit__(self, exc_type, exc_value, exc_tb):
1420 def __exit__(self, exc_type, exc_value, exc_tb):
1420 self.complete()
1421 self.complete()
1421
1422
1422 def update(self, pos, item="", total=None):
1423 def update(self, pos, item="", total=None):
1423 assert pos is not None
1424 assert pos is not None
1424 if total:
1425 if total:
1425 self.total = total
1426 self.total = total
1426 self.pos = pos
1427 self.pos = pos
1427 self._print(item)
1428 self._print(item)
1428
1429
1429 def increment(self, step=1, item="", total=None):
1430 def increment(self, step=1, item="", total=None):
1430 self.update(self.pos + step, item, total)
1431 self.update(self.pos + step, item, total)
1431
1432
1432 def complete(self):
1433 def complete(self):
1433 self.ui.progress(self.topic, None)
1434 self.ui.progress(self.topic, None)
1434
1435
1435 def _print(self, item):
1436 def _print(self, item):
1436 self.ui.progress(self.topic, self.pos, item, self.unit,
1437 self.ui.progress(self.topic, self.pos, item, self.unit,
1437 self.total)
1438 self.total)
1438
1439
1439 def gdinitconfig(ui):
1440 def gdinitconfig(ui):
1440 """helper function to know if a repo should be created as general delta
1441 """helper function to know if a repo should be created as general delta
1441 """
1442 """
1442 # experimental config: format.generaldelta
1443 # experimental config: format.generaldelta
1443 return (ui.configbool('format', 'generaldelta')
1444 return (ui.configbool('format', 'generaldelta')
1444 or ui.configbool('format', 'usegeneraldelta')
1445 or ui.configbool('format', 'usegeneraldelta')
1445 or ui.configbool('format', 'sparse-revlog'))
1446 or ui.configbool('format', 'sparse-revlog'))
1446
1447
1447 def gddeltaconfig(ui):
1448 def gddeltaconfig(ui):
1448 """helper function to know if incoming delta should be optimised
1449 """helper function to know if incoming delta should be optimised
1449 """
1450 """
1450 # experimental config: format.generaldelta
1451 # experimental config: format.generaldelta
1451 return ui.configbool('format', 'generaldelta')
1452 return ui.configbool('format', 'generaldelta')
1452
1453
1453 class simplekeyvaluefile(object):
1454 class simplekeyvaluefile(object):
1454 """A simple file with key=value lines
1455 """A simple file with key=value lines
1455
1456
1456 Keys must be alphanumerics and start with a letter, values must not
1457 Keys must be alphanumerics and start with a letter, values must not
1457 contain '\n' characters"""
1458 contain '\n' characters"""
1458 firstlinekey = '__firstline'
1459 firstlinekey = '__firstline'
1459
1460
1460 def __init__(self, vfs, path, keys=None):
1461 def __init__(self, vfs, path, keys=None):
1461 self.vfs = vfs
1462 self.vfs = vfs
1462 self.path = path
1463 self.path = path
1463
1464
1464 def read(self, firstlinenonkeyval=False):
1465 def read(self, firstlinenonkeyval=False):
1465 """Read the contents of a simple key-value file
1466 """Read the contents of a simple key-value file
1466
1467
1467 'firstlinenonkeyval' indicates whether the first line of file should
1468 'firstlinenonkeyval' indicates whether the first line of file should
1468 be treated as a key-value pair or reuturned fully under the
1469 be treated as a key-value pair or reuturned fully under the
1469 __firstline key."""
1470 __firstline key."""
1470 lines = self.vfs.readlines(self.path)
1471 lines = self.vfs.readlines(self.path)
1471 d = {}
1472 d = {}
1472 if firstlinenonkeyval:
1473 if firstlinenonkeyval:
1473 if not lines:
1474 if not lines:
1474 e = _("empty simplekeyvalue file")
1475 e = _("empty simplekeyvalue file")
1475 raise error.CorruptedState(e)
1476 raise error.CorruptedState(e)
1476 # we don't want to include '\n' in the __firstline
1477 # we don't want to include '\n' in the __firstline
1477 d[self.firstlinekey] = lines[0][:-1]
1478 d[self.firstlinekey] = lines[0][:-1]
1478 del lines[0]
1479 del lines[0]
1479
1480
1480 try:
1481 try:
1481 # the 'if line.strip()' part prevents us from failing on empty
1482 # the 'if line.strip()' part prevents us from failing on empty
1482 # lines which only contain '\n' therefore are not skipped
1483 # lines which only contain '\n' therefore are not skipped
1483 # by 'if line'
1484 # by 'if line'
1484 updatedict = dict(line[:-1].split('=', 1) for line in lines
1485 updatedict = dict(line[:-1].split('=', 1) for line in lines
1485 if line.strip())
1486 if line.strip())
1486 if self.firstlinekey in updatedict:
1487 if self.firstlinekey in updatedict:
1487 e = _("%r can't be used as a key")
1488 e = _("%r can't be used as a key")
1488 raise error.CorruptedState(e % self.firstlinekey)
1489 raise error.CorruptedState(e % self.firstlinekey)
1489 d.update(updatedict)
1490 d.update(updatedict)
1490 except ValueError as e:
1491 except ValueError as e:
1491 raise error.CorruptedState(str(e))
1492 raise error.CorruptedState(str(e))
1492 return d
1493 return d
1493
1494
1494 def write(self, data, firstline=None):
1495 def write(self, data, firstline=None):
1495 """Write key=>value mapping to a file
1496 """Write key=>value mapping to a file
1496 data is a dict. Keys must be alphanumerical and start with a letter.
1497 data is a dict. Keys must be alphanumerical and start with a letter.
1497 Values must not contain newline characters.
1498 Values must not contain newline characters.
1498
1499
1499 If 'firstline' is not None, it is written to file before
1500 If 'firstline' is not None, it is written to file before
1500 everything else, as it is, not in a key=value form"""
1501 everything else, as it is, not in a key=value form"""
1501 lines = []
1502 lines = []
1502 if firstline is not None:
1503 if firstline is not None:
1503 lines.append('%s\n' % firstline)
1504 lines.append('%s\n' % firstline)
1504
1505
1505 for k, v in data.items():
1506 for k, v in data.items():
1506 if k == self.firstlinekey:
1507 if k == self.firstlinekey:
1507 e = "key name '%s' is reserved" % self.firstlinekey
1508 e = "key name '%s' is reserved" % self.firstlinekey
1508 raise error.ProgrammingError(e)
1509 raise error.ProgrammingError(e)
1509 if not k[0:1].isalpha():
1510 if not k[0:1].isalpha():
1510 e = "keys must start with a letter in a key-value file"
1511 e = "keys must start with a letter in a key-value file"
1511 raise error.ProgrammingError(e)
1512 raise error.ProgrammingError(e)
1512 if not k.isalnum():
1513 if not k.isalnum():
1513 e = "invalid key name in a simple key-value file"
1514 e = "invalid key name in a simple key-value file"
1514 raise error.ProgrammingError(e)
1515 raise error.ProgrammingError(e)
1515 if '\n' in v:
1516 if '\n' in v:
1516 e = "invalid value in a simple key-value file"
1517 e = "invalid value in a simple key-value file"
1517 raise error.ProgrammingError(e)
1518 raise error.ProgrammingError(e)
1518 lines.append("%s=%s\n" % (k, v))
1519 lines.append("%s=%s\n" % (k, v))
1519 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1520 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1520 fp.write(''.join(lines))
1521 fp.write(''.join(lines))
1521
1522
1522 _reportobsoletedsource = [
1523 _reportobsoletedsource = [
1523 'debugobsolete',
1524 'debugobsolete',
1524 'pull',
1525 'pull',
1525 'push',
1526 'push',
1526 'serve',
1527 'serve',
1527 'unbundle',
1528 'unbundle',
1528 ]
1529 ]
1529
1530
1530 _reportnewcssource = [
1531 _reportnewcssource = [
1531 'pull',
1532 'pull',
1532 'unbundle',
1533 'unbundle',
1533 ]
1534 ]
1534
1535
1535 def prefetchfiles(repo, revs, match):
1536 def prefetchfiles(repo, revs, match):
1536 """Invokes the registered file prefetch functions, allowing extensions to
1537 """Invokes the registered file prefetch functions, allowing extensions to
1537 ensure the corresponding files are available locally, before the command
1538 ensure the corresponding files are available locally, before the command
1538 uses them."""
1539 uses them."""
1539 if match:
1540 if match:
1540 # The command itself will complain about files that don't exist, so
1541 # The command itself will complain about files that don't exist, so
1541 # don't duplicate the message.
1542 # don't duplicate the message.
1542 match = matchmod.badmatch(match, lambda fn, msg: None)
1543 match = matchmod.badmatch(match, lambda fn, msg: None)
1543 else:
1544 else:
1544 match = matchall(repo)
1545 match = matchall(repo)
1545
1546
1546 fileprefetchhooks(repo, revs, match)
1547 fileprefetchhooks(repo, revs, match)
1547
1548
1548 # a list of (repo, revs, match) prefetch functions
1549 # a list of (repo, revs, match) prefetch functions
1549 fileprefetchhooks = util.hooks()
1550 fileprefetchhooks = util.hooks()
1550
1551
1551 # A marker that tells the evolve extension to suppress its own reporting
1552 # A marker that tells the evolve extension to suppress its own reporting
1552 _reportstroubledchangesets = True
1553 _reportstroubledchangesets = True
1553
1554
1554 def registersummarycallback(repo, otr, txnname=''):
1555 def registersummarycallback(repo, otr, txnname=''):
1555 """register a callback to issue a summary after the transaction is closed
1556 """register a callback to issue a summary after the transaction is closed
1556 """
1557 """
1557 def txmatch(sources):
1558 def txmatch(sources):
1558 return any(txnname.startswith(source) for source in sources)
1559 return any(txnname.startswith(source) for source in sources)
1559
1560
1560 categories = []
1561 categories = []
1561
1562
1562 def reportsummary(func):
1563 def reportsummary(func):
1563 """decorator for report callbacks."""
1564 """decorator for report callbacks."""
1564 # The repoview life cycle is shorter than the one of the actual
1565 # The repoview life cycle is shorter than the one of the actual
1565 # underlying repository. So the filtered object can die before the
1566 # underlying repository. So the filtered object can die before the
1566 # weakref is used leading to troubles. We keep a reference to the
1567 # weakref is used leading to troubles. We keep a reference to the
1567 # unfiltered object and restore the filtering when retrieving the
1568 # unfiltered object and restore the filtering when retrieving the
1568 # repository through the weakref.
1569 # repository through the weakref.
1569 filtername = repo.filtername
1570 filtername = repo.filtername
1570 reporef = weakref.ref(repo.unfiltered())
1571 reporef = weakref.ref(repo.unfiltered())
1571 def wrapped(tr):
1572 def wrapped(tr):
1572 repo = reporef()
1573 repo = reporef()
1573 if filtername:
1574 if filtername:
1574 repo = repo.filtered(filtername)
1575 repo = repo.filtered(filtername)
1575 func(repo, tr)
1576 func(repo, tr)
1576 newcat = '%02i-txnreport' % len(categories)
1577 newcat = '%02i-txnreport' % len(categories)
1577 otr.addpostclose(newcat, wrapped)
1578 otr.addpostclose(newcat, wrapped)
1578 categories.append(newcat)
1579 categories.append(newcat)
1579 return wrapped
1580 return wrapped
1580
1581
1581 if txmatch(_reportobsoletedsource):
1582 if txmatch(_reportobsoletedsource):
1582 @reportsummary
1583 @reportsummary
1583 def reportobsoleted(repo, tr):
1584 def reportobsoleted(repo, tr):
1584 obsoleted = obsutil.getobsoleted(repo, tr)
1585 obsoleted = obsutil.getobsoleted(repo, tr)
1585 if obsoleted:
1586 if obsoleted:
1586 repo.ui.status(_('obsoleted %i changesets\n')
1587 repo.ui.status(_('obsoleted %i changesets\n')
1587 % len(obsoleted))
1588 % len(obsoleted))
1588
1589
1589 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1590 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1590 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1591 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1591 instabilitytypes = [
1592 instabilitytypes = [
1592 ('orphan', 'orphan'),
1593 ('orphan', 'orphan'),
1593 ('phase-divergent', 'phasedivergent'),
1594 ('phase-divergent', 'phasedivergent'),
1594 ('content-divergent', 'contentdivergent'),
1595 ('content-divergent', 'contentdivergent'),
1595 ]
1596 ]
1596
1597
1597 def getinstabilitycounts(repo):
1598 def getinstabilitycounts(repo):
1598 filtered = repo.changelog.filteredrevs
1599 filtered = repo.changelog.filteredrevs
1599 counts = {}
1600 counts = {}
1600 for instability, revset in instabilitytypes:
1601 for instability, revset in instabilitytypes:
1601 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1602 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1602 filtered)
1603 filtered)
1603 return counts
1604 return counts
1604
1605
1605 oldinstabilitycounts = getinstabilitycounts(repo)
1606 oldinstabilitycounts = getinstabilitycounts(repo)
1606 @reportsummary
1607 @reportsummary
1607 def reportnewinstabilities(repo, tr):
1608 def reportnewinstabilities(repo, tr):
1608 newinstabilitycounts = getinstabilitycounts(repo)
1609 newinstabilitycounts = getinstabilitycounts(repo)
1609 for instability, revset in instabilitytypes:
1610 for instability, revset in instabilitytypes:
1610 delta = (newinstabilitycounts[instability] -
1611 delta = (newinstabilitycounts[instability] -
1611 oldinstabilitycounts[instability])
1612 oldinstabilitycounts[instability])
1612 msg = getinstabilitymessage(delta, instability)
1613 msg = getinstabilitymessage(delta, instability)
1613 if msg:
1614 if msg:
1614 repo.ui.warn(msg)
1615 repo.ui.warn(msg)
1615
1616
1616 if txmatch(_reportnewcssource):
1617 if txmatch(_reportnewcssource):
1617 @reportsummary
1618 @reportsummary
1618 def reportnewcs(repo, tr):
1619 def reportnewcs(repo, tr):
1619 """Report the range of new revisions pulled/unbundled."""
1620 """Report the range of new revisions pulled/unbundled."""
1620 origrepolen = tr.changes.get('origrepolen', len(repo))
1621 origrepolen = tr.changes.get('origrepolen', len(repo))
1621 if origrepolen >= len(repo):
1622 if origrepolen >= len(repo):
1622 return
1623 return
1623
1624
1624 # Compute the bounds of new revisions' range, excluding obsoletes.
1625 # Compute the bounds of new revisions' range, excluding obsoletes.
1625 unfi = repo.unfiltered()
1626 unfi = repo.unfiltered()
1626 revs = unfi.revs('%d: and not obsolete()', origrepolen)
1627 revs = unfi.revs('%d: and not obsolete()', origrepolen)
1627 if not revs:
1628 if not revs:
1628 # Got only obsoletes.
1629 # Got only obsoletes.
1629 return
1630 return
1630 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1631 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1631
1632
1632 if minrev == maxrev:
1633 if minrev == maxrev:
1633 revrange = minrev
1634 revrange = minrev
1634 else:
1635 else:
1635 revrange = '%s:%s' % (minrev, maxrev)
1636 revrange = '%s:%s' % (minrev, maxrev)
1636 draft = len(repo.revs('%ld and draft()', revs))
1637 draft = len(repo.revs('%ld and draft()', revs))
1637 secret = len(repo.revs('%ld and secret()', revs))
1638 secret = len(repo.revs('%ld and secret()', revs))
1638 if not (draft or secret):
1639 if not (draft or secret):
1639 msg = _('new changesets %s\n') % revrange
1640 msg = _('new changesets %s\n') % revrange
1640 elif draft and secret:
1641 elif draft and secret:
1641 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1642 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1642 msg %= (revrange, draft, secret)
1643 msg %= (revrange, draft, secret)
1643 elif draft:
1644 elif draft:
1644 msg = _('new changesets %s (%d drafts)\n')
1645 msg = _('new changesets %s (%d drafts)\n')
1645 msg %= (revrange, draft)
1646 msg %= (revrange, draft)
1646 elif secret:
1647 elif secret:
1647 msg = _('new changesets %s (%d secrets)\n')
1648 msg = _('new changesets %s (%d secrets)\n')
1648 msg %= (revrange, secret)
1649 msg %= (revrange, secret)
1649 else:
1650 else:
1650 raise error.ProgrammingError('entered unreachable condition')
1651 raise error.ProgrammingError('entered unreachable condition')
1651 repo.ui.status(msg)
1652 repo.ui.status(msg)
1652
1653
1653 @reportsummary
1654 @reportsummary
1654 def reportphasechanges(repo, tr):
1655 def reportphasechanges(repo, tr):
1655 """Report statistics of phase changes for changesets pre-existing
1656 """Report statistics of phase changes for changesets pre-existing
1656 pull/unbundle.
1657 pull/unbundle.
1657 """
1658 """
1658 origrepolen = tr.changes.get('origrepolen', len(repo))
1659 origrepolen = tr.changes.get('origrepolen', len(repo))
1659 phasetracking = tr.changes.get('phases', {})
1660 phasetracking = tr.changes.get('phases', {})
1660 if not phasetracking:
1661 if not phasetracking:
1661 return
1662 return
1662 published = [
1663 published = [
1663 rev for rev, (old, new) in phasetracking.iteritems()
1664 rev for rev, (old, new) in phasetracking.iteritems()
1664 if new == phases.public and rev < origrepolen
1665 if new == phases.public and rev < origrepolen
1665 ]
1666 ]
1666 if not published:
1667 if not published:
1667 return
1668 return
1668 repo.ui.status(_('%d local changesets published\n')
1669 repo.ui.status(_('%d local changesets published\n')
1669 % len(published))
1670 % len(published))
1670
1671
1671 def getinstabilitymessage(delta, instability):
1672 def getinstabilitymessage(delta, instability):
1672 """function to return the message to show warning about new instabilities
1673 """function to return the message to show warning about new instabilities
1673
1674
1674 exists as a separate function so that extension can wrap to show more
1675 exists as a separate function so that extension can wrap to show more
1675 information like how to fix instabilities"""
1676 information like how to fix instabilities"""
1676 if delta > 0:
1677 if delta > 0:
1677 return _('%i new %s changesets\n') % (delta, instability)
1678 return _('%i new %s changesets\n') % (delta, instability)
1678
1679
1679 def nodesummaries(repo, nodes, maxnumnodes=4):
1680 def nodesummaries(repo, nodes, maxnumnodes=4):
1680 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1681 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1681 return ' '.join(short(h) for h in nodes)
1682 return ' '.join(short(h) for h in nodes)
1682 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1683 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1683 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1684 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1684
1685
1685 def enforcesinglehead(repo, tr, desc):
1686 def enforcesinglehead(repo, tr, desc):
1686 """check that no named branch has multiple heads"""
1687 """check that no named branch has multiple heads"""
1687 if desc in ('strip', 'repair'):
1688 if desc in ('strip', 'repair'):
1688 # skip the logic during strip
1689 # skip the logic during strip
1689 return
1690 return
1690 visible = repo.filtered('visible')
1691 visible = repo.filtered('visible')
1691 # possible improvement: we could restrict the check to affected branch
1692 # possible improvement: we could restrict the check to affected branch
1692 for name, heads in visible.branchmap().iteritems():
1693 for name, heads in visible.branchmap().iteritems():
1693 if len(heads) > 1:
1694 if len(heads) > 1:
1694 msg = _('rejecting multiple heads on branch "%s"')
1695 msg = _('rejecting multiple heads on branch "%s"')
1695 msg %= name
1696 msg %= name
1696 hint = _('%d heads: %s')
1697 hint = _('%d heads: %s')
1697 hint %= (len(heads), nodesummaries(repo, heads))
1698 hint %= (len(heads), nodesummaries(repo, heads))
1698 raise error.Abort(msg, hint=hint)
1699 raise error.Abort(msg, hint=hint)
1699
1700
1700 def wrapconvertsink(sink):
1701 def wrapconvertsink(sink):
1701 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1702 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1702 before it is used, whether or not the convert extension was formally loaded.
1703 before it is used, whether or not the convert extension was formally loaded.
1703 """
1704 """
1704 return sink
1705 return sink
1705
1706
1706 def unhidehashlikerevs(repo, specs, hiddentype):
1707 def unhidehashlikerevs(repo, specs, hiddentype):
1707 """parse the user specs and unhide changesets whose hash or revision number
1708 """parse the user specs and unhide changesets whose hash or revision number
1708 is passed.
1709 is passed.
1709
1710
1710 hiddentype can be: 1) 'warn': warn while unhiding changesets
1711 hiddentype can be: 1) 'warn': warn while unhiding changesets
1711 2) 'nowarn': don't warn while unhiding changesets
1712 2) 'nowarn': don't warn while unhiding changesets
1712
1713
1713 returns a repo object with the required changesets unhidden
1714 returns a repo object with the required changesets unhidden
1714 """
1715 """
1715 if not repo.filtername or not repo.ui.configbool('experimental',
1716 if not repo.filtername or not repo.ui.configbool('experimental',
1716 'directaccess'):
1717 'directaccess'):
1717 return repo
1718 return repo
1718
1719
1719 if repo.filtername not in ('visible', 'visible-hidden'):
1720 if repo.filtername not in ('visible', 'visible-hidden'):
1720 return repo
1721 return repo
1721
1722
1722 symbols = set()
1723 symbols = set()
1723 for spec in specs:
1724 for spec in specs:
1724 try:
1725 try:
1725 tree = revsetlang.parse(spec)
1726 tree = revsetlang.parse(spec)
1726 except error.ParseError: # will be reported by scmutil.revrange()
1727 except error.ParseError: # will be reported by scmutil.revrange()
1727 continue
1728 continue
1728
1729
1729 symbols.update(revsetlang.gethashlikesymbols(tree))
1730 symbols.update(revsetlang.gethashlikesymbols(tree))
1730
1731
1731 if not symbols:
1732 if not symbols:
1732 return repo
1733 return repo
1733
1734
1734 revs = _getrevsfromsymbols(repo, symbols)
1735 revs = _getrevsfromsymbols(repo, symbols)
1735
1736
1736 if not revs:
1737 if not revs:
1737 return repo
1738 return repo
1738
1739
1739 if hiddentype == 'warn':
1740 if hiddentype == 'warn':
1740 unfi = repo.unfiltered()
1741 unfi = repo.unfiltered()
1741 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1742 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1742 repo.ui.warn(_("warning: accessing hidden changesets for write "
1743 repo.ui.warn(_("warning: accessing hidden changesets for write "
1743 "operation: %s\n") % revstr)
1744 "operation: %s\n") % revstr)
1744
1745
1745 # we have to use new filtername to separate branch/tags cache until we can
1746 # we have to use new filtername to separate branch/tags cache until we can
1746 # disbale these cache when revisions are dynamically pinned.
1747 # disbale these cache when revisions are dynamically pinned.
1747 return repo.filtered('visible-hidden', revs)
1748 return repo.filtered('visible-hidden', revs)
1748
1749
1749 def _getrevsfromsymbols(repo, symbols):
1750 def _getrevsfromsymbols(repo, symbols):
1750 """parse the list of symbols and returns a set of revision numbers of hidden
1751 """parse the list of symbols and returns a set of revision numbers of hidden
1751 changesets present in symbols"""
1752 changesets present in symbols"""
1752 revs = set()
1753 revs = set()
1753 unfi = repo.unfiltered()
1754 unfi = repo.unfiltered()
1754 unficl = unfi.changelog
1755 unficl = unfi.changelog
1755 cl = repo.changelog
1756 cl = repo.changelog
1756 tiprev = len(unficl)
1757 tiprev = len(unficl)
1757 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1758 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1758 for s in symbols:
1759 for s in symbols:
1759 try:
1760 try:
1760 n = int(s)
1761 n = int(s)
1761 if n <= tiprev:
1762 if n <= tiprev:
1762 if not allowrevnums:
1763 if not allowrevnums:
1763 continue
1764 continue
1764 else:
1765 else:
1765 if n not in cl:
1766 if n not in cl:
1766 revs.add(n)
1767 revs.add(n)
1767 continue
1768 continue
1768 except ValueError:
1769 except ValueError:
1769 pass
1770 pass
1770
1771
1771 try:
1772 try:
1772 s = resolvehexnodeidprefix(unfi, s)
1773 s = resolvehexnodeidprefix(unfi, s)
1773 except (error.LookupError, error.WdirUnsupported):
1774 except (error.LookupError, error.WdirUnsupported):
1774 s = None
1775 s = None
1775
1776
1776 if s is not None:
1777 if s is not None:
1777 rev = unficl.rev(s)
1778 rev = unficl.rev(s)
1778 if rev not in cl:
1779 if rev not in cl:
1779 revs.add(rev)
1780 revs.add(rev)
1780
1781
1781 return revs
1782 return revs
1782
1783
1783 def bookmarkrevs(repo, mark):
1784 def bookmarkrevs(repo, mark):
1784 """
1785 """
1785 Select revisions reachable by a given bookmark
1786 Select revisions reachable by a given bookmark
1786 """
1787 """
1787 return repo.revs("ancestors(bookmark(%s)) - "
1788 return repo.revs("ancestors(bookmark(%s)) - "
1788 "ancestors(head() and not bookmark(%s)) - "
1789 "ancestors(head() and not bookmark(%s)) - "
1789 "ancestors(bookmark() and not bookmark(%s))",
1790 "ancestors(bookmark() and not bookmark(%s))",
1790 mark, mark, mark)
1791 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now