##// END OF EJS Templates
Start using manifestflags methods
Matt Mackall -
r2832:e196aa1d default
parent child Browse files
Show More
@@ -1,174 +1,174 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of
5 # This software may be used and distributed according to the terms of
6 # the GNU General Public License, incorporated herein by reference.
6 # the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import *
8 from demandload import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from node import *
10 from node import *
11 demandload(globals(), 'cStringIO os stat tarfile time util zipfile')
11 demandload(globals(), 'cStringIO os stat tarfile time util zipfile')
12
12
13 def tidyprefix(dest, prefix, suffixes):
13 def tidyprefix(dest, prefix, suffixes):
14 '''choose prefix to use for names in archive. make sure prefix is
14 '''choose prefix to use for names in archive. make sure prefix is
15 safe for consumers.'''
15 safe for consumers.'''
16
16
17 if prefix:
17 if prefix:
18 prefix = prefix.replace('\\', '/')
18 prefix = prefix.replace('\\', '/')
19 else:
19 else:
20 if not isinstance(dest, str):
20 if not isinstance(dest, str):
21 raise ValueError('dest must be string if no prefix')
21 raise ValueError('dest must be string if no prefix')
22 prefix = os.path.basename(dest)
22 prefix = os.path.basename(dest)
23 lower = prefix.lower()
23 lower = prefix.lower()
24 for sfx in suffixes:
24 for sfx in suffixes:
25 if lower.endswith(sfx):
25 if lower.endswith(sfx):
26 prefix = prefix[:-len(sfx)]
26 prefix = prefix[:-len(sfx)]
27 break
27 break
28 lpfx = os.path.normpath(util.localpath(prefix))
28 lpfx = os.path.normpath(util.localpath(prefix))
29 prefix = util.pconvert(lpfx)
29 prefix = util.pconvert(lpfx)
30 if not prefix.endswith('/'):
30 if not prefix.endswith('/'):
31 prefix += '/'
31 prefix += '/'
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 raise util.Abort(_('archive prefix contains illegal components'))
33 raise util.Abort(_('archive prefix contains illegal components'))
34 return prefix
34 return prefix
35
35
36 class tarit:
36 class tarit:
37 '''write archive to tar file or stream. can write uncompressed,
37 '''write archive to tar file or stream. can write uncompressed,
38 or compress with gzip or bzip2.'''
38 or compress with gzip or bzip2.'''
39
39
40 def __init__(self, dest, prefix, mtime, kind=''):
40 def __init__(self, dest, prefix, mtime, kind=''):
41 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
41 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
42 '.tgz', 'tbz2'])
42 '.tgz', 'tbz2'])
43 self.mtime = mtime
43 self.mtime = mtime
44 if isinstance(dest, str):
44 if isinstance(dest, str):
45 self.z = tarfile.open(dest, mode='w:'+kind)
45 self.z = tarfile.open(dest, mode='w:'+kind)
46 else:
46 else:
47 self.z = tarfile.open(mode='w|'+kind, fileobj=dest)
47 self.z = tarfile.open(mode='w|'+kind, fileobj=dest)
48
48
49 def addfile(self, name, mode, data):
49 def addfile(self, name, mode, data):
50 i = tarfile.TarInfo(self.prefix + name)
50 i = tarfile.TarInfo(self.prefix + name)
51 i.mtime = self.mtime
51 i.mtime = self.mtime
52 i.size = len(data)
52 i.size = len(data)
53 i.mode = mode
53 i.mode = mode
54 self.z.addfile(i, cStringIO.StringIO(data))
54 self.z.addfile(i, cStringIO.StringIO(data))
55
55
56 def done(self):
56 def done(self):
57 self.z.close()
57 self.z.close()
58
58
59 class tellable:
59 class tellable:
60 '''provide tell method for zipfile.ZipFile when writing to http
60 '''provide tell method for zipfile.ZipFile when writing to http
61 response file object.'''
61 response file object.'''
62
62
63 def __init__(self, fp):
63 def __init__(self, fp):
64 self.fp = fp
64 self.fp = fp
65 self.offset = 0
65 self.offset = 0
66
66
67 def __getattr__(self, key):
67 def __getattr__(self, key):
68 return getattr(self.fp, key)
68 return getattr(self.fp, key)
69
69
70 def write(self, s):
70 def write(self, s):
71 self.fp.write(s)
71 self.fp.write(s)
72 self.offset += len(s)
72 self.offset += len(s)
73
73
74 def tell(self):
74 def tell(self):
75 return self.offset
75 return self.offset
76
76
77 class zipit:
77 class zipit:
78 '''write archive to zip file or stream. can write uncompressed,
78 '''write archive to zip file or stream. can write uncompressed,
79 or compressed with deflate.'''
79 or compressed with deflate.'''
80
80
81 def __init__(self, dest, prefix, mtime, compress=True):
81 def __init__(self, dest, prefix, mtime, compress=True):
82 self.prefix = tidyprefix(dest, prefix, ('.zip',))
82 self.prefix = tidyprefix(dest, prefix, ('.zip',))
83 if not isinstance(dest, str):
83 if not isinstance(dest, str):
84 try:
84 try:
85 dest.tell()
85 dest.tell()
86 except (AttributeError, IOError):
86 except (AttributeError, IOError):
87 dest = tellable(dest)
87 dest = tellable(dest)
88 self.z = zipfile.ZipFile(dest, 'w',
88 self.z = zipfile.ZipFile(dest, 'w',
89 compress and zipfile.ZIP_DEFLATED or
89 compress and zipfile.ZIP_DEFLATED or
90 zipfile.ZIP_STORED)
90 zipfile.ZIP_STORED)
91 self.date_time = time.gmtime(mtime)[:6]
91 self.date_time = time.gmtime(mtime)[:6]
92
92
93 def addfile(self, name, mode, data):
93 def addfile(self, name, mode, data):
94 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
94 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
95 i.compress_type = self.z.compression
95 i.compress_type = self.z.compression
96 i.flag_bits = 0x08
96 i.flag_bits = 0x08
97 # unzip will not honor unix file modes unless file creator is
97 # unzip will not honor unix file modes unless file creator is
98 # set to unix (id 3).
98 # set to unix (id 3).
99 i.create_system = 3
99 i.create_system = 3
100 i.external_attr = (mode | stat.S_IFREG) << 16L
100 i.external_attr = (mode | stat.S_IFREG) << 16L
101 self.z.writestr(i, data)
101 self.z.writestr(i, data)
102
102
103 def done(self):
103 def done(self):
104 self.z.close()
104 self.z.close()
105
105
106 class fileit:
106 class fileit:
107 '''write archive as files in directory.'''
107 '''write archive as files in directory.'''
108
108
109 def __init__(self, name, prefix, mtime):
109 def __init__(self, name, prefix, mtime):
110 if prefix:
110 if prefix:
111 raise util.Abort(_('cannot give prefix when archiving to files'))
111 raise util.Abort(_('cannot give prefix when archiving to files'))
112 self.basedir = name
112 self.basedir = name
113 self.dirs = {}
113 self.dirs = {}
114 self.oflags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY |
114 self.oflags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY |
115 getattr(os, 'O_BINARY', 0) |
115 getattr(os, 'O_BINARY', 0) |
116 getattr(os, 'O_NOFOLLOW', 0))
116 getattr(os, 'O_NOFOLLOW', 0))
117
117
118 def addfile(self, name, mode, data):
118 def addfile(self, name, mode, data):
119 destfile = os.path.join(self.basedir, name)
119 destfile = os.path.join(self.basedir, name)
120 destdir = os.path.dirname(destfile)
120 destdir = os.path.dirname(destfile)
121 if destdir not in self.dirs:
121 if destdir not in self.dirs:
122 if not os.path.isdir(destdir):
122 if not os.path.isdir(destdir):
123 os.makedirs(destdir)
123 os.makedirs(destdir)
124 self.dirs[destdir] = 1
124 self.dirs[destdir] = 1
125 os.fdopen(os.open(destfile, self.oflags, mode), 'wb').write(data)
125 os.fdopen(os.open(destfile, self.oflags, mode), 'wb').write(data)
126
126
127 def done(self):
127 def done(self):
128 pass
128 pass
129
129
130 archivers = {
130 archivers = {
131 'files': fileit,
131 'files': fileit,
132 'tar': tarit,
132 'tar': tarit,
133 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
133 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
134 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
134 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
135 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
135 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
136 'zip': zipit,
136 'zip': zipit,
137 }
137 }
138
138
139 def archive(repo, dest, node, kind, decode=True, matchfn=None,
139 def archive(repo, dest, node, kind, decode=True, matchfn=None,
140 prefix=None, mtime=None):
140 prefix=None, mtime=None):
141 '''create archive of repo as it was at node.
141 '''create archive of repo as it was at node.
142
142
143 dest can be name of directory, name of archive file, or file
143 dest can be name of directory, name of archive file, or file
144 object to write archive to.
144 object to write archive to.
145
145
146 kind is type of archive to create.
146 kind is type of archive to create.
147
147
148 decode tells whether to put files through decode filters from
148 decode tells whether to put files through decode filters from
149 hgrc.
149 hgrc.
150
150
151 matchfn is function to filter names of files to write to archive.
151 matchfn is function to filter names of files to write to archive.
152
152
153 prefix is name of path to put before every archive member.'''
153 prefix is name of path to put before every archive member.'''
154
154
155 def write(name, mode, data):
155 def write(name, mode, data):
156 if matchfn and not matchfn(name): return
156 if matchfn and not matchfn(name): return
157 if decode:
157 if decode:
158 fp = cStringIO.StringIO()
158 fp = cStringIO.StringIO()
159 repo.wwrite(name, data, fp)
159 repo.wwrite(name, data, fp)
160 data = fp.getvalue()
160 data = fp.getvalue()
161 archiver.addfile(name, mode, data)
161 archiver.addfile(name, mode, data)
162
162
163 change = repo.changelog.read(node)
163 change = repo.changelog.read(node)
164 mn = change[0]
164 mn = change[0]
165 archiver = archivers[kind](dest, prefix, mtime or change[2][0])
165 archiver = archivers[kind](dest, prefix, mtime or change[2][0])
166 mf = repo.manifest.read(mn).items()
166 mf = repo.manifest.read(mn).items()
167 mff = repo.manifest.readflags(mn)
167 mff = repo.manifest.readflags(mn)
168 mf.sort()
168 mf.sort()
169 write('.hg_archival.txt', 0644,
169 write('.hg_archival.txt', 0644,
170 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
170 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
171 for filename, filenode in mf:
171 for filename, filenode in mf:
172 write(filename, mff[filename] and 0755 or 0644,
172 write(filename, mff.execf(filename) and 0755 or 0644,
173 repo.file(filename).read(filenode))
173 repo.file(filename).read(filenode))
174 archiver.done()
174 archiver.done()
@@ -1,3507 +1,3508 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
16 demandload(globals(), "hgweb.server sshserver")
16 demandload(globals(), "hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted, unknown = repo.changes()
24 modified, added, removed, deleted, unknown = repo.changes()
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def filterfiles(filters, files):
28 def filterfiles(filters, files):
29 l = [x for x in files if x in filters]
29 l = [x for x in files if x in filters]
30
30
31 for t in filters:
31 for t in filters:
32 if t and t[-1] != "/":
32 if t and t[-1] != "/":
33 t += "/"
33 t += "/"
34 l += [x for x in files if x.startswith(t)]
34 l += [x for x in files if x.startswith(t)]
35 return l
35 return l
36
36
37 def relpath(repo, args):
37 def relpath(repo, args):
38 cwd = repo.getcwd()
38 cwd = repo.getcwd()
39 if cwd:
39 if cwd:
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
41 return args
41 return args
42
42
43 def matchpats(repo, pats=[], opts={}, head=''):
43 def matchpats(repo, pats=[], opts={}, head=''):
44 cwd = repo.getcwd()
44 cwd = repo.getcwd()
45 if not pats and cwd:
45 if not pats and cwd:
46 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
46 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
47 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
47 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
48 cwd = ''
48 cwd = ''
49 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
49 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
50 opts.get('exclude'), head)
50 opts.get('exclude'), head)
51
51
52 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
52 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
53 files, matchfn, anypats = matchpats(repo, pats, opts, head)
53 files, matchfn, anypats = matchpats(repo, pats, opts, head)
54 exact = dict(zip(files, files))
54 exact = dict(zip(files, files))
55 def walk():
55 def walk():
56 for src, fn in repo.walk(node=node, files=files, match=matchfn,
56 for src, fn in repo.walk(node=node, files=files, match=matchfn,
57 badmatch=badmatch):
57 badmatch=badmatch):
58 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
58 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
59 return files, matchfn, walk()
59 return files, matchfn, walk()
60
60
61 def walk(repo, pats, opts, node=None, head='', badmatch=None):
61 def walk(repo, pats, opts, node=None, head='', badmatch=None):
62 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
62 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
63 for r in results:
63 for r in results:
64 yield r
64 yield r
65
65
66 def walkchangerevs(ui, repo, pats, opts):
66 def walkchangerevs(ui, repo, pats, opts):
67 '''Iterate over files and the revs they changed in.
67 '''Iterate over files and the revs they changed in.
68
68
69 Callers most commonly need to iterate backwards over the history
69 Callers most commonly need to iterate backwards over the history
70 it is interested in. Doing so has awful (quadratic-looking)
70 it is interested in. Doing so has awful (quadratic-looking)
71 performance, so we use iterators in a "windowed" way.
71 performance, so we use iterators in a "windowed" way.
72
72
73 We walk a window of revisions in the desired order. Within the
73 We walk a window of revisions in the desired order. Within the
74 window, we first walk forwards to gather data, then in the desired
74 window, we first walk forwards to gather data, then in the desired
75 order (usually backwards) to display it.
75 order (usually backwards) to display it.
76
76
77 This function returns an (iterator, getchange, matchfn) tuple. The
77 This function returns an (iterator, getchange, matchfn) tuple. The
78 getchange function returns the changelog entry for a numeric
78 getchange function returns the changelog entry for a numeric
79 revision. The iterator yields 3-tuples. They will be of one of
79 revision. The iterator yields 3-tuples. They will be of one of
80 the following forms:
80 the following forms:
81
81
82 "window", incrementing, lastrev: stepping through a window,
82 "window", incrementing, lastrev: stepping through a window,
83 positive if walking forwards through revs, last rev in the
83 positive if walking forwards through revs, last rev in the
84 sequence iterated over - use to reset state for the current window
84 sequence iterated over - use to reset state for the current window
85
85
86 "add", rev, fns: out-of-order traversal of the given file names
86 "add", rev, fns: out-of-order traversal of the given file names
87 fns, which changed during revision rev - use to gather data for
87 fns, which changed during revision rev - use to gather data for
88 possible display
88 possible display
89
89
90 "iter", rev, None: in-order traversal of the revs earlier iterated
90 "iter", rev, None: in-order traversal of the revs earlier iterated
91 over with "add" - use to display data'''
91 over with "add" - use to display data'''
92
92
93 def increasing_windows(start, end, windowsize=8, sizelimit=512):
93 def increasing_windows(start, end, windowsize=8, sizelimit=512):
94 if start < end:
94 if start < end:
95 while start < end:
95 while start < end:
96 yield start, min(windowsize, end-start)
96 yield start, min(windowsize, end-start)
97 start += windowsize
97 start += windowsize
98 if windowsize < sizelimit:
98 if windowsize < sizelimit:
99 windowsize *= 2
99 windowsize *= 2
100 else:
100 else:
101 while start > end:
101 while start > end:
102 yield start, min(windowsize, start-end-1)
102 yield start, min(windowsize, start-end-1)
103 start -= windowsize
103 start -= windowsize
104 if windowsize < sizelimit:
104 if windowsize < sizelimit:
105 windowsize *= 2
105 windowsize *= 2
106
106
107
107
108 files, matchfn, anypats = matchpats(repo, pats, opts)
108 files, matchfn, anypats = matchpats(repo, pats, opts)
109
109
110 if repo.changelog.count() == 0:
110 if repo.changelog.count() == 0:
111 return [], False, matchfn
111 return [], False, matchfn
112
112
113 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
113 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
114 wanted = {}
114 wanted = {}
115 slowpath = anypats
115 slowpath = anypats
116 fncache = {}
116 fncache = {}
117
117
118 chcache = {}
118 chcache = {}
119 def getchange(rev):
119 def getchange(rev):
120 ch = chcache.get(rev)
120 ch = chcache.get(rev)
121 if ch is None:
121 if ch is None:
122 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
122 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
123 return ch
123 return ch
124
124
125 if not slowpath and not files:
125 if not slowpath and not files:
126 # No files, no patterns. Display all revs.
126 # No files, no patterns. Display all revs.
127 wanted = dict(zip(revs, revs))
127 wanted = dict(zip(revs, revs))
128 if not slowpath:
128 if not slowpath:
129 # Only files, no patterns. Check the history of each file.
129 # Only files, no patterns. Check the history of each file.
130 def filerevgen(filelog):
130 def filerevgen(filelog):
131 for i, window in increasing_windows(filelog.count()-1, -1):
131 for i, window in increasing_windows(filelog.count()-1, -1):
132 revs = []
132 revs = []
133 for j in xrange(i - window, i + 1):
133 for j in xrange(i - window, i + 1):
134 revs.append(filelog.linkrev(filelog.node(j)))
134 revs.append(filelog.linkrev(filelog.node(j)))
135 revs.reverse()
135 revs.reverse()
136 for rev in revs:
136 for rev in revs:
137 yield rev
137 yield rev
138
138
139 minrev, maxrev = min(revs), max(revs)
139 minrev, maxrev = min(revs), max(revs)
140 for file_ in files:
140 for file_ in files:
141 filelog = repo.file(file_)
141 filelog = repo.file(file_)
142 # A zero count may be a directory or deleted file, so
142 # A zero count may be a directory or deleted file, so
143 # try to find matching entries on the slow path.
143 # try to find matching entries on the slow path.
144 if filelog.count() == 0:
144 if filelog.count() == 0:
145 slowpath = True
145 slowpath = True
146 break
146 break
147 for rev in filerevgen(filelog):
147 for rev in filerevgen(filelog):
148 if rev <= maxrev:
148 if rev <= maxrev:
149 if rev < minrev:
149 if rev < minrev:
150 break
150 break
151 fncache.setdefault(rev, [])
151 fncache.setdefault(rev, [])
152 fncache[rev].append(file_)
152 fncache[rev].append(file_)
153 wanted[rev] = 1
153 wanted[rev] = 1
154 if slowpath:
154 if slowpath:
155 # The slow path checks files modified in every changeset.
155 # The slow path checks files modified in every changeset.
156 def changerevgen():
156 def changerevgen():
157 for i, window in increasing_windows(repo.changelog.count()-1, -1):
157 for i, window in increasing_windows(repo.changelog.count()-1, -1):
158 for j in xrange(i - window, i + 1):
158 for j in xrange(i - window, i + 1):
159 yield j, getchange(j)[3]
159 yield j, getchange(j)[3]
160
160
161 for rev, changefiles in changerevgen():
161 for rev, changefiles in changerevgen():
162 matches = filter(matchfn, changefiles)
162 matches = filter(matchfn, changefiles)
163 if matches:
163 if matches:
164 fncache[rev] = matches
164 fncache[rev] = matches
165 wanted[rev] = 1
165 wanted[rev] = 1
166
166
167 def iterate():
167 def iterate():
168 for i, window in increasing_windows(0, len(revs)):
168 for i, window in increasing_windows(0, len(revs)):
169 yield 'window', revs[0] < revs[-1], revs[-1]
169 yield 'window', revs[0] < revs[-1], revs[-1]
170 nrevs = [rev for rev in revs[i:i+window]
170 nrevs = [rev for rev in revs[i:i+window]
171 if rev in wanted]
171 if rev in wanted]
172 srevs = list(nrevs)
172 srevs = list(nrevs)
173 srevs.sort()
173 srevs.sort()
174 for rev in srevs:
174 for rev in srevs:
175 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
175 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
176 yield 'add', rev, fns
176 yield 'add', rev, fns
177 for rev in nrevs:
177 for rev in nrevs:
178 yield 'iter', rev, None
178 yield 'iter', rev, None
179 return iterate(), getchange, matchfn
179 return iterate(), getchange, matchfn
180
180
181 revrangesep = ':'
181 revrangesep = ':'
182
182
183 def revfix(repo, val, defval):
183 def revfix(repo, val, defval):
184 '''turn user-level id of changeset into rev number.
184 '''turn user-level id of changeset into rev number.
185 user-level id can be tag, changeset, rev number, or negative rev
185 user-level id can be tag, changeset, rev number, or negative rev
186 number relative to number of revs (-1 is tip, etc).'''
186 number relative to number of revs (-1 is tip, etc).'''
187 if not val:
187 if not val:
188 return defval
188 return defval
189 try:
189 try:
190 num = int(val)
190 num = int(val)
191 if str(num) != val:
191 if str(num) != val:
192 raise ValueError
192 raise ValueError
193 if num < 0:
193 if num < 0:
194 num += repo.changelog.count()
194 num += repo.changelog.count()
195 if num < 0:
195 if num < 0:
196 num = 0
196 num = 0
197 elif num >= repo.changelog.count():
197 elif num >= repo.changelog.count():
198 raise ValueError
198 raise ValueError
199 except ValueError:
199 except ValueError:
200 try:
200 try:
201 num = repo.changelog.rev(repo.lookup(val))
201 num = repo.changelog.rev(repo.lookup(val))
202 except KeyError:
202 except KeyError:
203 raise util.Abort(_('invalid revision identifier %s'), val)
203 raise util.Abort(_('invalid revision identifier %s'), val)
204 return num
204 return num
205
205
206 def revpair(ui, repo, revs):
206 def revpair(ui, repo, revs):
207 '''return pair of nodes, given list of revisions. second item can
207 '''return pair of nodes, given list of revisions. second item can
208 be None, meaning use working dir.'''
208 be None, meaning use working dir.'''
209 if not revs:
209 if not revs:
210 return repo.dirstate.parents()[0], None
210 return repo.dirstate.parents()[0], None
211 end = None
211 end = None
212 if len(revs) == 1:
212 if len(revs) == 1:
213 start = revs[0]
213 start = revs[0]
214 if revrangesep in start:
214 if revrangesep in start:
215 start, end = start.split(revrangesep, 1)
215 start, end = start.split(revrangesep, 1)
216 start = revfix(repo, start, 0)
216 start = revfix(repo, start, 0)
217 end = revfix(repo, end, repo.changelog.count() - 1)
217 end = revfix(repo, end, repo.changelog.count() - 1)
218 else:
218 else:
219 start = revfix(repo, start, None)
219 start = revfix(repo, start, None)
220 elif len(revs) == 2:
220 elif len(revs) == 2:
221 if revrangesep in revs[0] or revrangesep in revs[1]:
221 if revrangesep in revs[0] or revrangesep in revs[1]:
222 raise util.Abort(_('too many revisions specified'))
222 raise util.Abort(_('too many revisions specified'))
223 start = revfix(repo, revs[0], None)
223 start = revfix(repo, revs[0], None)
224 end = revfix(repo, revs[1], None)
224 end = revfix(repo, revs[1], None)
225 else:
225 else:
226 raise util.Abort(_('too many revisions specified'))
226 raise util.Abort(_('too many revisions specified'))
227 if end is not None: end = repo.lookup(str(end))
227 if end is not None: end = repo.lookup(str(end))
228 return repo.lookup(str(start)), end
228 return repo.lookup(str(start)), end
229
229
230 def revrange(ui, repo, revs):
230 def revrange(ui, repo, revs):
231 """Yield revision as strings from a list of revision specifications."""
231 """Yield revision as strings from a list of revision specifications."""
232 seen = {}
232 seen = {}
233 for spec in revs:
233 for spec in revs:
234 if revrangesep in spec:
234 if revrangesep in spec:
235 start, end = spec.split(revrangesep, 1)
235 start, end = spec.split(revrangesep, 1)
236 start = revfix(repo, start, 0)
236 start = revfix(repo, start, 0)
237 end = revfix(repo, end, repo.changelog.count() - 1)
237 end = revfix(repo, end, repo.changelog.count() - 1)
238 step = start > end and -1 or 1
238 step = start > end and -1 or 1
239 for rev in xrange(start, end+step, step):
239 for rev in xrange(start, end+step, step):
240 if rev in seen:
240 if rev in seen:
241 continue
241 continue
242 seen[rev] = 1
242 seen[rev] = 1
243 yield str(rev)
243 yield str(rev)
244 else:
244 else:
245 rev = revfix(repo, spec, None)
245 rev = revfix(repo, spec, None)
246 if rev in seen:
246 if rev in seen:
247 continue
247 continue
248 seen[rev] = 1
248 seen[rev] = 1
249 yield str(rev)
249 yield str(rev)
250
250
251 def make_filename(repo, pat, node,
251 def make_filename(repo, pat, node,
252 total=None, seqno=None, revwidth=None, pathname=None):
252 total=None, seqno=None, revwidth=None, pathname=None):
253 node_expander = {
253 node_expander = {
254 'H': lambda: hex(node),
254 'H': lambda: hex(node),
255 'R': lambda: str(repo.changelog.rev(node)),
255 'R': lambda: str(repo.changelog.rev(node)),
256 'h': lambda: short(node),
256 'h': lambda: short(node),
257 }
257 }
258 expander = {
258 expander = {
259 '%': lambda: '%',
259 '%': lambda: '%',
260 'b': lambda: os.path.basename(repo.root),
260 'b': lambda: os.path.basename(repo.root),
261 }
261 }
262
262
263 try:
263 try:
264 if node:
264 if node:
265 expander.update(node_expander)
265 expander.update(node_expander)
266 if node and revwidth is not None:
266 if node and revwidth is not None:
267 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
267 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
268 if total is not None:
268 if total is not None:
269 expander['N'] = lambda: str(total)
269 expander['N'] = lambda: str(total)
270 if seqno is not None:
270 if seqno is not None:
271 expander['n'] = lambda: str(seqno)
271 expander['n'] = lambda: str(seqno)
272 if total is not None and seqno is not None:
272 if total is not None and seqno is not None:
273 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
273 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
274 if pathname is not None:
274 if pathname is not None:
275 expander['s'] = lambda: os.path.basename(pathname)
275 expander['s'] = lambda: os.path.basename(pathname)
276 expander['d'] = lambda: os.path.dirname(pathname) or '.'
276 expander['d'] = lambda: os.path.dirname(pathname) or '.'
277 expander['p'] = lambda: pathname
277 expander['p'] = lambda: pathname
278
278
279 newname = []
279 newname = []
280 patlen = len(pat)
280 patlen = len(pat)
281 i = 0
281 i = 0
282 while i < patlen:
282 while i < patlen:
283 c = pat[i]
283 c = pat[i]
284 if c == '%':
284 if c == '%':
285 i += 1
285 i += 1
286 c = pat[i]
286 c = pat[i]
287 c = expander[c]()
287 c = expander[c]()
288 newname.append(c)
288 newname.append(c)
289 i += 1
289 i += 1
290 return ''.join(newname)
290 return ''.join(newname)
291 except KeyError, inst:
291 except KeyError, inst:
292 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
292 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
293 inst.args[0])
293 inst.args[0])
294
294
295 def make_file(repo, pat, node=None,
295 def make_file(repo, pat, node=None,
296 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
296 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
297 if not pat or pat == '-':
297 if not pat or pat == '-':
298 return 'w' in mode and sys.stdout or sys.stdin
298 return 'w' in mode and sys.stdout or sys.stdin
299 if hasattr(pat, 'write') and 'w' in mode:
299 if hasattr(pat, 'write') and 'w' in mode:
300 return pat
300 return pat
301 if hasattr(pat, 'read') and 'r' in mode:
301 if hasattr(pat, 'read') and 'r' in mode:
302 return pat
302 return pat
303 return open(make_filename(repo, pat, node, total, seqno, revwidth,
303 return open(make_filename(repo, pat, node, total, seqno, revwidth,
304 pathname),
304 pathname),
305 mode)
305 mode)
306
306
307 def write_bundle(cg, filename=None, compress=True):
307 def write_bundle(cg, filename=None, compress=True):
308 """Write a bundle file and return its filename.
308 """Write a bundle file and return its filename.
309
309
310 Existing files will not be overwritten.
310 Existing files will not be overwritten.
311 If no filename is specified, a temporary file is created.
311 If no filename is specified, a temporary file is created.
312 bz2 compression can be turned off.
312 bz2 compression can be turned off.
313 The bundle file will be deleted in case of errors.
313 The bundle file will be deleted in case of errors.
314 """
314 """
315 class nocompress(object):
315 class nocompress(object):
316 def compress(self, x):
316 def compress(self, x):
317 return x
317 return x
318 def flush(self):
318 def flush(self):
319 return ""
319 return ""
320
320
321 fh = None
321 fh = None
322 cleanup = None
322 cleanup = None
323 try:
323 try:
324 if filename:
324 if filename:
325 if os.path.exists(filename):
325 if os.path.exists(filename):
326 raise util.Abort(_("file '%s' already exists"), filename)
326 raise util.Abort(_("file '%s' already exists"), filename)
327 fh = open(filename, "wb")
327 fh = open(filename, "wb")
328 else:
328 else:
329 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
329 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
330 fh = os.fdopen(fd, "wb")
330 fh = os.fdopen(fd, "wb")
331 cleanup = filename
331 cleanup = filename
332
332
333 if compress:
333 if compress:
334 fh.write("HG10")
334 fh.write("HG10")
335 z = bz2.BZ2Compressor(9)
335 z = bz2.BZ2Compressor(9)
336 else:
336 else:
337 fh.write("HG10UN")
337 fh.write("HG10UN")
338 z = nocompress()
338 z = nocompress()
339 # parse the changegroup data, otherwise we will block
339 # parse the changegroup data, otherwise we will block
340 # in case of sshrepo because we don't know the end of the stream
340 # in case of sshrepo because we don't know the end of the stream
341
341
342 # an empty chunkiter is the end of the changegroup
342 # an empty chunkiter is the end of the changegroup
343 empty = False
343 empty = False
344 while not empty:
344 while not empty:
345 empty = True
345 empty = True
346 for chunk in changegroup.chunkiter(cg):
346 for chunk in changegroup.chunkiter(cg):
347 empty = False
347 empty = False
348 fh.write(z.compress(changegroup.genchunk(chunk)))
348 fh.write(z.compress(changegroup.genchunk(chunk)))
349 fh.write(z.compress(changegroup.closechunk()))
349 fh.write(z.compress(changegroup.closechunk()))
350 fh.write(z.flush())
350 fh.write(z.flush())
351 cleanup = None
351 cleanup = None
352 return filename
352 return filename
353 finally:
353 finally:
354 if fh is not None:
354 if fh is not None:
355 fh.close()
355 fh.close()
356 if cleanup is not None:
356 if cleanup is not None:
357 os.unlink(cleanup)
357 os.unlink(cleanup)
358
358
359 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
359 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
360 changes=None, text=False, opts={}):
360 changes=None, text=False, opts={}):
361 if not node1:
361 if not node1:
362 node1 = repo.dirstate.parents()[0]
362 node1 = repo.dirstate.parents()[0]
363 # reading the data for node1 early allows it to play nicely
363 # reading the data for node1 early allows it to play nicely
364 # with repo.changes and the revlog cache.
364 # with repo.changes and the revlog cache.
365 change = repo.changelog.read(node1)
365 change = repo.changelog.read(node1)
366 mmap = repo.manifest.read(change[0])
366 mmap = repo.manifest.read(change[0])
367 date1 = util.datestr(change[2])
367 date1 = util.datestr(change[2])
368
368
369 if not changes:
369 if not changes:
370 changes = repo.changes(node1, node2, files, match=match)
370 changes = repo.changes(node1, node2, files, match=match)
371 modified, added, removed, deleted, unknown = changes
371 modified, added, removed, deleted, unknown = changes
372 if files:
372 if files:
373 modified, added, removed = map(lambda x: filterfiles(files, x),
373 modified, added, removed = map(lambda x: filterfiles(files, x),
374 (modified, added, removed))
374 (modified, added, removed))
375
375
376 if not modified and not added and not removed:
376 if not modified and not added and not removed:
377 return
377 return
378
378
379 if node2:
379 if node2:
380 change = repo.changelog.read(node2)
380 change = repo.changelog.read(node2)
381 mmap2 = repo.manifest.read(change[0])
381 mmap2 = repo.manifest.read(change[0])
382 _date2 = util.datestr(change[2])
382 _date2 = util.datestr(change[2])
383 def date2(f):
383 def date2(f):
384 return _date2
384 return _date2
385 def read(f):
385 def read(f):
386 return repo.file(f).read(mmap2[f])
386 return repo.file(f).read(mmap2[f])
387 else:
387 else:
388 tz = util.makedate()[1]
388 tz = util.makedate()[1]
389 _date2 = util.datestr()
389 _date2 = util.datestr()
390 def date2(f):
390 def date2(f):
391 try:
391 try:
392 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
392 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
393 except OSError, err:
393 except OSError, err:
394 if err.errno != errno.ENOENT: raise
394 if err.errno != errno.ENOENT: raise
395 return _date2
395 return _date2
396 def read(f):
396 def read(f):
397 return repo.wread(f)
397 return repo.wread(f)
398
398
399 if ui.quiet:
399 if ui.quiet:
400 r = None
400 r = None
401 else:
401 else:
402 hexfunc = ui.verbose and hex or short
402 hexfunc = ui.verbose and hex or short
403 r = [hexfunc(node) for node in [node1, node2] if node]
403 r = [hexfunc(node) for node in [node1, node2] if node]
404
404
405 diffopts = ui.diffopts()
405 diffopts = ui.diffopts()
406 showfunc = opts.get('show_function') or diffopts['showfunc']
406 showfunc = opts.get('show_function') or diffopts['showfunc']
407 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
407 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
408 ignorewsamount = opts.get('ignore_space_change') or \
408 ignorewsamount = opts.get('ignore_space_change') or \
409 diffopts['ignorewsamount']
409 diffopts['ignorewsamount']
410 ignoreblanklines = opts.get('ignore_blank_lines') or \
410 ignoreblanklines = opts.get('ignore_blank_lines') or \
411 diffopts['ignoreblanklines']
411 diffopts['ignoreblanklines']
412 for f in modified:
412 for f in modified:
413 to = None
413 to = None
414 if f in mmap:
414 if f in mmap:
415 to = repo.file(f).read(mmap[f])
415 to = repo.file(f).read(mmap[f])
416 tn = read(f)
416 tn = read(f)
417 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
417 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
418 showfunc=showfunc, ignorews=ignorews,
418 showfunc=showfunc, ignorews=ignorews,
419 ignorewsamount=ignorewsamount,
419 ignorewsamount=ignorewsamount,
420 ignoreblanklines=ignoreblanklines))
420 ignoreblanklines=ignoreblanklines))
421 for f in added:
421 for f in added:
422 to = None
422 to = None
423 tn = read(f)
423 tn = read(f)
424 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
424 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
425 showfunc=showfunc, ignorews=ignorews,
425 showfunc=showfunc, ignorews=ignorews,
426 ignorewsamount=ignorewsamount,
426 ignorewsamount=ignorewsamount,
427 ignoreblanklines=ignoreblanklines))
427 ignoreblanklines=ignoreblanklines))
428 for f in removed:
428 for f in removed:
429 to = repo.file(f).read(mmap[f])
429 to = repo.file(f).read(mmap[f])
430 tn = None
430 tn = None
431 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
431 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
432 showfunc=showfunc, ignorews=ignorews,
432 showfunc=showfunc, ignorews=ignorews,
433 ignorewsamount=ignorewsamount,
433 ignorewsamount=ignorewsamount,
434 ignoreblanklines=ignoreblanklines))
434 ignoreblanklines=ignoreblanklines))
435
435
436 def trimuser(ui, name, rev, revcache):
436 def trimuser(ui, name, rev, revcache):
437 """trim the name of the user who committed a change"""
437 """trim the name of the user who committed a change"""
438 user = revcache.get(rev)
438 user = revcache.get(rev)
439 if user is None:
439 if user is None:
440 user = revcache[rev] = ui.shortuser(name)
440 user = revcache[rev] = ui.shortuser(name)
441 return user
441 return user
442
442
443 class changeset_printer(object):
443 class changeset_printer(object):
444 '''show changeset information when templating not requested.'''
444 '''show changeset information when templating not requested.'''
445
445
446 def __init__(self, ui, repo):
446 def __init__(self, ui, repo):
447 self.ui = ui
447 self.ui = ui
448 self.repo = repo
448 self.repo = repo
449
449
450 def show(self, rev=0, changenode=None, brinfo=None):
450 def show(self, rev=0, changenode=None, brinfo=None):
451 '''show a single changeset or file revision'''
451 '''show a single changeset or file revision'''
452 log = self.repo.changelog
452 log = self.repo.changelog
453 if changenode is None:
453 if changenode is None:
454 changenode = log.node(rev)
454 changenode = log.node(rev)
455 elif not rev:
455 elif not rev:
456 rev = log.rev(changenode)
456 rev = log.rev(changenode)
457
457
458 if self.ui.quiet:
458 if self.ui.quiet:
459 self.ui.write("%d:%s\n" % (rev, short(changenode)))
459 self.ui.write("%d:%s\n" % (rev, short(changenode)))
460 return
460 return
461
461
462 changes = log.read(changenode)
462 changes = log.read(changenode)
463 date = util.datestr(changes[2])
463 date = util.datestr(changes[2])
464
464
465 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
465 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
466 for p in log.parents(changenode)
466 for p in log.parents(changenode)
467 if self.ui.debugflag or p != nullid]
467 if self.ui.debugflag or p != nullid]
468 if (not self.ui.debugflag and len(parents) == 1 and
468 if (not self.ui.debugflag and len(parents) == 1 and
469 parents[0][0] == rev-1):
469 parents[0][0] == rev-1):
470 parents = []
470 parents = []
471
471
472 if self.ui.verbose:
472 if self.ui.verbose:
473 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
473 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
474 else:
474 else:
475 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
475 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
476
476
477 for tag in self.repo.nodetags(changenode):
477 for tag in self.repo.nodetags(changenode):
478 self.ui.status(_("tag: %s\n") % tag)
478 self.ui.status(_("tag: %s\n") % tag)
479 for parent in parents:
479 for parent in parents:
480 self.ui.write(_("parent: %d:%s\n") % parent)
480 self.ui.write(_("parent: %d:%s\n") % parent)
481
481
482 if brinfo and changenode in brinfo:
482 if brinfo and changenode in brinfo:
483 br = brinfo[changenode]
483 br = brinfo[changenode]
484 self.ui.write(_("branch: %s\n") % " ".join(br))
484 self.ui.write(_("branch: %s\n") % " ".join(br))
485
485
486 self.ui.debug(_("manifest: %d:%s\n") %
486 self.ui.debug(_("manifest: %d:%s\n") %
487 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
487 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
488 self.ui.status(_("user: %s\n") % changes[1])
488 self.ui.status(_("user: %s\n") % changes[1])
489 self.ui.status(_("date: %s\n") % date)
489 self.ui.status(_("date: %s\n") % date)
490
490
491 if self.ui.debugflag:
491 if self.ui.debugflag:
492 files = self.repo.changes(log.parents(changenode)[0], changenode)
492 files = self.repo.changes(log.parents(changenode)[0], changenode)
493 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
493 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
494 files):
494 files):
495 if value:
495 if value:
496 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
496 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
497 else:
497 else:
498 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
498 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
499
499
500 description = changes[4].strip()
500 description = changes[4].strip()
501 if description:
501 if description:
502 if self.ui.verbose:
502 if self.ui.verbose:
503 self.ui.status(_("description:\n"))
503 self.ui.status(_("description:\n"))
504 self.ui.status(description)
504 self.ui.status(description)
505 self.ui.status("\n\n")
505 self.ui.status("\n\n")
506 else:
506 else:
507 self.ui.status(_("summary: %s\n") %
507 self.ui.status(_("summary: %s\n") %
508 description.splitlines()[0])
508 description.splitlines()[0])
509 self.ui.status("\n")
509 self.ui.status("\n")
510
510
511 def show_changeset(ui, repo, opts):
511 def show_changeset(ui, repo, opts):
512 '''show one changeset. uses template or regular display. caller
512 '''show one changeset. uses template or regular display. caller
513 can pass in 'style' and 'template' options in opts.'''
513 can pass in 'style' and 'template' options in opts.'''
514
514
515 tmpl = opts.get('template')
515 tmpl = opts.get('template')
516 if tmpl:
516 if tmpl:
517 tmpl = templater.parsestring(tmpl, quoted=False)
517 tmpl = templater.parsestring(tmpl, quoted=False)
518 else:
518 else:
519 tmpl = ui.config('ui', 'logtemplate')
519 tmpl = ui.config('ui', 'logtemplate')
520 if tmpl: tmpl = templater.parsestring(tmpl)
520 if tmpl: tmpl = templater.parsestring(tmpl)
521 mapfile = opts.get('style') or ui.config('ui', 'style')
521 mapfile = opts.get('style') or ui.config('ui', 'style')
522 if tmpl or mapfile:
522 if tmpl or mapfile:
523 if mapfile:
523 if mapfile:
524 if not os.path.isfile(mapfile):
524 if not os.path.isfile(mapfile):
525 mapname = templater.templatepath('map-cmdline.' + mapfile)
525 mapname = templater.templatepath('map-cmdline.' + mapfile)
526 if not mapname: mapname = templater.templatepath(mapfile)
526 if not mapname: mapname = templater.templatepath(mapfile)
527 if mapname: mapfile = mapname
527 if mapname: mapfile = mapname
528 try:
528 try:
529 t = templater.changeset_templater(ui, repo, mapfile)
529 t = templater.changeset_templater(ui, repo, mapfile)
530 except SyntaxError, inst:
530 except SyntaxError, inst:
531 raise util.Abort(inst.args[0])
531 raise util.Abort(inst.args[0])
532 if tmpl: t.use_template(tmpl)
532 if tmpl: t.use_template(tmpl)
533 return t
533 return t
534 return changeset_printer(ui, repo)
534 return changeset_printer(ui, repo)
535
535
536 def show_version(ui):
536 def show_version(ui):
537 """output version and copyright information"""
537 """output version and copyright information"""
538 ui.write(_("Mercurial Distributed SCM (version %s)\n")
538 ui.write(_("Mercurial Distributed SCM (version %s)\n")
539 % version.get_version())
539 % version.get_version())
540 ui.status(_(
540 ui.status(_(
541 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
541 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
542 "This is free software; see the source for copying conditions. "
542 "This is free software; see the source for copying conditions. "
543 "There is NO\nwarranty; "
543 "There is NO\nwarranty; "
544 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
544 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
545 ))
545 ))
546
546
547 def help_(ui, name=None, with_version=False):
547 def help_(ui, name=None, with_version=False):
548 """show help for a command, extension, or list of commands
548 """show help for a command, extension, or list of commands
549
549
550 With no arguments, print a list of commands and short help.
550 With no arguments, print a list of commands and short help.
551
551
552 Given a command name, print help for that command.
552 Given a command name, print help for that command.
553
553
554 Given an extension name, print help for that extension, and the
554 Given an extension name, print help for that extension, and the
555 commands it provides."""
555 commands it provides."""
556 option_lists = []
556 option_lists = []
557
557
558 def helpcmd(name):
558 def helpcmd(name):
559 if with_version:
559 if with_version:
560 show_version(ui)
560 show_version(ui)
561 ui.write('\n')
561 ui.write('\n')
562 aliases, i = findcmd(name)
562 aliases, i = findcmd(name)
563 # synopsis
563 # synopsis
564 ui.write("%s\n\n" % i[2])
564 ui.write("%s\n\n" % i[2])
565
565
566 # description
566 # description
567 doc = i[0].__doc__
567 doc = i[0].__doc__
568 if not doc:
568 if not doc:
569 doc = _("(No help text available)")
569 doc = _("(No help text available)")
570 if ui.quiet:
570 if ui.quiet:
571 doc = doc.splitlines(0)[0]
571 doc = doc.splitlines(0)[0]
572 ui.write("%s\n" % doc.rstrip())
572 ui.write("%s\n" % doc.rstrip())
573
573
574 if not ui.quiet:
574 if not ui.quiet:
575 # aliases
575 # aliases
576 if len(aliases) > 1:
576 if len(aliases) > 1:
577 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
577 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
578
578
579 # options
579 # options
580 if i[1]:
580 if i[1]:
581 option_lists.append(("options", i[1]))
581 option_lists.append(("options", i[1]))
582
582
583 def helplist(select=None):
583 def helplist(select=None):
584 h = {}
584 h = {}
585 cmds = {}
585 cmds = {}
586 for c, e in table.items():
586 for c, e in table.items():
587 f = c.split("|", 1)[0]
587 f = c.split("|", 1)[0]
588 if select and not select(f):
588 if select and not select(f):
589 continue
589 continue
590 if name == "shortlist" and not f.startswith("^"):
590 if name == "shortlist" and not f.startswith("^"):
591 continue
591 continue
592 f = f.lstrip("^")
592 f = f.lstrip("^")
593 if not ui.debugflag and f.startswith("debug"):
593 if not ui.debugflag and f.startswith("debug"):
594 continue
594 continue
595 doc = e[0].__doc__
595 doc = e[0].__doc__
596 if not doc:
596 if not doc:
597 doc = _("(No help text available)")
597 doc = _("(No help text available)")
598 h[f] = doc.splitlines(0)[0].rstrip()
598 h[f] = doc.splitlines(0)[0].rstrip()
599 cmds[f] = c.lstrip("^")
599 cmds[f] = c.lstrip("^")
600
600
601 fns = h.keys()
601 fns = h.keys()
602 fns.sort()
602 fns.sort()
603 m = max(map(len, fns))
603 m = max(map(len, fns))
604 for f in fns:
604 for f in fns:
605 if ui.verbose:
605 if ui.verbose:
606 commands = cmds[f].replace("|",", ")
606 commands = cmds[f].replace("|",", ")
607 ui.write(" %s:\n %s\n"%(commands, h[f]))
607 ui.write(" %s:\n %s\n"%(commands, h[f]))
608 else:
608 else:
609 ui.write(' %-*s %s\n' % (m, f, h[f]))
609 ui.write(' %-*s %s\n' % (m, f, h[f]))
610
610
611 def helpext(name):
611 def helpext(name):
612 try:
612 try:
613 mod = findext(name)
613 mod = findext(name)
614 except KeyError:
614 except KeyError:
615 raise UnknownCommand(name)
615 raise UnknownCommand(name)
616
616
617 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
617 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
618 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
618 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
619 for d in doc[1:]:
619 for d in doc[1:]:
620 ui.write(d, '\n')
620 ui.write(d, '\n')
621
621
622 ui.status('\n')
622 ui.status('\n')
623 if ui.verbose:
623 if ui.verbose:
624 ui.status(_('list of commands:\n\n'))
624 ui.status(_('list of commands:\n\n'))
625 else:
625 else:
626 ui.status(_('list of commands (use "hg help -v %s" '
626 ui.status(_('list of commands (use "hg help -v %s" '
627 'to show aliases and global options):\n\n') % name)
627 'to show aliases and global options):\n\n') % name)
628
628
629 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
629 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
630 helplist(modcmds.has_key)
630 helplist(modcmds.has_key)
631
631
632 if name and name != 'shortlist':
632 if name and name != 'shortlist':
633 try:
633 try:
634 helpcmd(name)
634 helpcmd(name)
635 except UnknownCommand:
635 except UnknownCommand:
636 helpext(name)
636 helpext(name)
637
637
638 else:
638 else:
639 # program name
639 # program name
640 if ui.verbose or with_version:
640 if ui.verbose or with_version:
641 show_version(ui)
641 show_version(ui)
642 else:
642 else:
643 ui.status(_("Mercurial Distributed SCM\n"))
643 ui.status(_("Mercurial Distributed SCM\n"))
644 ui.status('\n')
644 ui.status('\n')
645
645
646 # list of commands
646 # list of commands
647 if name == "shortlist":
647 if name == "shortlist":
648 ui.status(_('basic commands (use "hg help" '
648 ui.status(_('basic commands (use "hg help" '
649 'for the full list or option "-v" for details):\n\n'))
649 'for the full list or option "-v" for details):\n\n'))
650 elif ui.verbose:
650 elif ui.verbose:
651 ui.status(_('list of commands:\n\n'))
651 ui.status(_('list of commands:\n\n'))
652 else:
652 else:
653 ui.status(_('list of commands (use "hg help -v" '
653 ui.status(_('list of commands (use "hg help -v" '
654 'to show aliases and global options):\n\n'))
654 'to show aliases and global options):\n\n'))
655
655
656 helplist()
656 helplist()
657
657
658 # global options
658 # global options
659 if ui.verbose:
659 if ui.verbose:
660 option_lists.append(("global options", globalopts))
660 option_lists.append(("global options", globalopts))
661
661
662 # list all option lists
662 # list all option lists
663 opt_output = []
663 opt_output = []
664 for title, options in option_lists:
664 for title, options in option_lists:
665 opt_output.append(("\n%s:\n" % title, None))
665 opt_output.append(("\n%s:\n" % title, None))
666 for shortopt, longopt, default, desc in options:
666 for shortopt, longopt, default, desc in options:
667 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
667 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
668 longopt and " --%s" % longopt),
668 longopt and " --%s" % longopt),
669 "%s%s" % (desc,
669 "%s%s" % (desc,
670 default
670 default
671 and _(" (default: %s)") % default
671 and _(" (default: %s)") % default
672 or "")))
672 or "")))
673
673
674 if opt_output:
674 if opt_output:
675 opts_len = max([len(line[0]) for line in opt_output if line[1]])
675 opts_len = max([len(line[0]) for line in opt_output if line[1]])
676 for first, second in opt_output:
676 for first, second in opt_output:
677 if second:
677 if second:
678 ui.write(" %-*s %s\n" % (opts_len, first, second))
678 ui.write(" %-*s %s\n" % (opts_len, first, second))
679 else:
679 else:
680 ui.write("%s\n" % first)
680 ui.write("%s\n" % first)
681
681
682 # Commands start here, listed alphabetically
682 # Commands start here, listed alphabetically
683
683
684 def add(ui, repo, *pats, **opts):
684 def add(ui, repo, *pats, **opts):
685 """add the specified files on the next commit
685 """add the specified files on the next commit
686
686
687 Schedule files to be version controlled and added to the repository.
687 Schedule files to be version controlled and added to the repository.
688
688
689 The files will be added to the repository at the next commit.
689 The files will be added to the repository at the next commit.
690
690
691 If no names are given, add all files in the repository.
691 If no names are given, add all files in the repository.
692 """
692 """
693
693
694 names = []
694 names = []
695 for src, abs, rel, exact in walk(repo, pats, opts):
695 for src, abs, rel, exact in walk(repo, pats, opts):
696 if exact:
696 if exact:
697 if ui.verbose:
697 if ui.verbose:
698 ui.status(_('adding %s\n') % rel)
698 ui.status(_('adding %s\n') % rel)
699 names.append(abs)
699 names.append(abs)
700 elif repo.dirstate.state(abs) == '?':
700 elif repo.dirstate.state(abs) == '?':
701 ui.status(_('adding %s\n') % rel)
701 ui.status(_('adding %s\n') % rel)
702 names.append(abs)
702 names.append(abs)
703 if not opts.get('dry_run'):
703 if not opts.get('dry_run'):
704 repo.add(names)
704 repo.add(names)
705
705
706 def addremove(ui, repo, *pats, **opts):
706 def addremove(ui, repo, *pats, **opts):
707 """add all new files, delete all missing files (DEPRECATED)
707 """add all new files, delete all missing files (DEPRECATED)
708
708
709 (DEPRECATED)
709 (DEPRECATED)
710 Add all new files and remove all missing files from the repository.
710 Add all new files and remove all missing files from the repository.
711
711
712 New files are ignored if they match any of the patterns in .hgignore. As
712 New files are ignored if they match any of the patterns in .hgignore. As
713 with add, these changes take effect at the next commit.
713 with add, these changes take effect at the next commit.
714
714
715 This command is now deprecated and will be removed in a future
715 This command is now deprecated and will be removed in a future
716 release. Please use add and remove --after instead.
716 release. Please use add and remove --after instead.
717 """
717 """
718 ui.warn(_('(the addremove command is deprecated; use add and remove '
718 ui.warn(_('(the addremove command is deprecated; use add and remove '
719 '--after instead)\n'))
719 '--after instead)\n'))
720 return addremove_lock(ui, repo, pats, opts)
720 return addremove_lock(ui, repo, pats, opts)
721
721
722 def addremove_lock(ui, repo, pats, opts, wlock=None):
722 def addremove_lock(ui, repo, pats, opts, wlock=None):
723 add, remove = [], []
723 add, remove = [], []
724 for src, abs, rel, exact in walk(repo, pats, opts):
724 for src, abs, rel, exact in walk(repo, pats, opts):
725 if src == 'f' and repo.dirstate.state(abs) == '?':
725 if src == 'f' and repo.dirstate.state(abs) == '?':
726 add.append(abs)
726 add.append(abs)
727 if ui.verbose or not exact:
727 if ui.verbose or not exact:
728 ui.status(_('adding %s\n') % ((pats and rel) or abs))
728 ui.status(_('adding %s\n') % ((pats and rel) or abs))
729 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
729 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
730 remove.append(abs)
730 remove.append(abs)
731 if ui.verbose or not exact:
731 if ui.verbose or not exact:
732 ui.status(_('removing %s\n') % ((pats and rel) or abs))
732 ui.status(_('removing %s\n') % ((pats and rel) or abs))
733 if not opts.get('dry_run'):
733 if not opts.get('dry_run'):
734 repo.add(add, wlock=wlock)
734 repo.add(add, wlock=wlock)
735 repo.remove(remove, wlock=wlock)
735 repo.remove(remove, wlock=wlock)
736
736
737 def annotate(ui, repo, *pats, **opts):
737 def annotate(ui, repo, *pats, **opts):
738 """show changeset information per file line
738 """show changeset information per file line
739
739
740 List changes in files, showing the revision id responsible for each line
740 List changes in files, showing the revision id responsible for each line
741
741
742 This command is useful to discover who did a change or when a change took
742 This command is useful to discover who did a change or when a change took
743 place.
743 place.
744
744
745 Without the -a option, annotate will avoid processing files it
745 Without the -a option, annotate will avoid processing files it
746 detects as binary. With -a, annotate will generate an annotation
746 detects as binary. With -a, annotate will generate an annotation
747 anyway, probably with undesirable results.
747 anyway, probably with undesirable results.
748 """
748 """
749 def getnode(rev):
749 def getnode(rev):
750 return short(repo.changelog.node(rev))
750 return short(repo.changelog.node(rev))
751
751
752 ucache = {}
752 ucache = {}
753 def getname(rev):
753 def getname(rev):
754 try:
754 try:
755 return ucache[rev]
755 return ucache[rev]
756 except:
756 except:
757 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
757 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
758 ucache[rev] = u
758 ucache[rev] = u
759 return u
759 return u
760
760
761 dcache = {}
761 dcache = {}
762 def getdate(rev):
762 def getdate(rev):
763 datestr = dcache.get(rev)
763 datestr = dcache.get(rev)
764 if datestr is None:
764 if datestr is None:
765 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
765 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
766 return datestr
766 return datestr
767
767
768 if not pats:
768 if not pats:
769 raise util.Abort(_('at least one file name or pattern required'))
769 raise util.Abort(_('at least one file name or pattern required'))
770
770
771 opmap = [['user', getname], ['number', str], ['changeset', getnode],
771 opmap = [['user', getname], ['number', str], ['changeset', getnode],
772 ['date', getdate]]
772 ['date', getdate]]
773 if not opts['user'] and not opts['changeset'] and not opts['date']:
773 if not opts['user'] and not opts['changeset'] and not opts['date']:
774 opts['number'] = 1
774 opts['number'] = 1
775
775
776 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
776 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
777
777
778 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
778 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
779 fctx = ctx.filectx(abs)
779 fctx = ctx.filectx(abs)
780 if not opts['text'] and util.binary(fctx.data()):
780 if not opts['text'] and util.binary(fctx.data()):
781 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
781 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
782 continue
782 continue
783
783
784 lines = fctx.annotate()
784 lines = fctx.annotate()
785 pieces = []
785 pieces = []
786
786
787 for o, f in opmap:
787 for o, f in opmap:
788 if opts[o]:
788 if opts[o]:
789 l = [f(n) for n, dummy in lines]
789 l = [f(n) for n, dummy in lines]
790 if l:
790 if l:
791 m = max(map(len, l))
791 m = max(map(len, l))
792 pieces.append(["%*s" % (m, x) for x in l])
792 pieces.append(["%*s" % (m, x) for x in l])
793
793
794 if pieces:
794 if pieces:
795 for p, l in zip(zip(*pieces), lines):
795 for p, l in zip(zip(*pieces), lines):
796 ui.write("%s: %s" % (" ".join(p), l[1]))
796 ui.write("%s: %s" % (" ".join(p), l[1]))
797
797
798 def archive(ui, repo, dest, **opts):
798 def archive(ui, repo, dest, **opts):
799 '''create unversioned archive of a repository revision
799 '''create unversioned archive of a repository revision
800
800
801 By default, the revision used is the parent of the working
801 By default, the revision used is the parent of the working
802 directory; use "-r" to specify a different revision.
802 directory; use "-r" to specify a different revision.
803
803
804 To specify the type of archive to create, use "-t". Valid
804 To specify the type of archive to create, use "-t". Valid
805 types are:
805 types are:
806
806
807 "files" (default): a directory full of files
807 "files" (default): a directory full of files
808 "tar": tar archive, uncompressed
808 "tar": tar archive, uncompressed
809 "tbz2": tar archive, compressed using bzip2
809 "tbz2": tar archive, compressed using bzip2
810 "tgz": tar archive, compressed using gzip
810 "tgz": tar archive, compressed using gzip
811 "uzip": zip archive, uncompressed
811 "uzip": zip archive, uncompressed
812 "zip": zip archive, compressed using deflate
812 "zip": zip archive, compressed using deflate
813
813
814 The exact name of the destination archive or directory is given
814 The exact name of the destination archive or directory is given
815 using a format string; see "hg help export" for details.
815 using a format string; see "hg help export" for details.
816
816
817 Each member added to an archive file has a directory prefix
817 Each member added to an archive file has a directory prefix
818 prepended. Use "-p" to specify a format string for the prefix.
818 prepended. Use "-p" to specify a format string for the prefix.
819 The default is the basename of the archive, with suffixes removed.
819 The default is the basename of the archive, with suffixes removed.
820 '''
820 '''
821
821
822 if opts['rev']:
822 if opts['rev']:
823 node = repo.lookup(opts['rev'])
823 node = repo.lookup(opts['rev'])
824 else:
824 else:
825 node, p2 = repo.dirstate.parents()
825 node, p2 = repo.dirstate.parents()
826 if p2 != nullid:
826 if p2 != nullid:
827 raise util.Abort(_('uncommitted merge - please provide a '
827 raise util.Abort(_('uncommitted merge - please provide a '
828 'specific revision'))
828 'specific revision'))
829
829
830 dest = make_filename(repo, dest, node)
830 dest = make_filename(repo, dest, node)
831 if os.path.realpath(dest) == repo.root:
831 if os.path.realpath(dest) == repo.root:
832 raise util.Abort(_('repository root cannot be destination'))
832 raise util.Abort(_('repository root cannot be destination'))
833 dummy, matchfn, dummy = matchpats(repo, [], opts)
833 dummy, matchfn, dummy = matchpats(repo, [], opts)
834 kind = opts.get('type') or 'files'
834 kind = opts.get('type') or 'files'
835 prefix = opts['prefix']
835 prefix = opts['prefix']
836 if dest == '-':
836 if dest == '-':
837 if kind == 'files':
837 if kind == 'files':
838 raise util.Abort(_('cannot archive plain files to stdout'))
838 raise util.Abort(_('cannot archive plain files to stdout'))
839 dest = sys.stdout
839 dest = sys.stdout
840 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
840 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
841 prefix = make_filename(repo, prefix, node)
841 prefix = make_filename(repo, prefix, node)
842 archival.archive(repo, dest, node, kind, not opts['no_decode'],
842 archival.archive(repo, dest, node, kind, not opts['no_decode'],
843 matchfn, prefix)
843 matchfn, prefix)
844
844
845 def backout(ui, repo, rev, **opts):
845 def backout(ui, repo, rev, **opts):
846 '''reverse effect of earlier changeset
846 '''reverse effect of earlier changeset
847
847
848 Commit the backed out changes as a new changeset. The new
848 Commit the backed out changes as a new changeset. The new
849 changeset is a child of the backed out changeset.
849 changeset is a child of the backed out changeset.
850
850
851 If you back out a changeset other than the tip, a new head is
851 If you back out a changeset other than the tip, a new head is
852 created. This head is the parent of the working directory. If
852 created. This head is the parent of the working directory. If
853 you back out an old changeset, your working directory will appear
853 you back out an old changeset, your working directory will appear
854 old after the backout. You should merge the backout changeset
854 old after the backout. You should merge the backout changeset
855 with another head.
855 with another head.
856
856
857 The --merge option remembers the parent of the working directory
857 The --merge option remembers the parent of the working directory
858 before starting the backout, then merges the new head with that
858 before starting the backout, then merges the new head with that
859 changeset afterwards. This saves you from doing the merge by
859 changeset afterwards. This saves you from doing the merge by
860 hand. The result of this merge is not committed, as for a normal
860 hand. The result of this merge is not committed, as for a normal
861 merge.'''
861 merge.'''
862
862
863 bail_if_changed(repo)
863 bail_if_changed(repo)
864 op1, op2 = repo.dirstate.parents()
864 op1, op2 = repo.dirstate.parents()
865 if op2 != nullid:
865 if op2 != nullid:
866 raise util.Abort(_('outstanding uncommitted merge'))
866 raise util.Abort(_('outstanding uncommitted merge'))
867 node = repo.lookup(rev)
867 node = repo.lookup(rev)
868 parent, p2 = repo.changelog.parents(node)
868 parent, p2 = repo.changelog.parents(node)
869 if parent == nullid:
869 if parent == nullid:
870 raise util.Abort(_('cannot back out a change with no parents'))
870 raise util.Abort(_('cannot back out a change with no parents'))
871 if p2 != nullid:
871 if p2 != nullid:
872 raise util.Abort(_('cannot back out a merge'))
872 raise util.Abort(_('cannot back out a merge'))
873 repo.update(node, force=True, show_stats=False)
873 repo.update(node, force=True, show_stats=False)
874 revert_opts = opts.copy()
874 revert_opts = opts.copy()
875 revert_opts['rev'] = hex(parent)
875 revert_opts['rev'] = hex(parent)
876 revert(ui, repo, **revert_opts)
876 revert(ui, repo, **revert_opts)
877 commit_opts = opts.copy()
877 commit_opts = opts.copy()
878 commit_opts['addremove'] = False
878 commit_opts['addremove'] = False
879 if not commit_opts['message'] and not commit_opts['logfile']:
879 if not commit_opts['message'] and not commit_opts['logfile']:
880 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
880 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
881 commit_opts['force_editor'] = True
881 commit_opts['force_editor'] = True
882 commit(ui, repo, **commit_opts)
882 commit(ui, repo, **commit_opts)
883 def nice(node):
883 def nice(node):
884 return '%d:%s' % (repo.changelog.rev(node), short(node))
884 return '%d:%s' % (repo.changelog.rev(node), short(node))
885 ui.status(_('changeset %s backs out changeset %s\n') %
885 ui.status(_('changeset %s backs out changeset %s\n') %
886 (nice(repo.changelog.tip()), nice(node)))
886 (nice(repo.changelog.tip()), nice(node)))
887 if op1 != node:
887 if op1 != node:
888 if opts['merge']:
888 if opts['merge']:
889 ui.status(_('merging with changeset %s\n') % nice(op1))
889 ui.status(_('merging with changeset %s\n') % nice(op1))
890 doupdate(ui, repo, hex(op1), **opts)
890 doupdate(ui, repo, hex(op1), **opts)
891 else:
891 else:
892 ui.status(_('the backout changeset is a new head - '
892 ui.status(_('the backout changeset is a new head - '
893 'do not forget to merge\n'))
893 'do not forget to merge\n'))
894 ui.status(_('(use "backout -m" if you want to auto-merge)\n'))
894 ui.status(_('(use "backout -m" if you want to auto-merge)\n'))
895
895
896 def bundle(ui, repo, fname, dest=None, **opts):
896 def bundle(ui, repo, fname, dest=None, **opts):
897 """create a changegroup file
897 """create a changegroup file
898
898
899 Generate a compressed changegroup file collecting all changesets
899 Generate a compressed changegroup file collecting all changesets
900 not found in the other repository.
900 not found in the other repository.
901
901
902 This file can then be transferred using conventional means and
902 This file can then be transferred using conventional means and
903 applied to another repository with the unbundle command. This is
903 applied to another repository with the unbundle command. This is
904 useful when native push and pull are not available or when
904 useful when native push and pull are not available or when
905 exporting an entire repository is undesirable. The standard file
905 exporting an entire repository is undesirable. The standard file
906 extension is ".hg".
906 extension is ".hg".
907
907
908 Unlike import/export, this exactly preserves all changeset
908 Unlike import/export, this exactly preserves all changeset
909 contents including permissions, rename data, and revision history.
909 contents including permissions, rename data, and revision history.
910 """
910 """
911 dest = ui.expandpath(dest or 'default-push', dest or 'default')
911 dest = ui.expandpath(dest or 'default-push', dest or 'default')
912 other = hg.repository(ui, dest)
912 other = hg.repository(ui, dest)
913 o = repo.findoutgoing(other, force=opts['force'])
913 o = repo.findoutgoing(other, force=opts['force'])
914 cg = repo.changegroup(o, 'bundle')
914 cg = repo.changegroup(o, 'bundle')
915 write_bundle(cg, fname)
915 write_bundle(cg, fname)
916
916
917 def cat(ui, repo, file1, *pats, **opts):
917 def cat(ui, repo, file1, *pats, **opts):
918 """output the latest or given revisions of files
918 """output the latest or given revisions of files
919
919
920 Print the specified files as they were at the given revision.
920 Print the specified files as they were at the given revision.
921 If no revision is given then the tip is used.
921 If no revision is given then the tip is used.
922
922
923 Output may be to a file, in which case the name of the file is
923 Output may be to a file, in which case the name of the file is
924 given using a format string. The formatting rules are the same as
924 given using a format string. The formatting rules are the same as
925 for the export command, with the following additions:
925 for the export command, with the following additions:
926
926
927 %s basename of file being printed
927 %s basename of file being printed
928 %d dirname of file being printed, or '.' if in repo root
928 %d dirname of file being printed, or '.' if in repo root
929 %p root-relative path name of file being printed
929 %p root-relative path name of file being printed
930 """
930 """
931 ctx = repo.changectx(opts['rev'] or -1)
931 ctx = repo.changectx(opts['rev'] or -1)
932 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
932 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
933 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
933 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
934 fp.write(ctx.filectx(abs).data())
934 fp.write(ctx.filectx(abs).data())
935
935
936 def clone(ui, source, dest=None, **opts):
936 def clone(ui, source, dest=None, **opts):
937 """make a copy of an existing repository
937 """make a copy of an existing repository
938
938
939 Create a copy of an existing repository in a new directory.
939 Create a copy of an existing repository in a new directory.
940
940
941 If no destination directory name is specified, it defaults to the
941 If no destination directory name is specified, it defaults to the
942 basename of the source.
942 basename of the source.
943
943
944 The location of the source is added to the new repository's
944 The location of the source is added to the new repository's
945 .hg/hgrc file, as the default to be used for future pulls.
945 .hg/hgrc file, as the default to be used for future pulls.
946
946
947 For efficiency, hardlinks are used for cloning whenever the source
947 For efficiency, hardlinks are used for cloning whenever the source
948 and destination are on the same filesystem. Some filesystems,
948 and destination are on the same filesystem. Some filesystems,
949 such as AFS, implement hardlinking incorrectly, but do not report
949 such as AFS, implement hardlinking incorrectly, but do not report
950 errors. In these cases, use the --pull option to avoid
950 errors. In these cases, use the --pull option to avoid
951 hardlinking.
951 hardlinking.
952
952
953 See pull for valid source format details.
953 See pull for valid source format details.
954
954
955 It is possible to specify an ssh:// URL as the destination, but no
955 It is possible to specify an ssh:// URL as the destination, but no
956 .hg/hgrc will be created on the remote side. Look at the help text
956 .hg/hgrc will be created on the remote side. Look at the help text
957 for the pull command for important details about ssh:// URLs.
957 for the pull command for important details about ssh:// URLs.
958 """
958 """
959 ui.setconfig_remoteopts(**opts)
959 ui.setconfig_remoteopts(**opts)
960 hg.clone(ui, ui.expandpath(source), dest,
960 hg.clone(ui, ui.expandpath(source), dest,
961 pull=opts['pull'],
961 pull=opts['pull'],
962 rev=opts['rev'],
962 rev=opts['rev'],
963 update=not opts['noupdate'])
963 update=not opts['noupdate'])
964
964
965 def commit(ui, repo, *pats, **opts):
965 def commit(ui, repo, *pats, **opts):
966 """commit the specified files or all outstanding changes
966 """commit the specified files or all outstanding changes
967
967
968 Commit changes to the given files into the repository.
968 Commit changes to the given files into the repository.
969
969
970 If a list of files is omitted, all changes reported by "hg status"
970 If a list of files is omitted, all changes reported by "hg status"
971 will be committed.
971 will be committed.
972
972
973 If no commit message is specified, the editor configured in your hgrc
973 If no commit message is specified, the editor configured in your hgrc
974 or in the EDITOR environment variable is started to enter a message.
974 or in the EDITOR environment variable is started to enter a message.
975 """
975 """
976 message = opts['message']
976 message = opts['message']
977 logfile = opts['logfile']
977 logfile = opts['logfile']
978
978
979 if message and logfile:
979 if message and logfile:
980 raise util.Abort(_('options --message and --logfile are mutually '
980 raise util.Abort(_('options --message and --logfile are mutually '
981 'exclusive'))
981 'exclusive'))
982 if not message and logfile:
982 if not message and logfile:
983 try:
983 try:
984 if logfile == '-':
984 if logfile == '-':
985 message = sys.stdin.read()
985 message = sys.stdin.read()
986 else:
986 else:
987 message = open(logfile).read()
987 message = open(logfile).read()
988 except IOError, inst:
988 except IOError, inst:
989 raise util.Abort(_("can't read commit message '%s': %s") %
989 raise util.Abort(_("can't read commit message '%s': %s") %
990 (logfile, inst.strerror))
990 (logfile, inst.strerror))
991
991
992 if opts['addremove']:
992 if opts['addremove']:
993 addremove_lock(ui, repo, pats, opts)
993 addremove_lock(ui, repo, pats, opts)
994 fns, match, anypats = matchpats(repo, pats, opts)
994 fns, match, anypats = matchpats(repo, pats, opts)
995 if pats:
995 if pats:
996 modified, added, removed, deleted, unknown = (
996 modified, added, removed, deleted, unknown = (
997 repo.changes(files=fns, match=match))
997 repo.changes(files=fns, match=match))
998 files = modified + added + removed
998 files = modified + added + removed
999 else:
999 else:
1000 files = []
1000 files = []
1001 try:
1001 try:
1002 repo.commit(files, message, opts['user'], opts['date'], match,
1002 repo.commit(files, message, opts['user'], opts['date'], match,
1003 force_editor=opts.get('force_editor'))
1003 force_editor=opts.get('force_editor'))
1004 except ValueError, inst:
1004 except ValueError, inst:
1005 raise util.Abort(str(inst))
1005 raise util.Abort(str(inst))
1006
1006
1007 def docopy(ui, repo, pats, opts, wlock):
1007 def docopy(ui, repo, pats, opts, wlock):
1008 # called with the repo lock held
1008 # called with the repo lock held
1009 cwd = repo.getcwd()
1009 cwd = repo.getcwd()
1010 errors = 0
1010 errors = 0
1011 copied = []
1011 copied = []
1012 targets = {}
1012 targets = {}
1013
1013
1014 def okaytocopy(abs, rel, exact):
1014 def okaytocopy(abs, rel, exact):
1015 reasons = {'?': _('is not managed'),
1015 reasons = {'?': _('is not managed'),
1016 'a': _('has been marked for add'),
1016 'a': _('has been marked for add'),
1017 'r': _('has been marked for remove')}
1017 'r': _('has been marked for remove')}
1018 state = repo.dirstate.state(abs)
1018 state = repo.dirstate.state(abs)
1019 reason = reasons.get(state)
1019 reason = reasons.get(state)
1020 if reason:
1020 if reason:
1021 if state == 'a':
1021 if state == 'a':
1022 origsrc = repo.dirstate.copied(abs)
1022 origsrc = repo.dirstate.copied(abs)
1023 if origsrc is not None:
1023 if origsrc is not None:
1024 return origsrc
1024 return origsrc
1025 if exact:
1025 if exact:
1026 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1026 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1027 else:
1027 else:
1028 return abs
1028 return abs
1029
1029
1030 def copy(origsrc, abssrc, relsrc, target, exact):
1030 def copy(origsrc, abssrc, relsrc, target, exact):
1031 abstarget = util.canonpath(repo.root, cwd, target)
1031 abstarget = util.canonpath(repo.root, cwd, target)
1032 reltarget = util.pathto(cwd, abstarget)
1032 reltarget = util.pathto(cwd, abstarget)
1033 prevsrc = targets.get(abstarget)
1033 prevsrc = targets.get(abstarget)
1034 if prevsrc is not None:
1034 if prevsrc is not None:
1035 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1035 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1036 (reltarget, abssrc, prevsrc))
1036 (reltarget, abssrc, prevsrc))
1037 return
1037 return
1038 if (not opts['after'] and os.path.exists(reltarget) or
1038 if (not opts['after'] and os.path.exists(reltarget) or
1039 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1039 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1040 if not opts['force']:
1040 if not opts['force']:
1041 ui.warn(_('%s: not overwriting - file exists\n') %
1041 ui.warn(_('%s: not overwriting - file exists\n') %
1042 reltarget)
1042 reltarget)
1043 return
1043 return
1044 if not opts['after'] and not opts.get('dry_run'):
1044 if not opts['after'] and not opts.get('dry_run'):
1045 os.unlink(reltarget)
1045 os.unlink(reltarget)
1046 if opts['after']:
1046 if opts['after']:
1047 if not os.path.exists(reltarget):
1047 if not os.path.exists(reltarget):
1048 return
1048 return
1049 else:
1049 else:
1050 targetdir = os.path.dirname(reltarget) or '.'
1050 targetdir = os.path.dirname(reltarget) or '.'
1051 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1051 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1052 os.makedirs(targetdir)
1052 os.makedirs(targetdir)
1053 try:
1053 try:
1054 restore = repo.dirstate.state(abstarget) == 'r'
1054 restore = repo.dirstate.state(abstarget) == 'r'
1055 if restore and not opts.get('dry_run'):
1055 if restore and not opts.get('dry_run'):
1056 repo.undelete([abstarget], wlock)
1056 repo.undelete([abstarget], wlock)
1057 try:
1057 try:
1058 if not opts.get('dry_run'):
1058 if not opts.get('dry_run'):
1059 shutil.copyfile(relsrc, reltarget)
1059 shutil.copyfile(relsrc, reltarget)
1060 shutil.copymode(relsrc, reltarget)
1060 shutil.copymode(relsrc, reltarget)
1061 restore = False
1061 restore = False
1062 finally:
1062 finally:
1063 if restore:
1063 if restore:
1064 repo.remove([abstarget], wlock)
1064 repo.remove([abstarget], wlock)
1065 except shutil.Error, inst:
1065 except shutil.Error, inst:
1066 raise util.Abort(str(inst))
1066 raise util.Abort(str(inst))
1067 except IOError, inst:
1067 except IOError, inst:
1068 if inst.errno == errno.ENOENT:
1068 if inst.errno == errno.ENOENT:
1069 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1069 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1070 else:
1070 else:
1071 ui.warn(_('%s: cannot copy - %s\n') %
1071 ui.warn(_('%s: cannot copy - %s\n') %
1072 (relsrc, inst.strerror))
1072 (relsrc, inst.strerror))
1073 errors += 1
1073 errors += 1
1074 return
1074 return
1075 if ui.verbose or not exact:
1075 if ui.verbose or not exact:
1076 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1076 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1077 targets[abstarget] = abssrc
1077 targets[abstarget] = abssrc
1078 if abstarget != origsrc and not opts.get('dry_run'):
1078 if abstarget != origsrc and not opts.get('dry_run'):
1079 repo.copy(origsrc, abstarget, wlock)
1079 repo.copy(origsrc, abstarget, wlock)
1080 copied.append((abssrc, relsrc, exact))
1080 copied.append((abssrc, relsrc, exact))
1081
1081
1082 def targetpathfn(pat, dest, srcs):
1082 def targetpathfn(pat, dest, srcs):
1083 if os.path.isdir(pat):
1083 if os.path.isdir(pat):
1084 abspfx = util.canonpath(repo.root, cwd, pat)
1084 abspfx = util.canonpath(repo.root, cwd, pat)
1085 if destdirexists:
1085 if destdirexists:
1086 striplen = len(os.path.split(abspfx)[0])
1086 striplen = len(os.path.split(abspfx)[0])
1087 else:
1087 else:
1088 striplen = len(abspfx)
1088 striplen = len(abspfx)
1089 if striplen:
1089 if striplen:
1090 striplen += len(os.sep)
1090 striplen += len(os.sep)
1091 res = lambda p: os.path.join(dest, p[striplen:])
1091 res = lambda p: os.path.join(dest, p[striplen:])
1092 elif destdirexists:
1092 elif destdirexists:
1093 res = lambda p: os.path.join(dest, os.path.basename(p))
1093 res = lambda p: os.path.join(dest, os.path.basename(p))
1094 else:
1094 else:
1095 res = lambda p: dest
1095 res = lambda p: dest
1096 return res
1096 return res
1097
1097
1098 def targetpathafterfn(pat, dest, srcs):
1098 def targetpathafterfn(pat, dest, srcs):
1099 if util.patkind(pat, None)[0]:
1099 if util.patkind(pat, None)[0]:
1100 # a mercurial pattern
1100 # a mercurial pattern
1101 res = lambda p: os.path.join(dest, os.path.basename(p))
1101 res = lambda p: os.path.join(dest, os.path.basename(p))
1102 else:
1102 else:
1103 abspfx = util.canonpath(repo.root, cwd, pat)
1103 abspfx = util.canonpath(repo.root, cwd, pat)
1104 if len(abspfx) < len(srcs[0][0]):
1104 if len(abspfx) < len(srcs[0][0]):
1105 # A directory. Either the target path contains the last
1105 # A directory. Either the target path contains the last
1106 # component of the source path or it does not.
1106 # component of the source path or it does not.
1107 def evalpath(striplen):
1107 def evalpath(striplen):
1108 score = 0
1108 score = 0
1109 for s in srcs:
1109 for s in srcs:
1110 t = os.path.join(dest, s[0][striplen:])
1110 t = os.path.join(dest, s[0][striplen:])
1111 if os.path.exists(t):
1111 if os.path.exists(t):
1112 score += 1
1112 score += 1
1113 return score
1113 return score
1114
1114
1115 striplen = len(abspfx)
1115 striplen = len(abspfx)
1116 if striplen:
1116 if striplen:
1117 striplen += len(os.sep)
1117 striplen += len(os.sep)
1118 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1118 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1119 score = evalpath(striplen)
1119 score = evalpath(striplen)
1120 striplen1 = len(os.path.split(abspfx)[0])
1120 striplen1 = len(os.path.split(abspfx)[0])
1121 if striplen1:
1121 if striplen1:
1122 striplen1 += len(os.sep)
1122 striplen1 += len(os.sep)
1123 if evalpath(striplen1) > score:
1123 if evalpath(striplen1) > score:
1124 striplen = striplen1
1124 striplen = striplen1
1125 res = lambda p: os.path.join(dest, p[striplen:])
1125 res = lambda p: os.path.join(dest, p[striplen:])
1126 else:
1126 else:
1127 # a file
1127 # a file
1128 if destdirexists:
1128 if destdirexists:
1129 res = lambda p: os.path.join(dest, os.path.basename(p))
1129 res = lambda p: os.path.join(dest, os.path.basename(p))
1130 else:
1130 else:
1131 res = lambda p: dest
1131 res = lambda p: dest
1132 return res
1132 return res
1133
1133
1134
1134
1135 pats = list(pats)
1135 pats = list(pats)
1136 if not pats:
1136 if not pats:
1137 raise util.Abort(_('no source or destination specified'))
1137 raise util.Abort(_('no source or destination specified'))
1138 if len(pats) == 1:
1138 if len(pats) == 1:
1139 raise util.Abort(_('no destination specified'))
1139 raise util.Abort(_('no destination specified'))
1140 dest = pats.pop()
1140 dest = pats.pop()
1141 destdirexists = os.path.isdir(dest)
1141 destdirexists = os.path.isdir(dest)
1142 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1142 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1143 raise util.Abort(_('with multiple sources, destination must be an '
1143 raise util.Abort(_('with multiple sources, destination must be an '
1144 'existing directory'))
1144 'existing directory'))
1145 if opts['after']:
1145 if opts['after']:
1146 tfn = targetpathafterfn
1146 tfn = targetpathafterfn
1147 else:
1147 else:
1148 tfn = targetpathfn
1148 tfn = targetpathfn
1149 copylist = []
1149 copylist = []
1150 for pat in pats:
1150 for pat in pats:
1151 srcs = []
1151 srcs = []
1152 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1152 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1153 origsrc = okaytocopy(abssrc, relsrc, exact)
1153 origsrc = okaytocopy(abssrc, relsrc, exact)
1154 if origsrc:
1154 if origsrc:
1155 srcs.append((origsrc, abssrc, relsrc, exact))
1155 srcs.append((origsrc, abssrc, relsrc, exact))
1156 if not srcs:
1156 if not srcs:
1157 continue
1157 continue
1158 copylist.append((tfn(pat, dest, srcs), srcs))
1158 copylist.append((tfn(pat, dest, srcs), srcs))
1159 if not copylist:
1159 if not copylist:
1160 raise util.Abort(_('no files to copy'))
1160 raise util.Abort(_('no files to copy'))
1161
1161
1162 for targetpath, srcs in copylist:
1162 for targetpath, srcs in copylist:
1163 for origsrc, abssrc, relsrc, exact in srcs:
1163 for origsrc, abssrc, relsrc, exact in srcs:
1164 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1164 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1165
1165
1166 if errors:
1166 if errors:
1167 ui.warn(_('(consider using --after)\n'))
1167 ui.warn(_('(consider using --after)\n'))
1168 return errors, copied
1168 return errors, copied
1169
1169
1170 def copy(ui, repo, *pats, **opts):
1170 def copy(ui, repo, *pats, **opts):
1171 """mark files as copied for the next commit
1171 """mark files as copied for the next commit
1172
1172
1173 Mark dest as having copies of source files. If dest is a
1173 Mark dest as having copies of source files. If dest is a
1174 directory, copies are put in that directory. If dest is a file,
1174 directory, copies are put in that directory. If dest is a file,
1175 there can only be one source.
1175 there can only be one source.
1176
1176
1177 By default, this command copies the contents of files as they
1177 By default, this command copies the contents of files as they
1178 stand in the working directory. If invoked with --after, the
1178 stand in the working directory. If invoked with --after, the
1179 operation is recorded, but no copying is performed.
1179 operation is recorded, but no copying is performed.
1180
1180
1181 This command takes effect in the next commit.
1181 This command takes effect in the next commit.
1182
1182
1183 NOTE: This command should be treated as experimental. While it
1183 NOTE: This command should be treated as experimental. While it
1184 should properly record copied files, this information is not yet
1184 should properly record copied files, this information is not yet
1185 fully used by merge, nor fully reported by log.
1185 fully used by merge, nor fully reported by log.
1186 """
1186 """
1187 wlock = repo.wlock(0)
1187 wlock = repo.wlock(0)
1188 errs, copied = docopy(ui, repo, pats, opts, wlock)
1188 errs, copied = docopy(ui, repo, pats, opts, wlock)
1189 return errs
1189 return errs
1190
1190
1191 def debugancestor(ui, index, rev1, rev2):
1191 def debugancestor(ui, index, rev1, rev2):
1192 """find the ancestor revision of two revisions in a given index"""
1192 """find the ancestor revision of two revisions in a given index"""
1193 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1193 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1194 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1194 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1195 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1195 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1196
1196
1197 def debugcomplete(ui, cmd='', **opts):
1197 def debugcomplete(ui, cmd='', **opts):
1198 """returns the completion list associated with the given command"""
1198 """returns the completion list associated with the given command"""
1199
1199
1200 if opts['options']:
1200 if opts['options']:
1201 options = []
1201 options = []
1202 otables = [globalopts]
1202 otables = [globalopts]
1203 if cmd:
1203 if cmd:
1204 aliases, entry = findcmd(cmd)
1204 aliases, entry = findcmd(cmd)
1205 otables.append(entry[1])
1205 otables.append(entry[1])
1206 for t in otables:
1206 for t in otables:
1207 for o in t:
1207 for o in t:
1208 if o[0]:
1208 if o[0]:
1209 options.append('-%s' % o[0])
1209 options.append('-%s' % o[0])
1210 options.append('--%s' % o[1])
1210 options.append('--%s' % o[1])
1211 ui.write("%s\n" % "\n".join(options))
1211 ui.write("%s\n" % "\n".join(options))
1212 return
1212 return
1213
1213
1214 clist = findpossible(cmd).keys()
1214 clist = findpossible(cmd).keys()
1215 clist.sort()
1215 clist.sort()
1216 ui.write("%s\n" % "\n".join(clist))
1216 ui.write("%s\n" % "\n".join(clist))
1217
1217
1218 def debugrebuildstate(ui, repo, rev=None):
1218 def debugrebuildstate(ui, repo, rev=None):
1219 """rebuild the dirstate as it would look like for the given revision"""
1219 """rebuild the dirstate as it would look like for the given revision"""
1220 if not rev:
1220 if not rev:
1221 rev = repo.changelog.tip()
1221 rev = repo.changelog.tip()
1222 else:
1222 else:
1223 rev = repo.lookup(rev)
1223 rev = repo.lookup(rev)
1224 change = repo.changelog.read(rev)
1224 change = repo.changelog.read(rev)
1225 n = change[0]
1225 n = change[0]
1226 files = repo.manifest.readflags(n)
1226 files = repo.manifest.readflags(n)
1227 wlock = repo.wlock()
1227 wlock = repo.wlock()
1228 repo.dirstate.rebuild(rev, files.iteritems())
1228 repo.dirstate.rebuild(rev, files.iteritems())
1229
1229
1230 def debugcheckstate(ui, repo):
1230 def debugcheckstate(ui, repo):
1231 """validate the correctness of the current dirstate"""
1231 """validate the correctness of the current dirstate"""
1232 parent1, parent2 = repo.dirstate.parents()
1232 parent1, parent2 = repo.dirstate.parents()
1233 repo.dirstate.read()
1233 repo.dirstate.read()
1234 dc = repo.dirstate.map
1234 dc = repo.dirstate.map
1235 keys = dc.keys()
1235 keys = dc.keys()
1236 keys.sort()
1236 keys.sort()
1237 m1n = repo.changelog.read(parent1)[0]
1237 m1n = repo.changelog.read(parent1)[0]
1238 m2n = repo.changelog.read(parent2)[0]
1238 m2n = repo.changelog.read(parent2)[0]
1239 m1 = repo.manifest.read(m1n)
1239 m1 = repo.manifest.read(m1n)
1240 m2 = repo.manifest.read(m2n)
1240 m2 = repo.manifest.read(m2n)
1241 errors = 0
1241 errors = 0
1242 for f in dc:
1242 for f in dc:
1243 state = repo.dirstate.state(f)
1243 state = repo.dirstate.state(f)
1244 if state in "nr" and f not in m1:
1244 if state in "nr" and f not in m1:
1245 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1245 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1246 errors += 1
1246 errors += 1
1247 if state in "a" and f in m1:
1247 if state in "a" and f in m1:
1248 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1248 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1249 errors += 1
1249 errors += 1
1250 if state in "m" and f not in m1 and f not in m2:
1250 if state in "m" and f not in m1 and f not in m2:
1251 ui.warn(_("%s in state %s, but not in either manifest\n") %
1251 ui.warn(_("%s in state %s, but not in either manifest\n") %
1252 (f, state))
1252 (f, state))
1253 errors += 1
1253 errors += 1
1254 for f in m1:
1254 for f in m1:
1255 state = repo.dirstate.state(f)
1255 state = repo.dirstate.state(f)
1256 if state not in "nrm":
1256 if state not in "nrm":
1257 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1257 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1258 errors += 1
1258 errors += 1
1259 if errors:
1259 if errors:
1260 error = _(".hg/dirstate inconsistent with current parent's manifest")
1260 error = _(".hg/dirstate inconsistent with current parent's manifest")
1261 raise util.Abort(error)
1261 raise util.Abort(error)
1262
1262
1263 def debugconfig(ui, repo, *values):
1263 def debugconfig(ui, repo, *values):
1264 """show combined config settings from all hgrc files
1264 """show combined config settings from all hgrc files
1265
1265
1266 With no args, print names and values of all config items.
1266 With no args, print names and values of all config items.
1267
1267
1268 With one arg of the form section.name, print just the value of
1268 With one arg of the form section.name, print just the value of
1269 that config item.
1269 that config item.
1270
1270
1271 With multiple args, print names and values of all config items
1271 With multiple args, print names and values of all config items
1272 with matching section names."""
1272 with matching section names."""
1273
1273
1274 if values:
1274 if values:
1275 if len([v for v in values if '.' in v]) > 1:
1275 if len([v for v in values if '.' in v]) > 1:
1276 raise util.Abort(_('only one config item permitted'))
1276 raise util.Abort(_('only one config item permitted'))
1277 for section, name, value in ui.walkconfig():
1277 for section, name, value in ui.walkconfig():
1278 sectname = section + '.' + name
1278 sectname = section + '.' + name
1279 if values:
1279 if values:
1280 for v in values:
1280 for v in values:
1281 if v == section:
1281 if v == section:
1282 ui.write('%s=%s\n' % (sectname, value))
1282 ui.write('%s=%s\n' % (sectname, value))
1283 elif v == sectname:
1283 elif v == sectname:
1284 ui.write(value, '\n')
1284 ui.write(value, '\n')
1285 else:
1285 else:
1286 ui.write('%s=%s\n' % (sectname, value))
1286 ui.write('%s=%s\n' % (sectname, value))
1287
1287
1288 def debugsetparents(ui, repo, rev1, rev2=None):
1288 def debugsetparents(ui, repo, rev1, rev2=None):
1289 """manually set the parents of the current working directory
1289 """manually set the parents of the current working directory
1290
1290
1291 This is useful for writing repository conversion tools, but should
1291 This is useful for writing repository conversion tools, but should
1292 be used with care.
1292 be used with care.
1293 """
1293 """
1294
1294
1295 if not rev2:
1295 if not rev2:
1296 rev2 = hex(nullid)
1296 rev2 = hex(nullid)
1297
1297
1298 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1298 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1299
1299
1300 def debugstate(ui, repo):
1300 def debugstate(ui, repo):
1301 """show the contents of the current dirstate"""
1301 """show the contents of the current dirstate"""
1302 repo.dirstate.read()
1302 repo.dirstate.read()
1303 dc = repo.dirstate.map
1303 dc = repo.dirstate.map
1304 keys = dc.keys()
1304 keys = dc.keys()
1305 keys.sort()
1305 keys.sort()
1306 for file_ in keys:
1306 for file_ in keys:
1307 ui.write("%c %3o %10d %s %s\n"
1307 ui.write("%c %3o %10d %s %s\n"
1308 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1308 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1309 time.strftime("%x %X",
1309 time.strftime("%x %X",
1310 time.localtime(dc[file_][3])), file_))
1310 time.localtime(dc[file_][3])), file_))
1311 for f in repo.dirstate.copies:
1311 for f in repo.dirstate.copies:
1312 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1312 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1313
1313
1314 def debugdata(ui, file_, rev):
1314 def debugdata(ui, file_, rev):
1315 """dump the contents of an data file revision"""
1315 """dump the contents of an data file revision"""
1316 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1316 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1317 file_[:-2] + ".i", file_, 0)
1317 file_[:-2] + ".i", file_, 0)
1318 try:
1318 try:
1319 ui.write(r.revision(r.lookup(rev)))
1319 ui.write(r.revision(r.lookup(rev)))
1320 except KeyError:
1320 except KeyError:
1321 raise util.Abort(_('invalid revision identifier %s'), rev)
1321 raise util.Abort(_('invalid revision identifier %s'), rev)
1322
1322
1323 def debugindex(ui, file_):
1323 def debugindex(ui, file_):
1324 """dump the contents of an index file"""
1324 """dump the contents of an index file"""
1325 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1325 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1326 ui.write(" rev offset length base linkrev" +
1326 ui.write(" rev offset length base linkrev" +
1327 " nodeid p1 p2\n")
1327 " nodeid p1 p2\n")
1328 for i in range(r.count()):
1328 for i in range(r.count()):
1329 node = r.node(i)
1329 node = r.node(i)
1330 pp = r.parents(node)
1330 pp = r.parents(node)
1331 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1331 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1332 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1332 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1333 short(node), short(pp[0]), short(pp[1])))
1333 short(node), short(pp[0]), short(pp[1])))
1334
1334
1335 def debugindexdot(ui, file_):
1335 def debugindexdot(ui, file_):
1336 """dump an index DAG as a .dot file"""
1336 """dump an index DAG as a .dot file"""
1337 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1337 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1338 ui.write("digraph G {\n")
1338 ui.write("digraph G {\n")
1339 for i in range(r.count()):
1339 for i in range(r.count()):
1340 node = r.node(i)
1340 node = r.node(i)
1341 pp = r.parents(node)
1341 pp = r.parents(node)
1342 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1342 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1343 if pp[1] != nullid:
1343 if pp[1] != nullid:
1344 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1344 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1345 ui.write("}\n")
1345 ui.write("}\n")
1346
1346
1347 def debugrename(ui, repo, file, rev=None):
1347 def debugrename(ui, repo, file, rev=None):
1348 """dump rename information"""
1348 """dump rename information"""
1349 r = repo.file(relpath(repo, [file])[0])
1349 r = repo.file(relpath(repo, [file])[0])
1350 if rev:
1350 if rev:
1351 try:
1351 try:
1352 # assume all revision numbers are for changesets
1352 # assume all revision numbers are for changesets
1353 n = repo.lookup(rev)
1353 n = repo.lookup(rev)
1354 change = repo.changelog.read(n)
1354 change = repo.changelog.read(n)
1355 m = repo.manifest.read(change[0])
1355 m = repo.manifest.read(change[0])
1356 n = m[relpath(repo, [file])[0]]
1356 n = m[relpath(repo, [file])[0]]
1357 except (hg.RepoError, KeyError):
1357 except (hg.RepoError, KeyError):
1358 n = r.lookup(rev)
1358 n = r.lookup(rev)
1359 else:
1359 else:
1360 n = r.tip()
1360 n = r.tip()
1361 m = r.renamed(n)
1361 m = r.renamed(n)
1362 if m:
1362 if m:
1363 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1363 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1364 else:
1364 else:
1365 ui.write(_("not renamed\n"))
1365 ui.write(_("not renamed\n"))
1366
1366
1367 def debugwalk(ui, repo, *pats, **opts):
1367 def debugwalk(ui, repo, *pats, **opts):
1368 """show how files match on given patterns"""
1368 """show how files match on given patterns"""
1369 items = list(walk(repo, pats, opts))
1369 items = list(walk(repo, pats, opts))
1370 if not items:
1370 if not items:
1371 return
1371 return
1372 fmt = '%%s %%-%ds %%-%ds %%s' % (
1372 fmt = '%%s %%-%ds %%-%ds %%s' % (
1373 max([len(abs) for (src, abs, rel, exact) in items]),
1373 max([len(abs) for (src, abs, rel, exact) in items]),
1374 max([len(rel) for (src, abs, rel, exact) in items]))
1374 max([len(rel) for (src, abs, rel, exact) in items]))
1375 for src, abs, rel, exact in items:
1375 for src, abs, rel, exact in items:
1376 line = fmt % (src, abs, rel, exact and 'exact' or '')
1376 line = fmt % (src, abs, rel, exact and 'exact' or '')
1377 ui.write("%s\n" % line.rstrip())
1377 ui.write("%s\n" % line.rstrip())
1378
1378
1379 def diff(ui, repo, *pats, **opts):
1379 def diff(ui, repo, *pats, **opts):
1380 """diff repository (or selected files)
1380 """diff repository (or selected files)
1381
1381
1382 Show differences between revisions for the specified files.
1382 Show differences between revisions for the specified files.
1383
1383
1384 Differences between files are shown using the unified diff format.
1384 Differences between files are shown using the unified diff format.
1385
1385
1386 When two revision arguments are given, then changes are shown
1386 When two revision arguments are given, then changes are shown
1387 between those revisions. If only one revision is specified then
1387 between those revisions. If only one revision is specified then
1388 that revision is compared to the working directory, and, when no
1388 that revision is compared to the working directory, and, when no
1389 revisions are specified, the working directory files are compared
1389 revisions are specified, the working directory files are compared
1390 to its parent.
1390 to its parent.
1391
1391
1392 Without the -a option, diff will avoid generating diffs of files
1392 Without the -a option, diff will avoid generating diffs of files
1393 it detects as binary. With -a, diff will generate a diff anyway,
1393 it detects as binary. With -a, diff will generate a diff anyway,
1394 probably with undesirable results.
1394 probably with undesirable results.
1395 """
1395 """
1396 node1, node2 = revpair(ui, repo, opts['rev'])
1396 node1, node2 = revpair(ui, repo, opts['rev'])
1397
1397
1398 fns, matchfn, anypats = matchpats(repo, pats, opts)
1398 fns, matchfn, anypats = matchpats(repo, pats, opts)
1399
1399
1400 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1400 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1401 text=opts['text'], opts=opts)
1401 text=opts['text'], opts=opts)
1402
1402
1403 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1403 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1404 node = repo.lookup(changeset)
1404 node = repo.lookup(changeset)
1405 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1405 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1406 if opts['switch_parent']:
1406 if opts['switch_parent']:
1407 parents.reverse()
1407 parents.reverse()
1408 prev = (parents and parents[0]) or nullid
1408 prev = (parents and parents[0]) or nullid
1409 change = repo.changelog.read(node)
1409 change = repo.changelog.read(node)
1410
1410
1411 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1411 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1412 revwidth=revwidth)
1412 revwidth=revwidth)
1413 if fp != sys.stdout:
1413 if fp != sys.stdout:
1414 ui.note("%s\n" % fp.name)
1414 ui.note("%s\n" % fp.name)
1415
1415
1416 fp.write("# HG changeset patch\n")
1416 fp.write("# HG changeset patch\n")
1417 fp.write("# User %s\n" % change[1])
1417 fp.write("# User %s\n" % change[1])
1418 fp.write("# Date %d %d\n" % change[2])
1418 fp.write("# Date %d %d\n" % change[2])
1419 fp.write("# Node ID %s\n" % hex(node))
1419 fp.write("# Node ID %s\n" % hex(node))
1420 fp.write("# Parent %s\n" % hex(prev))
1420 fp.write("# Parent %s\n" % hex(prev))
1421 if len(parents) > 1:
1421 if len(parents) > 1:
1422 fp.write("# Parent %s\n" % hex(parents[1]))
1422 fp.write("# Parent %s\n" % hex(parents[1]))
1423 fp.write(change[4].rstrip())
1423 fp.write(change[4].rstrip())
1424 fp.write("\n\n")
1424 fp.write("\n\n")
1425
1425
1426 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1426 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1427 if fp != sys.stdout:
1427 if fp != sys.stdout:
1428 fp.close()
1428 fp.close()
1429
1429
1430 def export(ui, repo, *changesets, **opts):
1430 def export(ui, repo, *changesets, **opts):
1431 """dump the header and diffs for one or more changesets
1431 """dump the header and diffs for one or more changesets
1432
1432
1433 Print the changeset header and diffs for one or more revisions.
1433 Print the changeset header and diffs for one or more revisions.
1434
1434
1435 The information shown in the changeset header is: author,
1435 The information shown in the changeset header is: author,
1436 changeset hash, parent and commit comment.
1436 changeset hash, parent and commit comment.
1437
1437
1438 Output may be to a file, in which case the name of the file is
1438 Output may be to a file, in which case the name of the file is
1439 given using a format string. The formatting rules are as follows:
1439 given using a format string. The formatting rules are as follows:
1440
1440
1441 %% literal "%" character
1441 %% literal "%" character
1442 %H changeset hash (40 bytes of hexadecimal)
1442 %H changeset hash (40 bytes of hexadecimal)
1443 %N number of patches being generated
1443 %N number of patches being generated
1444 %R changeset revision number
1444 %R changeset revision number
1445 %b basename of the exporting repository
1445 %b basename of the exporting repository
1446 %h short-form changeset hash (12 bytes of hexadecimal)
1446 %h short-form changeset hash (12 bytes of hexadecimal)
1447 %n zero-padded sequence number, starting at 1
1447 %n zero-padded sequence number, starting at 1
1448 %r zero-padded changeset revision number
1448 %r zero-padded changeset revision number
1449
1449
1450 Without the -a option, export will avoid generating diffs of files
1450 Without the -a option, export will avoid generating diffs of files
1451 it detects as binary. With -a, export will generate a diff anyway,
1451 it detects as binary. With -a, export will generate a diff anyway,
1452 probably with undesirable results.
1452 probably with undesirable results.
1453
1453
1454 With the --switch-parent option, the diff will be against the second
1454 With the --switch-parent option, the diff will be against the second
1455 parent. It can be useful to review a merge.
1455 parent. It can be useful to review a merge.
1456 """
1456 """
1457 if not changesets:
1457 if not changesets:
1458 raise util.Abort(_("export requires at least one changeset"))
1458 raise util.Abort(_("export requires at least one changeset"))
1459 seqno = 0
1459 seqno = 0
1460 revs = list(revrange(ui, repo, changesets))
1460 revs = list(revrange(ui, repo, changesets))
1461 total = len(revs)
1461 total = len(revs)
1462 revwidth = max(map(len, revs))
1462 revwidth = max(map(len, revs))
1463 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1463 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1464 ui.note(msg)
1464 ui.note(msg)
1465 for cset in revs:
1465 for cset in revs:
1466 seqno += 1
1466 seqno += 1
1467 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1467 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1468
1468
1469 def forget(ui, repo, *pats, **opts):
1469 def forget(ui, repo, *pats, **opts):
1470 """don't add the specified files on the next commit (DEPRECATED)
1470 """don't add the specified files on the next commit (DEPRECATED)
1471
1471
1472 (DEPRECATED)
1472 (DEPRECATED)
1473 Undo an 'hg add' scheduled for the next commit.
1473 Undo an 'hg add' scheduled for the next commit.
1474
1474
1475 This command is now deprecated and will be removed in a future
1475 This command is now deprecated and will be removed in a future
1476 release. Please use revert instead.
1476 release. Please use revert instead.
1477 """
1477 """
1478 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1478 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1479 forget = []
1479 forget = []
1480 for src, abs, rel, exact in walk(repo, pats, opts):
1480 for src, abs, rel, exact in walk(repo, pats, opts):
1481 if repo.dirstate.state(abs) == 'a':
1481 if repo.dirstate.state(abs) == 'a':
1482 forget.append(abs)
1482 forget.append(abs)
1483 if ui.verbose or not exact:
1483 if ui.verbose or not exact:
1484 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1484 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1485 repo.forget(forget)
1485 repo.forget(forget)
1486
1486
1487 def grep(ui, repo, pattern, *pats, **opts):
1487 def grep(ui, repo, pattern, *pats, **opts):
1488 """search for a pattern in specified files and revisions
1488 """search for a pattern in specified files and revisions
1489
1489
1490 Search revisions of files for a regular expression.
1490 Search revisions of files for a regular expression.
1491
1491
1492 This command behaves differently than Unix grep. It only accepts
1492 This command behaves differently than Unix grep. It only accepts
1493 Python/Perl regexps. It searches repository history, not the
1493 Python/Perl regexps. It searches repository history, not the
1494 working directory. It always prints the revision number in which
1494 working directory. It always prints the revision number in which
1495 a match appears.
1495 a match appears.
1496
1496
1497 By default, grep only prints output for the first revision of a
1497 By default, grep only prints output for the first revision of a
1498 file in which it finds a match. To get it to print every revision
1498 file in which it finds a match. To get it to print every revision
1499 that contains a change in match status ("-" for a match that
1499 that contains a change in match status ("-" for a match that
1500 becomes a non-match, or "+" for a non-match that becomes a match),
1500 becomes a non-match, or "+" for a non-match that becomes a match),
1501 use the --all flag.
1501 use the --all flag.
1502 """
1502 """
1503 reflags = 0
1503 reflags = 0
1504 if opts['ignore_case']:
1504 if opts['ignore_case']:
1505 reflags |= re.I
1505 reflags |= re.I
1506 regexp = re.compile(pattern, reflags)
1506 regexp = re.compile(pattern, reflags)
1507 sep, eol = ':', '\n'
1507 sep, eol = ':', '\n'
1508 if opts['print0']:
1508 if opts['print0']:
1509 sep = eol = '\0'
1509 sep = eol = '\0'
1510
1510
1511 fcache = {}
1511 fcache = {}
1512 def getfile(fn):
1512 def getfile(fn):
1513 if fn not in fcache:
1513 if fn not in fcache:
1514 fcache[fn] = repo.file(fn)
1514 fcache[fn] = repo.file(fn)
1515 return fcache[fn]
1515 return fcache[fn]
1516
1516
1517 def matchlines(body):
1517 def matchlines(body):
1518 begin = 0
1518 begin = 0
1519 linenum = 0
1519 linenum = 0
1520 while True:
1520 while True:
1521 match = regexp.search(body, begin)
1521 match = regexp.search(body, begin)
1522 if not match:
1522 if not match:
1523 break
1523 break
1524 mstart, mend = match.span()
1524 mstart, mend = match.span()
1525 linenum += body.count('\n', begin, mstart) + 1
1525 linenum += body.count('\n', begin, mstart) + 1
1526 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1526 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1527 lend = body.find('\n', mend)
1527 lend = body.find('\n', mend)
1528 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1528 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1529 begin = lend + 1
1529 begin = lend + 1
1530
1530
1531 class linestate(object):
1531 class linestate(object):
1532 def __init__(self, line, linenum, colstart, colend):
1532 def __init__(self, line, linenum, colstart, colend):
1533 self.line = line
1533 self.line = line
1534 self.linenum = linenum
1534 self.linenum = linenum
1535 self.colstart = colstart
1535 self.colstart = colstart
1536 self.colend = colend
1536 self.colend = colend
1537 def __eq__(self, other):
1537 def __eq__(self, other):
1538 return self.line == other.line
1538 return self.line == other.line
1539 def __hash__(self):
1539 def __hash__(self):
1540 return hash(self.line)
1540 return hash(self.line)
1541
1541
1542 matches = {}
1542 matches = {}
1543 def grepbody(fn, rev, body):
1543 def grepbody(fn, rev, body):
1544 matches[rev].setdefault(fn, {})
1544 matches[rev].setdefault(fn, {})
1545 m = matches[rev][fn]
1545 m = matches[rev][fn]
1546 for lnum, cstart, cend, line in matchlines(body):
1546 for lnum, cstart, cend, line in matchlines(body):
1547 s = linestate(line, lnum, cstart, cend)
1547 s = linestate(line, lnum, cstart, cend)
1548 m[s] = s
1548 m[s] = s
1549
1549
1550 # FIXME: prev isn't used, why ?
1550 # FIXME: prev isn't used, why ?
1551 prev = {}
1551 prev = {}
1552 ucache = {}
1552 ucache = {}
1553 def display(fn, rev, states, prevstates):
1553 def display(fn, rev, states, prevstates):
1554 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1554 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1555 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1555 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1556 counts = {'-': 0, '+': 0}
1556 counts = {'-': 0, '+': 0}
1557 filerevmatches = {}
1557 filerevmatches = {}
1558 for l in diff:
1558 for l in diff:
1559 if incrementing or not opts['all']:
1559 if incrementing or not opts['all']:
1560 change = ((l in prevstates) and '-') or '+'
1560 change = ((l in prevstates) and '-') or '+'
1561 r = rev
1561 r = rev
1562 else:
1562 else:
1563 change = ((l in states) and '-') or '+'
1563 change = ((l in states) and '-') or '+'
1564 r = prev[fn]
1564 r = prev[fn]
1565 cols = [fn, str(rev)]
1565 cols = [fn, str(rev)]
1566 if opts['line_number']:
1566 if opts['line_number']:
1567 cols.append(str(l.linenum))
1567 cols.append(str(l.linenum))
1568 if opts['all']:
1568 if opts['all']:
1569 cols.append(change)
1569 cols.append(change)
1570 if opts['user']:
1570 if opts['user']:
1571 cols.append(trimuser(ui, getchange(rev)[1], rev,
1571 cols.append(trimuser(ui, getchange(rev)[1], rev,
1572 ucache))
1572 ucache))
1573 if opts['files_with_matches']:
1573 if opts['files_with_matches']:
1574 c = (fn, rev)
1574 c = (fn, rev)
1575 if c in filerevmatches:
1575 if c in filerevmatches:
1576 continue
1576 continue
1577 filerevmatches[c] = 1
1577 filerevmatches[c] = 1
1578 else:
1578 else:
1579 cols.append(l.line)
1579 cols.append(l.line)
1580 ui.write(sep.join(cols), eol)
1580 ui.write(sep.join(cols), eol)
1581 counts[change] += 1
1581 counts[change] += 1
1582 return counts['+'], counts['-']
1582 return counts['+'], counts['-']
1583
1583
1584 fstate = {}
1584 fstate = {}
1585 skip = {}
1585 skip = {}
1586 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1586 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1587 count = 0
1587 count = 0
1588 incrementing = False
1588 incrementing = False
1589 for st, rev, fns in changeiter:
1589 for st, rev, fns in changeiter:
1590 if st == 'window':
1590 if st == 'window':
1591 incrementing = rev
1591 incrementing = rev
1592 matches.clear()
1592 matches.clear()
1593 elif st == 'add':
1593 elif st == 'add':
1594 change = repo.changelog.read(repo.lookup(str(rev)))
1594 change = repo.changelog.read(repo.lookup(str(rev)))
1595 mf = repo.manifest.read(change[0])
1595 mf = repo.manifest.read(change[0])
1596 matches[rev] = {}
1596 matches[rev] = {}
1597 for fn in fns:
1597 for fn in fns:
1598 if fn in skip:
1598 if fn in skip:
1599 continue
1599 continue
1600 fstate.setdefault(fn, {})
1600 fstate.setdefault(fn, {})
1601 try:
1601 try:
1602 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1602 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1603 except KeyError:
1603 except KeyError:
1604 pass
1604 pass
1605 elif st == 'iter':
1605 elif st == 'iter':
1606 states = matches[rev].items()
1606 states = matches[rev].items()
1607 states.sort()
1607 states.sort()
1608 for fn, m in states:
1608 for fn, m in states:
1609 if fn in skip:
1609 if fn in skip:
1610 continue
1610 continue
1611 if incrementing or not opts['all'] or fstate[fn]:
1611 if incrementing or not opts['all'] or fstate[fn]:
1612 pos, neg = display(fn, rev, m, fstate[fn])
1612 pos, neg = display(fn, rev, m, fstate[fn])
1613 count += pos + neg
1613 count += pos + neg
1614 if pos and not opts['all']:
1614 if pos and not opts['all']:
1615 skip[fn] = True
1615 skip[fn] = True
1616 fstate[fn] = m
1616 fstate[fn] = m
1617 prev[fn] = rev
1617 prev[fn] = rev
1618
1618
1619 if not incrementing:
1619 if not incrementing:
1620 fstate = fstate.items()
1620 fstate = fstate.items()
1621 fstate.sort()
1621 fstate.sort()
1622 for fn, state in fstate:
1622 for fn, state in fstate:
1623 if fn in skip:
1623 if fn in skip:
1624 continue
1624 continue
1625 display(fn, rev, {}, state)
1625 display(fn, rev, {}, state)
1626 return (count == 0 and 1) or 0
1626 return (count == 0 and 1) or 0
1627
1627
1628 def heads(ui, repo, **opts):
1628 def heads(ui, repo, **opts):
1629 """show current repository heads
1629 """show current repository heads
1630
1630
1631 Show all repository head changesets.
1631 Show all repository head changesets.
1632
1632
1633 Repository "heads" are changesets that don't have children
1633 Repository "heads" are changesets that don't have children
1634 changesets. They are where development generally takes place and
1634 changesets. They are where development generally takes place and
1635 are the usual targets for update and merge operations.
1635 are the usual targets for update and merge operations.
1636 """
1636 """
1637 if opts['rev']:
1637 if opts['rev']:
1638 heads = repo.heads(repo.lookup(opts['rev']))
1638 heads = repo.heads(repo.lookup(opts['rev']))
1639 else:
1639 else:
1640 heads = repo.heads()
1640 heads = repo.heads()
1641 br = None
1641 br = None
1642 if opts['branches']:
1642 if opts['branches']:
1643 br = repo.branchlookup(heads)
1643 br = repo.branchlookup(heads)
1644 displayer = show_changeset(ui, repo, opts)
1644 displayer = show_changeset(ui, repo, opts)
1645 for n in heads:
1645 for n in heads:
1646 displayer.show(changenode=n, brinfo=br)
1646 displayer.show(changenode=n, brinfo=br)
1647
1647
1648 def identify(ui, repo):
1648 def identify(ui, repo):
1649 """print information about the working copy
1649 """print information about the working copy
1650
1650
1651 Print a short summary of the current state of the repo.
1651 Print a short summary of the current state of the repo.
1652
1652
1653 This summary identifies the repository state using one or two parent
1653 This summary identifies the repository state using one or two parent
1654 hash identifiers, followed by a "+" if there are uncommitted changes
1654 hash identifiers, followed by a "+" if there are uncommitted changes
1655 in the working directory, followed by a list of tags for this revision.
1655 in the working directory, followed by a list of tags for this revision.
1656 """
1656 """
1657 parents = [p for p in repo.dirstate.parents() if p != nullid]
1657 parents = [p for p in repo.dirstate.parents() if p != nullid]
1658 if not parents:
1658 if not parents:
1659 ui.write(_("unknown\n"))
1659 ui.write(_("unknown\n"))
1660 return
1660 return
1661
1661
1662 hexfunc = ui.verbose and hex or short
1662 hexfunc = ui.verbose and hex or short
1663 modified, added, removed, deleted, unknown = repo.changes()
1663 modified, added, removed, deleted, unknown = repo.changes()
1664 output = ["%s%s" %
1664 output = ["%s%s" %
1665 ('+'.join([hexfunc(parent) for parent in parents]),
1665 ('+'.join([hexfunc(parent) for parent in parents]),
1666 (modified or added or removed or deleted) and "+" or "")]
1666 (modified or added or removed or deleted) and "+" or "")]
1667
1667
1668 if not ui.quiet:
1668 if not ui.quiet:
1669 # multiple tags for a single parent separated by '/'
1669 # multiple tags for a single parent separated by '/'
1670 parenttags = ['/'.join(tags)
1670 parenttags = ['/'.join(tags)
1671 for tags in map(repo.nodetags, parents) if tags]
1671 for tags in map(repo.nodetags, parents) if tags]
1672 # tags for multiple parents separated by ' + '
1672 # tags for multiple parents separated by ' + '
1673 if parenttags:
1673 if parenttags:
1674 output.append(' + '.join(parenttags))
1674 output.append(' + '.join(parenttags))
1675
1675
1676 ui.write("%s\n" % ' '.join(output))
1676 ui.write("%s\n" % ' '.join(output))
1677
1677
1678 def import_(ui, repo, patch1, *patches, **opts):
1678 def import_(ui, repo, patch1, *patches, **opts):
1679 """import an ordered set of patches
1679 """import an ordered set of patches
1680
1680
1681 Import a list of patches and commit them individually.
1681 Import a list of patches and commit them individually.
1682
1682
1683 If there are outstanding changes in the working directory, import
1683 If there are outstanding changes in the working directory, import
1684 will abort unless given the -f flag.
1684 will abort unless given the -f flag.
1685
1685
1686 You can import a patch straight from a mail message. Even patches
1686 You can import a patch straight from a mail message. Even patches
1687 as attachments work (body part must be type text/plain or
1687 as attachments work (body part must be type text/plain or
1688 text/x-patch to be used). From and Subject headers of email
1688 text/x-patch to be used). From and Subject headers of email
1689 message are used as default committer and commit message. All
1689 message are used as default committer and commit message. All
1690 text/plain body parts before first diff are added to commit
1690 text/plain body parts before first diff are added to commit
1691 message.
1691 message.
1692
1692
1693 If imported patch was generated by hg export, user and description
1693 If imported patch was generated by hg export, user and description
1694 from patch override values from message headers and body. Values
1694 from patch override values from message headers and body. Values
1695 given on command line with -m and -u override these.
1695 given on command line with -m and -u override these.
1696
1696
1697 To read a patch from standard input, use patch name "-".
1697 To read a patch from standard input, use patch name "-".
1698 """
1698 """
1699 patches = (patch1,) + patches
1699 patches = (patch1,) + patches
1700
1700
1701 if not opts['force']:
1701 if not opts['force']:
1702 bail_if_changed(repo)
1702 bail_if_changed(repo)
1703
1703
1704 d = opts["base"]
1704 d = opts["base"]
1705 strip = opts["strip"]
1705 strip = opts["strip"]
1706
1706
1707 mailre = re.compile(r'(?:From |[\w-]+:)')
1707 mailre = re.compile(r'(?:From |[\w-]+:)')
1708
1708
1709 # attempt to detect the start of a patch
1709 # attempt to detect the start of a patch
1710 # (this heuristic is borrowed from quilt)
1710 # (this heuristic is borrowed from quilt)
1711 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1711 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1712 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1712 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1713 '(---|\*\*\*)[ \t])', re.MULTILINE)
1713 '(---|\*\*\*)[ \t])', re.MULTILINE)
1714
1714
1715 for patch in patches:
1715 for patch in patches:
1716 pf = os.path.join(d, patch)
1716 pf = os.path.join(d, patch)
1717
1717
1718 message = None
1718 message = None
1719 user = None
1719 user = None
1720 date = None
1720 date = None
1721 hgpatch = False
1721 hgpatch = False
1722
1722
1723 p = email.Parser.Parser()
1723 p = email.Parser.Parser()
1724 if pf == '-':
1724 if pf == '-':
1725 msg = p.parse(sys.stdin)
1725 msg = p.parse(sys.stdin)
1726 ui.status(_("applying patch from stdin\n"))
1726 ui.status(_("applying patch from stdin\n"))
1727 else:
1727 else:
1728 msg = p.parse(file(pf))
1728 msg = p.parse(file(pf))
1729 ui.status(_("applying %s\n") % patch)
1729 ui.status(_("applying %s\n") % patch)
1730
1730
1731 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1731 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1732 tmpfp = os.fdopen(fd, 'w')
1732 tmpfp = os.fdopen(fd, 'w')
1733 try:
1733 try:
1734 message = msg['Subject']
1734 message = msg['Subject']
1735 if message:
1735 if message:
1736 message = message.replace('\n\t', ' ')
1736 message = message.replace('\n\t', ' ')
1737 ui.debug('Subject: %s\n' % message)
1737 ui.debug('Subject: %s\n' % message)
1738 user = msg['From']
1738 user = msg['From']
1739 if user:
1739 if user:
1740 ui.debug('From: %s\n' % user)
1740 ui.debug('From: %s\n' % user)
1741 diffs_seen = 0
1741 diffs_seen = 0
1742 ok_types = ('text/plain', 'text/x-patch')
1742 ok_types = ('text/plain', 'text/x-patch')
1743 for part in msg.walk():
1743 for part in msg.walk():
1744 content_type = part.get_content_type()
1744 content_type = part.get_content_type()
1745 ui.debug('Content-Type: %s\n' % content_type)
1745 ui.debug('Content-Type: %s\n' % content_type)
1746 if content_type not in ok_types:
1746 if content_type not in ok_types:
1747 continue
1747 continue
1748 payload = part.get_payload(decode=True)
1748 payload = part.get_payload(decode=True)
1749 m = diffre.search(payload)
1749 m = diffre.search(payload)
1750 if m:
1750 if m:
1751 ui.debug(_('found patch at byte %d\n') % m.start(0))
1751 ui.debug(_('found patch at byte %d\n') % m.start(0))
1752 diffs_seen += 1
1752 diffs_seen += 1
1753 hgpatch = False
1753 hgpatch = False
1754 fp = cStringIO.StringIO()
1754 fp = cStringIO.StringIO()
1755 if message:
1755 if message:
1756 fp.write(message)
1756 fp.write(message)
1757 fp.write('\n')
1757 fp.write('\n')
1758 for line in payload[:m.start(0)].splitlines():
1758 for line in payload[:m.start(0)].splitlines():
1759 if line.startswith('# HG changeset patch'):
1759 if line.startswith('# HG changeset patch'):
1760 ui.debug(_('patch generated by hg export\n'))
1760 ui.debug(_('patch generated by hg export\n'))
1761 hgpatch = True
1761 hgpatch = True
1762 # drop earlier commit message content
1762 # drop earlier commit message content
1763 fp.seek(0)
1763 fp.seek(0)
1764 fp.truncate()
1764 fp.truncate()
1765 elif hgpatch:
1765 elif hgpatch:
1766 if line.startswith('# User '):
1766 if line.startswith('# User '):
1767 user = line[7:]
1767 user = line[7:]
1768 ui.debug('From: %s\n' % user)
1768 ui.debug('From: %s\n' % user)
1769 elif line.startswith("# Date "):
1769 elif line.startswith("# Date "):
1770 date = line[7:]
1770 date = line[7:]
1771 if not line.startswith('# '):
1771 if not line.startswith('# '):
1772 fp.write(line)
1772 fp.write(line)
1773 fp.write('\n')
1773 fp.write('\n')
1774 message = fp.getvalue()
1774 message = fp.getvalue()
1775 if tmpfp:
1775 if tmpfp:
1776 tmpfp.write(payload)
1776 tmpfp.write(payload)
1777 if not payload.endswith('\n'):
1777 if not payload.endswith('\n'):
1778 tmpfp.write('\n')
1778 tmpfp.write('\n')
1779 elif not diffs_seen and message and content_type == 'text/plain':
1779 elif not diffs_seen and message and content_type == 'text/plain':
1780 message += '\n' + payload
1780 message += '\n' + payload
1781
1781
1782 if opts['message']:
1782 if opts['message']:
1783 # pickup the cmdline msg
1783 # pickup the cmdline msg
1784 message = opts['message']
1784 message = opts['message']
1785 elif message:
1785 elif message:
1786 # pickup the patch msg
1786 # pickup the patch msg
1787 message = message.strip()
1787 message = message.strip()
1788 else:
1788 else:
1789 # launch the editor
1789 # launch the editor
1790 message = None
1790 message = None
1791 ui.debug(_('message:\n%s\n') % message)
1791 ui.debug(_('message:\n%s\n') % message)
1792
1792
1793 tmpfp.close()
1793 tmpfp.close()
1794 if not diffs_seen:
1794 if not diffs_seen:
1795 raise util.Abort(_('no diffs found'))
1795 raise util.Abort(_('no diffs found'))
1796
1796
1797 files = util.patch(strip, tmpname, ui)
1797 files = util.patch(strip, tmpname, ui)
1798 if len(files) > 0:
1798 if len(files) > 0:
1799 addremove_lock(ui, repo, files, {})
1799 addremove_lock(ui, repo, files, {})
1800 repo.commit(files, message, user, date)
1800 repo.commit(files, message, user, date)
1801 finally:
1801 finally:
1802 os.unlink(tmpname)
1802 os.unlink(tmpname)
1803
1803
1804 def incoming(ui, repo, source="default", **opts):
1804 def incoming(ui, repo, source="default", **opts):
1805 """show new changesets found in source
1805 """show new changesets found in source
1806
1806
1807 Show new changesets found in the specified path/URL or the default
1807 Show new changesets found in the specified path/URL or the default
1808 pull location. These are the changesets that would be pulled if a pull
1808 pull location. These are the changesets that would be pulled if a pull
1809 was requested.
1809 was requested.
1810
1810
1811 For remote repository, using --bundle avoids downloading the changesets
1811 For remote repository, using --bundle avoids downloading the changesets
1812 twice if the incoming is followed by a pull.
1812 twice if the incoming is followed by a pull.
1813
1813
1814 See pull for valid source format details.
1814 See pull for valid source format details.
1815 """
1815 """
1816 source = ui.expandpath(source)
1816 source = ui.expandpath(source)
1817 ui.setconfig_remoteopts(**opts)
1817 ui.setconfig_remoteopts(**opts)
1818
1818
1819 other = hg.repository(ui, source)
1819 other = hg.repository(ui, source)
1820 incoming = repo.findincoming(other, force=opts["force"])
1820 incoming = repo.findincoming(other, force=opts["force"])
1821 if not incoming:
1821 if not incoming:
1822 ui.status(_("no changes found\n"))
1822 ui.status(_("no changes found\n"))
1823 return
1823 return
1824
1824
1825 cleanup = None
1825 cleanup = None
1826 try:
1826 try:
1827 fname = opts["bundle"]
1827 fname = opts["bundle"]
1828 if fname or not other.local():
1828 if fname or not other.local():
1829 # create a bundle (uncompressed if other repo is not local)
1829 # create a bundle (uncompressed if other repo is not local)
1830 cg = other.changegroup(incoming, "incoming")
1830 cg = other.changegroup(incoming, "incoming")
1831 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1831 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1832 # keep written bundle?
1832 # keep written bundle?
1833 if opts["bundle"]:
1833 if opts["bundle"]:
1834 cleanup = None
1834 cleanup = None
1835 if not other.local():
1835 if not other.local():
1836 # use the created uncompressed bundlerepo
1836 # use the created uncompressed bundlerepo
1837 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1837 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1838
1838
1839 revs = None
1839 revs = None
1840 if opts['rev']:
1840 if opts['rev']:
1841 revs = [other.lookup(rev) for rev in opts['rev']]
1841 revs = [other.lookup(rev) for rev in opts['rev']]
1842 o = other.changelog.nodesbetween(incoming, revs)[0]
1842 o = other.changelog.nodesbetween(incoming, revs)[0]
1843 if opts['newest_first']:
1843 if opts['newest_first']:
1844 o.reverse()
1844 o.reverse()
1845 displayer = show_changeset(ui, other, opts)
1845 displayer = show_changeset(ui, other, opts)
1846 for n in o:
1846 for n in o:
1847 parents = [p for p in other.changelog.parents(n) if p != nullid]
1847 parents = [p for p in other.changelog.parents(n) if p != nullid]
1848 if opts['no_merges'] and len(parents) == 2:
1848 if opts['no_merges'] and len(parents) == 2:
1849 continue
1849 continue
1850 displayer.show(changenode=n)
1850 displayer.show(changenode=n)
1851 if opts['patch']:
1851 if opts['patch']:
1852 prev = (parents and parents[0]) or nullid
1852 prev = (parents and parents[0]) or nullid
1853 dodiff(ui, ui, other, prev, n)
1853 dodiff(ui, ui, other, prev, n)
1854 ui.write("\n")
1854 ui.write("\n")
1855 finally:
1855 finally:
1856 if hasattr(other, 'close'):
1856 if hasattr(other, 'close'):
1857 other.close()
1857 other.close()
1858 if cleanup:
1858 if cleanup:
1859 os.unlink(cleanup)
1859 os.unlink(cleanup)
1860
1860
1861 def init(ui, dest=".", **opts):
1861 def init(ui, dest=".", **opts):
1862 """create a new repository in the given directory
1862 """create a new repository in the given directory
1863
1863
1864 Initialize a new repository in the given directory. If the given
1864 Initialize a new repository in the given directory. If the given
1865 directory does not exist, it is created.
1865 directory does not exist, it is created.
1866
1866
1867 If no directory is given, the current directory is used.
1867 If no directory is given, the current directory is used.
1868
1868
1869 It is possible to specify an ssh:// URL as the destination.
1869 It is possible to specify an ssh:// URL as the destination.
1870 Look at the help text for the pull command for important details
1870 Look at the help text for the pull command for important details
1871 about ssh:// URLs.
1871 about ssh:// URLs.
1872 """
1872 """
1873 ui.setconfig_remoteopts(**opts)
1873 ui.setconfig_remoteopts(**opts)
1874 hg.repository(ui, dest, create=1)
1874 hg.repository(ui, dest, create=1)
1875
1875
1876 def locate(ui, repo, *pats, **opts):
1876 def locate(ui, repo, *pats, **opts):
1877 """locate files matching specific patterns
1877 """locate files matching specific patterns
1878
1878
1879 Print all files under Mercurial control whose names match the
1879 Print all files under Mercurial control whose names match the
1880 given patterns.
1880 given patterns.
1881
1881
1882 This command searches the current directory and its
1882 This command searches the current directory and its
1883 subdirectories. To search an entire repository, move to the root
1883 subdirectories. To search an entire repository, move to the root
1884 of the repository.
1884 of the repository.
1885
1885
1886 If no patterns are given to match, this command prints all file
1886 If no patterns are given to match, this command prints all file
1887 names.
1887 names.
1888
1888
1889 If you want to feed the output of this command into the "xargs"
1889 If you want to feed the output of this command into the "xargs"
1890 command, use the "-0" option to both this command and "xargs".
1890 command, use the "-0" option to both this command and "xargs".
1891 This will avoid the problem of "xargs" treating single filenames
1891 This will avoid the problem of "xargs" treating single filenames
1892 that contain white space as multiple filenames.
1892 that contain white space as multiple filenames.
1893 """
1893 """
1894 end = opts['print0'] and '\0' or '\n'
1894 end = opts['print0'] and '\0' or '\n'
1895 rev = opts['rev']
1895 rev = opts['rev']
1896 if rev:
1896 if rev:
1897 node = repo.lookup(rev)
1897 node = repo.lookup(rev)
1898 else:
1898 else:
1899 node = None
1899 node = None
1900
1900
1901 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1901 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1902 head='(?:.*/|)'):
1902 head='(?:.*/|)'):
1903 if not node and repo.dirstate.state(abs) == '?':
1903 if not node and repo.dirstate.state(abs) == '?':
1904 continue
1904 continue
1905 if opts['fullpath']:
1905 if opts['fullpath']:
1906 ui.write(os.path.join(repo.root, abs), end)
1906 ui.write(os.path.join(repo.root, abs), end)
1907 else:
1907 else:
1908 ui.write(((pats and rel) or abs), end)
1908 ui.write(((pats and rel) or abs), end)
1909
1909
1910 def log(ui, repo, *pats, **opts):
1910 def log(ui, repo, *pats, **opts):
1911 """show revision history of entire repository or files
1911 """show revision history of entire repository or files
1912
1912
1913 Print the revision history of the specified files or the entire project.
1913 Print the revision history of the specified files or the entire project.
1914
1914
1915 By default this command outputs: changeset id and hash, tags,
1915 By default this command outputs: changeset id and hash, tags,
1916 non-trivial parents, user, date and time, and a summary for each
1916 non-trivial parents, user, date and time, and a summary for each
1917 commit. When the -v/--verbose switch is used, the list of changed
1917 commit. When the -v/--verbose switch is used, the list of changed
1918 files and full commit message is shown.
1918 files and full commit message is shown.
1919 """
1919 """
1920 class dui(object):
1920 class dui(object):
1921 # Implement and delegate some ui protocol. Save hunks of
1921 # Implement and delegate some ui protocol. Save hunks of
1922 # output for later display in the desired order.
1922 # output for later display in the desired order.
1923 def __init__(self, ui):
1923 def __init__(self, ui):
1924 self.ui = ui
1924 self.ui = ui
1925 self.hunk = {}
1925 self.hunk = {}
1926 self.header = {}
1926 self.header = {}
1927 def bump(self, rev):
1927 def bump(self, rev):
1928 self.rev = rev
1928 self.rev = rev
1929 self.hunk[rev] = []
1929 self.hunk[rev] = []
1930 self.header[rev] = []
1930 self.header[rev] = []
1931 def note(self, *args):
1931 def note(self, *args):
1932 if self.verbose:
1932 if self.verbose:
1933 self.write(*args)
1933 self.write(*args)
1934 def status(self, *args):
1934 def status(self, *args):
1935 if not self.quiet:
1935 if not self.quiet:
1936 self.write(*args)
1936 self.write(*args)
1937 def write(self, *args):
1937 def write(self, *args):
1938 self.hunk[self.rev].append(args)
1938 self.hunk[self.rev].append(args)
1939 def write_header(self, *args):
1939 def write_header(self, *args):
1940 self.header[self.rev].append(args)
1940 self.header[self.rev].append(args)
1941 def debug(self, *args):
1941 def debug(self, *args):
1942 if self.debugflag:
1942 if self.debugflag:
1943 self.write(*args)
1943 self.write(*args)
1944 def __getattr__(self, key):
1944 def __getattr__(self, key):
1945 return getattr(self.ui, key)
1945 return getattr(self.ui, key)
1946
1946
1947 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1947 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1948
1948
1949 if opts['limit']:
1949 if opts['limit']:
1950 try:
1950 try:
1951 limit = int(opts['limit'])
1951 limit = int(opts['limit'])
1952 except ValueError:
1952 except ValueError:
1953 raise util.Abort(_('limit must be a positive integer'))
1953 raise util.Abort(_('limit must be a positive integer'))
1954 if limit <= 0: raise util.Abort(_('limit must be positive'))
1954 if limit <= 0: raise util.Abort(_('limit must be positive'))
1955 else:
1955 else:
1956 limit = sys.maxint
1956 limit = sys.maxint
1957 count = 0
1957 count = 0
1958
1958
1959 displayer = show_changeset(ui, repo, opts)
1959 displayer = show_changeset(ui, repo, opts)
1960 for st, rev, fns in changeiter:
1960 for st, rev, fns in changeiter:
1961 if st == 'window':
1961 if st == 'window':
1962 du = dui(ui)
1962 du = dui(ui)
1963 displayer.ui = du
1963 displayer.ui = du
1964 elif st == 'add':
1964 elif st == 'add':
1965 du.bump(rev)
1965 du.bump(rev)
1966 changenode = repo.changelog.node(rev)
1966 changenode = repo.changelog.node(rev)
1967 parents = [p for p in repo.changelog.parents(changenode)
1967 parents = [p for p in repo.changelog.parents(changenode)
1968 if p != nullid]
1968 if p != nullid]
1969 if opts['no_merges'] and len(parents) == 2:
1969 if opts['no_merges'] and len(parents) == 2:
1970 continue
1970 continue
1971 if opts['only_merges'] and len(parents) != 2:
1971 if opts['only_merges'] and len(parents) != 2:
1972 continue
1972 continue
1973
1973
1974 if opts['keyword']:
1974 if opts['keyword']:
1975 changes = getchange(rev)
1975 changes = getchange(rev)
1976 miss = 0
1976 miss = 0
1977 for k in [kw.lower() for kw in opts['keyword']]:
1977 for k in [kw.lower() for kw in opts['keyword']]:
1978 if not (k in changes[1].lower() or
1978 if not (k in changes[1].lower() or
1979 k in changes[4].lower() or
1979 k in changes[4].lower() or
1980 k in " ".join(changes[3][:20]).lower()):
1980 k in " ".join(changes[3][:20]).lower()):
1981 miss = 1
1981 miss = 1
1982 break
1982 break
1983 if miss:
1983 if miss:
1984 continue
1984 continue
1985
1985
1986 br = None
1986 br = None
1987 if opts['branches']:
1987 if opts['branches']:
1988 br = repo.branchlookup([repo.changelog.node(rev)])
1988 br = repo.branchlookup([repo.changelog.node(rev)])
1989
1989
1990 displayer.show(rev, brinfo=br)
1990 displayer.show(rev, brinfo=br)
1991 if opts['patch']:
1991 if opts['patch']:
1992 prev = (parents and parents[0]) or nullid
1992 prev = (parents and parents[0]) or nullid
1993 dodiff(du, du, repo, prev, changenode, match=matchfn)
1993 dodiff(du, du, repo, prev, changenode, match=matchfn)
1994 du.write("\n\n")
1994 du.write("\n\n")
1995 elif st == 'iter':
1995 elif st == 'iter':
1996 if count == limit: break
1996 if count == limit: break
1997 if du.header[rev]:
1997 if du.header[rev]:
1998 for args in du.header[rev]:
1998 for args in du.header[rev]:
1999 ui.write_header(*args)
1999 ui.write_header(*args)
2000 if du.hunk[rev]:
2000 if du.hunk[rev]:
2001 count += 1
2001 count += 1
2002 for args in du.hunk[rev]:
2002 for args in du.hunk[rev]:
2003 ui.write(*args)
2003 ui.write(*args)
2004
2004
2005 def manifest(ui, repo, rev=None):
2005 def manifest(ui, repo, rev=None):
2006 """output the latest or given revision of the project manifest
2006 """output the latest or given revision of the project manifest
2007
2007
2008 Print a list of version controlled files for the given revision.
2008 Print a list of version controlled files for the given revision.
2009
2009
2010 The manifest is the list of files being version controlled. If no revision
2010 The manifest is the list of files being version controlled. If no revision
2011 is given then the tip is used.
2011 is given then the tip is used.
2012 """
2012 """
2013 if rev:
2013 if rev:
2014 try:
2014 try:
2015 # assume all revision numbers are for changesets
2015 # assume all revision numbers are for changesets
2016 n = repo.lookup(rev)
2016 n = repo.lookup(rev)
2017 change = repo.changelog.read(n)
2017 change = repo.changelog.read(n)
2018 n = change[0]
2018 n = change[0]
2019 except hg.RepoError:
2019 except hg.RepoError:
2020 n = repo.manifest.lookup(rev)
2020 n = repo.manifest.lookup(rev)
2021 else:
2021 else:
2022 n = repo.manifest.tip()
2022 n = repo.manifest.tip()
2023 m = repo.manifest.read(n)
2023 m = repo.manifest.read(n)
2024 mf = repo.manifest.readflags(n)
2024 mf = repo.manifest.readflags(n)
2025 files = m.keys()
2025 files = m.keys()
2026 files.sort()
2026 files.sort()
2027
2027
2028 for f in files:
2028 for f in files:
2029 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2029 ui.write("%40s %3s %s\n" % (hex(m[f]),
2030 mf.execf(f) and "755" or "644", f))
2030
2031
2031 def merge(ui, repo, node=None, **opts):
2032 def merge(ui, repo, node=None, **opts):
2032 """Merge working directory with another revision
2033 """Merge working directory with another revision
2033
2034
2034 Merge the contents of the current working directory and the
2035 Merge the contents of the current working directory and the
2035 requested revision. Files that changed between either parent are
2036 requested revision. Files that changed between either parent are
2036 marked as changed for the next commit and a commit must be
2037 marked as changed for the next commit and a commit must be
2037 performed before any further updates are allowed.
2038 performed before any further updates are allowed.
2038 """
2039 """
2039 return doupdate(ui, repo, node=node, merge=True, **opts)
2040 return doupdate(ui, repo, node=node, merge=True, **opts)
2040
2041
2041 def outgoing(ui, repo, dest=None, **opts):
2042 def outgoing(ui, repo, dest=None, **opts):
2042 """show changesets not found in destination
2043 """show changesets not found in destination
2043
2044
2044 Show changesets not found in the specified destination repository or
2045 Show changesets not found in the specified destination repository or
2045 the default push location. These are the changesets that would be pushed
2046 the default push location. These are the changesets that would be pushed
2046 if a push was requested.
2047 if a push was requested.
2047
2048
2048 See pull for valid destination format details.
2049 See pull for valid destination format details.
2049 """
2050 """
2050 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2051 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2051 ui.setconfig_remoteopts(**opts)
2052 ui.setconfig_remoteopts(**opts)
2052 revs = None
2053 revs = None
2053 if opts['rev']:
2054 if opts['rev']:
2054 revs = [repo.lookup(rev) for rev in opts['rev']]
2055 revs = [repo.lookup(rev) for rev in opts['rev']]
2055
2056
2056 other = hg.repository(ui, dest)
2057 other = hg.repository(ui, dest)
2057 o = repo.findoutgoing(other, force=opts['force'])
2058 o = repo.findoutgoing(other, force=opts['force'])
2058 if not o:
2059 if not o:
2059 ui.status(_("no changes found\n"))
2060 ui.status(_("no changes found\n"))
2060 return
2061 return
2061 o = repo.changelog.nodesbetween(o, revs)[0]
2062 o = repo.changelog.nodesbetween(o, revs)[0]
2062 if opts['newest_first']:
2063 if opts['newest_first']:
2063 o.reverse()
2064 o.reverse()
2064 displayer = show_changeset(ui, repo, opts)
2065 displayer = show_changeset(ui, repo, opts)
2065 for n in o:
2066 for n in o:
2066 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2067 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2067 if opts['no_merges'] and len(parents) == 2:
2068 if opts['no_merges'] and len(parents) == 2:
2068 continue
2069 continue
2069 displayer.show(changenode=n)
2070 displayer.show(changenode=n)
2070 if opts['patch']:
2071 if opts['patch']:
2071 prev = (parents and parents[0]) or nullid
2072 prev = (parents and parents[0]) or nullid
2072 dodiff(ui, ui, repo, prev, n)
2073 dodiff(ui, ui, repo, prev, n)
2073 ui.write("\n")
2074 ui.write("\n")
2074
2075
2075 def parents(ui, repo, rev=None, branches=None, **opts):
2076 def parents(ui, repo, rev=None, branches=None, **opts):
2076 """show the parents of the working dir or revision
2077 """show the parents of the working dir or revision
2077
2078
2078 Print the working directory's parent revisions.
2079 Print the working directory's parent revisions.
2079 """
2080 """
2080 if rev:
2081 if rev:
2081 p = repo.changelog.parents(repo.lookup(rev))
2082 p = repo.changelog.parents(repo.lookup(rev))
2082 else:
2083 else:
2083 p = repo.dirstate.parents()
2084 p = repo.dirstate.parents()
2084
2085
2085 br = None
2086 br = None
2086 if branches is not None:
2087 if branches is not None:
2087 br = repo.branchlookup(p)
2088 br = repo.branchlookup(p)
2088 displayer = show_changeset(ui, repo, opts)
2089 displayer = show_changeset(ui, repo, opts)
2089 for n in p:
2090 for n in p:
2090 if n != nullid:
2091 if n != nullid:
2091 displayer.show(changenode=n, brinfo=br)
2092 displayer.show(changenode=n, brinfo=br)
2092
2093
2093 def paths(ui, repo, search=None):
2094 def paths(ui, repo, search=None):
2094 """show definition of symbolic path names
2095 """show definition of symbolic path names
2095
2096
2096 Show definition of symbolic path name NAME. If no name is given, show
2097 Show definition of symbolic path name NAME. If no name is given, show
2097 definition of available names.
2098 definition of available names.
2098
2099
2099 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2100 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2100 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2101 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2101 """
2102 """
2102 if search:
2103 if search:
2103 for name, path in ui.configitems("paths"):
2104 for name, path in ui.configitems("paths"):
2104 if name == search:
2105 if name == search:
2105 ui.write("%s\n" % path)
2106 ui.write("%s\n" % path)
2106 return
2107 return
2107 ui.warn(_("not found!\n"))
2108 ui.warn(_("not found!\n"))
2108 return 1
2109 return 1
2109 else:
2110 else:
2110 for name, path in ui.configitems("paths"):
2111 for name, path in ui.configitems("paths"):
2111 ui.write("%s = %s\n" % (name, path))
2112 ui.write("%s = %s\n" % (name, path))
2112
2113
2113 def postincoming(ui, repo, modheads, optupdate):
2114 def postincoming(ui, repo, modheads, optupdate):
2114 if modheads == 0:
2115 if modheads == 0:
2115 return
2116 return
2116 if optupdate:
2117 if optupdate:
2117 if modheads == 1:
2118 if modheads == 1:
2118 return doupdate(ui, repo)
2119 return doupdate(ui, repo)
2119 else:
2120 else:
2120 ui.status(_("not updating, since new heads added\n"))
2121 ui.status(_("not updating, since new heads added\n"))
2121 if modheads > 1:
2122 if modheads > 1:
2122 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2123 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2123 else:
2124 else:
2124 ui.status(_("(run 'hg update' to get a working copy)\n"))
2125 ui.status(_("(run 'hg update' to get a working copy)\n"))
2125
2126
2126 def pull(ui, repo, source="default", **opts):
2127 def pull(ui, repo, source="default", **opts):
2127 """pull changes from the specified source
2128 """pull changes from the specified source
2128
2129
2129 Pull changes from a remote repository to a local one.
2130 Pull changes from a remote repository to a local one.
2130
2131
2131 This finds all changes from the repository at the specified path
2132 This finds all changes from the repository at the specified path
2132 or URL and adds them to the local repository. By default, this
2133 or URL and adds them to the local repository. By default, this
2133 does not update the copy of the project in the working directory.
2134 does not update the copy of the project in the working directory.
2134
2135
2135 Valid URLs are of the form:
2136 Valid URLs are of the form:
2136
2137
2137 local/filesystem/path
2138 local/filesystem/path
2138 http://[user@]host[:port]/[path]
2139 http://[user@]host[:port]/[path]
2139 https://[user@]host[:port]/[path]
2140 https://[user@]host[:port]/[path]
2140 ssh://[user@]host[:port]/[path]
2141 ssh://[user@]host[:port]/[path]
2141
2142
2142 Some notes about using SSH with Mercurial:
2143 Some notes about using SSH with Mercurial:
2143 - SSH requires an accessible shell account on the destination machine
2144 - SSH requires an accessible shell account on the destination machine
2144 and a copy of hg in the remote path or specified with as remotecmd.
2145 and a copy of hg in the remote path or specified with as remotecmd.
2145 - path is relative to the remote user's home directory by default.
2146 - path is relative to the remote user's home directory by default.
2146 Use an extra slash at the start of a path to specify an absolute path:
2147 Use an extra slash at the start of a path to specify an absolute path:
2147 ssh://example.com//tmp/repository
2148 ssh://example.com//tmp/repository
2148 - Mercurial doesn't use its own compression via SSH; the right thing
2149 - Mercurial doesn't use its own compression via SSH; the right thing
2149 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2150 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2150 Host *.mylocalnetwork.example.com
2151 Host *.mylocalnetwork.example.com
2151 Compression off
2152 Compression off
2152 Host *
2153 Host *
2153 Compression on
2154 Compression on
2154 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2155 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2155 with the --ssh command line option.
2156 with the --ssh command line option.
2156 """
2157 """
2157 source = ui.expandpath(source)
2158 source = ui.expandpath(source)
2158 ui.setconfig_remoteopts(**opts)
2159 ui.setconfig_remoteopts(**opts)
2159
2160
2160 other = hg.repository(ui, source)
2161 other = hg.repository(ui, source)
2161 ui.status(_('pulling from %s\n') % (source))
2162 ui.status(_('pulling from %s\n') % (source))
2162 revs = None
2163 revs = None
2163 if opts['rev'] and not other.local():
2164 if opts['rev'] and not other.local():
2164 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2165 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2165 elif opts['rev']:
2166 elif opts['rev']:
2166 revs = [other.lookup(rev) for rev in opts['rev']]
2167 revs = [other.lookup(rev) for rev in opts['rev']]
2167 modheads = repo.pull(other, heads=revs, force=opts['force'])
2168 modheads = repo.pull(other, heads=revs, force=opts['force'])
2168 return postincoming(ui, repo, modheads, opts['update'])
2169 return postincoming(ui, repo, modheads, opts['update'])
2169
2170
2170 def push(ui, repo, dest=None, **opts):
2171 def push(ui, repo, dest=None, **opts):
2171 """push changes to the specified destination
2172 """push changes to the specified destination
2172
2173
2173 Push changes from the local repository to the given destination.
2174 Push changes from the local repository to the given destination.
2174
2175
2175 This is the symmetrical operation for pull. It helps to move
2176 This is the symmetrical operation for pull. It helps to move
2176 changes from the current repository to a different one. If the
2177 changes from the current repository to a different one. If the
2177 destination is local this is identical to a pull in that directory
2178 destination is local this is identical to a pull in that directory
2178 from the current one.
2179 from the current one.
2179
2180
2180 By default, push will refuse to run if it detects the result would
2181 By default, push will refuse to run if it detects the result would
2181 increase the number of remote heads. This generally indicates the
2182 increase the number of remote heads. This generally indicates the
2182 the client has forgotten to sync and merge before pushing.
2183 the client has forgotten to sync and merge before pushing.
2183
2184
2184 Valid URLs are of the form:
2185 Valid URLs are of the form:
2185
2186
2186 local/filesystem/path
2187 local/filesystem/path
2187 ssh://[user@]host[:port]/[path]
2188 ssh://[user@]host[:port]/[path]
2188
2189
2189 Look at the help text for the pull command for important details
2190 Look at the help text for the pull command for important details
2190 about ssh:// URLs.
2191 about ssh:// URLs.
2191
2192
2192 Pushing to http:// and https:// URLs is possible, too, if this
2193 Pushing to http:// and https:// URLs is possible, too, if this
2193 feature is enabled on the remote Mercurial server.
2194 feature is enabled on the remote Mercurial server.
2194 """
2195 """
2195 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2196 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2196 ui.setconfig_remoteopts(**opts)
2197 ui.setconfig_remoteopts(**opts)
2197
2198
2198 other = hg.repository(ui, dest)
2199 other = hg.repository(ui, dest)
2199 ui.status('pushing to %s\n' % (dest))
2200 ui.status('pushing to %s\n' % (dest))
2200 revs = None
2201 revs = None
2201 if opts['rev']:
2202 if opts['rev']:
2202 revs = [repo.lookup(rev) for rev in opts['rev']]
2203 revs = [repo.lookup(rev) for rev in opts['rev']]
2203 r = repo.push(other, opts['force'], revs=revs)
2204 r = repo.push(other, opts['force'], revs=revs)
2204 return r == 0
2205 return r == 0
2205
2206
2206 def rawcommit(ui, repo, *flist, **rc):
2207 def rawcommit(ui, repo, *flist, **rc):
2207 """raw commit interface (DEPRECATED)
2208 """raw commit interface (DEPRECATED)
2208
2209
2209 (DEPRECATED)
2210 (DEPRECATED)
2210 Lowlevel commit, for use in helper scripts.
2211 Lowlevel commit, for use in helper scripts.
2211
2212
2212 This command is not intended to be used by normal users, as it is
2213 This command is not intended to be used by normal users, as it is
2213 primarily useful for importing from other SCMs.
2214 primarily useful for importing from other SCMs.
2214
2215
2215 This command is now deprecated and will be removed in a future
2216 This command is now deprecated and will be removed in a future
2216 release, please use debugsetparents and commit instead.
2217 release, please use debugsetparents and commit instead.
2217 """
2218 """
2218
2219
2219 ui.warn(_("(the rawcommit command is deprecated)\n"))
2220 ui.warn(_("(the rawcommit command is deprecated)\n"))
2220
2221
2221 message = rc['message']
2222 message = rc['message']
2222 if not message and rc['logfile']:
2223 if not message and rc['logfile']:
2223 try:
2224 try:
2224 message = open(rc['logfile']).read()
2225 message = open(rc['logfile']).read()
2225 except IOError:
2226 except IOError:
2226 pass
2227 pass
2227 if not message and not rc['logfile']:
2228 if not message and not rc['logfile']:
2228 raise util.Abort(_("missing commit message"))
2229 raise util.Abort(_("missing commit message"))
2229
2230
2230 files = relpath(repo, list(flist))
2231 files = relpath(repo, list(flist))
2231 if rc['files']:
2232 if rc['files']:
2232 files += open(rc['files']).read().splitlines()
2233 files += open(rc['files']).read().splitlines()
2233
2234
2234 rc['parent'] = map(repo.lookup, rc['parent'])
2235 rc['parent'] = map(repo.lookup, rc['parent'])
2235
2236
2236 try:
2237 try:
2237 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2238 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2238 except ValueError, inst:
2239 except ValueError, inst:
2239 raise util.Abort(str(inst))
2240 raise util.Abort(str(inst))
2240
2241
2241 def recover(ui, repo):
2242 def recover(ui, repo):
2242 """roll back an interrupted transaction
2243 """roll back an interrupted transaction
2243
2244
2244 Recover from an interrupted commit or pull.
2245 Recover from an interrupted commit or pull.
2245
2246
2246 This command tries to fix the repository status after an interrupted
2247 This command tries to fix the repository status after an interrupted
2247 operation. It should only be necessary when Mercurial suggests it.
2248 operation. It should only be necessary when Mercurial suggests it.
2248 """
2249 """
2249 if repo.recover():
2250 if repo.recover():
2250 return repo.verify()
2251 return repo.verify()
2251 return 1
2252 return 1
2252
2253
2253 def remove(ui, repo, *pats, **opts):
2254 def remove(ui, repo, *pats, **opts):
2254 """remove the specified files on the next commit
2255 """remove the specified files on the next commit
2255
2256
2256 Schedule the indicated files for removal from the repository.
2257 Schedule the indicated files for removal from the repository.
2257
2258
2258 This command schedules the files to be removed at the next commit.
2259 This command schedules the files to be removed at the next commit.
2259 This only removes files from the current branch, not from the
2260 This only removes files from the current branch, not from the
2260 entire project history. If the files still exist in the working
2261 entire project history. If the files still exist in the working
2261 directory, they will be deleted from it. If invoked with --after,
2262 directory, they will be deleted from it. If invoked with --after,
2262 files that have been manually deleted are marked as removed.
2263 files that have been manually deleted are marked as removed.
2263
2264
2264 Modified files and added files are not removed by default. To
2265 Modified files and added files are not removed by default. To
2265 remove them, use the -f/--force option.
2266 remove them, use the -f/--force option.
2266 """
2267 """
2267 names = []
2268 names = []
2268 if not opts['after'] and not pats:
2269 if not opts['after'] and not pats:
2269 raise util.Abort(_('no files specified'))
2270 raise util.Abort(_('no files specified'))
2270 files, matchfn, anypats = matchpats(repo, pats, opts)
2271 files, matchfn, anypats = matchpats(repo, pats, opts)
2271 exact = dict.fromkeys(files)
2272 exact = dict.fromkeys(files)
2272 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2273 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2273 modified, added, removed, deleted, unknown = mardu
2274 modified, added, removed, deleted, unknown = mardu
2274 remove, forget = [], []
2275 remove, forget = [], []
2275 for src, abs, rel, exact in walk(repo, pats, opts):
2276 for src, abs, rel, exact in walk(repo, pats, opts):
2276 reason = None
2277 reason = None
2277 if abs not in deleted and opts['after']:
2278 if abs not in deleted and opts['after']:
2278 reason = _('is still present')
2279 reason = _('is still present')
2279 elif abs in modified and not opts['force']:
2280 elif abs in modified and not opts['force']:
2280 reason = _('is modified (use -f to force removal)')
2281 reason = _('is modified (use -f to force removal)')
2281 elif abs in added:
2282 elif abs in added:
2282 if opts['force']:
2283 if opts['force']:
2283 forget.append(abs)
2284 forget.append(abs)
2284 continue
2285 continue
2285 reason = _('has been marked for add (use -f to force removal)')
2286 reason = _('has been marked for add (use -f to force removal)')
2286 elif abs in unknown:
2287 elif abs in unknown:
2287 reason = _('is not managed')
2288 reason = _('is not managed')
2288 elif abs in removed:
2289 elif abs in removed:
2289 continue
2290 continue
2290 if reason:
2291 if reason:
2291 if exact:
2292 if exact:
2292 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2293 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2293 else:
2294 else:
2294 if ui.verbose or not exact:
2295 if ui.verbose or not exact:
2295 ui.status(_('removing %s\n') % rel)
2296 ui.status(_('removing %s\n') % rel)
2296 remove.append(abs)
2297 remove.append(abs)
2297 repo.forget(forget)
2298 repo.forget(forget)
2298 repo.remove(remove, unlink=not opts['after'])
2299 repo.remove(remove, unlink=not opts['after'])
2299
2300
2300 def rename(ui, repo, *pats, **opts):
2301 def rename(ui, repo, *pats, **opts):
2301 """rename files; equivalent of copy + remove
2302 """rename files; equivalent of copy + remove
2302
2303
2303 Mark dest as copies of sources; mark sources for deletion. If
2304 Mark dest as copies of sources; mark sources for deletion. If
2304 dest is a directory, copies are put in that directory. If dest is
2305 dest is a directory, copies are put in that directory. If dest is
2305 a file, there can only be one source.
2306 a file, there can only be one source.
2306
2307
2307 By default, this command copies the contents of files as they
2308 By default, this command copies the contents of files as they
2308 stand in the working directory. If invoked with --after, the
2309 stand in the working directory. If invoked with --after, the
2309 operation is recorded, but no copying is performed.
2310 operation is recorded, but no copying is performed.
2310
2311
2311 This command takes effect in the next commit.
2312 This command takes effect in the next commit.
2312
2313
2313 NOTE: This command should be treated as experimental. While it
2314 NOTE: This command should be treated as experimental. While it
2314 should properly record rename files, this information is not yet
2315 should properly record rename files, this information is not yet
2315 fully used by merge, nor fully reported by log.
2316 fully used by merge, nor fully reported by log.
2316 """
2317 """
2317 wlock = repo.wlock(0)
2318 wlock = repo.wlock(0)
2318 errs, copied = docopy(ui, repo, pats, opts, wlock)
2319 errs, copied = docopy(ui, repo, pats, opts, wlock)
2319 names = []
2320 names = []
2320 for abs, rel, exact in copied:
2321 for abs, rel, exact in copied:
2321 if ui.verbose or not exact:
2322 if ui.verbose or not exact:
2322 ui.status(_('removing %s\n') % rel)
2323 ui.status(_('removing %s\n') % rel)
2323 names.append(abs)
2324 names.append(abs)
2324 if not opts.get('dry_run'):
2325 if not opts.get('dry_run'):
2325 repo.remove(names, True, wlock)
2326 repo.remove(names, True, wlock)
2326 return errs
2327 return errs
2327
2328
2328 def revert(ui, repo, *pats, **opts):
2329 def revert(ui, repo, *pats, **opts):
2329 """revert files or dirs to their states as of some revision
2330 """revert files or dirs to their states as of some revision
2330
2331
2331 With no revision specified, revert the named files or directories
2332 With no revision specified, revert the named files or directories
2332 to the contents they had in the parent of the working directory.
2333 to the contents they had in the parent of the working directory.
2333 This restores the contents of the affected files to an unmodified
2334 This restores the contents of the affected files to an unmodified
2334 state. If the working directory has two parents, you must
2335 state. If the working directory has two parents, you must
2335 explicitly specify the revision to revert to.
2336 explicitly specify the revision to revert to.
2336
2337
2337 Modified files are saved with a .orig suffix before reverting.
2338 Modified files are saved with a .orig suffix before reverting.
2338 To disable these backups, use --no-backup.
2339 To disable these backups, use --no-backup.
2339
2340
2340 Using the -r option, revert the given files or directories to
2341 Using the -r option, revert the given files or directories to
2341 their contents as of a specific revision. This can be helpful to"roll
2342 their contents as of a specific revision. This can be helpful to"roll
2342 back" some or all of a change that should not have been committed.
2343 back" some or all of a change that should not have been committed.
2343
2344
2344 Revert modifies the working directory. It does not commit any
2345 Revert modifies the working directory. It does not commit any
2345 changes, or change the parent of the working directory. If you
2346 changes, or change the parent of the working directory. If you
2346 revert to a revision other than the parent of the working
2347 revert to a revision other than the parent of the working
2347 directory, the reverted files will thus appear modified
2348 directory, the reverted files will thus appear modified
2348 afterwards.
2349 afterwards.
2349
2350
2350 If a file has been deleted, it is recreated. If the executable
2351 If a file has been deleted, it is recreated. If the executable
2351 mode of a file was changed, it is reset.
2352 mode of a file was changed, it is reset.
2352
2353
2353 If names are given, all files matching the names are reverted.
2354 If names are given, all files matching the names are reverted.
2354
2355
2355 If no arguments are given, all files in the repository are reverted.
2356 If no arguments are given, all files in the repository are reverted.
2356 """
2357 """
2357 parent, p2 = repo.dirstate.parents()
2358 parent, p2 = repo.dirstate.parents()
2358 if opts['rev']:
2359 if opts['rev']:
2359 node = repo.lookup(opts['rev'])
2360 node = repo.lookup(opts['rev'])
2360 elif p2 != nullid:
2361 elif p2 != nullid:
2361 raise util.Abort(_('working dir has two parents; '
2362 raise util.Abort(_('working dir has two parents; '
2362 'you must specify the revision to revert to'))
2363 'you must specify the revision to revert to'))
2363 else:
2364 else:
2364 node = parent
2365 node = parent
2365 mf = repo.manifest.read(repo.changelog.read(node)[0])
2366 mf = repo.manifest.read(repo.changelog.read(node)[0])
2366 if node == parent:
2367 if node == parent:
2367 pmf = mf
2368 pmf = mf
2368 else:
2369 else:
2369 pmf = None
2370 pmf = None
2370
2371
2371 wlock = repo.wlock()
2372 wlock = repo.wlock()
2372
2373
2373 # need all matching names in dirstate and manifest of target rev,
2374 # need all matching names in dirstate and manifest of target rev,
2374 # so have to walk both. do not print errors if files exist in one
2375 # so have to walk both. do not print errors if files exist in one
2375 # but not other.
2376 # but not other.
2376
2377
2377 names = {}
2378 names = {}
2378 target_only = {}
2379 target_only = {}
2379
2380
2380 # walk dirstate.
2381 # walk dirstate.
2381
2382
2382 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2383 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2383 names[abs] = (rel, exact)
2384 names[abs] = (rel, exact)
2384 if src == 'b':
2385 if src == 'b':
2385 target_only[abs] = True
2386 target_only[abs] = True
2386
2387
2387 # walk target manifest.
2388 # walk target manifest.
2388
2389
2389 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2390 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2390 badmatch=names.has_key):
2391 badmatch=names.has_key):
2391 if abs in names: continue
2392 if abs in names: continue
2392 names[abs] = (rel, exact)
2393 names[abs] = (rel, exact)
2393 target_only[abs] = True
2394 target_only[abs] = True
2394
2395
2395 changes = repo.changes(match=names.has_key, wlock=wlock)
2396 changes = repo.changes(match=names.has_key, wlock=wlock)
2396 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2397 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2397
2398
2398 revert = ([], _('reverting %s\n'))
2399 revert = ([], _('reverting %s\n'))
2399 add = ([], _('adding %s\n'))
2400 add = ([], _('adding %s\n'))
2400 remove = ([], _('removing %s\n'))
2401 remove = ([], _('removing %s\n'))
2401 forget = ([], _('forgetting %s\n'))
2402 forget = ([], _('forgetting %s\n'))
2402 undelete = ([], _('undeleting %s\n'))
2403 undelete = ([], _('undeleting %s\n'))
2403 update = {}
2404 update = {}
2404
2405
2405 disptable = (
2406 disptable = (
2406 # dispatch table:
2407 # dispatch table:
2407 # file state
2408 # file state
2408 # action if in target manifest
2409 # action if in target manifest
2409 # action if not in target manifest
2410 # action if not in target manifest
2410 # make backup if in target manifest
2411 # make backup if in target manifest
2411 # make backup if not in target manifest
2412 # make backup if not in target manifest
2412 (modified, revert, remove, True, True),
2413 (modified, revert, remove, True, True),
2413 (added, revert, forget, True, False),
2414 (added, revert, forget, True, False),
2414 (removed, undelete, None, False, False),
2415 (removed, undelete, None, False, False),
2415 (deleted, revert, remove, False, False),
2416 (deleted, revert, remove, False, False),
2416 (unknown, add, None, True, False),
2417 (unknown, add, None, True, False),
2417 (target_only, add, None, False, False),
2418 (target_only, add, None, False, False),
2418 )
2419 )
2419
2420
2420 entries = names.items()
2421 entries = names.items()
2421 entries.sort()
2422 entries.sort()
2422
2423
2423 for abs, (rel, exact) in entries:
2424 for abs, (rel, exact) in entries:
2424 mfentry = mf.get(abs)
2425 mfentry = mf.get(abs)
2425 def handle(xlist, dobackup):
2426 def handle(xlist, dobackup):
2426 xlist[0].append(abs)
2427 xlist[0].append(abs)
2427 update[abs] = 1
2428 update[abs] = 1
2428 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2429 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2429 bakname = "%s.orig" % rel
2430 bakname = "%s.orig" % rel
2430 ui.note(_('saving current version of %s as %s\n') %
2431 ui.note(_('saving current version of %s as %s\n') %
2431 (rel, bakname))
2432 (rel, bakname))
2432 if not opts.get('dry_run'):
2433 if not opts.get('dry_run'):
2433 shutil.copyfile(rel, bakname)
2434 shutil.copyfile(rel, bakname)
2434 shutil.copymode(rel, bakname)
2435 shutil.copymode(rel, bakname)
2435 if ui.verbose or not exact:
2436 if ui.verbose or not exact:
2436 ui.status(xlist[1] % rel)
2437 ui.status(xlist[1] % rel)
2437 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2438 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2438 if abs not in table: continue
2439 if abs not in table: continue
2439 # file has changed in dirstate
2440 # file has changed in dirstate
2440 if mfentry:
2441 if mfentry:
2441 handle(hitlist, backuphit)
2442 handle(hitlist, backuphit)
2442 elif misslist is not None:
2443 elif misslist is not None:
2443 handle(misslist, backupmiss)
2444 handle(misslist, backupmiss)
2444 else:
2445 else:
2445 if exact: ui.warn(_('file not managed: %s\n' % rel))
2446 if exact: ui.warn(_('file not managed: %s\n' % rel))
2446 break
2447 break
2447 else:
2448 else:
2448 # file has not changed in dirstate
2449 # file has not changed in dirstate
2449 if node == parent:
2450 if node == parent:
2450 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2451 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2451 continue
2452 continue
2452 if pmf is None:
2453 if pmf is None:
2453 # only need parent manifest in this unlikely case,
2454 # only need parent manifest in this unlikely case,
2454 # so do not read by default
2455 # so do not read by default
2455 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2456 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2456 if abs in pmf:
2457 if abs in pmf:
2457 if mfentry:
2458 if mfentry:
2458 # if version of file is same in parent and target
2459 # if version of file is same in parent and target
2459 # manifests, do nothing
2460 # manifests, do nothing
2460 if pmf[abs] != mfentry:
2461 if pmf[abs] != mfentry:
2461 handle(revert, False)
2462 handle(revert, False)
2462 else:
2463 else:
2463 handle(remove, False)
2464 handle(remove, False)
2464
2465
2465 if not opts.get('dry_run'):
2466 if not opts.get('dry_run'):
2466 repo.dirstate.forget(forget[0])
2467 repo.dirstate.forget(forget[0])
2467 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2468 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2468 show_stats=False)
2469 show_stats=False)
2469 repo.dirstate.update(add[0], 'a')
2470 repo.dirstate.update(add[0], 'a')
2470 repo.dirstate.update(undelete[0], 'n')
2471 repo.dirstate.update(undelete[0], 'n')
2471 repo.dirstate.update(remove[0], 'r')
2472 repo.dirstate.update(remove[0], 'r')
2472 return r
2473 return r
2473
2474
2474 def rollback(ui, repo):
2475 def rollback(ui, repo):
2475 """roll back the last transaction in this repository
2476 """roll back the last transaction in this repository
2476
2477
2477 Roll back the last transaction in this repository, restoring the
2478 Roll back the last transaction in this repository, restoring the
2478 project to its state prior to the transaction.
2479 project to its state prior to the transaction.
2479
2480
2480 Transactions are used to encapsulate the effects of all commands
2481 Transactions are used to encapsulate the effects of all commands
2481 that create new changesets or propagate existing changesets into a
2482 that create new changesets or propagate existing changesets into a
2482 repository. For example, the following commands are transactional,
2483 repository. For example, the following commands are transactional,
2483 and their effects can be rolled back:
2484 and their effects can be rolled back:
2484
2485
2485 commit
2486 commit
2486 import
2487 import
2487 pull
2488 pull
2488 push (with this repository as destination)
2489 push (with this repository as destination)
2489 unbundle
2490 unbundle
2490
2491
2491 This command should be used with care. There is only one level of
2492 This command should be used with care. There is only one level of
2492 rollback, and there is no way to undo a rollback.
2493 rollback, and there is no way to undo a rollback.
2493
2494
2494 This command is not intended for use on public repositories. Once
2495 This command is not intended for use on public repositories. Once
2495 changes are visible for pull by other users, rolling a transaction
2496 changes are visible for pull by other users, rolling a transaction
2496 back locally is ineffective (someone else may already have pulled
2497 back locally is ineffective (someone else may already have pulled
2497 the changes). Furthermore, a race is possible with readers of the
2498 the changes). Furthermore, a race is possible with readers of the
2498 repository; for example an in-progress pull from the repository
2499 repository; for example an in-progress pull from the repository
2499 may fail if a rollback is performed.
2500 may fail if a rollback is performed.
2500 """
2501 """
2501 repo.rollback()
2502 repo.rollback()
2502
2503
2503 def root(ui, repo):
2504 def root(ui, repo):
2504 """print the root (top) of the current working dir
2505 """print the root (top) of the current working dir
2505
2506
2506 Print the root directory of the current repository.
2507 Print the root directory of the current repository.
2507 """
2508 """
2508 ui.write(repo.root + "\n")
2509 ui.write(repo.root + "\n")
2509
2510
2510 def serve(ui, repo, **opts):
2511 def serve(ui, repo, **opts):
2511 """export the repository via HTTP
2512 """export the repository via HTTP
2512
2513
2513 Start a local HTTP repository browser and pull server.
2514 Start a local HTTP repository browser and pull server.
2514
2515
2515 By default, the server logs accesses to stdout and errors to
2516 By default, the server logs accesses to stdout and errors to
2516 stderr. Use the "-A" and "-E" options to log to files.
2517 stderr. Use the "-A" and "-E" options to log to files.
2517 """
2518 """
2518
2519
2519 if opts["stdio"]:
2520 if opts["stdio"]:
2520 if repo is None:
2521 if repo is None:
2521 raise hg.RepoError(_('no repo found'))
2522 raise hg.RepoError(_('no repo found'))
2522 s = sshserver.sshserver(ui, repo)
2523 s = sshserver.sshserver(ui, repo)
2523 s.serve_forever()
2524 s.serve_forever()
2524
2525
2525 optlist = ("name templates style address port ipv6"
2526 optlist = ("name templates style address port ipv6"
2526 " accesslog errorlog webdir_conf")
2527 " accesslog errorlog webdir_conf")
2527 for o in optlist.split():
2528 for o in optlist.split():
2528 if opts[o]:
2529 if opts[o]:
2529 ui.setconfig("web", o, opts[o])
2530 ui.setconfig("web", o, opts[o])
2530
2531
2531 if repo is None and not ui.config("web", "webdir_conf"):
2532 if repo is None and not ui.config("web", "webdir_conf"):
2532 raise hg.RepoError(_('no repo found'))
2533 raise hg.RepoError(_('no repo found'))
2533
2534
2534 if opts['daemon'] and not opts['daemon_pipefds']:
2535 if opts['daemon'] and not opts['daemon_pipefds']:
2535 rfd, wfd = os.pipe()
2536 rfd, wfd = os.pipe()
2536 args = sys.argv[:]
2537 args = sys.argv[:]
2537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2538 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2538 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2539 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2539 args[0], args)
2540 args[0], args)
2540 os.close(wfd)
2541 os.close(wfd)
2541 os.read(rfd, 1)
2542 os.read(rfd, 1)
2542 os._exit(0)
2543 os._exit(0)
2543
2544
2544 try:
2545 try:
2545 httpd = hgweb.server.create_server(ui, repo)
2546 httpd = hgweb.server.create_server(ui, repo)
2546 except socket.error, inst:
2547 except socket.error, inst:
2547 raise util.Abort(_('cannot start server: ') + inst.args[1])
2548 raise util.Abort(_('cannot start server: ') + inst.args[1])
2548
2549
2549 if ui.verbose:
2550 if ui.verbose:
2550 addr, port = httpd.socket.getsockname()
2551 addr, port = httpd.socket.getsockname()
2551 if addr == '0.0.0.0':
2552 if addr == '0.0.0.0':
2552 addr = socket.gethostname()
2553 addr = socket.gethostname()
2553 else:
2554 else:
2554 try:
2555 try:
2555 addr = socket.gethostbyaddr(addr)[0]
2556 addr = socket.gethostbyaddr(addr)[0]
2556 except socket.error:
2557 except socket.error:
2557 pass
2558 pass
2558 if port != 80:
2559 if port != 80:
2559 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2560 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2560 else:
2561 else:
2561 ui.status(_('listening at http://%s/\n') % addr)
2562 ui.status(_('listening at http://%s/\n') % addr)
2562
2563
2563 if opts['pid_file']:
2564 if opts['pid_file']:
2564 fp = open(opts['pid_file'], 'w')
2565 fp = open(opts['pid_file'], 'w')
2565 fp.write(str(os.getpid()) + '\n')
2566 fp.write(str(os.getpid()) + '\n')
2566 fp.close()
2567 fp.close()
2567
2568
2568 if opts['daemon_pipefds']:
2569 if opts['daemon_pipefds']:
2569 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2570 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2570 os.close(rfd)
2571 os.close(rfd)
2571 os.write(wfd, 'y')
2572 os.write(wfd, 'y')
2572 os.close(wfd)
2573 os.close(wfd)
2573 sys.stdout.flush()
2574 sys.stdout.flush()
2574 sys.stderr.flush()
2575 sys.stderr.flush()
2575 fd = os.open(util.nulldev, os.O_RDWR)
2576 fd = os.open(util.nulldev, os.O_RDWR)
2576 if fd != 0: os.dup2(fd, 0)
2577 if fd != 0: os.dup2(fd, 0)
2577 if fd != 1: os.dup2(fd, 1)
2578 if fd != 1: os.dup2(fd, 1)
2578 if fd != 2: os.dup2(fd, 2)
2579 if fd != 2: os.dup2(fd, 2)
2579 if fd not in (0, 1, 2): os.close(fd)
2580 if fd not in (0, 1, 2): os.close(fd)
2580
2581
2581 httpd.serve_forever()
2582 httpd.serve_forever()
2582
2583
2583 def status(ui, repo, *pats, **opts):
2584 def status(ui, repo, *pats, **opts):
2584 """show changed files in the working directory
2585 """show changed files in the working directory
2585
2586
2586 Show changed files in the repository. If names are
2587 Show changed files in the repository. If names are
2587 given, only files that match are shown.
2588 given, only files that match are shown.
2588
2589
2589 The codes used to show the status of files are:
2590 The codes used to show the status of files are:
2590 M = modified
2591 M = modified
2591 A = added
2592 A = added
2592 R = removed
2593 R = removed
2593 ! = deleted, but still tracked
2594 ! = deleted, but still tracked
2594 ? = not tracked
2595 ? = not tracked
2595 I = ignored (not shown by default)
2596 I = ignored (not shown by default)
2596 """
2597 """
2597
2598
2598 show_ignored = opts['ignored'] and True or False
2599 show_ignored = opts['ignored'] and True or False
2599 files, matchfn, anypats = matchpats(repo, pats, opts)
2600 files, matchfn, anypats = matchpats(repo, pats, opts)
2600 cwd = (pats and repo.getcwd()) or ''
2601 cwd = (pats and repo.getcwd()) or ''
2601 modified, added, removed, deleted, unknown, ignored = [
2602 modified, added, removed, deleted, unknown, ignored = [
2602 [util.pathto(cwd, x) for x in n]
2603 [util.pathto(cwd, x) for x in n]
2603 for n in repo.changes(files=files, match=matchfn,
2604 for n in repo.changes(files=files, match=matchfn,
2604 show_ignored=show_ignored)]
2605 show_ignored=show_ignored)]
2605
2606
2606 changetypes = [('modified', 'M', modified),
2607 changetypes = [('modified', 'M', modified),
2607 ('added', 'A', added),
2608 ('added', 'A', added),
2608 ('removed', 'R', removed),
2609 ('removed', 'R', removed),
2609 ('deleted', '!', deleted),
2610 ('deleted', '!', deleted),
2610 ('unknown', '?', unknown),
2611 ('unknown', '?', unknown),
2611 ('ignored', 'I', ignored)]
2612 ('ignored', 'I', ignored)]
2612
2613
2613 end = opts['print0'] and '\0' or '\n'
2614 end = opts['print0'] and '\0' or '\n'
2614
2615
2615 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2616 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2616 or changetypes):
2617 or changetypes):
2617 if opts['no_status']:
2618 if opts['no_status']:
2618 format = "%%s%s" % end
2619 format = "%%s%s" % end
2619 else:
2620 else:
2620 format = "%s %%s%s" % (char, end)
2621 format = "%s %%s%s" % (char, end)
2621
2622
2622 for f in changes:
2623 for f in changes:
2623 ui.write(format % f)
2624 ui.write(format % f)
2624
2625
2625 def tag(ui, repo, name, rev_=None, **opts):
2626 def tag(ui, repo, name, rev_=None, **opts):
2626 """add a tag for the current tip or a given revision
2627 """add a tag for the current tip or a given revision
2627
2628
2628 Name a particular revision using <name>.
2629 Name a particular revision using <name>.
2629
2630
2630 Tags are used to name particular revisions of the repository and are
2631 Tags are used to name particular revisions of the repository and are
2631 very useful to compare different revision, to go back to significant
2632 very useful to compare different revision, to go back to significant
2632 earlier versions or to mark branch points as releases, etc.
2633 earlier versions or to mark branch points as releases, etc.
2633
2634
2634 If no revision is given, the tip is used.
2635 If no revision is given, the tip is used.
2635
2636
2636 To facilitate version control, distribution, and merging of tags,
2637 To facilitate version control, distribution, and merging of tags,
2637 they are stored as a file named ".hgtags" which is managed
2638 they are stored as a file named ".hgtags" which is managed
2638 similarly to other project files and can be hand-edited if
2639 similarly to other project files and can be hand-edited if
2639 necessary. The file '.hg/localtags' is used for local tags (not
2640 necessary. The file '.hg/localtags' is used for local tags (not
2640 shared among repositories).
2641 shared among repositories).
2641 """
2642 """
2642 if name == "tip":
2643 if name == "tip":
2643 raise util.Abort(_("the name 'tip' is reserved"))
2644 raise util.Abort(_("the name 'tip' is reserved"))
2644 if rev_ is not None:
2645 if rev_ is not None:
2645 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2646 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2646 "please use 'hg tag [-r REV] NAME' instead\n"))
2647 "please use 'hg tag [-r REV] NAME' instead\n"))
2647 if opts['rev']:
2648 if opts['rev']:
2648 raise util.Abort(_("use only one form to specify the revision"))
2649 raise util.Abort(_("use only one form to specify the revision"))
2649 if opts['rev']:
2650 if opts['rev']:
2650 rev_ = opts['rev']
2651 rev_ = opts['rev']
2651 if rev_:
2652 if rev_:
2652 r = hex(repo.lookup(rev_))
2653 r = hex(repo.lookup(rev_))
2653 else:
2654 else:
2654 r = hex(repo.changelog.tip())
2655 r = hex(repo.changelog.tip())
2655
2656
2656 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2657 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2657 opts['date'])
2658 opts['date'])
2658
2659
2659 def tags(ui, repo):
2660 def tags(ui, repo):
2660 """list repository tags
2661 """list repository tags
2661
2662
2662 List the repository tags.
2663 List the repository tags.
2663
2664
2664 This lists both regular and local tags.
2665 This lists both regular and local tags.
2665 """
2666 """
2666
2667
2667 l = repo.tagslist()
2668 l = repo.tagslist()
2668 l.reverse()
2669 l.reverse()
2669 for t, n in l:
2670 for t, n in l:
2670 try:
2671 try:
2671 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2672 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2672 except KeyError:
2673 except KeyError:
2673 r = " ?:?"
2674 r = " ?:?"
2674 if ui.quiet:
2675 if ui.quiet:
2675 ui.write("%s\n" % t)
2676 ui.write("%s\n" % t)
2676 else:
2677 else:
2677 ui.write("%-30s %s\n" % (t, r))
2678 ui.write("%-30s %s\n" % (t, r))
2678
2679
2679 def tip(ui, repo, **opts):
2680 def tip(ui, repo, **opts):
2680 """show the tip revision
2681 """show the tip revision
2681
2682
2682 Show the tip revision.
2683 Show the tip revision.
2683 """
2684 """
2684 n = repo.changelog.tip()
2685 n = repo.changelog.tip()
2685 br = None
2686 br = None
2686 if opts['branches']:
2687 if opts['branches']:
2687 br = repo.branchlookup([n])
2688 br = repo.branchlookup([n])
2688 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2689 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2689 if opts['patch']:
2690 if opts['patch']:
2690 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2691 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2691
2692
2692 def unbundle(ui, repo, fname, **opts):
2693 def unbundle(ui, repo, fname, **opts):
2693 """apply a changegroup file
2694 """apply a changegroup file
2694
2695
2695 Apply a compressed changegroup file generated by the bundle
2696 Apply a compressed changegroup file generated by the bundle
2696 command.
2697 command.
2697 """
2698 """
2698 f = urllib.urlopen(fname)
2699 f = urllib.urlopen(fname)
2699
2700
2700 header = f.read(6)
2701 header = f.read(6)
2701 if not header.startswith("HG"):
2702 if not header.startswith("HG"):
2702 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2703 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2703 elif not header.startswith("HG10"):
2704 elif not header.startswith("HG10"):
2704 raise util.Abort(_("%s: unknown bundle version") % fname)
2705 raise util.Abort(_("%s: unknown bundle version") % fname)
2705 elif header == "HG10BZ":
2706 elif header == "HG10BZ":
2706 def generator(f):
2707 def generator(f):
2707 zd = bz2.BZ2Decompressor()
2708 zd = bz2.BZ2Decompressor()
2708 zd.decompress("BZ")
2709 zd.decompress("BZ")
2709 for chunk in f:
2710 for chunk in f:
2710 yield zd.decompress(chunk)
2711 yield zd.decompress(chunk)
2711 elif header == "HG10UN":
2712 elif header == "HG10UN":
2712 def generator(f):
2713 def generator(f):
2713 for chunk in f:
2714 for chunk in f:
2714 yield chunk
2715 yield chunk
2715 else:
2716 else:
2716 raise util.Abort(_("%s: unknown bundle compression type")
2717 raise util.Abort(_("%s: unknown bundle compression type")
2717 % fname)
2718 % fname)
2718 gen = generator(util.filechunkiter(f, 4096))
2719 gen = generator(util.filechunkiter(f, 4096))
2719 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle')
2720 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle')
2720 return postincoming(ui, repo, modheads, opts['update'])
2721 return postincoming(ui, repo, modheads, opts['update'])
2721
2722
2722 def undo(ui, repo):
2723 def undo(ui, repo):
2723 """undo the last commit or pull (DEPRECATED)
2724 """undo the last commit or pull (DEPRECATED)
2724
2725
2725 (DEPRECATED)
2726 (DEPRECATED)
2726 This command is now deprecated and will be removed in a future
2727 This command is now deprecated and will be removed in a future
2727 release. Please use the rollback command instead. For usage
2728 release. Please use the rollback command instead. For usage
2728 instructions, see the rollback command.
2729 instructions, see the rollback command.
2729 """
2730 """
2730 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2731 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2731 repo.rollback()
2732 repo.rollback()
2732
2733
2733 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2734 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2734 branch=None, **opts):
2735 branch=None, **opts):
2735 """update or merge working directory
2736 """update or merge working directory
2736
2737
2737 Update the working directory to the specified revision.
2738 Update the working directory to the specified revision.
2738
2739
2739 If there are no outstanding changes in the working directory and
2740 If there are no outstanding changes in the working directory and
2740 there is a linear relationship between the current version and the
2741 there is a linear relationship between the current version and the
2741 requested version, the result is the requested version.
2742 requested version, the result is the requested version.
2742
2743
2743 To merge the working directory with another revision, use the
2744 To merge the working directory with another revision, use the
2744 merge command.
2745 merge command.
2745
2746
2746 By default, update will refuse to run if doing so would require
2747 By default, update will refuse to run if doing so would require
2747 merging or discarding local changes.
2748 merging or discarding local changes.
2748 """
2749 """
2749 if merge:
2750 if merge:
2750 ui.warn(_('(the -m/--merge option is deprecated; '
2751 ui.warn(_('(the -m/--merge option is deprecated; '
2751 'use the merge command instead)\n'))
2752 'use the merge command instead)\n'))
2752 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2753 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2753
2754
2754 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2755 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2755 branch=None, **opts):
2756 branch=None, **opts):
2756 if branch:
2757 if branch:
2757 br = repo.branchlookup(branch=branch)
2758 br = repo.branchlookup(branch=branch)
2758 found = []
2759 found = []
2759 for x in br:
2760 for x in br:
2760 if branch in br[x]:
2761 if branch in br[x]:
2761 found.append(x)
2762 found.append(x)
2762 if len(found) > 1:
2763 if len(found) > 1:
2763 ui.warn(_("Found multiple heads for %s\n") % branch)
2764 ui.warn(_("Found multiple heads for %s\n") % branch)
2764 for x in found:
2765 for x in found:
2765 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2766 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2766 return 1
2767 return 1
2767 if len(found) == 1:
2768 if len(found) == 1:
2768 node = found[0]
2769 node = found[0]
2769 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2770 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2770 else:
2771 else:
2771 ui.warn(_("branch %s not found\n") % (branch))
2772 ui.warn(_("branch %s not found\n") % (branch))
2772 return 1
2773 return 1
2773 else:
2774 else:
2774 node = node and repo.lookup(node) or repo.changelog.tip()
2775 node = node and repo.lookup(node) or repo.changelog.tip()
2775 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2776 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2776
2777
2777 def verify(ui, repo):
2778 def verify(ui, repo):
2778 """verify the integrity of the repository
2779 """verify the integrity of the repository
2779
2780
2780 Verify the integrity of the current repository.
2781 Verify the integrity of the current repository.
2781
2782
2782 This will perform an extensive check of the repository's
2783 This will perform an extensive check of the repository's
2783 integrity, validating the hashes and checksums of each entry in
2784 integrity, validating the hashes and checksums of each entry in
2784 the changelog, manifest, and tracked files, as well as the
2785 the changelog, manifest, and tracked files, as well as the
2785 integrity of their crosslinks and indices.
2786 integrity of their crosslinks and indices.
2786 """
2787 """
2787 return repo.verify()
2788 return repo.verify()
2788
2789
2789 # Command options and aliases are listed here, alphabetically
2790 # Command options and aliases are listed here, alphabetically
2790
2791
2791 table = {
2792 table = {
2792 "^add":
2793 "^add":
2793 (add,
2794 (add,
2794 [('I', 'include', [], _('include names matching the given patterns')),
2795 [('I', 'include', [], _('include names matching the given patterns')),
2795 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2796 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2796 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2797 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2797 _('hg add [OPTION]... [FILE]...')),
2798 _('hg add [OPTION]... [FILE]...')),
2798 "debugaddremove|addremove":
2799 "debugaddremove|addremove":
2799 (addremove,
2800 (addremove,
2800 [('I', 'include', [], _('include names matching the given patterns')),
2801 [('I', 'include', [], _('include names matching the given patterns')),
2801 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2802 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2802 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2803 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2803 _('hg addremove [OPTION]... [FILE]...')),
2804 _('hg addremove [OPTION]... [FILE]...')),
2804 "^annotate":
2805 "^annotate":
2805 (annotate,
2806 (annotate,
2806 [('r', 'rev', '', _('annotate the specified revision')),
2807 [('r', 'rev', '', _('annotate the specified revision')),
2807 ('a', 'text', None, _('treat all files as text')),
2808 ('a', 'text', None, _('treat all files as text')),
2808 ('u', 'user', None, _('list the author')),
2809 ('u', 'user', None, _('list the author')),
2809 ('d', 'date', None, _('list the date')),
2810 ('d', 'date', None, _('list the date')),
2810 ('n', 'number', None, _('list the revision number (default)')),
2811 ('n', 'number', None, _('list the revision number (default)')),
2811 ('c', 'changeset', None, _('list the changeset')),
2812 ('c', 'changeset', None, _('list the changeset')),
2812 ('I', 'include', [], _('include names matching the given patterns')),
2813 ('I', 'include', [], _('include names matching the given patterns')),
2813 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2814 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2814 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2815 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2815 "archive":
2816 "archive":
2816 (archive,
2817 (archive,
2817 [('', 'no-decode', None, _('do not pass files through decoders')),
2818 [('', 'no-decode', None, _('do not pass files through decoders')),
2818 ('p', 'prefix', '', _('directory prefix for files in archive')),
2819 ('p', 'prefix', '', _('directory prefix for files in archive')),
2819 ('r', 'rev', '', _('revision to distribute')),
2820 ('r', 'rev', '', _('revision to distribute')),
2820 ('t', 'type', '', _('type of distribution to create')),
2821 ('t', 'type', '', _('type of distribution to create')),
2821 ('I', 'include', [], _('include names matching the given patterns')),
2822 ('I', 'include', [], _('include names matching the given patterns')),
2822 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2823 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2823 _('hg archive [OPTION]... DEST')),
2824 _('hg archive [OPTION]... DEST')),
2824 "backout":
2825 "backout":
2825 (backout,
2826 (backout,
2826 [('', 'merge', None,
2827 [('', 'merge', None,
2827 _('merge with old dirstate parent after backout')),
2828 _('merge with old dirstate parent after backout')),
2828 ('m', 'message', '', _('use <text> as commit message')),
2829 ('m', 'message', '', _('use <text> as commit message')),
2829 ('l', 'logfile', '', _('read commit message from <file>')),
2830 ('l', 'logfile', '', _('read commit message from <file>')),
2830 ('d', 'date', '', _('record datecode as commit date')),
2831 ('d', 'date', '', _('record datecode as commit date')),
2831 ('u', 'user', '', _('record user as committer')),
2832 ('u', 'user', '', _('record user as committer')),
2832 ('I', 'include', [], _('include names matching the given patterns')),
2833 ('I', 'include', [], _('include names matching the given patterns')),
2833 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2834 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2834 _('hg backout [OPTION]... REV')),
2835 _('hg backout [OPTION]... REV')),
2835 "bundle":
2836 "bundle":
2836 (bundle,
2837 (bundle,
2837 [('f', 'force', None,
2838 [('f', 'force', None,
2838 _('run even when remote repository is unrelated'))],
2839 _('run even when remote repository is unrelated'))],
2839 _('hg bundle FILE DEST')),
2840 _('hg bundle FILE DEST')),
2840 "cat":
2841 "cat":
2841 (cat,
2842 (cat,
2842 [('o', 'output', '', _('print output to file with formatted name')),
2843 [('o', 'output', '', _('print output to file with formatted name')),
2843 ('r', 'rev', '', _('print the given revision')),
2844 ('r', 'rev', '', _('print the given revision')),
2844 ('I', 'include', [], _('include names matching the given patterns')),
2845 ('I', 'include', [], _('include names matching the given patterns')),
2845 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2846 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2846 _('hg cat [OPTION]... FILE...')),
2847 _('hg cat [OPTION]... FILE...')),
2847 "^clone":
2848 "^clone":
2848 (clone,
2849 (clone,
2849 [('U', 'noupdate', None, _('do not update the new working directory')),
2850 [('U', 'noupdate', None, _('do not update the new working directory')),
2850 ('r', 'rev', [],
2851 ('r', 'rev', [],
2851 _('a changeset you would like to have after cloning')),
2852 _('a changeset you would like to have after cloning')),
2852 ('', 'pull', None, _('use pull protocol to copy metadata')),
2853 ('', 'pull', None, _('use pull protocol to copy metadata')),
2853 ('e', 'ssh', '', _('specify ssh command to use')),
2854 ('e', 'ssh', '', _('specify ssh command to use')),
2854 ('', 'remotecmd', '',
2855 ('', 'remotecmd', '',
2855 _('specify hg command to run on the remote side'))],
2856 _('specify hg command to run on the remote side'))],
2856 _('hg clone [OPTION]... SOURCE [DEST]')),
2857 _('hg clone [OPTION]... SOURCE [DEST]')),
2857 "^commit|ci":
2858 "^commit|ci":
2858 (commit,
2859 (commit,
2859 [('A', 'addremove', None,
2860 [('A', 'addremove', None,
2860 _('mark new/missing files as added/removed before committing')),
2861 _('mark new/missing files as added/removed before committing')),
2861 ('m', 'message', '', _('use <text> as commit message')),
2862 ('m', 'message', '', _('use <text> as commit message')),
2862 ('l', 'logfile', '', _('read the commit message from <file>')),
2863 ('l', 'logfile', '', _('read the commit message from <file>')),
2863 ('d', 'date', '', _('record datecode as commit date')),
2864 ('d', 'date', '', _('record datecode as commit date')),
2864 ('u', 'user', '', _('record user as commiter')),
2865 ('u', 'user', '', _('record user as commiter')),
2865 ('I', 'include', [], _('include names matching the given patterns')),
2866 ('I', 'include', [], _('include names matching the given patterns')),
2866 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2867 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2867 _('hg commit [OPTION]... [FILE]...')),
2868 _('hg commit [OPTION]... [FILE]...')),
2868 "copy|cp":
2869 "copy|cp":
2869 (copy,
2870 (copy,
2870 [('A', 'after', None, _('record a copy that has already occurred')),
2871 [('A', 'after', None, _('record a copy that has already occurred')),
2871 ('f', 'force', None,
2872 ('f', 'force', None,
2872 _('forcibly copy over an existing managed file')),
2873 _('forcibly copy over an existing managed file')),
2873 ('I', 'include', [], _('include names matching the given patterns')),
2874 ('I', 'include', [], _('include names matching the given patterns')),
2874 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2875 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2875 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2876 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2876 _('hg copy [OPTION]... [SOURCE]... DEST')),
2877 _('hg copy [OPTION]... [SOURCE]... DEST')),
2877 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2878 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2878 "debugcomplete":
2879 "debugcomplete":
2879 (debugcomplete,
2880 (debugcomplete,
2880 [('o', 'options', None, _('show the command options'))],
2881 [('o', 'options', None, _('show the command options'))],
2881 _('debugcomplete [-o] CMD')),
2882 _('debugcomplete [-o] CMD')),
2882 "debugrebuildstate":
2883 "debugrebuildstate":
2883 (debugrebuildstate,
2884 (debugrebuildstate,
2884 [('r', 'rev', '', _('revision to rebuild to'))],
2885 [('r', 'rev', '', _('revision to rebuild to'))],
2885 _('debugrebuildstate [-r REV] [REV]')),
2886 _('debugrebuildstate [-r REV] [REV]')),
2886 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2887 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2887 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2888 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2888 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2889 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2889 "debugstate": (debugstate, [], _('debugstate')),
2890 "debugstate": (debugstate, [], _('debugstate')),
2890 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2891 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2891 "debugindex": (debugindex, [], _('debugindex FILE')),
2892 "debugindex": (debugindex, [], _('debugindex FILE')),
2892 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2893 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2893 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2894 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2894 "debugwalk":
2895 "debugwalk":
2895 (debugwalk,
2896 (debugwalk,
2896 [('I', 'include', [], _('include names matching the given patterns')),
2897 [('I', 'include', [], _('include names matching the given patterns')),
2897 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2898 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2898 _('debugwalk [OPTION]... [FILE]...')),
2899 _('debugwalk [OPTION]... [FILE]...')),
2899 "^diff":
2900 "^diff":
2900 (diff,
2901 (diff,
2901 [('r', 'rev', [], _('revision')),
2902 [('r', 'rev', [], _('revision')),
2902 ('a', 'text', None, _('treat all files as text')),
2903 ('a', 'text', None, _('treat all files as text')),
2903 ('p', 'show-function', None,
2904 ('p', 'show-function', None,
2904 _('show which function each change is in')),
2905 _('show which function each change is in')),
2905 ('w', 'ignore-all-space', None,
2906 ('w', 'ignore-all-space', None,
2906 _('ignore white space when comparing lines')),
2907 _('ignore white space when comparing lines')),
2907 ('b', 'ignore-space-change', None,
2908 ('b', 'ignore-space-change', None,
2908 _('ignore changes in the amount of white space')),
2909 _('ignore changes in the amount of white space')),
2909 ('B', 'ignore-blank-lines', None,
2910 ('B', 'ignore-blank-lines', None,
2910 _('ignore changes whose lines are all blank')),
2911 _('ignore changes whose lines are all blank')),
2911 ('I', 'include', [], _('include names matching the given patterns')),
2912 ('I', 'include', [], _('include names matching the given patterns')),
2912 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2913 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2913 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2914 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2914 "^export":
2915 "^export":
2915 (export,
2916 (export,
2916 [('o', 'output', '', _('print output to file with formatted name')),
2917 [('o', 'output', '', _('print output to file with formatted name')),
2917 ('a', 'text', None, _('treat all files as text')),
2918 ('a', 'text', None, _('treat all files as text')),
2918 ('', 'switch-parent', None, _('diff against the second parent'))],
2919 ('', 'switch-parent', None, _('diff against the second parent'))],
2919 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2920 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2920 "debugforget|forget":
2921 "debugforget|forget":
2921 (forget,
2922 (forget,
2922 [('I', 'include', [], _('include names matching the given patterns')),
2923 [('I', 'include', [], _('include names matching the given patterns')),
2923 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2924 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2924 _('hg forget [OPTION]... FILE...')),
2925 _('hg forget [OPTION]... FILE...')),
2925 "grep":
2926 "grep":
2926 (grep,
2927 (grep,
2927 [('0', 'print0', None, _('end fields with NUL')),
2928 [('0', 'print0', None, _('end fields with NUL')),
2928 ('', 'all', None, _('print all revisions that match')),
2929 ('', 'all', None, _('print all revisions that match')),
2929 ('i', 'ignore-case', None, _('ignore case when matching')),
2930 ('i', 'ignore-case', None, _('ignore case when matching')),
2930 ('l', 'files-with-matches', None,
2931 ('l', 'files-with-matches', None,
2931 _('print only filenames and revs that match')),
2932 _('print only filenames and revs that match')),
2932 ('n', 'line-number', None, _('print matching line numbers')),
2933 ('n', 'line-number', None, _('print matching line numbers')),
2933 ('r', 'rev', [], _('search in given revision range')),
2934 ('r', 'rev', [], _('search in given revision range')),
2934 ('u', 'user', None, _('print user who committed change')),
2935 ('u', 'user', None, _('print user who committed change')),
2935 ('I', 'include', [], _('include names matching the given patterns')),
2936 ('I', 'include', [], _('include names matching the given patterns')),
2936 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2937 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2937 _('hg grep [OPTION]... PATTERN [FILE]...')),
2938 _('hg grep [OPTION]... PATTERN [FILE]...')),
2938 "heads":
2939 "heads":
2939 (heads,
2940 (heads,
2940 [('b', 'branches', None, _('show branches')),
2941 [('b', 'branches', None, _('show branches')),
2941 ('', 'style', '', _('display using template map file')),
2942 ('', 'style', '', _('display using template map file')),
2942 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2943 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2943 ('', 'template', '', _('display with template'))],
2944 ('', 'template', '', _('display with template'))],
2944 _('hg heads [-b] [-r <rev>]')),
2945 _('hg heads [-b] [-r <rev>]')),
2945 "help": (help_, [], _('hg help [COMMAND]')),
2946 "help": (help_, [], _('hg help [COMMAND]')),
2946 "identify|id": (identify, [], _('hg identify')),
2947 "identify|id": (identify, [], _('hg identify')),
2947 "import|patch":
2948 "import|patch":
2948 (import_,
2949 (import_,
2949 [('p', 'strip', 1,
2950 [('p', 'strip', 1,
2950 _('directory strip option for patch. This has the same\n'
2951 _('directory strip option for patch. This has the same\n'
2951 'meaning as the corresponding patch option')),
2952 'meaning as the corresponding patch option')),
2952 ('m', 'message', '', _('use <text> as commit message')),
2953 ('m', 'message', '', _('use <text> as commit message')),
2953 ('b', 'base', '', _('base path')),
2954 ('b', 'base', '', _('base path')),
2954 ('f', 'force', None,
2955 ('f', 'force', None,
2955 _('skip check for outstanding uncommitted changes'))],
2956 _('skip check for outstanding uncommitted changes'))],
2956 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2957 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2957 "incoming|in": (incoming,
2958 "incoming|in": (incoming,
2958 [('M', 'no-merges', None, _('do not show merges')),
2959 [('M', 'no-merges', None, _('do not show merges')),
2959 ('f', 'force', None,
2960 ('f', 'force', None,
2960 _('run even when remote repository is unrelated')),
2961 _('run even when remote repository is unrelated')),
2961 ('', 'style', '', _('display using template map file')),
2962 ('', 'style', '', _('display using template map file')),
2962 ('n', 'newest-first', None, _('show newest record first')),
2963 ('n', 'newest-first', None, _('show newest record first')),
2963 ('', 'bundle', '', _('file to store the bundles into')),
2964 ('', 'bundle', '', _('file to store the bundles into')),
2964 ('p', 'patch', None, _('show patch')),
2965 ('p', 'patch', None, _('show patch')),
2965 ('r', 'rev', [], _('a specific revision you would like to pull')),
2966 ('r', 'rev', [], _('a specific revision you would like to pull')),
2966 ('', 'template', '', _('display with template')),
2967 ('', 'template', '', _('display with template')),
2967 ('e', 'ssh', '', _('specify ssh command to use')),
2968 ('e', 'ssh', '', _('specify ssh command to use')),
2968 ('', 'remotecmd', '',
2969 ('', 'remotecmd', '',
2969 _('specify hg command to run on the remote side'))],
2970 _('specify hg command to run on the remote side'))],
2970 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2971 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2971 ' [--bundle FILENAME] [SOURCE]')),
2972 ' [--bundle FILENAME] [SOURCE]')),
2972 "^init":
2973 "^init":
2973 (init,
2974 (init,
2974 [('e', 'ssh', '', _('specify ssh command to use')),
2975 [('e', 'ssh', '', _('specify ssh command to use')),
2975 ('', 'remotecmd', '',
2976 ('', 'remotecmd', '',
2976 _('specify hg command to run on the remote side'))],
2977 _('specify hg command to run on the remote side'))],
2977 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2978 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2978 "locate":
2979 "locate":
2979 (locate,
2980 (locate,
2980 [('r', 'rev', '', _('search the repository as it stood at rev')),
2981 [('r', 'rev', '', _('search the repository as it stood at rev')),
2981 ('0', 'print0', None,
2982 ('0', 'print0', None,
2982 _('end filenames with NUL, for use with xargs')),
2983 _('end filenames with NUL, for use with xargs')),
2983 ('f', 'fullpath', None,
2984 ('f', 'fullpath', None,
2984 _('print complete paths from the filesystem root')),
2985 _('print complete paths from the filesystem root')),
2985 ('I', 'include', [], _('include names matching the given patterns')),
2986 ('I', 'include', [], _('include names matching the given patterns')),
2986 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2987 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2987 _('hg locate [OPTION]... [PATTERN]...')),
2988 _('hg locate [OPTION]... [PATTERN]...')),
2988 "^log|history":
2989 "^log|history":
2989 (log,
2990 (log,
2990 [('b', 'branches', None, _('show branches')),
2991 [('b', 'branches', None, _('show branches')),
2991 ('k', 'keyword', [], _('search for a keyword')),
2992 ('k', 'keyword', [], _('search for a keyword')),
2992 ('l', 'limit', '', _('limit number of changes displayed')),
2993 ('l', 'limit', '', _('limit number of changes displayed')),
2993 ('r', 'rev', [], _('show the specified revision or range')),
2994 ('r', 'rev', [], _('show the specified revision or range')),
2994 ('M', 'no-merges', None, _('do not show merges')),
2995 ('M', 'no-merges', None, _('do not show merges')),
2995 ('', 'style', '', _('display using template map file')),
2996 ('', 'style', '', _('display using template map file')),
2996 ('m', 'only-merges', None, _('show only merges')),
2997 ('m', 'only-merges', None, _('show only merges')),
2997 ('p', 'patch', None, _('show patch')),
2998 ('p', 'patch', None, _('show patch')),
2998 ('', 'template', '', _('display with template')),
2999 ('', 'template', '', _('display with template')),
2999 ('I', 'include', [], _('include names matching the given patterns')),
3000 ('I', 'include', [], _('include names matching the given patterns')),
3000 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3001 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3001 _('hg log [OPTION]... [FILE]')),
3002 _('hg log [OPTION]... [FILE]')),
3002 "manifest": (manifest, [], _('hg manifest [REV]')),
3003 "manifest": (manifest, [], _('hg manifest [REV]')),
3003 "merge":
3004 "merge":
3004 (merge,
3005 (merge,
3005 [('b', 'branch', '', _('merge with head of a specific branch')),
3006 [('b', 'branch', '', _('merge with head of a specific branch')),
3006 ('f', 'force', None, _('force a merge with outstanding changes'))],
3007 ('f', 'force', None, _('force a merge with outstanding changes'))],
3007 _('hg merge [-b TAG] [-f] [REV]')),
3008 _('hg merge [-b TAG] [-f] [REV]')),
3008 "outgoing|out": (outgoing,
3009 "outgoing|out": (outgoing,
3009 [('M', 'no-merges', None, _('do not show merges')),
3010 [('M', 'no-merges', None, _('do not show merges')),
3010 ('f', 'force', None,
3011 ('f', 'force', None,
3011 _('run even when remote repository is unrelated')),
3012 _('run even when remote repository is unrelated')),
3012 ('p', 'patch', None, _('show patch')),
3013 ('p', 'patch', None, _('show patch')),
3013 ('', 'style', '', _('display using template map file')),
3014 ('', 'style', '', _('display using template map file')),
3014 ('r', 'rev', [], _('a specific revision you would like to push')),
3015 ('r', 'rev', [], _('a specific revision you would like to push')),
3015 ('n', 'newest-first', None, _('show newest record first')),
3016 ('n', 'newest-first', None, _('show newest record first')),
3016 ('', 'template', '', _('display with template')),
3017 ('', 'template', '', _('display with template')),
3017 ('e', 'ssh', '', _('specify ssh command to use')),
3018 ('e', 'ssh', '', _('specify ssh command to use')),
3018 ('', 'remotecmd', '',
3019 ('', 'remotecmd', '',
3019 _('specify hg command to run on the remote side'))],
3020 _('specify hg command to run on the remote side'))],
3020 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3021 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3021 "^parents":
3022 "^parents":
3022 (parents,
3023 (parents,
3023 [('b', 'branches', None, _('show branches')),
3024 [('b', 'branches', None, _('show branches')),
3024 ('', 'style', '', _('display using template map file')),
3025 ('', 'style', '', _('display using template map file')),
3025 ('', 'template', '', _('display with template'))],
3026 ('', 'template', '', _('display with template'))],
3026 _('hg parents [-b] [REV]')),
3027 _('hg parents [-b] [REV]')),
3027 "paths": (paths, [], _('hg paths [NAME]')),
3028 "paths": (paths, [], _('hg paths [NAME]')),
3028 "^pull":
3029 "^pull":
3029 (pull,
3030 (pull,
3030 [('u', 'update', None,
3031 [('u', 'update', None,
3031 _('update the working directory to tip after pull')),
3032 _('update the working directory to tip after pull')),
3032 ('e', 'ssh', '', _('specify ssh command to use')),
3033 ('e', 'ssh', '', _('specify ssh command to use')),
3033 ('f', 'force', None,
3034 ('f', 'force', None,
3034 _('run even when remote repository is unrelated')),
3035 _('run even when remote repository is unrelated')),
3035 ('r', 'rev', [], _('a specific revision you would like to pull')),
3036 ('r', 'rev', [], _('a specific revision you would like to pull')),
3036 ('', 'remotecmd', '',
3037 ('', 'remotecmd', '',
3037 _('specify hg command to run on the remote side'))],
3038 _('specify hg command to run on the remote side'))],
3038 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3039 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3039 "^push":
3040 "^push":
3040 (push,
3041 (push,
3041 [('f', 'force', None, _('force push')),
3042 [('f', 'force', None, _('force push')),
3042 ('e', 'ssh', '', _('specify ssh command to use')),
3043 ('e', 'ssh', '', _('specify ssh command to use')),
3043 ('r', 'rev', [], _('a specific revision you would like to push')),
3044 ('r', 'rev', [], _('a specific revision you would like to push')),
3044 ('', 'remotecmd', '',
3045 ('', 'remotecmd', '',
3045 _('specify hg command to run on the remote side'))],
3046 _('specify hg command to run on the remote side'))],
3046 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3047 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3047 "debugrawcommit|rawcommit":
3048 "debugrawcommit|rawcommit":
3048 (rawcommit,
3049 (rawcommit,
3049 [('p', 'parent', [], _('parent')),
3050 [('p', 'parent', [], _('parent')),
3050 ('d', 'date', '', _('date code')),
3051 ('d', 'date', '', _('date code')),
3051 ('u', 'user', '', _('user')),
3052 ('u', 'user', '', _('user')),
3052 ('F', 'files', '', _('file list')),
3053 ('F', 'files', '', _('file list')),
3053 ('m', 'message', '', _('commit message')),
3054 ('m', 'message', '', _('commit message')),
3054 ('l', 'logfile', '', _('commit message file'))],
3055 ('l', 'logfile', '', _('commit message file'))],
3055 _('hg debugrawcommit [OPTION]... [FILE]...')),
3056 _('hg debugrawcommit [OPTION]... [FILE]...')),
3056 "recover": (recover, [], _('hg recover')),
3057 "recover": (recover, [], _('hg recover')),
3057 "^remove|rm":
3058 "^remove|rm":
3058 (remove,
3059 (remove,
3059 [('A', 'after', None, _('record remove that has already occurred')),
3060 [('A', 'after', None, _('record remove that has already occurred')),
3060 ('f', 'force', None, _('remove file even if modified')),
3061 ('f', 'force', None, _('remove file even if modified')),
3061 ('I', 'include', [], _('include names matching the given patterns')),
3062 ('I', 'include', [], _('include names matching the given patterns')),
3062 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3063 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3063 _('hg remove [OPTION]... FILE...')),
3064 _('hg remove [OPTION]... FILE...')),
3064 "rename|mv":
3065 "rename|mv":
3065 (rename,
3066 (rename,
3066 [('A', 'after', None, _('record a rename that has already occurred')),
3067 [('A', 'after', None, _('record a rename that has already occurred')),
3067 ('f', 'force', None,
3068 ('f', 'force', None,
3068 _('forcibly copy over an existing managed file')),
3069 _('forcibly copy over an existing managed file')),
3069 ('I', 'include', [], _('include names matching the given patterns')),
3070 ('I', 'include', [], _('include names matching the given patterns')),
3070 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3071 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3071 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3072 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3072 _('hg rename [OPTION]... SOURCE... DEST')),
3073 _('hg rename [OPTION]... SOURCE... DEST')),
3073 "^revert":
3074 "^revert":
3074 (revert,
3075 (revert,
3075 [('r', 'rev', '', _('revision to revert to')),
3076 [('r', 'rev', '', _('revision to revert to')),
3076 ('', 'no-backup', None, _('do not save backup copies of files')),
3077 ('', 'no-backup', None, _('do not save backup copies of files')),
3077 ('I', 'include', [], _('include names matching given patterns')),
3078 ('I', 'include', [], _('include names matching given patterns')),
3078 ('X', 'exclude', [], _('exclude names matching given patterns')),
3079 ('X', 'exclude', [], _('exclude names matching given patterns')),
3079 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3080 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3080 _('hg revert [-r REV] [NAME]...')),
3081 _('hg revert [-r REV] [NAME]...')),
3081 "rollback": (rollback, [], _('hg rollback')),
3082 "rollback": (rollback, [], _('hg rollback')),
3082 "root": (root, [], _('hg root')),
3083 "root": (root, [], _('hg root')),
3083 "^serve":
3084 "^serve":
3084 (serve,
3085 (serve,
3085 [('A', 'accesslog', '', _('name of access log file to write to')),
3086 [('A', 'accesslog', '', _('name of access log file to write to')),
3086 ('d', 'daemon', None, _('run server in background')),
3087 ('d', 'daemon', None, _('run server in background')),
3087 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3088 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3088 ('E', 'errorlog', '', _('name of error log file to write to')),
3089 ('E', 'errorlog', '', _('name of error log file to write to')),
3089 ('p', 'port', 0, _('port to use (default: 8000)')),
3090 ('p', 'port', 0, _('port to use (default: 8000)')),
3090 ('a', 'address', '', _('address to use')),
3091 ('a', 'address', '', _('address to use')),
3091 ('n', 'name', '',
3092 ('n', 'name', '',
3092 _('name to show in web pages (default: working dir)')),
3093 _('name to show in web pages (default: working dir)')),
3093 ('', 'webdir-conf', '', _('name of the webdir config file'
3094 ('', 'webdir-conf', '', _('name of the webdir config file'
3094 ' (serve more than one repo)')),
3095 ' (serve more than one repo)')),
3095 ('', 'pid-file', '', _('name of file to write process ID to')),
3096 ('', 'pid-file', '', _('name of file to write process ID to')),
3096 ('', 'stdio', None, _('for remote clients')),
3097 ('', 'stdio', None, _('for remote clients')),
3097 ('t', 'templates', '', _('web templates to use')),
3098 ('t', 'templates', '', _('web templates to use')),
3098 ('', 'style', '', _('template style to use')),
3099 ('', 'style', '', _('template style to use')),
3099 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3100 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3100 _('hg serve [OPTION]...')),
3101 _('hg serve [OPTION]...')),
3101 "^status|st":
3102 "^status|st":
3102 (status,
3103 (status,
3103 [('m', 'modified', None, _('show only modified files')),
3104 [('m', 'modified', None, _('show only modified files')),
3104 ('a', 'added', None, _('show only added files')),
3105 ('a', 'added', None, _('show only added files')),
3105 ('r', 'removed', None, _('show only removed files')),
3106 ('r', 'removed', None, _('show only removed files')),
3106 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3107 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3107 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3108 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3108 ('i', 'ignored', None, _('show ignored files')),
3109 ('i', 'ignored', None, _('show ignored files')),
3109 ('n', 'no-status', None, _('hide status prefix')),
3110 ('n', 'no-status', None, _('hide status prefix')),
3110 ('0', 'print0', None,
3111 ('0', 'print0', None,
3111 _('end filenames with NUL, for use with xargs')),
3112 _('end filenames with NUL, for use with xargs')),
3112 ('I', 'include', [], _('include names matching the given patterns')),
3113 ('I', 'include', [], _('include names matching the given patterns')),
3113 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3114 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3114 _('hg status [OPTION]... [FILE]...')),
3115 _('hg status [OPTION]... [FILE]...')),
3115 "tag":
3116 "tag":
3116 (tag,
3117 (tag,
3117 [('l', 'local', None, _('make the tag local')),
3118 [('l', 'local', None, _('make the tag local')),
3118 ('m', 'message', '', _('message for tag commit log entry')),
3119 ('m', 'message', '', _('message for tag commit log entry')),
3119 ('d', 'date', '', _('record datecode as commit date')),
3120 ('d', 'date', '', _('record datecode as commit date')),
3120 ('u', 'user', '', _('record user as commiter')),
3121 ('u', 'user', '', _('record user as commiter')),
3121 ('r', 'rev', '', _('revision to tag'))],
3122 ('r', 'rev', '', _('revision to tag'))],
3122 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3123 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3123 "tags": (tags, [], _('hg tags')),
3124 "tags": (tags, [], _('hg tags')),
3124 "tip":
3125 "tip":
3125 (tip,
3126 (tip,
3126 [('b', 'branches', None, _('show branches')),
3127 [('b', 'branches', None, _('show branches')),
3127 ('', 'style', '', _('display using template map file')),
3128 ('', 'style', '', _('display using template map file')),
3128 ('p', 'patch', None, _('show patch')),
3129 ('p', 'patch', None, _('show patch')),
3129 ('', 'template', '', _('display with template'))],
3130 ('', 'template', '', _('display with template'))],
3130 _('hg tip [-b] [-p]')),
3131 _('hg tip [-b] [-p]')),
3131 "unbundle":
3132 "unbundle":
3132 (unbundle,
3133 (unbundle,
3133 [('u', 'update', None,
3134 [('u', 'update', None,
3134 _('update the working directory to tip after unbundle'))],
3135 _('update the working directory to tip after unbundle'))],
3135 _('hg unbundle [-u] FILE')),
3136 _('hg unbundle [-u] FILE')),
3136 "debugundo|undo": (undo, [], _('hg undo')),
3137 "debugundo|undo": (undo, [], _('hg undo')),
3137 "^update|up|checkout|co":
3138 "^update|up|checkout|co":
3138 (update,
3139 (update,
3139 [('b', 'branch', '', _('checkout the head of a specific branch')),
3140 [('b', 'branch', '', _('checkout the head of a specific branch')),
3140 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3141 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3141 ('C', 'clean', None, _('overwrite locally modified files')),
3142 ('C', 'clean', None, _('overwrite locally modified files')),
3142 ('f', 'force', None, _('force a merge with outstanding changes'))],
3143 ('f', 'force', None, _('force a merge with outstanding changes'))],
3143 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3144 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3144 "verify": (verify, [], _('hg verify')),
3145 "verify": (verify, [], _('hg verify')),
3145 "version": (show_version, [], _('hg version')),
3146 "version": (show_version, [], _('hg version')),
3146 }
3147 }
3147
3148
3148 globalopts = [
3149 globalopts = [
3149 ('R', 'repository', '',
3150 ('R', 'repository', '',
3150 _('repository root directory or symbolic path name')),
3151 _('repository root directory or symbolic path name')),
3151 ('', 'cwd', '', _('change working directory')),
3152 ('', 'cwd', '', _('change working directory')),
3152 ('y', 'noninteractive', None,
3153 ('y', 'noninteractive', None,
3153 _('do not prompt, assume \'yes\' for any required answers')),
3154 _('do not prompt, assume \'yes\' for any required answers')),
3154 ('q', 'quiet', None, _('suppress output')),
3155 ('q', 'quiet', None, _('suppress output')),
3155 ('v', 'verbose', None, _('enable additional output')),
3156 ('v', 'verbose', None, _('enable additional output')),
3156 ('', 'config', [], _('set/override config option')),
3157 ('', 'config', [], _('set/override config option')),
3157 ('', 'debug', None, _('enable debugging output')),
3158 ('', 'debug', None, _('enable debugging output')),
3158 ('', 'debugger', None, _('start debugger')),
3159 ('', 'debugger', None, _('start debugger')),
3159 ('', 'lsprof', None, _('print improved command execution profile')),
3160 ('', 'lsprof', None, _('print improved command execution profile')),
3160 ('', 'traceback', None, _('print traceback on exception')),
3161 ('', 'traceback', None, _('print traceback on exception')),
3161 ('', 'time', None, _('time how long the command takes')),
3162 ('', 'time', None, _('time how long the command takes')),
3162 ('', 'profile', None, _('print command execution profile')),
3163 ('', 'profile', None, _('print command execution profile')),
3163 ('', 'version', None, _('output version information and exit')),
3164 ('', 'version', None, _('output version information and exit')),
3164 ('h', 'help', None, _('display help and exit')),
3165 ('h', 'help', None, _('display help and exit')),
3165 ]
3166 ]
3166
3167
3167 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3168 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3168 " debugindex debugindexdot")
3169 " debugindex debugindexdot")
3169 optionalrepo = ("paths serve debugconfig")
3170 optionalrepo = ("paths serve debugconfig")
3170
3171
3171 def findpossible(cmd):
3172 def findpossible(cmd):
3172 """
3173 """
3173 Return cmd -> (aliases, command table entry)
3174 Return cmd -> (aliases, command table entry)
3174 for each matching command.
3175 for each matching command.
3175 Return debug commands (or their aliases) only if no normal command matches.
3176 Return debug commands (or their aliases) only if no normal command matches.
3176 """
3177 """
3177 choice = {}
3178 choice = {}
3178 debugchoice = {}
3179 debugchoice = {}
3179 for e in table.keys():
3180 for e in table.keys():
3180 aliases = e.lstrip("^").split("|")
3181 aliases = e.lstrip("^").split("|")
3181 found = None
3182 found = None
3182 if cmd in aliases:
3183 if cmd in aliases:
3183 found = cmd
3184 found = cmd
3184 else:
3185 else:
3185 for a in aliases:
3186 for a in aliases:
3186 if a.startswith(cmd):
3187 if a.startswith(cmd):
3187 found = a
3188 found = a
3188 break
3189 break
3189 if found is not None:
3190 if found is not None:
3190 if aliases[0].startswith("debug"):
3191 if aliases[0].startswith("debug"):
3191 debugchoice[found] = (aliases, table[e])
3192 debugchoice[found] = (aliases, table[e])
3192 else:
3193 else:
3193 choice[found] = (aliases, table[e])
3194 choice[found] = (aliases, table[e])
3194
3195
3195 if not choice and debugchoice:
3196 if not choice and debugchoice:
3196 choice = debugchoice
3197 choice = debugchoice
3197
3198
3198 return choice
3199 return choice
3199
3200
3200 def findcmd(cmd):
3201 def findcmd(cmd):
3201 """Return (aliases, command table entry) for command string."""
3202 """Return (aliases, command table entry) for command string."""
3202 choice = findpossible(cmd)
3203 choice = findpossible(cmd)
3203
3204
3204 if choice.has_key(cmd):
3205 if choice.has_key(cmd):
3205 return choice[cmd]
3206 return choice[cmd]
3206
3207
3207 if len(choice) > 1:
3208 if len(choice) > 1:
3208 clist = choice.keys()
3209 clist = choice.keys()
3209 clist.sort()
3210 clist.sort()
3210 raise AmbiguousCommand(cmd, clist)
3211 raise AmbiguousCommand(cmd, clist)
3211
3212
3212 if choice:
3213 if choice:
3213 return choice.values()[0]
3214 return choice.values()[0]
3214
3215
3215 raise UnknownCommand(cmd)
3216 raise UnknownCommand(cmd)
3216
3217
3217 def catchterm(*args):
3218 def catchterm(*args):
3218 raise util.SignalInterrupt
3219 raise util.SignalInterrupt
3219
3220
3220 def run():
3221 def run():
3221 sys.exit(dispatch(sys.argv[1:]))
3222 sys.exit(dispatch(sys.argv[1:]))
3222
3223
3223 class ParseError(Exception):
3224 class ParseError(Exception):
3224 """Exception raised on errors in parsing the command line."""
3225 """Exception raised on errors in parsing the command line."""
3225
3226
3226 def parse(ui, args):
3227 def parse(ui, args):
3227 options = {}
3228 options = {}
3228 cmdoptions = {}
3229 cmdoptions = {}
3229
3230
3230 try:
3231 try:
3231 args = fancyopts.fancyopts(args, globalopts, options)
3232 args = fancyopts.fancyopts(args, globalopts, options)
3232 except fancyopts.getopt.GetoptError, inst:
3233 except fancyopts.getopt.GetoptError, inst:
3233 raise ParseError(None, inst)
3234 raise ParseError(None, inst)
3234
3235
3235 if args:
3236 if args:
3236 cmd, args = args[0], args[1:]
3237 cmd, args = args[0], args[1:]
3237 aliases, i = findcmd(cmd)
3238 aliases, i = findcmd(cmd)
3238 cmd = aliases[0]
3239 cmd = aliases[0]
3239 defaults = ui.config("defaults", cmd)
3240 defaults = ui.config("defaults", cmd)
3240 if defaults:
3241 if defaults:
3241 args = defaults.split() + args
3242 args = defaults.split() + args
3242 c = list(i[1])
3243 c = list(i[1])
3243 else:
3244 else:
3244 cmd = None
3245 cmd = None
3245 c = []
3246 c = []
3246
3247
3247 # combine global options into local
3248 # combine global options into local
3248 for o in globalopts:
3249 for o in globalopts:
3249 c.append((o[0], o[1], options[o[1]], o[3]))
3250 c.append((o[0], o[1], options[o[1]], o[3]))
3250
3251
3251 try:
3252 try:
3252 args = fancyopts.fancyopts(args, c, cmdoptions)
3253 args = fancyopts.fancyopts(args, c, cmdoptions)
3253 except fancyopts.getopt.GetoptError, inst:
3254 except fancyopts.getopt.GetoptError, inst:
3254 raise ParseError(cmd, inst)
3255 raise ParseError(cmd, inst)
3255
3256
3256 # separate global options back out
3257 # separate global options back out
3257 for o in globalopts:
3258 for o in globalopts:
3258 n = o[1]
3259 n = o[1]
3259 options[n] = cmdoptions[n]
3260 options[n] = cmdoptions[n]
3260 del cmdoptions[n]
3261 del cmdoptions[n]
3261
3262
3262 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3263 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3263
3264
3264 external = {}
3265 external = {}
3265
3266
3266 def findext(name):
3267 def findext(name):
3267 '''return module with given extension name'''
3268 '''return module with given extension name'''
3268 try:
3269 try:
3269 return sys.modules[external[name]]
3270 return sys.modules[external[name]]
3270 except KeyError:
3271 except KeyError:
3271 dotname = '.' + name
3272 dotname = '.' + name
3272 for k, v in external.iteritems():
3273 for k, v in external.iteritems():
3273 if k.endswith('.' + name) or v == name:
3274 if k.endswith('.' + name) or v == name:
3274 return sys.modules[v]
3275 return sys.modules[v]
3275 raise KeyError(name)
3276 raise KeyError(name)
3276
3277
3277 def dispatch(args):
3278 def dispatch(args):
3278 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3279 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3279 num = getattr(signal, name, None)
3280 num = getattr(signal, name, None)
3280 if num: signal.signal(num, catchterm)
3281 if num: signal.signal(num, catchterm)
3281
3282
3282 try:
3283 try:
3283 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3284 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3284 except util.Abort, inst:
3285 except util.Abort, inst:
3285 sys.stderr.write(_("abort: %s\n") % inst)
3286 sys.stderr.write(_("abort: %s\n") % inst)
3286 return -1
3287 return -1
3287
3288
3288 for ext_name, load_from_name in u.extensions():
3289 for ext_name, load_from_name in u.extensions():
3289 try:
3290 try:
3290 if load_from_name:
3291 if load_from_name:
3291 # the module will be loaded in sys.modules
3292 # the module will be loaded in sys.modules
3292 # choose an unique name so that it doesn't
3293 # choose an unique name so that it doesn't
3293 # conflicts with other modules
3294 # conflicts with other modules
3294 module_name = "hgext_%s" % ext_name.replace('.', '_')
3295 module_name = "hgext_%s" % ext_name.replace('.', '_')
3295 mod = imp.load_source(module_name, load_from_name)
3296 mod = imp.load_source(module_name, load_from_name)
3296 else:
3297 else:
3297 def importh(name):
3298 def importh(name):
3298 mod = __import__(name)
3299 mod = __import__(name)
3299 components = name.split('.')
3300 components = name.split('.')
3300 for comp in components[1:]:
3301 for comp in components[1:]:
3301 mod = getattr(mod, comp)
3302 mod = getattr(mod, comp)
3302 return mod
3303 return mod
3303 try:
3304 try:
3304 mod = importh("hgext.%s" % ext_name)
3305 mod = importh("hgext.%s" % ext_name)
3305 except ImportError:
3306 except ImportError:
3306 mod = importh(ext_name)
3307 mod = importh(ext_name)
3307 external[ext_name] = mod.__name__
3308 external[ext_name] = mod.__name__
3308 except (util.SignalInterrupt, KeyboardInterrupt):
3309 except (util.SignalInterrupt, KeyboardInterrupt):
3309 raise
3310 raise
3310 except Exception, inst:
3311 except Exception, inst:
3311 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3312 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3312 if u.print_exc():
3313 if u.print_exc():
3313 return 1
3314 return 1
3314
3315
3315 for name in external.itervalues():
3316 for name in external.itervalues():
3316 mod = sys.modules[name]
3317 mod = sys.modules[name]
3317 uisetup = getattr(mod, 'uisetup', None)
3318 uisetup = getattr(mod, 'uisetup', None)
3318 if uisetup:
3319 if uisetup:
3319 uisetup(u)
3320 uisetup(u)
3320 cmdtable = getattr(mod, 'cmdtable', {})
3321 cmdtable = getattr(mod, 'cmdtable', {})
3321 for t in cmdtable:
3322 for t in cmdtable:
3322 if t in table:
3323 if t in table:
3323 u.warn(_("module %s overrides %s\n") % (name, t))
3324 u.warn(_("module %s overrides %s\n") % (name, t))
3324 table.update(cmdtable)
3325 table.update(cmdtable)
3325
3326
3326 try:
3327 try:
3327 cmd, func, args, options, cmdoptions = parse(u, args)
3328 cmd, func, args, options, cmdoptions = parse(u, args)
3328 if options["time"]:
3329 if options["time"]:
3329 def get_times():
3330 def get_times():
3330 t = os.times()
3331 t = os.times()
3331 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3332 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3332 t = (t[0], t[1], t[2], t[3], time.clock())
3333 t = (t[0], t[1], t[2], t[3], time.clock())
3333 return t
3334 return t
3334 s = get_times()
3335 s = get_times()
3335 def print_time():
3336 def print_time():
3336 t = get_times()
3337 t = get_times()
3337 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3338 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3338 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3339 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3339 atexit.register(print_time)
3340 atexit.register(print_time)
3340
3341
3341 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3342 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3342 not options["noninteractive"], options["traceback"],
3343 not options["noninteractive"], options["traceback"],
3343 options["config"])
3344 options["config"])
3344
3345
3345 # enter the debugger before command execution
3346 # enter the debugger before command execution
3346 if options['debugger']:
3347 if options['debugger']:
3347 pdb.set_trace()
3348 pdb.set_trace()
3348
3349
3349 try:
3350 try:
3350 if options['cwd']:
3351 if options['cwd']:
3351 try:
3352 try:
3352 os.chdir(options['cwd'])
3353 os.chdir(options['cwd'])
3353 except OSError, inst:
3354 except OSError, inst:
3354 raise util.Abort('%s: %s' %
3355 raise util.Abort('%s: %s' %
3355 (options['cwd'], inst.strerror))
3356 (options['cwd'], inst.strerror))
3356
3357
3357 path = u.expandpath(options["repository"]) or ""
3358 path = u.expandpath(options["repository"]) or ""
3358 repo = path and hg.repository(u, path=path) or None
3359 repo = path and hg.repository(u, path=path) or None
3359
3360
3360 if options['help']:
3361 if options['help']:
3361 return help_(u, cmd, options['version'])
3362 return help_(u, cmd, options['version'])
3362 elif options['version']:
3363 elif options['version']:
3363 return show_version(u)
3364 return show_version(u)
3364 elif not cmd:
3365 elif not cmd:
3365 return help_(u, 'shortlist')
3366 return help_(u, 'shortlist')
3366
3367
3367 if cmd not in norepo.split():
3368 if cmd not in norepo.split():
3368 try:
3369 try:
3369 if not repo:
3370 if not repo:
3370 repo = hg.repository(u, path=path)
3371 repo = hg.repository(u, path=path)
3371 u = repo.ui
3372 u = repo.ui
3372 for name in external.itervalues():
3373 for name in external.itervalues():
3373 mod = sys.modules[name]
3374 mod = sys.modules[name]
3374 if hasattr(mod, 'reposetup'):
3375 if hasattr(mod, 'reposetup'):
3375 mod.reposetup(u, repo)
3376 mod.reposetup(u, repo)
3376 except hg.RepoError:
3377 except hg.RepoError:
3377 if cmd not in optionalrepo.split():
3378 if cmd not in optionalrepo.split():
3378 raise
3379 raise
3379 d = lambda: func(u, repo, *args, **cmdoptions)
3380 d = lambda: func(u, repo, *args, **cmdoptions)
3380 else:
3381 else:
3381 d = lambda: func(u, *args, **cmdoptions)
3382 d = lambda: func(u, *args, **cmdoptions)
3382
3383
3383 try:
3384 try:
3384 if options['profile']:
3385 if options['profile']:
3385 import hotshot, hotshot.stats
3386 import hotshot, hotshot.stats
3386 prof = hotshot.Profile("hg.prof")
3387 prof = hotshot.Profile("hg.prof")
3387 try:
3388 try:
3388 try:
3389 try:
3389 return prof.runcall(d)
3390 return prof.runcall(d)
3390 except:
3391 except:
3391 try:
3392 try:
3392 u.warn(_('exception raised - generating '
3393 u.warn(_('exception raised - generating '
3393 'profile anyway\n'))
3394 'profile anyway\n'))
3394 except:
3395 except:
3395 pass
3396 pass
3396 raise
3397 raise
3397 finally:
3398 finally:
3398 prof.close()
3399 prof.close()
3399 stats = hotshot.stats.load("hg.prof")
3400 stats = hotshot.stats.load("hg.prof")
3400 stats.strip_dirs()
3401 stats.strip_dirs()
3401 stats.sort_stats('time', 'calls')
3402 stats.sort_stats('time', 'calls')
3402 stats.print_stats(40)
3403 stats.print_stats(40)
3403 elif options['lsprof']:
3404 elif options['lsprof']:
3404 try:
3405 try:
3405 from mercurial import lsprof
3406 from mercurial import lsprof
3406 except ImportError:
3407 except ImportError:
3407 raise util.Abort(_(
3408 raise util.Abort(_(
3408 'lsprof not available - install from '
3409 'lsprof not available - install from '
3409 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3410 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3410 p = lsprof.Profiler()
3411 p = lsprof.Profiler()
3411 p.enable(subcalls=True)
3412 p.enable(subcalls=True)
3412 try:
3413 try:
3413 return d()
3414 return d()
3414 finally:
3415 finally:
3415 p.disable()
3416 p.disable()
3416 stats = lsprof.Stats(p.getstats())
3417 stats = lsprof.Stats(p.getstats())
3417 stats.sort()
3418 stats.sort()
3418 stats.pprint(top=10, file=sys.stderr, climit=5)
3419 stats.pprint(top=10, file=sys.stderr, climit=5)
3419 else:
3420 else:
3420 return d()
3421 return d()
3421 finally:
3422 finally:
3422 u.flush()
3423 u.flush()
3423 except:
3424 except:
3424 # enter the debugger when we hit an exception
3425 # enter the debugger when we hit an exception
3425 if options['debugger']:
3426 if options['debugger']:
3426 pdb.post_mortem(sys.exc_info()[2])
3427 pdb.post_mortem(sys.exc_info()[2])
3427 u.print_exc()
3428 u.print_exc()
3428 raise
3429 raise
3429 except ParseError, inst:
3430 except ParseError, inst:
3430 if inst.args[0]:
3431 if inst.args[0]:
3431 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3432 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3432 help_(u, inst.args[0])
3433 help_(u, inst.args[0])
3433 else:
3434 else:
3434 u.warn(_("hg: %s\n") % inst.args[1])
3435 u.warn(_("hg: %s\n") % inst.args[1])
3435 help_(u, 'shortlist')
3436 help_(u, 'shortlist')
3436 except AmbiguousCommand, inst:
3437 except AmbiguousCommand, inst:
3437 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3438 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3438 (inst.args[0], " ".join(inst.args[1])))
3439 (inst.args[0], " ".join(inst.args[1])))
3439 except UnknownCommand, inst:
3440 except UnknownCommand, inst:
3440 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3441 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3441 help_(u, 'shortlist')
3442 help_(u, 'shortlist')
3442 except hg.RepoError, inst:
3443 except hg.RepoError, inst:
3443 u.warn(_("abort: %s!\n") % inst)
3444 u.warn(_("abort: %s!\n") % inst)
3444 except lock.LockHeld, inst:
3445 except lock.LockHeld, inst:
3445 if inst.errno == errno.ETIMEDOUT:
3446 if inst.errno == errno.ETIMEDOUT:
3446 reason = _('timed out waiting for lock held by %s') % inst.locker
3447 reason = _('timed out waiting for lock held by %s') % inst.locker
3447 else:
3448 else:
3448 reason = _('lock held by %s') % inst.locker
3449 reason = _('lock held by %s') % inst.locker
3449 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3450 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3450 except lock.LockUnavailable, inst:
3451 except lock.LockUnavailable, inst:
3451 u.warn(_("abort: could not lock %s: %s\n") %
3452 u.warn(_("abort: could not lock %s: %s\n") %
3452 (inst.desc or inst.filename, inst.strerror))
3453 (inst.desc or inst.filename, inst.strerror))
3453 except revlog.RevlogError, inst:
3454 except revlog.RevlogError, inst:
3454 u.warn(_("abort: "), inst, "!\n")
3455 u.warn(_("abort: "), inst, "!\n")
3455 except util.SignalInterrupt:
3456 except util.SignalInterrupt:
3456 u.warn(_("killed!\n"))
3457 u.warn(_("killed!\n"))
3457 except KeyboardInterrupt:
3458 except KeyboardInterrupt:
3458 try:
3459 try:
3459 u.warn(_("interrupted!\n"))
3460 u.warn(_("interrupted!\n"))
3460 except IOError, inst:
3461 except IOError, inst:
3461 if inst.errno == errno.EPIPE:
3462 if inst.errno == errno.EPIPE:
3462 if u.debugflag:
3463 if u.debugflag:
3463 u.warn(_("\nbroken pipe\n"))
3464 u.warn(_("\nbroken pipe\n"))
3464 else:
3465 else:
3465 raise
3466 raise
3466 except IOError, inst:
3467 except IOError, inst:
3467 if hasattr(inst, "code"):
3468 if hasattr(inst, "code"):
3468 u.warn(_("abort: %s\n") % inst)
3469 u.warn(_("abort: %s\n") % inst)
3469 elif hasattr(inst, "reason"):
3470 elif hasattr(inst, "reason"):
3470 u.warn(_("abort: error: %s\n") % inst.reason[1])
3471 u.warn(_("abort: error: %s\n") % inst.reason[1])
3471 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3472 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3472 if u.debugflag:
3473 if u.debugflag:
3473 u.warn(_("broken pipe\n"))
3474 u.warn(_("broken pipe\n"))
3474 elif getattr(inst, "strerror", None):
3475 elif getattr(inst, "strerror", None):
3475 if getattr(inst, "filename", None):
3476 if getattr(inst, "filename", None):
3476 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3477 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3477 else:
3478 else:
3478 u.warn(_("abort: %s\n") % inst.strerror)
3479 u.warn(_("abort: %s\n") % inst.strerror)
3479 else:
3480 else:
3480 raise
3481 raise
3481 except OSError, inst:
3482 except OSError, inst:
3482 if hasattr(inst, "filename"):
3483 if hasattr(inst, "filename"):
3483 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3484 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3484 else:
3485 else:
3485 u.warn(_("abort: %s\n") % inst.strerror)
3486 u.warn(_("abort: %s\n") % inst.strerror)
3486 except util.Abort, inst:
3487 except util.Abort, inst:
3487 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3488 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3488 except TypeError, inst:
3489 except TypeError, inst:
3489 # was this an argument error?
3490 # was this an argument error?
3490 tb = traceback.extract_tb(sys.exc_info()[2])
3491 tb = traceback.extract_tb(sys.exc_info()[2])
3491 if len(tb) > 2: # no
3492 if len(tb) > 2: # no
3492 raise
3493 raise
3493 u.debug(inst, "\n")
3494 u.debug(inst, "\n")
3494 u.warn(_("%s: invalid arguments\n") % cmd)
3495 u.warn(_("%s: invalid arguments\n") % cmd)
3495 help_(u, cmd)
3496 help_(u, cmd)
3496 except SystemExit, inst:
3497 except SystemExit, inst:
3497 # Commands shouldn't sys.exit directly, but give a return code.
3498 # Commands shouldn't sys.exit directly, but give a return code.
3498 # Just in case catch this and and pass exit code to caller.
3499 # Just in case catch this and and pass exit code to caller.
3499 return inst.code
3500 return inst.code
3500 except:
3501 except:
3501 u.warn(_("** unknown exception encountered, details follow\n"))
3502 u.warn(_("** unknown exception encountered, details follow\n"))
3502 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3503 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3503 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3504 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3504 % version.get_version())
3505 % version.get_version())
3505 raise
3506 raise
3506
3507
3507 return -1
3508 return -1
@@ -1,483 +1,483 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "struct os time bisect stat util re errno")
13 demandload(globals(), "struct os time bisect stat util re errno")
14
14
15 class dirstate(object):
15 class dirstate(object):
16 format = ">cllll"
16 format = ">cllll"
17
17
18 def __init__(self, opener, ui, root):
18 def __init__(self, opener, ui, root):
19 self.opener = opener
19 self.opener = opener
20 self.root = root
20 self.root = root
21 self.dirty = 0
21 self.dirty = 0
22 self.ui = ui
22 self.ui = ui
23 self.map = None
23 self.map = None
24 self.pl = None
24 self.pl = None
25 self.copies = {}
25 self.copies = {}
26 self.ignorefunc = None
26 self.ignorefunc = None
27 self.blockignore = False
27 self.blockignore = False
28
28
29 def wjoin(self, f):
29 def wjoin(self, f):
30 return os.path.join(self.root, f)
30 return os.path.join(self.root, f)
31
31
32 def getcwd(self):
32 def getcwd(self):
33 cwd = os.getcwd()
33 cwd = os.getcwd()
34 if cwd == self.root: return ''
34 if cwd == self.root: return ''
35 return cwd[len(self.root) + 1:]
35 return cwd[len(self.root) + 1:]
36
36
37 def hgignore(self):
37 def hgignore(self):
38 '''return the contents of .hgignore files as a list of patterns.
38 '''return the contents of .hgignore files as a list of patterns.
39
39
40 the files parsed for patterns include:
40 the files parsed for patterns include:
41 .hgignore in the repository root
41 .hgignore in the repository root
42 any additional files specified in the [ui] section of ~/.hgrc
42 any additional files specified in the [ui] section of ~/.hgrc
43
43
44 trailing white space is dropped.
44 trailing white space is dropped.
45 the escape character is backslash.
45 the escape character is backslash.
46 comments start with #.
46 comments start with #.
47 empty lines are skipped.
47 empty lines are skipped.
48
48
49 lines can be of the following formats:
49 lines can be of the following formats:
50
50
51 syntax: regexp # defaults following lines to non-rooted regexps
51 syntax: regexp # defaults following lines to non-rooted regexps
52 syntax: glob # defaults following lines to non-rooted globs
52 syntax: glob # defaults following lines to non-rooted globs
53 re:pattern # non-rooted regular expression
53 re:pattern # non-rooted regular expression
54 glob:pattern # non-rooted glob
54 glob:pattern # non-rooted glob
55 pattern # pattern of the current default type'''
55 pattern # pattern of the current default type'''
56 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
56 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
57 def parselines(fp):
57 def parselines(fp):
58 for line in fp:
58 for line in fp:
59 escape = False
59 escape = False
60 for i in xrange(len(line)):
60 for i in xrange(len(line)):
61 if escape: escape = False
61 if escape: escape = False
62 elif line[i] == '\\': escape = True
62 elif line[i] == '\\': escape = True
63 elif line[i] == '#': break
63 elif line[i] == '#': break
64 line = line[:i].rstrip()
64 line = line[:i].rstrip()
65 if line: yield line
65 if line: yield line
66 repoignore = self.wjoin('.hgignore')
66 repoignore = self.wjoin('.hgignore')
67 files = [repoignore]
67 files = [repoignore]
68 files.extend(self.ui.hgignorefiles())
68 files.extend(self.ui.hgignorefiles())
69 pats = {}
69 pats = {}
70 for f in files:
70 for f in files:
71 try:
71 try:
72 pats[f] = []
72 pats[f] = []
73 fp = open(f)
73 fp = open(f)
74 syntax = 'relre:'
74 syntax = 'relre:'
75 for line in parselines(fp):
75 for line in parselines(fp):
76 if line.startswith('syntax:'):
76 if line.startswith('syntax:'):
77 s = line[7:].strip()
77 s = line[7:].strip()
78 try:
78 try:
79 syntax = syntaxes[s]
79 syntax = syntaxes[s]
80 except KeyError:
80 except KeyError:
81 self.ui.warn(_("%s: ignoring invalid "
81 self.ui.warn(_("%s: ignoring invalid "
82 "syntax '%s'\n") % (f, s))
82 "syntax '%s'\n") % (f, s))
83 continue
83 continue
84 pat = syntax + line
84 pat = syntax + line
85 for s in syntaxes.values():
85 for s in syntaxes.values():
86 if line.startswith(s):
86 if line.startswith(s):
87 pat = line
87 pat = line
88 break
88 break
89 pats[f].append(pat)
89 pats[f].append(pat)
90 except IOError, inst:
90 except IOError, inst:
91 if f != repoignore:
91 if f != repoignore:
92 self.ui.warn(_("skipping unreadable ignore file"
92 self.ui.warn(_("skipping unreadable ignore file"
93 " '%s': %s\n") % (f, inst.strerror))
93 " '%s': %s\n") % (f, inst.strerror))
94 return pats
94 return pats
95
95
96 def ignore(self, fn):
96 def ignore(self, fn):
97 '''default match function used by dirstate and
97 '''default match function used by dirstate and
98 localrepository. this honours the repository .hgignore file
98 localrepository. this honours the repository .hgignore file
99 and any other files specified in the [ui] section of .hgrc.'''
99 and any other files specified in the [ui] section of .hgrc.'''
100 if self.blockignore:
100 if self.blockignore:
101 return False
101 return False
102 if not self.ignorefunc:
102 if not self.ignorefunc:
103 ignore = self.hgignore()
103 ignore = self.hgignore()
104 allpats = []
104 allpats = []
105 [allpats.extend(patlist) for patlist in ignore.values()]
105 [allpats.extend(patlist) for patlist in ignore.values()]
106 if allpats:
106 if allpats:
107 try:
107 try:
108 files, self.ignorefunc, anypats = (
108 files, self.ignorefunc, anypats = (
109 util.matcher(self.root, inc=allpats, src='.hgignore'))
109 util.matcher(self.root, inc=allpats, src='.hgignore'))
110 except util.Abort:
110 except util.Abort:
111 # Re-raise an exception where the src is the right file
111 # Re-raise an exception where the src is the right file
112 for f, patlist in ignore.items():
112 for f, patlist in ignore.items():
113 files, self.ignorefunc, anypats = (
113 files, self.ignorefunc, anypats = (
114 util.matcher(self.root, inc=patlist, src=f))
114 util.matcher(self.root, inc=patlist, src=f))
115 else:
115 else:
116 self.ignorefunc = util.never
116 self.ignorefunc = util.never
117 return self.ignorefunc(fn)
117 return self.ignorefunc(fn)
118
118
119 def __del__(self):
119 def __del__(self):
120 if self.dirty:
120 if self.dirty:
121 self.write()
121 self.write()
122
122
123 def __getitem__(self, key):
123 def __getitem__(self, key):
124 try:
124 try:
125 return self.map[key]
125 return self.map[key]
126 except TypeError:
126 except TypeError:
127 self.lazyread()
127 self.lazyread()
128 return self[key]
128 return self[key]
129
129
130 def __contains__(self, key):
130 def __contains__(self, key):
131 self.lazyread()
131 self.lazyread()
132 return key in self.map
132 return key in self.map
133
133
134 def parents(self):
134 def parents(self):
135 self.lazyread()
135 self.lazyread()
136 return self.pl
136 return self.pl
137
137
138 def markdirty(self):
138 def markdirty(self):
139 if not self.dirty:
139 if not self.dirty:
140 self.dirty = 1
140 self.dirty = 1
141
141
142 def setparents(self, p1, p2=nullid):
142 def setparents(self, p1, p2=nullid):
143 self.lazyread()
143 self.lazyread()
144 self.markdirty()
144 self.markdirty()
145 self.pl = p1, p2
145 self.pl = p1, p2
146
146
147 def state(self, key):
147 def state(self, key):
148 try:
148 try:
149 return self[key][0]
149 return self[key][0]
150 except KeyError:
150 except KeyError:
151 return "?"
151 return "?"
152
152
153 def lazyread(self):
153 def lazyread(self):
154 if self.map is None:
154 if self.map is None:
155 self.read()
155 self.read()
156
156
157 def parse(self, st):
157 def parse(self, st):
158 self.pl = [st[:20], st[20: 40]]
158 self.pl = [st[:20], st[20: 40]]
159
159
160 # deref fields so they will be local in loop
160 # deref fields so they will be local in loop
161 map = self.map
161 map = self.map
162 copies = self.copies
162 copies = self.copies
163 format = self.format
163 format = self.format
164 unpack = struct.unpack
164 unpack = struct.unpack
165
165
166 pos = 40
166 pos = 40
167 e_size = struct.calcsize(format)
167 e_size = struct.calcsize(format)
168
168
169 while pos < len(st):
169 while pos < len(st):
170 newpos = pos + e_size
170 newpos = pos + e_size
171 e = unpack(format, st[pos:newpos])
171 e = unpack(format, st[pos:newpos])
172 l = e[4]
172 l = e[4]
173 pos = newpos
173 pos = newpos
174 newpos = pos + l
174 newpos = pos + l
175 f = st[pos:newpos]
175 f = st[pos:newpos]
176 if '\0' in f:
176 if '\0' in f:
177 f, c = f.split('\0')
177 f, c = f.split('\0')
178 copies[f] = c
178 copies[f] = c
179 map[f] = e[:4]
179 map[f] = e[:4]
180 pos = newpos
180 pos = newpos
181
181
182 def read(self):
182 def read(self):
183 self.map = {}
183 self.map = {}
184 self.pl = [nullid, nullid]
184 self.pl = [nullid, nullid]
185 try:
185 try:
186 st = self.opener("dirstate").read()
186 st = self.opener("dirstate").read()
187 if st:
187 if st:
188 self.parse(st)
188 self.parse(st)
189 except IOError, err:
189 except IOError, err:
190 if err.errno != errno.ENOENT: raise
190 if err.errno != errno.ENOENT: raise
191
191
192 def copy(self, source, dest):
192 def copy(self, source, dest):
193 self.lazyread()
193 self.lazyread()
194 self.markdirty()
194 self.markdirty()
195 self.copies[dest] = source
195 self.copies[dest] = source
196
196
197 def copied(self, file):
197 def copied(self, file):
198 return self.copies.get(file, None)
198 return self.copies.get(file, None)
199
199
200 def update(self, files, state, **kw):
200 def update(self, files, state, **kw):
201 ''' current states:
201 ''' current states:
202 n normal
202 n normal
203 m needs merging
203 m needs merging
204 r marked for removal
204 r marked for removal
205 a marked for addition'''
205 a marked for addition'''
206
206
207 if not files: return
207 if not files: return
208 self.lazyread()
208 self.lazyread()
209 self.markdirty()
209 self.markdirty()
210 for f in files:
210 for f in files:
211 if state == "r":
211 if state == "r":
212 self.map[f] = ('r', 0, 0, 0)
212 self.map[f] = ('r', 0, 0, 0)
213 else:
213 else:
214 s = os.lstat(self.wjoin(f))
214 s = os.lstat(self.wjoin(f))
215 st_size = kw.get('st_size', s.st_size)
215 st_size = kw.get('st_size', s.st_size)
216 st_mtime = kw.get('st_mtime', s.st_mtime)
216 st_mtime = kw.get('st_mtime', s.st_mtime)
217 self.map[f] = (state, s.st_mode, st_size, st_mtime)
217 self.map[f] = (state, s.st_mode, st_size, st_mtime)
218 if self.copies.has_key(f):
218 if self.copies.has_key(f):
219 del self.copies[f]
219 del self.copies[f]
220
220
221 def forget(self, files):
221 def forget(self, files):
222 if not files: return
222 if not files: return
223 self.lazyread()
223 self.lazyread()
224 self.markdirty()
224 self.markdirty()
225 for f in files:
225 for f in files:
226 try:
226 try:
227 del self.map[f]
227 del self.map[f]
228 except KeyError:
228 except KeyError:
229 self.ui.warn(_("not in dirstate: %s!\n") % f)
229 self.ui.warn(_("not in dirstate: %s!\n") % f)
230 pass
230 pass
231
231
232 def clear(self):
232 def clear(self):
233 self.map = {}
233 self.map = {}
234 self.copies = {}
234 self.copies = {}
235 self.markdirty()
235 self.markdirty()
236
236
237 def rebuild(self, parent, files):
237 def rebuild(self, parent, files):
238 self.clear()
238 self.clear()
239 umask = os.umask(0)
239 umask = os.umask(0)
240 os.umask(umask)
240 os.umask(umask)
241 for f, mode in files:
241 for f in files:
242 if mode:
242 if files.execf(f):
243 self.map[f] = ('n', ~umask, -1, 0)
243 self.map[f] = ('n', ~umask, -1, 0)
244 else:
244 else:
245 self.map[f] = ('n', ~umask & 0666, -1, 0)
245 self.map[f] = ('n', ~umask & 0666, -1, 0)
246 self.pl = (parent, nullid)
246 self.pl = (parent, nullid)
247 self.markdirty()
247 self.markdirty()
248
248
249 def write(self):
249 def write(self):
250 if not self.dirty:
250 if not self.dirty:
251 return
251 return
252 st = self.opener("dirstate", "w", atomic=True)
252 st = self.opener("dirstate", "w", atomic=True)
253 st.write("".join(self.pl))
253 st.write("".join(self.pl))
254 for f, e in self.map.items():
254 for f, e in self.map.items():
255 c = self.copied(f)
255 c = self.copied(f)
256 if c:
256 if c:
257 f = f + "\0" + c
257 f = f + "\0" + c
258 e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
258 e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
259 st.write(e + f)
259 st.write(e + f)
260 self.dirty = 0
260 self.dirty = 0
261
261
262 def filterfiles(self, files):
262 def filterfiles(self, files):
263 ret = {}
263 ret = {}
264 unknown = []
264 unknown = []
265
265
266 for x in files:
266 for x in files:
267 if x == '.':
267 if x == '.':
268 return self.map.copy()
268 return self.map.copy()
269 if x not in self.map:
269 if x not in self.map:
270 unknown.append(x)
270 unknown.append(x)
271 else:
271 else:
272 ret[x] = self.map[x]
272 ret[x] = self.map[x]
273
273
274 if not unknown:
274 if not unknown:
275 return ret
275 return ret
276
276
277 b = self.map.keys()
277 b = self.map.keys()
278 b.sort()
278 b.sort()
279 blen = len(b)
279 blen = len(b)
280
280
281 for x in unknown:
281 for x in unknown:
282 bs = bisect.bisect(b, "%s%s" % (x, '/'))
282 bs = bisect.bisect(b, "%s%s" % (x, '/'))
283 while bs < blen:
283 while bs < blen:
284 s = b[bs]
284 s = b[bs]
285 if len(s) > len(x) and s.startswith(x):
285 if len(s) > len(x) and s.startswith(x):
286 ret[s] = self.map[s]
286 ret[s] = self.map[s]
287 else:
287 else:
288 break
288 break
289 bs += 1
289 bs += 1
290 return ret
290 return ret
291
291
292 def supported_type(self, f, st, verbose=False):
292 def supported_type(self, f, st, verbose=False):
293 if stat.S_ISREG(st.st_mode):
293 if stat.S_ISREG(st.st_mode):
294 return True
294 return True
295 if verbose:
295 if verbose:
296 kind = 'unknown'
296 kind = 'unknown'
297 if stat.S_ISCHR(st.st_mode): kind = _('character device')
297 if stat.S_ISCHR(st.st_mode): kind = _('character device')
298 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
298 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
299 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
299 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
300 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
300 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
301 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
301 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
302 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
302 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
303 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
303 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
304 util.pathto(self.getcwd(), f),
304 util.pathto(self.getcwd(), f),
305 kind))
305 kind))
306 return False
306 return False
307
307
308 def statwalk(self, files=None, match=util.always, dc=None, ignored=False,
308 def statwalk(self, files=None, match=util.always, dc=None, ignored=False,
309 badmatch=None):
309 badmatch=None):
310 self.lazyread()
310 self.lazyread()
311
311
312 # walk all files by default
312 # walk all files by default
313 if not files:
313 if not files:
314 files = [self.root]
314 files = [self.root]
315 if not dc:
315 if not dc:
316 dc = self.map.copy()
316 dc = self.map.copy()
317 elif not dc:
317 elif not dc:
318 dc = self.filterfiles(files)
318 dc = self.filterfiles(files)
319
319
320 def statmatch(file_, stat):
320 def statmatch(file_, stat):
321 file_ = util.pconvert(file_)
321 file_ = util.pconvert(file_)
322 if not ignored and file_ not in dc and self.ignore(file_):
322 if not ignored and file_ not in dc and self.ignore(file_):
323 return False
323 return False
324 return match(file_)
324 return match(file_)
325
325
326 return self.walkhelper(files=files, statmatch=statmatch, dc=dc,
326 return self.walkhelper(files=files, statmatch=statmatch, dc=dc,
327 badmatch=badmatch)
327 badmatch=badmatch)
328
328
329 def walk(self, files=None, match=util.always, dc=None, badmatch=None):
329 def walk(self, files=None, match=util.always, dc=None, badmatch=None):
330 # filter out the stat
330 # filter out the stat
331 for src, f, st in self.statwalk(files, match, dc, badmatch=badmatch):
331 for src, f, st in self.statwalk(files, match, dc, badmatch=badmatch):
332 yield src, f
332 yield src, f
333
333
334 # walk recursively through the directory tree, finding all files
334 # walk recursively through the directory tree, finding all files
335 # matched by the statmatch function
335 # matched by the statmatch function
336 #
336 #
337 # results are yielded in a tuple (src, filename, st), where src
337 # results are yielded in a tuple (src, filename, st), where src
338 # is one of:
338 # is one of:
339 # 'f' the file was found in the directory tree
339 # 'f' the file was found in the directory tree
340 # 'm' the file was only in the dirstate and not in the tree
340 # 'm' the file was only in the dirstate and not in the tree
341 # and st is the stat result if the file was found in the directory.
341 # and st is the stat result if the file was found in the directory.
342 #
342 #
343 # dc is an optional arg for the current dirstate. dc is not modified
343 # dc is an optional arg for the current dirstate. dc is not modified
344 # directly by this function, but might be modified by your statmatch call.
344 # directly by this function, but might be modified by your statmatch call.
345 #
345 #
346 def walkhelper(self, files, statmatch, dc, badmatch=None):
346 def walkhelper(self, files, statmatch, dc, badmatch=None):
347 # recursion free walker, faster than os.walk.
347 # recursion free walker, faster than os.walk.
348 def findfiles(s):
348 def findfiles(s):
349 work = [s]
349 work = [s]
350 while work:
350 while work:
351 top = work.pop()
351 top = work.pop()
352 names = os.listdir(top)
352 names = os.listdir(top)
353 names.sort()
353 names.sort()
354 # nd is the top of the repository dir tree
354 # nd is the top of the repository dir tree
355 nd = util.normpath(top[len(self.root) + 1:])
355 nd = util.normpath(top[len(self.root) + 1:])
356 if nd == '.':
356 if nd == '.':
357 nd = ''
357 nd = ''
358 else:
358 else:
359 # do not recurse into a repo contained in this
359 # do not recurse into a repo contained in this
360 # one. use bisect to find .hg directory so speed
360 # one. use bisect to find .hg directory so speed
361 # is good on big directory.
361 # is good on big directory.
362 hg = bisect.bisect_left(names, '.hg')
362 hg = bisect.bisect_left(names, '.hg')
363 if hg < len(names) and names[hg] == '.hg':
363 if hg < len(names) and names[hg] == '.hg':
364 if os.path.isdir(os.path.join(top, '.hg')):
364 if os.path.isdir(os.path.join(top, '.hg')):
365 continue
365 continue
366 for f in names:
366 for f in names:
367 np = util.pconvert(os.path.join(nd, f))
367 np = util.pconvert(os.path.join(nd, f))
368 if seen(np):
368 if seen(np):
369 continue
369 continue
370 p = os.path.join(top, f)
370 p = os.path.join(top, f)
371 # don't trip over symlinks
371 # don't trip over symlinks
372 st = os.lstat(p)
372 st = os.lstat(p)
373 if stat.S_ISDIR(st.st_mode):
373 if stat.S_ISDIR(st.st_mode):
374 ds = os.path.join(nd, f +'/')
374 ds = os.path.join(nd, f +'/')
375 if statmatch(ds, st):
375 if statmatch(ds, st):
376 work.append(p)
376 work.append(p)
377 if statmatch(np, st) and np in dc:
377 if statmatch(np, st) and np in dc:
378 yield 'm', np, st
378 yield 'm', np, st
379 elif statmatch(np, st):
379 elif statmatch(np, st):
380 if self.supported_type(np, st):
380 if self.supported_type(np, st):
381 yield 'f', np, st
381 yield 'f', np, st
382 elif np in dc:
382 elif np in dc:
383 yield 'm', np, st
383 yield 'm', np, st
384
384
385 known = {'.hg': 1}
385 known = {'.hg': 1}
386 def seen(fn):
386 def seen(fn):
387 if fn in known: return True
387 if fn in known: return True
388 known[fn] = 1
388 known[fn] = 1
389
389
390 # step one, find all files that match our criteria
390 # step one, find all files that match our criteria
391 files.sort()
391 files.sort()
392 for ff in util.unique(files):
392 for ff in util.unique(files):
393 f = self.wjoin(ff)
393 f = self.wjoin(ff)
394 try:
394 try:
395 st = os.lstat(f)
395 st = os.lstat(f)
396 except OSError, inst:
396 except OSError, inst:
397 nf = util.normpath(ff)
397 nf = util.normpath(ff)
398 found = False
398 found = False
399 for fn in dc:
399 for fn in dc:
400 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
400 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
401 found = True
401 found = True
402 break
402 break
403 if not found:
403 if not found:
404 if inst.errno != errno.ENOENT or not badmatch:
404 if inst.errno != errno.ENOENT or not badmatch:
405 self.ui.warn('%s: %s\n' % (
405 self.ui.warn('%s: %s\n' % (
406 util.pathto(self.getcwd(), ff),
406 util.pathto(self.getcwd(), ff),
407 inst.strerror))
407 inst.strerror))
408 elif badmatch and badmatch(ff) and statmatch(ff, None):
408 elif badmatch and badmatch(ff) and statmatch(ff, None):
409 yield 'b', ff, None
409 yield 'b', ff, None
410 continue
410 continue
411 if stat.S_ISDIR(st.st_mode):
411 if stat.S_ISDIR(st.st_mode):
412 cmp1 = (lambda x, y: cmp(x[1], y[1]))
412 cmp1 = (lambda x, y: cmp(x[1], y[1]))
413 sorted_ = [ x for x in findfiles(f) ]
413 sorted_ = [ x for x in findfiles(f) ]
414 sorted_.sort(cmp1)
414 sorted_.sort(cmp1)
415 for e in sorted_:
415 for e in sorted_:
416 yield e
416 yield e
417 else:
417 else:
418 ff = util.normpath(ff)
418 ff = util.normpath(ff)
419 if seen(ff):
419 if seen(ff):
420 continue
420 continue
421 self.blockignore = True
421 self.blockignore = True
422 if statmatch(ff, st):
422 if statmatch(ff, st):
423 if self.supported_type(ff, st, verbose=True):
423 if self.supported_type(ff, st, verbose=True):
424 yield 'f', ff, st
424 yield 'f', ff, st
425 elif ff in dc:
425 elif ff in dc:
426 yield 'm', ff, st
426 yield 'm', ff, st
427 self.blockignore = False
427 self.blockignore = False
428
428
429 # step two run through anything left in the dc hash and yield
429 # step two run through anything left in the dc hash and yield
430 # if we haven't already seen it
430 # if we haven't already seen it
431 ks = dc.keys()
431 ks = dc.keys()
432 ks.sort()
432 ks.sort()
433 for k in ks:
433 for k in ks:
434 if not seen(k) and (statmatch(k, None)):
434 if not seen(k) and (statmatch(k, None)):
435 yield 'm', k, None
435 yield 'm', k, None
436
436
437 def changes(self, files=None, match=util.always, show_ignored=None):
437 def changes(self, files=None, match=util.always, show_ignored=None):
438 lookup, modified, added, unknown, ignored = [], [], [], [], []
438 lookup, modified, added, unknown, ignored = [], [], [], [], []
439 removed, deleted = [], []
439 removed, deleted = [], []
440
440
441 for src, fn, st in self.statwalk(files, match, ignored=show_ignored):
441 for src, fn, st in self.statwalk(files, match, ignored=show_ignored):
442 try:
442 try:
443 type_, mode, size, time = self[fn]
443 type_, mode, size, time = self[fn]
444 except KeyError:
444 except KeyError:
445 if show_ignored and self.ignore(fn):
445 if show_ignored and self.ignore(fn):
446 ignored.append(fn)
446 ignored.append(fn)
447 else:
447 else:
448 unknown.append(fn)
448 unknown.append(fn)
449 continue
449 continue
450 if src == 'm':
450 if src == 'm':
451 nonexistent = True
451 nonexistent = True
452 if not st:
452 if not st:
453 try:
453 try:
454 st = os.lstat(self.wjoin(fn))
454 st = os.lstat(self.wjoin(fn))
455 except OSError, inst:
455 except OSError, inst:
456 if inst.errno != errno.ENOENT:
456 if inst.errno != errno.ENOENT:
457 raise
457 raise
458 st = None
458 st = None
459 # We need to re-check that it is a valid file
459 # We need to re-check that it is a valid file
460 if st and self.supported_type(fn, st):
460 if st and self.supported_type(fn, st):
461 nonexistent = False
461 nonexistent = False
462 # XXX: what to do with file no longer present in the fs
462 # XXX: what to do with file no longer present in the fs
463 # who are not removed in the dirstate ?
463 # who are not removed in the dirstate ?
464 if nonexistent and type_ in "nm":
464 if nonexistent and type_ in "nm":
465 deleted.append(fn)
465 deleted.append(fn)
466 continue
466 continue
467 # check the common case first
467 # check the common case first
468 if type_ == 'n':
468 if type_ == 'n':
469 if not st:
469 if not st:
470 st = os.lstat(self.wjoin(fn))
470 st = os.lstat(self.wjoin(fn))
471 if size >= 0 and (size != st.st_size
471 if size >= 0 and (size != st.st_size
472 or (mode ^ st.st_mode) & 0100):
472 or (mode ^ st.st_mode) & 0100):
473 modified.append(fn)
473 modified.append(fn)
474 elif time != st.st_mtime:
474 elif time != st.st_mtime:
475 lookup.append(fn)
475 lookup.append(fn)
476 elif type_ == 'm':
476 elif type_ == 'm':
477 modified.append(fn)
477 modified.append(fn)
478 elif type_ == 'a':
478 elif type_ == 'a':
479 added.append(fn)
479 added.append(fn)
480 elif type_ == 'r':
480 elif type_ == 'r':
481 removed.append(fn)
481 removed.append(fn)
482
482
483 return (lookup, modified, added, removed, deleted, unknown, ignored)
483 return (lookup, modified, added, removed, deleted, unknown, ignored)
@@ -1,2212 +1,2211 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "appendfile changegroup")
11 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "changelog dirstate filelog manifest repo context")
12 demandload(globals(), "changelog dirstate filelog manifest repo context")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "os revlog util")
14 demandload(globals(), "os revlog util")
15
15
16 class localrepository(object):
16 class localrepository(object):
17 capabilities = ()
17 capabilities = ()
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 if not path:
22 if not path:
23 p = os.getcwd()
23 p = os.getcwd()
24 while not os.path.isdir(os.path.join(p, ".hg")):
24 while not os.path.isdir(os.path.join(p, ".hg")):
25 oldp = p
25 oldp = p
26 p = os.path.dirname(p)
26 p = os.path.dirname(p)
27 if p == oldp:
27 if p == oldp:
28 raise repo.RepoError(_("no repo found"))
28 raise repo.RepoError(_("no repo found"))
29 path = p
29 path = p
30 self.path = os.path.join(path, ".hg")
30 self.path = os.path.join(path, ".hg")
31
31
32 if not create and not os.path.isdir(self.path):
32 if not create and not os.path.isdir(self.path):
33 raise repo.RepoError(_("repository %s not found") % path)
33 raise repo.RepoError(_("repository %s not found") % path)
34
34
35 self.root = os.path.abspath(path)
35 self.root = os.path.abspath(path)
36 self.origroot = path
36 self.origroot = path
37 self.ui = ui.ui(parentui=parentui)
37 self.ui = ui.ui(parentui=parentui)
38 self.opener = util.opener(self.path)
38 self.opener = util.opener(self.path)
39 self.wopener = util.opener(self.root)
39 self.wopener = util.opener(self.root)
40
40
41 try:
41 try:
42 self.ui.readconfig(self.join("hgrc"), self.root)
42 self.ui.readconfig(self.join("hgrc"), self.root)
43 except IOError:
43 except IOError:
44 pass
44 pass
45
45
46 v = self.ui.revlogopts
46 v = self.ui.revlogopts
47 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
47 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
48 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
48 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
49 fl = v.get('flags', None)
49 fl = v.get('flags', None)
50 flags = 0
50 flags = 0
51 if fl != None:
51 if fl != None:
52 for x in fl.split():
52 for x in fl.split():
53 flags |= revlog.flagstr(x)
53 flags |= revlog.flagstr(x)
54 elif self.revlogv1:
54 elif self.revlogv1:
55 flags = revlog.REVLOG_DEFAULT_FLAGS
55 flags = revlog.REVLOG_DEFAULT_FLAGS
56
56
57 v = self.revlogversion | flags
57 v = self.revlogversion | flags
58 self.manifest = manifest.manifest(self.opener, v)
58 self.manifest = manifest.manifest(self.opener, v)
59 self.changelog = changelog.changelog(self.opener, v)
59 self.changelog = changelog.changelog(self.opener, v)
60
60
61 # the changelog might not have the inline index flag
61 # the changelog might not have the inline index flag
62 # on. If the format of the changelog is the same as found in
62 # on. If the format of the changelog is the same as found in
63 # .hgrc, apply any flags found in the .hgrc as well.
63 # .hgrc, apply any flags found in the .hgrc as well.
64 # Otherwise, just version from the changelog
64 # Otherwise, just version from the changelog
65 v = self.changelog.version
65 v = self.changelog.version
66 if v == self.revlogversion:
66 if v == self.revlogversion:
67 v |= flags
67 v |= flags
68 self.revlogversion = v
68 self.revlogversion = v
69
69
70 self.tagscache = None
70 self.tagscache = None
71 self.nodetagscache = None
71 self.nodetagscache = None
72 self.encodepats = None
72 self.encodepats = None
73 self.decodepats = None
73 self.decodepats = None
74 self.transhandle = None
74 self.transhandle = None
75
75
76 if create:
76 if create:
77 if not os.path.exists(path):
77 if not os.path.exists(path):
78 os.mkdir(path)
78 os.mkdir(path)
79 os.mkdir(self.path)
79 os.mkdir(self.path)
80 os.mkdir(self.join("data"))
80 os.mkdir(self.join("data"))
81
81
82 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
82 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
83
83
84 def hook(self, name, throw=False, **args):
84 def hook(self, name, throw=False, **args):
85 def callhook(hname, funcname):
85 def callhook(hname, funcname):
86 '''call python hook. hook is callable object, looked up as
86 '''call python hook. hook is callable object, looked up as
87 name in python module. if callable returns "true", hook
87 name in python module. if callable returns "true", hook
88 fails, else passes. if hook raises exception, treated as
88 fails, else passes. if hook raises exception, treated as
89 hook failure. exception propagates if throw is "true".
89 hook failure. exception propagates if throw is "true".
90
90
91 reason for "true" meaning "hook failed" is so that
91 reason for "true" meaning "hook failed" is so that
92 unmodified commands (e.g. mercurial.commands.update) can
92 unmodified commands (e.g. mercurial.commands.update) can
93 be run as hooks without wrappers to convert return values.'''
93 be run as hooks without wrappers to convert return values.'''
94
94
95 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
95 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
96 d = funcname.rfind('.')
96 d = funcname.rfind('.')
97 if d == -1:
97 if d == -1:
98 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
98 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
99 % (hname, funcname))
99 % (hname, funcname))
100 modname = funcname[:d]
100 modname = funcname[:d]
101 try:
101 try:
102 obj = __import__(modname)
102 obj = __import__(modname)
103 except ImportError:
103 except ImportError:
104 try:
104 try:
105 # extensions are loaded with hgext_ prefix
105 # extensions are loaded with hgext_ prefix
106 obj = __import__("hgext_%s" % modname)
106 obj = __import__("hgext_%s" % modname)
107 except ImportError:
107 except ImportError:
108 raise util.Abort(_('%s hook is invalid '
108 raise util.Abort(_('%s hook is invalid '
109 '(import of "%s" failed)') %
109 '(import of "%s" failed)') %
110 (hname, modname))
110 (hname, modname))
111 try:
111 try:
112 for p in funcname.split('.')[1:]:
112 for p in funcname.split('.')[1:]:
113 obj = getattr(obj, p)
113 obj = getattr(obj, p)
114 except AttributeError, err:
114 except AttributeError, err:
115 raise util.Abort(_('%s hook is invalid '
115 raise util.Abort(_('%s hook is invalid '
116 '("%s" is not defined)') %
116 '("%s" is not defined)') %
117 (hname, funcname))
117 (hname, funcname))
118 if not callable(obj):
118 if not callable(obj):
119 raise util.Abort(_('%s hook is invalid '
119 raise util.Abort(_('%s hook is invalid '
120 '("%s" is not callable)') %
120 '("%s" is not callable)') %
121 (hname, funcname))
121 (hname, funcname))
122 try:
122 try:
123 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
123 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
124 except (KeyboardInterrupt, util.SignalInterrupt):
124 except (KeyboardInterrupt, util.SignalInterrupt):
125 raise
125 raise
126 except Exception, exc:
126 except Exception, exc:
127 if isinstance(exc, util.Abort):
127 if isinstance(exc, util.Abort):
128 self.ui.warn(_('error: %s hook failed: %s\n') %
128 self.ui.warn(_('error: %s hook failed: %s\n') %
129 (hname, exc.args[0] % exc.args[1:]))
129 (hname, exc.args[0] % exc.args[1:]))
130 else:
130 else:
131 self.ui.warn(_('error: %s hook raised an exception: '
131 self.ui.warn(_('error: %s hook raised an exception: '
132 '%s\n') % (hname, exc))
132 '%s\n') % (hname, exc))
133 if throw:
133 if throw:
134 raise
134 raise
135 self.ui.print_exc()
135 self.ui.print_exc()
136 return True
136 return True
137 if r:
137 if r:
138 if throw:
138 if throw:
139 raise util.Abort(_('%s hook failed') % hname)
139 raise util.Abort(_('%s hook failed') % hname)
140 self.ui.warn(_('warning: %s hook failed\n') % hname)
140 self.ui.warn(_('warning: %s hook failed\n') % hname)
141 return r
141 return r
142
142
143 def runhook(name, cmd):
143 def runhook(name, cmd):
144 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
144 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
145 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
145 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
146 r = util.system(cmd, environ=env, cwd=self.root)
146 r = util.system(cmd, environ=env, cwd=self.root)
147 if r:
147 if r:
148 desc, r = util.explain_exit(r)
148 desc, r = util.explain_exit(r)
149 if throw:
149 if throw:
150 raise util.Abort(_('%s hook %s') % (name, desc))
150 raise util.Abort(_('%s hook %s') % (name, desc))
151 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
151 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
152 return r
152 return r
153
153
154 r = False
154 r = False
155 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
155 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
156 if hname.split(".", 1)[0] == name and cmd]
156 if hname.split(".", 1)[0] == name and cmd]
157 hooks.sort()
157 hooks.sort()
158 for hname, cmd in hooks:
158 for hname, cmd in hooks:
159 if cmd.startswith('python:'):
159 if cmd.startswith('python:'):
160 r = callhook(hname, cmd[7:].strip()) or r
160 r = callhook(hname, cmd[7:].strip()) or r
161 else:
161 else:
162 r = runhook(hname, cmd) or r
162 r = runhook(hname, cmd) or r
163 return r
163 return r
164
164
165 tag_disallowed = ':\r\n'
165 tag_disallowed = ':\r\n'
166
166
167 def tag(self, name, node, local=False, message=None, user=None, date=None):
167 def tag(self, name, node, local=False, message=None, user=None, date=None):
168 '''tag a revision with a symbolic name.
168 '''tag a revision with a symbolic name.
169
169
170 if local is True, the tag is stored in a per-repository file.
170 if local is True, the tag is stored in a per-repository file.
171 otherwise, it is stored in the .hgtags file, and a new
171 otherwise, it is stored in the .hgtags file, and a new
172 changeset is committed with the change.
172 changeset is committed with the change.
173
173
174 keyword arguments:
174 keyword arguments:
175
175
176 local: whether to store tag in non-version-controlled file
176 local: whether to store tag in non-version-controlled file
177 (default False)
177 (default False)
178
178
179 message: commit message to use if committing
179 message: commit message to use if committing
180
180
181 user: name of user to use if committing
181 user: name of user to use if committing
182
182
183 date: date tuple to use if committing'''
183 date: date tuple to use if committing'''
184
184
185 for c in self.tag_disallowed:
185 for c in self.tag_disallowed:
186 if c in name:
186 if c in name:
187 raise util.Abort(_('%r cannot be used in a tag name') % c)
187 raise util.Abort(_('%r cannot be used in a tag name') % c)
188
188
189 self.hook('pretag', throw=True, node=node, tag=name, local=local)
189 self.hook('pretag', throw=True, node=node, tag=name, local=local)
190
190
191 if local:
191 if local:
192 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
192 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
193 self.hook('tag', node=node, tag=name, local=local)
193 self.hook('tag', node=node, tag=name, local=local)
194 return
194 return
195
195
196 for x in self.changes():
196 for x in self.changes():
197 if '.hgtags' in x:
197 if '.hgtags' in x:
198 raise util.Abort(_('working copy of .hgtags is changed '
198 raise util.Abort(_('working copy of .hgtags is changed '
199 '(please commit .hgtags manually)'))
199 '(please commit .hgtags manually)'))
200
200
201 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
201 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
202 if self.dirstate.state('.hgtags') == '?':
202 if self.dirstate.state('.hgtags') == '?':
203 self.add(['.hgtags'])
203 self.add(['.hgtags'])
204
204
205 if not message:
205 if not message:
206 message = _('Added tag %s for changeset %s') % (name, node)
206 message = _('Added tag %s for changeset %s') % (name, node)
207
207
208 self.commit(['.hgtags'], message, user, date)
208 self.commit(['.hgtags'], message, user, date)
209 self.hook('tag', node=node, tag=name, local=local)
209 self.hook('tag', node=node, tag=name, local=local)
210
210
211 def tags(self):
211 def tags(self):
212 '''return a mapping of tag to node'''
212 '''return a mapping of tag to node'''
213 if not self.tagscache:
213 if not self.tagscache:
214 self.tagscache = {}
214 self.tagscache = {}
215
215
216 def parsetag(line, context):
216 def parsetag(line, context):
217 if not line:
217 if not line:
218 return
218 return
219 s = l.split(" ", 1)
219 s = l.split(" ", 1)
220 if len(s) != 2:
220 if len(s) != 2:
221 self.ui.warn(_("%s: cannot parse entry\n") % context)
221 self.ui.warn(_("%s: cannot parse entry\n") % context)
222 return
222 return
223 node, key = s
223 node, key = s
224 key = key.strip()
224 key = key.strip()
225 try:
225 try:
226 bin_n = bin(node)
226 bin_n = bin(node)
227 except TypeError:
227 except TypeError:
228 self.ui.warn(_("%s: node '%s' is not well formed\n") %
228 self.ui.warn(_("%s: node '%s' is not well formed\n") %
229 (context, node))
229 (context, node))
230 return
230 return
231 if bin_n not in self.changelog.nodemap:
231 if bin_n not in self.changelog.nodemap:
232 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
232 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
233 (context, key))
233 (context, key))
234 return
234 return
235 self.tagscache[key] = bin_n
235 self.tagscache[key] = bin_n
236
236
237 # read the tags file from each head, ending with the tip,
237 # read the tags file from each head, ending with the tip,
238 # and add each tag found to the map, with "newer" ones
238 # and add each tag found to the map, with "newer" ones
239 # taking precedence
239 # taking precedence
240 heads = self.heads()
240 heads = self.heads()
241 heads.reverse()
241 heads.reverse()
242 fl = self.file(".hgtags")
242 fl = self.file(".hgtags")
243 for node in heads:
243 for node in heads:
244 change = self.changelog.read(node)
244 change = self.changelog.read(node)
245 rev = self.changelog.rev(node)
245 rev = self.changelog.rev(node)
246 fn, ff = self.manifest.find(change[0], '.hgtags')
246 fn, ff = self.manifest.find(change[0], '.hgtags')
247 if fn is None: continue
247 if fn is None: continue
248 count = 0
248 count = 0
249 for l in fl.read(fn).splitlines():
249 for l in fl.read(fn).splitlines():
250 count += 1
250 count += 1
251 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
251 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
252 (rev, short(node), count))
252 (rev, short(node), count))
253 try:
253 try:
254 f = self.opener("localtags")
254 f = self.opener("localtags")
255 count = 0
255 count = 0
256 for l in f:
256 for l in f:
257 count += 1
257 count += 1
258 parsetag(l, _("localtags, line %d") % count)
258 parsetag(l, _("localtags, line %d") % count)
259 except IOError:
259 except IOError:
260 pass
260 pass
261
261
262 self.tagscache['tip'] = self.changelog.tip()
262 self.tagscache['tip'] = self.changelog.tip()
263
263
264 return self.tagscache
264 return self.tagscache
265
265
266 def tagslist(self):
266 def tagslist(self):
267 '''return a list of tags ordered by revision'''
267 '''return a list of tags ordered by revision'''
268 l = []
268 l = []
269 for t, n in self.tags().items():
269 for t, n in self.tags().items():
270 try:
270 try:
271 r = self.changelog.rev(n)
271 r = self.changelog.rev(n)
272 except:
272 except:
273 r = -2 # sort to the beginning of the list if unknown
273 r = -2 # sort to the beginning of the list if unknown
274 l.append((r, t, n))
274 l.append((r, t, n))
275 l.sort()
275 l.sort()
276 return [(t, n) for r, t, n in l]
276 return [(t, n) for r, t, n in l]
277
277
278 def nodetags(self, node):
278 def nodetags(self, node):
279 '''return the tags associated with a node'''
279 '''return the tags associated with a node'''
280 if not self.nodetagscache:
280 if not self.nodetagscache:
281 self.nodetagscache = {}
281 self.nodetagscache = {}
282 for t, n in self.tags().items():
282 for t, n in self.tags().items():
283 self.nodetagscache.setdefault(n, []).append(t)
283 self.nodetagscache.setdefault(n, []).append(t)
284 return self.nodetagscache.get(node, [])
284 return self.nodetagscache.get(node, [])
285
285
286 def lookup(self, key):
286 def lookup(self, key):
287 try:
287 try:
288 return self.tags()[key]
288 return self.tags()[key]
289 except KeyError:
289 except KeyError:
290 try:
290 try:
291 return self.changelog.lookup(key)
291 return self.changelog.lookup(key)
292 except:
292 except:
293 raise repo.RepoError(_("unknown revision '%s'") % key)
293 raise repo.RepoError(_("unknown revision '%s'") % key)
294
294
295 def dev(self):
295 def dev(self):
296 return os.lstat(self.path).st_dev
296 return os.lstat(self.path).st_dev
297
297
298 def local(self):
298 def local(self):
299 return True
299 return True
300
300
301 def join(self, f):
301 def join(self, f):
302 return os.path.join(self.path, f)
302 return os.path.join(self.path, f)
303
303
304 def wjoin(self, f):
304 def wjoin(self, f):
305 return os.path.join(self.root, f)
305 return os.path.join(self.root, f)
306
306
307 def file(self, f):
307 def file(self, f):
308 if f[0] == '/':
308 if f[0] == '/':
309 f = f[1:]
309 f = f[1:]
310 return filelog.filelog(self.opener, f, self.revlogversion)
310 return filelog.filelog(self.opener, f, self.revlogversion)
311
311
312 def changectx(self, changeid):
312 def changectx(self, changeid):
313 return context.changectx(self, changeid)
313 return context.changectx(self, changeid)
314
314
315 def filectx(self, path, changeid=None, fileid=None):
315 def filectx(self, path, changeid=None, fileid=None):
316 """changeid can be a changeset revision, node, or tag.
316 """changeid can be a changeset revision, node, or tag.
317 fileid can be a file revision or node."""
317 fileid can be a file revision or node."""
318 return context.filectx(self, path, changeid, fileid)
318 return context.filectx(self, path, changeid, fileid)
319
319
320 def getcwd(self):
320 def getcwd(self):
321 return self.dirstate.getcwd()
321 return self.dirstate.getcwd()
322
322
323 def wfile(self, f, mode='r'):
323 def wfile(self, f, mode='r'):
324 return self.wopener(f, mode)
324 return self.wopener(f, mode)
325
325
326 def wread(self, filename):
326 def wread(self, filename):
327 if self.encodepats == None:
327 if self.encodepats == None:
328 l = []
328 l = []
329 for pat, cmd in self.ui.configitems("encode"):
329 for pat, cmd in self.ui.configitems("encode"):
330 mf = util.matcher(self.root, "", [pat], [], [])[1]
330 mf = util.matcher(self.root, "", [pat], [], [])[1]
331 l.append((mf, cmd))
331 l.append((mf, cmd))
332 self.encodepats = l
332 self.encodepats = l
333
333
334 data = self.wopener(filename, 'r').read()
334 data = self.wopener(filename, 'r').read()
335
335
336 for mf, cmd in self.encodepats:
336 for mf, cmd in self.encodepats:
337 if mf(filename):
337 if mf(filename):
338 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
338 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
339 data = util.filter(data, cmd)
339 data = util.filter(data, cmd)
340 break
340 break
341
341
342 return data
342 return data
343
343
344 def wwrite(self, filename, data, fd=None):
344 def wwrite(self, filename, data, fd=None):
345 if self.decodepats == None:
345 if self.decodepats == None:
346 l = []
346 l = []
347 for pat, cmd in self.ui.configitems("decode"):
347 for pat, cmd in self.ui.configitems("decode"):
348 mf = util.matcher(self.root, "", [pat], [], [])[1]
348 mf = util.matcher(self.root, "", [pat], [], [])[1]
349 l.append((mf, cmd))
349 l.append((mf, cmd))
350 self.decodepats = l
350 self.decodepats = l
351
351
352 for mf, cmd in self.decodepats:
352 for mf, cmd in self.decodepats:
353 if mf(filename):
353 if mf(filename):
354 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
354 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
355 data = util.filter(data, cmd)
355 data = util.filter(data, cmd)
356 break
356 break
357
357
358 if fd:
358 if fd:
359 return fd.write(data)
359 return fd.write(data)
360 return self.wopener(filename, 'w').write(data)
360 return self.wopener(filename, 'w').write(data)
361
361
362 def transaction(self):
362 def transaction(self):
363 tr = self.transhandle
363 tr = self.transhandle
364 if tr != None and tr.running():
364 if tr != None and tr.running():
365 return tr.nest()
365 return tr.nest()
366
366
367 # save dirstate for rollback
367 # save dirstate for rollback
368 try:
368 try:
369 ds = self.opener("dirstate").read()
369 ds = self.opener("dirstate").read()
370 except IOError:
370 except IOError:
371 ds = ""
371 ds = ""
372 self.opener("journal.dirstate", "w").write(ds)
372 self.opener("journal.dirstate", "w").write(ds)
373
373
374 tr = transaction.transaction(self.ui.warn, self.opener,
374 tr = transaction.transaction(self.ui.warn, self.opener,
375 self.join("journal"),
375 self.join("journal"),
376 aftertrans(self.path))
376 aftertrans(self.path))
377 self.transhandle = tr
377 self.transhandle = tr
378 return tr
378 return tr
379
379
380 def recover(self):
380 def recover(self):
381 l = self.lock()
381 l = self.lock()
382 if os.path.exists(self.join("journal")):
382 if os.path.exists(self.join("journal")):
383 self.ui.status(_("rolling back interrupted transaction\n"))
383 self.ui.status(_("rolling back interrupted transaction\n"))
384 transaction.rollback(self.opener, self.join("journal"))
384 transaction.rollback(self.opener, self.join("journal"))
385 self.reload()
385 self.reload()
386 return True
386 return True
387 else:
387 else:
388 self.ui.warn(_("no interrupted transaction available\n"))
388 self.ui.warn(_("no interrupted transaction available\n"))
389 return False
389 return False
390
390
391 def rollback(self, wlock=None):
391 def rollback(self, wlock=None):
392 if not wlock:
392 if not wlock:
393 wlock = self.wlock()
393 wlock = self.wlock()
394 l = self.lock()
394 l = self.lock()
395 if os.path.exists(self.join("undo")):
395 if os.path.exists(self.join("undo")):
396 self.ui.status(_("rolling back last transaction\n"))
396 self.ui.status(_("rolling back last transaction\n"))
397 transaction.rollback(self.opener, self.join("undo"))
397 transaction.rollback(self.opener, self.join("undo"))
398 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
398 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
399 self.reload()
399 self.reload()
400 self.wreload()
400 self.wreload()
401 else:
401 else:
402 self.ui.warn(_("no rollback information available\n"))
402 self.ui.warn(_("no rollback information available\n"))
403
403
404 def wreload(self):
404 def wreload(self):
405 self.dirstate.read()
405 self.dirstate.read()
406
406
407 def reload(self):
407 def reload(self):
408 self.changelog.load()
408 self.changelog.load()
409 self.manifest.load()
409 self.manifest.load()
410 self.tagscache = None
410 self.tagscache = None
411 self.nodetagscache = None
411 self.nodetagscache = None
412
412
413 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
413 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
414 desc=None):
414 desc=None):
415 try:
415 try:
416 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
416 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
417 except lock.LockHeld, inst:
417 except lock.LockHeld, inst:
418 if not wait:
418 if not wait:
419 raise
419 raise
420 self.ui.warn(_("waiting for lock on %s held by %s\n") %
420 self.ui.warn(_("waiting for lock on %s held by %s\n") %
421 (desc, inst.args[0]))
421 (desc, inst.args[0]))
422 # default to 600 seconds timeout
422 # default to 600 seconds timeout
423 l = lock.lock(self.join(lockname),
423 l = lock.lock(self.join(lockname),
424 int(self.ui.config("ui", "timeout") or 600),
424 int(self.ui.config("ui", "timeout") or 600),
425 releasefn, desc=desc)
425 releasefn, desc=desc)
426 if acquirefn:
426 if acquirefn:
427 acquirefn()
427 acquirefn()
428 return l
428 return l
429
429
430 def lock(self, wait=1):
430 def lock(self, wait=1):
431 return self.do_lock("lock", wait, acquirefn=self.reload,
431 return self.do_lock("lock", wait, acquirefn=self.reload,
432 desc=_('repository %s') % self.origroot)
432 desc=_('repository %s') % self.origroot)
433
433
434 def wlock(self, wait=1):
434 def wlock(self, wait=1):
435 return self.do_lock("wlock", wait, self.dirstate.write,
435 return self.do_lock("wlock", wait, self.dirstate.write,
436 self.wreload,
436 self.wreload,
437 desc=_('working directory of %s') % self.origroot)
437 desc=_('working directory of %s') % self.origroot)
438
438
439 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
439 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
440 "determine whether a new filenode is needed"
440 "determine whether a new filenode is needed"
441 fp1 = manifest1.get(filename, nullid)
441 fp1 = manifest1.get(filename, nullid)
442 fp2 = manifest2.get(filename, nullid)
442 fp2 = manifest2.get(filename, nullid)
443
443
444 if fp2 != nullid:
444 if fp2 != nullid:
445 # is one parent an ancestor of the other?
445 # is one parent an ancestor of the other?
446 fpa = filelog.ancestor(fp1, fp2)
446 fpa = filelog.ancestor(fp1, fp2)
447 if fpa == fp1:
447 if fpa == fp1:
448 fp1, fp2 = fp2, nullid
448 fp1, fp2 = fp2, nullid
449 elif fpa == fp2:
449 elif fpa == fp2:
450 fp2 = nullid
450 fp2 = nullid
451
451
452 # is the file unmodified from the parent? report existing entry
452 # is the file unmodified from the parent? report existing entry
453 if fp2 == nullid and text == filelog.read(fp1):
453 if fp2 == nullid and text == filelog.read(fp1):
454 return (fp1, None, None)
454 return (fp1, None, None)
455
455
456 return (None, fp1, fp2)
456 return (None, fp1, fp2)
457
457
458 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
458 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
459 orig_parent = self.dirstate.parents()[0] or nullid
459 orig_parent = self.dirstate.parents()[0] or nullid
460 p1 = p1 or self.dirstate.parents()[0] or nullid
460 p1 = p1 or self.dirstate.parents()[0] or nullid
461 p2 = p2 or self.dirstate.parents()[1] or nullid
461 p2 = p2 or self.dirstate.parents()[1] or nullid
462 c1 = self.changelog.read(p1)
462 c1 = self.changelog.read(p1)
463 c2 = self.changelog.read(p2)
463 c2 = self.changelog.read(p2)
464 m1 = self.manifest.read(c1[0])
464 m1 = self.manifest.read(c1[0])
465 mf1 = self.manifest.readflags(c1[0])
465 mf1 = self.manifest.readflags(c1[0])
466 m2 = self.manifest.read(c2[0])
466 m2 = self.manifest.read(c2[0])
467 changed = []
467 changed = []
468
468
469 if orig_parent == p1:
469 if orig_parent == p1:
470 update_dirstate = 1
470 update_dirstate = 1
471 else:
471 else:
472 update_dirstate = 0
472 update_dirstate = 0
473
473
474 if not wlock:
474 if not wlock:
475 wlock = self.wlock()
475 wlock = self.wlock()
476 l = self.lock()
476 l = self.lock()
477 tr = self.transaction()
477 tr = self.transaction()
478 mm = m1.copy()
478 mm = m1.copy()
479 mfm = mf1.copy()
479 mfm = mf1.copy()
480 linkrev = self.changelog.count()
480 linkrev = self.changelog.count()
481 for f in files:
481 for f in files:
482 try:
482 try:
483 t = self.wread(f)
483 t = self.wread(f)
484 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
484 mfm.set(f, util.is_exec(self.wjoin(f), mfm.execf(f)))
485 r = self.file(f)
485 r = self.file(f)
486 mfm[f] = tm
487
486
488 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
487 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
489 if entry:
488 if entry:
490 mm[f] = entry
489 mm[f] = entry
491 continue
490 continue
492
491
493 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
492 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
494 changed.append(f)
493 changed.append(f)
495 if update_dirstate:
494 if update_dirstate:
496 self.dirstate.update([f], "n")
495 self.dirstate.update([f], "n")
497 except IOError:
496 except IOError:
498 try:
497 try:
499 del mm[f]
498 del mm[f]
500 del mfm[f]
499 del mfm[f]
501 if update_dirstate:
500 if update_dirstate:
502 self.dirstate.forget([f])
501 self.dirstate.forget([f])
503 except:
502 except:
504 # deleted from p2?
503 # deleted from p2?
505 pass
504 pass
506
505
507 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
506 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
508 user = user or self.ui.username()
507 user = user or self.ui.username()
509 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
508 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
510 tr.close()
509 tr.close()
511 if update_dirstate:
510 if update_dirstate:
512 self.dirstate.setparents(n, nullid)
511 self.dirstate.setparents(n, nullid)
513
512
514 def commit(self, files=None, text="", user=None, date=None,
513 def commit(self, files=None, text="", user=None, date=None,
515 match=util.always, force=False, lock=None, wlock=None,
514 match=util.always, force=False, lock=None, wlock=None,
516 force_editor=False):
515 force_editor=False):
517 commit = []
516 commit = []
518 remove = []
517 remove = []
519 changed = []
518 changed = []
520
519
521 if files:
520 if files:
522 for f in files:
521 for f in files:
523 s = self.dirstate.state(f)
522 s = self.dirstate.state(f)
524 if s in 'nmai':
523 if s in 'nmai':
525 commit.append(f)
524 commit.append(f)
526 elif s == 'r':
525 elif s == 'r':
527 remove.append(f)
526 remove.append(f)
528 else:
527 else:
529 self.ui.warn(_("%s not tracked!\n") % f)
528 self.ui.warn(_("%s not tracked!\n") % f)
530 else:
529 else:
531 modified, added, removed, deleted, unknown = self.changes(match=match)
530 modified, added, removed, deleted, unknown = self.changes(match=match)
532 commit = modified + added
531 commit = modified + added
533 remove = removed
532 remove = removed
534
533
535 p1, p2 = self.dirstate.parents()
534 p1, p2 = self.dirstate.parents()
536 c1 = self.changelog.read(p1)
535 c1 = self.changelog.read(p1)
537 c2 = self.changelog.read(p2)
536 c2 = self.changelog.read(p2)
538 m1 = self.manifest.read(c1[0])
537 m1 = self.manifest.read(c1[0])
539 mf1 = self.manifest.readflags(c1[0])
538 mf1 = self.manifest.readflags(c1[0])
540 m2 = self.manifest.read(c2[0])
539 m2 = self.manifest.read(c2[0])
541
540
542 if not commit and not remove and not force and p2 == nullid:
541 if not commit and not remove and not force and p2 == nullid:
543 self.ui.status(_("nothing changed\n"))
542 self.ui.status(_("nothing changed\n"))
544 return None
543 return None
545
544
546 xp1 = hex(p1)
545 xp1 = hex(p1)
547 if p2 == nullid: xp2 = ''
546 if p2 == nullid: xp2 = ''
548 else: xp2 = hex(p2)
547 else: xp2 = hex(p2)
549
548
550 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
549 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
551
550
552 if not wlock:
551 if not wlock:
553 wlock = self.wlock()
552 wlock = self.wlock()
554 if not lock:
553 if not lock:
555 lock = self.lock()
554 lock = self.lock()
556 tr = self.transaction()
555 tr = self.transaction()
557
556
558 # check in files
557 # check in files
559 new = {}
558 new = {}
560 linkrev = self.changelog.count()
559 linkrev = self.changelog.count()
561 commit.sort()
560 commit.sort()
562 for f in commit:
561 for f in commit:
563 self.ui.note(f + "\n")
562 self.ui.note(f + "\n")
564 try:
563 try:
565 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
564 mf1.set(f, util.is_exec(self.wjoin(f), mf1.execf(f)))
566 t = self.wread(f)
565 t = self.wread(f)
567 except IOError:
566 except IOError:
568 self.ui.warn(_("trouble committing %s!\n") % f)
567 self.ui.warn(_("trouble committing %s!\n") % f)
569 raise
568 raise
570
569
571 r = self.file(f)
570 r = self.file(f)
572
571
573 meta = {}
572 meta = {}
574 cp = self.dirstate.copied(f)
573 cp = self.dirstate.copied(f)
575 if cp:
574 if cp:
576 meta["copy"] = cp
575 meta["copy"] = cp
577 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
576 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
578 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
577 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
579 fp1, fp2 = nullid, nullid
578 fp1, fp2 = nullid, nullid
580 else:
579 else:
581 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
580 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
582 if entry:
581 if entry:
583 new[f] = entry
582 new[f] = entry
584 continue
583 continue
585
584
586 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
585 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
587 # remember what we've added so that we can later calculate
586 # remember what we've added so that we can later calculate
588 # the files to pull from a set of changesets
587 # the files to pull from a set of changesets
589 changed.append(f)
588 changed.append(f)
590
589
591 # update manifest
590 # update manifest
592 m1 = m1.copy()
591 m1 = m1.copy()
593 m1.update(new)
592 m1.update(new)
594 for f in remove:
593 for f in remove:
595 if f in m1:
594 if f in m1:
596 del m1[f]
595 del m1[f]
597 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
596 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
598 (new, remove))
597 (new, remove))
599
598
600 # add changeset
599 # add changeset
601 new = new.keys()
600 new = new.keys()
602 new.sort()
601 new.sort()
603
602
604 user = user or self.ui.username()
603 user = user or self.ui.username()
605 if not text or force_editor:
604 if not text or force_editor:
606 edittext = []
605 edittext = []
607 if text:
606 if text:
608 edittext.append(text)
607 edittext.append(text)
609 edittext.append("")
608 edittext.append("")
610 if p2 != nullid:
609 if p2 != nullid:
611 edittext.append("HG: branch merge")
610 edittext.append("HG: branch merge")
612 edittext.extend(["HG: changed %s" % f for f in changed])
611 edittext.extend(["HG: changed %s" % f for f in changed])
613 edittext.extend(["HG: removed %s" % f for f in remove])
612 edittext.extend(["HG: removed %s" % f for f in remove])
614 if not changed and not remove:
613 if not changed and not remove:
615 edittext.append("HG: no files changed")
614 edittext.append("HG: no files changed")
616 edittext.append("")
615 edittext.append("")
617 # run editor in the repository root
616 # run editor in the repository root
618 olddir = os.getcwd()
617 olddir = os.getcwd()
619 os.chdir(self.root)
618 os.chdir(self.root)
620 text = self.ui.edit("\n".join(edittext), user)
619 text = self.ui.edit("\n".join(edittext), user)
621 os.chdir(olddir)
620 os.chdir(olddir)
622
621
623 lines = [line.rstrip() for line in text.rstrip().splitlines()]
622 lines = [line.rstrip() for line in text.rstrip().splitlines()]
624 while lines and not lines[0]:
623 while lines and not lines[0]:
625 del lines[0]
624 del lines[0]
626 if not lines:
625 if not lines:
627 return None
626 return None
628 text = '\n'.join(lines)
627 text = '\n'.join(lines)
629 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
628 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
630 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
629 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
631 parent2=xp2)
630 parent2=xp2)
632 tr.close()
631 tr.close()
633
632
634 self.dirstate.setparents(n)
633 self.dirstate.setparents(n)
635 self.dirstate.update(new, "n")
634 self.dirstate.update(new, "n")
636 self.dirstate.forget(remove)
635 self.dirstate.forget(remove)
637
636
638 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
637 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
639 return n
638 return n
640
639
641 def walk(self, node=None, files=[], match=util.always, badmatch=None):
640 def walk(self, node=None, files=[], match=util.always, badmatch=None):
642 if node:
641 if node:
643 fdict = dict.fromkeys(files)
642 fdict = dict.fromkeys(files)
644 for fn in self.manifest.read(self.changelog.read(node)[0]):
643 for fn in self.manifest.read(self.changelog.read(node)[0]):
645 fdict.pop(fn, None)
644 fdict.pop(fn, None)
646 if match(fn):
645 if match(fn):
647 yield 'm', fn
646 yield 'm', fn
648 for fn in fdict:
647 for fn in fdict:
649 if badmatch and badmatch(fn):
648 if badmatch and badmatch(fn):
650 if match(fn):
649 if match(fn):
651 yield 'b', fn
650 yield 'b', fn
652 else:
651 else:
653 self.ui.warn(_('%s: No such file in rev %s\n') % (
652 self.ui.warn(_('%s: No such file in rev %s\n') % (
654 util.pathto(self.getcwd(), fn), short(node)))
653 util.pathto(self.getcwd(), fn), short(node)))
655 else:
654 else:
656 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
655 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
657 yield src, fn
656 yield src, fn
658
657
659 def changes(self, node1=None, node2=None, files=[], match=util.always,
658 def changes(self, node1=None, node2=None, files=[], match=util.always,
660 wlock=None, show_ignored=None):
659 wlock=None, show_ignored=None):
661 """return changes between two nodes or node and working directory
660 """return changes between two nodes or node and working directory
662
661
663 If node1 is None, use the first dirstate parent instead.
662 If node1 is None, use the first dirstate parent instead.
664 If node2 is None, compare node1 with working directory.
663 If node2 is None, compare node1 with working directory.
665 """
664 """
666
665
667 def fcmp(fn, mf):
666 def fcmp(fn, mf):
668 t1 = self.wread(fn)
667 t1 = self.wread(fn)
669 t2 = self.file(fn).read(mf.get(fn, nullid))
668 t2 = self.file(fn).read(mf.get(fn, nullid))
670 return cmp(t1, t2)
669 return cmp(t1, t2)
671
670
672 def mfmatches(node):
671 def mfmatches(node):
673 change = self.changelog.read(node)
672 change = self.changelog.read(node)
674 mf = dict(self.manifest.read(change[0]))
673 mf = dict(self.manifest.read(change[0]))
675 for fn in mf.keys():
674 for fn in mf.keys():
676 if not match(fn):
675 if not match(fn):
677 del mf[fn]
676 del mf[fn]
678 return mf
677 return mf
679
678
680 modified, added, removed, deleted, unknown, ignored = [],[],[],[],[],[]
679 modified, added, removed, deleted, unknown, ignored = [],[],[],[],[],[]
681 compareworking = False
680 compareworking = False
682 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
681 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
683 compareworking = True
682 compareworking = True
684
683
685 if not compareworking:
684 if not compareworking:
686 # read the manifest from node1 before the manifest from node2,
685 # read the manifest from node1 before the manifest from node2,
687 # so that we'll hit the manifest cache if we're going through
686 # so that we'll hit the manifest cache if we're going through
688 # all the revisions in parent->child order.
687 # all the revisions in parent->child order.
689 mf1 = mfmatches(node1)
688 mf1 = mfmatches(node1)
690
689
691 # are we comparing the working directory?
690 # are we comparing the working directory?
692 if not node2:
691 if not node2:
693 if not wlock:
692 if not wlock:
694 try:
693 try:
695 wlock = self.wlock(wait=0)
694 wlock = self.wlock(wait=0)
696 except lock.LockException:
695 except lock.LockException:
697 wlock = None
696 wlock = None
698 lookup, modified, added, removed, deleted, unknown, ignored = (
697 lookup, modified, added, removed, deleted, unknown, ignored = (
699 self.dirstate.changes(files, match, show_ignored))
698 self.dirstate.changes(files, match, show_ignored))
700
699
701 # are we comparing working dir against its parent?
700 # are we comparing working dir against its parent?
702 if compareworking:
701 if compareworking:
703 if lookup:
702 if lookup:
704 # do a full compare of any files that might have changed
703 # do a full compare of any files that might have changed
705 mf2 = mfmatches(self.dirstate.parents()[0])
704 mf2 = mfmatches(self.dirstate.parents()[0])
706 for f in lookup:
705 for f in lookup:
707 if fcmp(f, mf2):
706 if fcmp(f, mf2):
708 modified.append(f)
707 modified.append(f)
709 elif wlock is not None:
708 elif wlock is not None:
710 self.dirstate.update([f], "n")
709 self.dirstate.update([f], "n")
711 else:
710 else:
712 # we are comparing working dir against non-parent
711 # we are comparing working dir against non-parent
713 # generate a pseudo-manifest for the working dir
712 # generate a pseudo-manifest for the working dir
714 mf2 = mfmatches(self.dirstate.parents()[0])
713 mf2 = mfmatches(self.dirstate.parents()[0])
715 for f in lookup + modified + added:
714 for f in lookup + modified + added:
716 mf2[f] = ""
715 mf2[f] = ""
717 for f in removed:
716 for f in removed:
718 if f in mf2:
717 if f in mf2:
719 del mf2[f]
718 del mf2[f]
720 else:
719 else:
721 # we are comparing two revisions
720 # we are comparing two revisions
722 deleted, unknown, ignored = [], [], []
721 deleted, unknown, ignored = [], [], []
723 mf2 = mfmatches(node2)
722 mf2 = mfmatches(node2)
724
723
725 if not compareworking:
724 if not compareworking:
726 # flush lists from dirstate before comparing manifests
725 # flush lists from dirstate before comparing manifests
727 modified, added = [], []
726 modified, added = [], []
728
727
729 # make sure to sort the files so we talk to the disk in a
728 # make sure to sort the files so we talk to the disk in a
730 # reasonable order
729 # reasonable order
731 mf2keys = mf2.keys()
730 mf2keys = mf2.keys()
732 mf2keys.sort()
731 mf2keys.sort()
733 for fn in mf2keys:
732 for fn in mf2keys:
734 if mf1.has_key(fn):
733 if mf1.has_key(fn):
735 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
734 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
736 modified.append(fn)
735 modified.append(fn)
737 del mf1[fn]
736 del mf1[fn]
738 else:
737 else:
739 added.append(fn)
738 added.append(fn)
740
739
741 removed = mf1.keys()
740 removed = mf1.keys()
742
741
743 # sort and return results:
742 # sort and return results:
744 for l in modified, added, removed, deleted, unknown, ignored:
743 for l in modified, added, removed, deleted, unknown, ignored:
745 l.sort()
744 l.sort()
746 if show_ignored is None:
745 if show_ignored is None:
747 return (modified, added, removed, deleted, unknown)
746 return (modified, added, removed, deleted, unknown)
748 else:
747 else:
749 return (modified, added, removed, deleted, unknown, ignored)
748 return (modified, added, removed, deleted, unknown, ignored)
750
749
751 def add(self, list, wlock=None):
750 def add(self, list, wlock=None):
752 if not wlock:
751 if not wlock:
753 wlock = self.wlock()
752 wlock = self.wlock()
754 for f in list:
753 for f in list:
755 p = self.wjoin(f)
754 p = self.wjoin(f)
756 if not os.path.exists(p):
755 if not os.path.exists(p):
757 self.ui.warn(_("%s does not exist!\n") % f)
756 self.ui.warn(_("%s does not exist!\n") % f)
758 elif not os.path.isfile(p):
757 elif not os.path.isfile(p):
759 self.ui.warn(_("%s not added: only files supported currently\n")
758 self.ui.warn(_("%s not added: only files supported currently\n")
760 % f)
759 % f)
761 elif self.dirstate.state(f) in 'an':
760 elif self.dirstate.state(f) in 'an':
762 self.ui.warn(_("%s already tracked!\n") % f)
761 self.ui.warn(_("%s already tracked!\n") % f)
763 else:
762 else:
764 self.dirstate.update([f], "a")
763 self.dirstate.update([f], "a")
765
764
766 def forget(self, list, wlock=None):
765 def forget(self, list, wlock=None):
767 if not wlock:
766 if not wlock:
768 wlock = self.wlock()
767 wlock = self.wlock()
769 for f in list:
768 for f in list:
770 if self.dirstate.state(f) not in 'ai':
769 if self.dirstate.state(f) not in 'ai':
771 self.ui.warn(_("%s not added!\n") % f)
770 self.ui.warn(_("%s not added!\n") % f)
772 else:
771 else:
773 self.dirstate.forget([f])
772 self.dirstate.forget([f])
774
773
775 def remove(self, list, unlink=False, wlock=None):
774 def remove(self, list, unlink=False, wlock=None):
776 if unlink:
775 if unlink:
777 for f in list:
776 for f in list:
778 try:
777 try:
779 util.unlink(self.wjoin(f))
778 util.unlink(self.wjoin(f))
780 except OSError, inst:
779 except OSError, inst:
781 if inst.errno != errno.ENOENT:
780 if inst.errno != errno.ENOENT:
782 raise
781 raise
783 if not wlock:
782 if not wlock:
784 wlock = self.wlock()
783 wlock = self.wlock()
785 for f in list:
784 for f in list:
786 p = self.wjoin(f)
785 p = self.wjoin(f)
787 if os.path.exists(p):
786 if os.path.exists(p):
788 self.ui.warn(_("%s still exists!\n") % f)
787 self.ui.warn(_("%s still exists!\n") % f)
789 elif self.dirstate.state(f) == 'a':
788 elif self.dirstate.state(f) == 'a':
790 self.dirstate.forget([f])
789 self.dirstate.forget([f])
791 elif f not in self.dirstate:
790 elif f not in self.dirstate:
792 self.ui.warn(_("%s not tracked!\n") % f)
791 self.ui.warn(_("%s not tracked!\n") % f)
793 else:
792 else:
794 self.dirstate.update([f], "r")
793 self.dirstate.update([f], "r")
795
794
796 def undelete(self, list, wlock=None):
795 def undelete(self, list, wlock=None):
797 p = self.dirstate.parents()[0]
796 p = self.dirstate.parents()[0]
798 mn = self.changelog.read(p)[0]
797 mn = self.changelog.read(p)[0]
799 mf = self.manifest.readflags(mn)
798 mf = self.manifest.readflags(mn)
800 m = self.manifest.read(mn)
799 m = self.manifest.read(mn)
801 if not wlock:
800 if not wlock:
802 wlock = self.wlock()
801 wlock = self.wlock()
803 for f in list:
802 for f in list:
804 if self.dirstate.state(f) not in "r":
803 if self.dirstate.state(f) not in "r":
805 self.ui.warn("%s not removed!\n" % f)
804 self.ui.warn("%s not removed!\n" % f)
806 else:
805 else:
807 t = self.file(f).read(m[f])
806 t = self.file(f).read(m[f])
808 self.wwrite(f, t)
807 self.wwrite(f, t)
809 util.set_exec(self.wjoin(f), mf[f])
808 util.set_exec(self.wjoin(f), mf.execf(f))
810 self.dirstate.update([f], "n")
809 self.dirstate.update([f], "n")
811
810
812 def copy(self, source, dest, wlock=None):
811 def copy(self, source, dest, wlock=None):
813 p = self.wjoin(dest)
812 p = self.wjoin(dest)
814 if not os.path.exists(p):
813 if not os.path.exists(p):
815 self.ui.warn(_("%s does not exist!\n") % dest)
814 self.ui.warn(_("%s does not exist!\n") % dest)
816 elif not os.path.isfile(p):
815 elif not os.path.isfile(p):
817 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
816 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
818 else:
817 else:
819 if not wlock:
818 if not wlock:
820 wlock = self.wlock()
819 wlock = self.wlock()
821 if self.dirstate.state(dest) == '?':
820 if self.dirstate.state(dest) == '?':
822 self.dirstate.update([dest], "a")
821 self.dirstate.update([dest], "a")
823 self.dirstate.copy(source, dest)
822 self.dirstate.copy(source, dest)
824
823
825 def heads(self, start=None):
824 def heads(self, start=None):
826 heads = self.changelog.heads(start)
825 heads = self.changelog.heads(start)
827 # sort the output in rev descending order
826 # sort the output in rev descending order
828 heads = [(-self.changelog.rev(h), h) for h in heads]
827 heads = [(-self.changelog.rev(h), h) for h in heads]
829 heads.sort()
828 heads.sort()
830 return [n for (r, n) in heads]
829 return [n for (r, n) in heads]
831
830
832 # branchlookup returns a dict giving a list of branches for
831 # branchlookup returns a dict giving a list of branches for
833 # each head. A branch is defined as the tag of a node or
832 # each head. A branch is defined as the tag of a node or
834 # the branch of the node's parents. If a node has multiple
833 # the branch of the node's parents. If a node has multiple
835 # branch tags, tags are eliminated if they are visible from other
834 # branch tags, tags are eliminated if they are visible from other
836 # branch tags.
835 # branch tags.
837 #
836 #
838 # So, for this graph: a->b->c->d->e
837 # So, for this graph: a->b->c->d->e
839 # \ /
838 # \ /
840 # aa -----/
839 # aa -----/
841 # a has tag 2.6.12
840 # a has tag 2.6.12
842 # d has tag 2.6.13
841 # d has tag 2.6.13
843 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
842 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
844 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
843 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
845 # from the list.
844 # from the list.
846 #
845 #
847 # It is possible that more than one head will have the same branch tag.
846 # It is possible that more than one head will have the same branch tag.
848 # callers need to check the result for multiple heads under the same
847 # callers need to check the result for multiple heads under the same
849 # branch tag if that is a problem for them (ie checkout of a specific
848 # branch tag if that is a problem for them (ie checkout of a specific
850 # branch).
849 # branch).
851 #
850 #
852 # passing in a specific branch will limit the depth of the search
851 # passing in a specific branch will limit the depth of the search
853 # through the parents. It won't limit the branches returned in the
852 # through the parents. It won't limit the branches returned in the
854 # result though.
853 # result though.
855 def branchlookup(self, heads=None, branch=None):
854 def branchlookup(self, heads=None, branch=None):
856 if not heads:
855 if not heads:
857 heads = self.heads()
856 heads = self.heads()
858 headt = [ h for h in heads ]
857 headt = [ h for h in heads ]
859 chlog = self.changelog
858 chlog = self.changelog
860 branches = {}
859 branches = {}
861 merges = []
860 merges = []
862 seenmerge = {}
861 seenmerge = {}
863
862
864 # traverse the tree once for each head, recording in the branches
863 # traverse the tree once for each head, recording in the branches
865 # dict which tags are visible from this head. The branches
864 # dict which tags are visible from this head. The branches
866 # dict also records which tags are visible from each tag
865 # dict also records which tags are visible from each tag
867 # while we traverse.
866 # while we traverse.
868 while headt or merges:
867 while headt or merges:
869 if merges:
868 if merges:
870 n, found = merges.pop()
869 n, found = merges.pop()
871 visit = [n]
870 visit = [n]
872 else:
871 else:
873 h = headt.pop()
872 h = headt.pop()
874 visit = [h]
873 visit = [h]
875 found = [h]
874 found = [h]
876 seen = {}
875 seen = {}
877 while visit:
876 while visit:
878 n = visit.pop()
877 n = visit.pop()
879 if n in seen:
878 if n in seen:
880 continue
879 continue
881 pp = chlog.parents(n)
880 pp = chlog.parents(n)
882 tags = self.nodetags(n)
881 tags = self.nodetags(n)
883 if tags:
882 if tags:
884 for x in tags:
883 for x in tags:
885 if x == 'tip':
884 if x == 'tip':
886 continue
885 continue
887 for f in found:
886 for f in found:
888 branches.setdefault(f, {})[n] = 1
887 branches.setdefault(f, {})[n] = 1
889 branches.setdefault(n, {})[n] = 1
888 branches.setdefault(n, {})[n] = 1
890 break
889 break
891 if n not in found:
890 if n not in found:
892 found.append(n)
891 found.append(n)
893 if branch in tags:
892 if branch in tags:
894 continue
893 continue
895 seen[n] = 1
894 seen[n] = 1
896 if pp[1] != nullid and n not in seenmerge:
895 if pp[1] != nullid and n not in seenmerge:
897 merges.append((pp[1], [x for x in found]))
896 merges.append((pp[1], [x for x in found]))
898 seenmerge[n] = 1
897 seenmerge[n] = 1
899 if pp[0] != nullid:
898 if pp[0] != nullid:
900 visit.append(pp[0])
899 visit.append(pp[0])
901 # traverse the branches dict, eliminating branch tags from each
900 # traverse the branches dict, eliminating branch tags from each
902 # head that are visible from another branch tag for that head.
901 # head that are visible from another branch tag for that head.
903 out = {}
902 out = {}
904 viscache = {}
903 viscache = {}
905 for h in heads:
904 for h in heads:
906 def visible(node):
905 def visible(node):
907 if node in viscache:
906 if node in viscache:
908 return viscache[node]
907 return viscache[node]
909 ret = {}
908 ret = {}
910 visit = [node]
909 visit = [node]
911 while visit:
910 while visit:
912 x = visit.pop()
911 x = visit.pop()
913 if x in viscache:
912 if x in viscache:
914 ret.update(viscache[x])
913 ret.update(viscache[x])
915 elif x not in ret:
914 elif x not in ret:
916 ret[x] = 1
915 ret[x] = 1
917 if x in branches:
916 if x in branches:
918 visit[len(visit):] = branches[x].keys()
917 visit[len(visit):] = branches[x].keys()
919 viscache[node] = ret
918 viscache[node] = ret
920 return ret
919 return ret
921 if h not in branches:
920 if h not in branches:
922 continue
921 continue
923 # O(n^2), but somewhat limited. This only searches the
922 # O(n^2), but somewhat limited. This only searches the
924 # tags visible from a specific head, not all the tags in the
923 # tags visible from a specific head, not all the tags in the
925 # whole repo.
924 # whole repo.
926 for b in branches[h]:
925 for b in branches[h]:
927 vis = False
926 vis = False
928 for bb in branches[h].keys():
927 for bb in branches[h].keys():
929 if b != bb:
928 if b != bb:
930 if b in visible(bb):
929 if b in visible(bb):
931 vis = True
930 vis = True
932 break
931 break
933 if not vis:
932 if not vis:
934 l = out.setdefault(h, [])
933 l = out.setdefault(h, [])
935 l[len(l):] = self.nodetags(b)
934 l[len(l):] = self.nodetags(b)
936 return out
935 return out
937
936
938 def branches(self, nodes):
937 def branches(self, nodes):
939 if not nodes:
938 if not nodes:
940 nodes = [self.changelog.tip()]
939 nodes = [self.changelog.tip()]
941 b = []
940 b = []
942 for n in nodes:
941 for n in nodes:
943 t = n
942 t = n
944 while 1:
943 while 1:
945 p = self.changelog.parents(n)
944 p = self.changelog.parents(n)
946 if p[1] != nullid or p[0] == nullid:
945 if p[1] != nullid or p[0] == nullid:
947 b.append((t, n, p[0], p[1]))
946 b.append((t, n, p[0], p[1]))
948 break
947 break
949 n = p[0]
948 n = p[0]
950 return b
949 return b
951
950
952 def between(self, pairs):
951 def between(self, pairs):
953 r = []
952 r = []
954
953
955 for top, bottom in pairs:
954 for top, bottom in pairs:
956 n, l, i = top, [], 0
955 n, l, i = top, [], 0
957 f = 1
956 f = 1
958
957
959 while n != bottom:
958 while n != bottom:
960 p = self.changelog.parents(n)[0]
959 p = self.changelog.parents(n)[0]
961 if i == f:
960 if i == f:
962 l.append(n)
961 l.append(n)
963 f = f * 2
962 f = f * 2
964 n = p
963 n = p
965 i += 1
964 i += 1
966
965
967 r.append(l)
966 r.append(l)
968
967
969 return r
968 return r
970
969
971 def findincoming(self, remote, base=None, heads=None, force=False):
970 def findincoming(self, remote, base=None, heads=None, force=False):
972 """Return list of roots of the subsets of missing nodes from remote
971 """Return list of roots of the subsets of missing nodes from remote
973
972
974 If base dict is specified, assume that these nodes and their parents
973 If base dict is specified, assume that these nodes and their parents
975 exist on the remote side and that no child of a node of base exists
974 exist on the remote side and that no child of a node of base exists
976 in both remote and self.
975 in both remote and self.
977 Furthermore base will be updated to include the nodes that exists
976 Furthermore base will be updated to include the nodes that exists
978 in self and remote but no children exists in self and remote.
977 in self and remote but no children exists in self and remote.
979 If a list of heads is specified, return only nodes which are heads
978 If a list of heads is specified, return only nodes which are heads
980 or ancestors of these heads.
979 or ancestors of these heads.
981
980
982 All the ancestors of base are in self and in remote.
981 All the ancestors of base are in self and in remote.
983 All the descendants of the list returned are missing in self.
982 All the descendants of the list returned are missing in self.
984 (and so we know that the rest of the nodes are missing in remote, see
983 (and so we know that the rest of the nodes are missing in remote, see
985 outgoing)
984 outgoing)
986 """
985 """
987 m = self.changelog.nodemap
986 m = self.changelog.nodemap
988 search = []
987 search = []
989 fetch = {}
988 fetch = {}
990 seen = {}
989 seen = {}
991 seenbranch = {}
990 seenbranch = {}
992 if base == None:
991 if base == None:
993 base = {}
992 base = {}
994
993
995 if not heads:
994 if not heads:
996 heads = remote.heads()
995 heads = remote.heads()
997
996
998 if self.changelog.tip() == nullid:
997 if self.changelog.tip() == nullid:
999 base[nullid] = 1
998 base[nullid] = 1
1000 if heads != [nullid]:
999 if heads != [nullid]:
1001 return [nullid]
1000 return [nullid]
1002 return []
1001 return []
1003
1002
1004 # assume we're closer to the tip than the root
1003 # assume we're closer to the tip than the root
1005 # and start by examining the heads
1004 # and start by examining the heads
1006 self.ui.status(_("searching for changes\n"))
1005 self.ui.status(_("searching for changes\n"))
1007
1006
1008 unknown = []
1007 unknown = []
1009 for h in heads:
1008 for h in heads:
1010 if h not in m:
1009 if h not in m:
1011 unknown.append(h)
1010 unknown.append(h)
1012 else:
1011 else:
1013 base[h] = 1
1012 base[h] = 1
1014
1013
1015 if not unknown:
1014 if not unknown:
1016 return []
1015 return []
1017
1016
1018 req = dict.fromkeys(unknown)
1017 req = dict.fromkeys(unknown)
1019 reqcnt = 0
1018 reqcnt = 0
1020
1019
1021 # search through remote branches
1020 # search through remote branches
1022 # a 'branch' here is a linear segment of history, with four parts:
1021 # a 'branch' here is a linear segment of history, with four parts:
1023 # head, root, first parent, second parent
1022 # head, root, first parent, second parent
1024 # (a branch always has two parents (or none) by definition)
1023 # (a branch always has two parents (or none) by definition)
1025 unknown = remote.branches(unknown)
1024 unknown = remote.branches(unknown)
1026 while unknown:
1025 while unknown:
1027 r = []
1026 r = []
1028 while unknown:
1027 while unknown:
1029 n = unknown.pop(0)
1028 n = unknown.pop(0)
1030 if n[0] in seen:
1029 if n[0] in seen:
1031 continue
1030 continue
1032
1031
1033 self.ui.debug(_("examining %s:%s\n")
1032 self.ui.debug(_("examining %s:%s\n")
1034 % (short(n[0]), short(n[1])))
1033 % (short(n[0]), short(n[1])))
1035 if n[0] == nullid: # found the end of the branch
1034 if n[0] == nullid: # found the end of the branch
1036 pass
1035 pass
1037 elif n in seenbranch:
1036 elif n in seenbranch:
1038 self.ui.debug(_("branch already found\n"))
1037 self.ui.debug(_("branch already found\n"))
1039 continue
1038 continue
1040 elif n[1] and n[1] in m: # do we know the base?
1039 elif n[1] and n[1] in m: # do we know the base?
1041 self.ui.debug(_("found incomplete branch %s:%s\n")
1040 self.ui.debug(_("found incomplete branch %s:%s\n")
1042 % (short(n[0]), short(n[1])))
1041 % (short(n[0]), short(n[1])))
1043 search.append(n) # schedule branch range for scanning
1042 search.append(n) # schedule branch range for scanning
1044 seenbranch[n] = 1
1043 seenbranch[n] = 1
1045 else:
1044 else:
1046 if n[1] not in seen and n[1] not in fetch:
1045 if n[1] not in seen and n[1] not in fetch:
1047 if n[2] in m and n[3] in m:
1046 if n[2] in m and n[3] in m:
1048 self.ui.debug(_("found new changeset %s\n") %
1047 self.ui.debug(_("found new changeset %s\n") %
1049 short(n[1]))
1048 short(n[1]))
1050 fetch[n[1]] = 1 # earliest unknown
1049 fetch[n[1]] = 1 # earliest unknown
1051 for p in n[2:4]:
1050 for p in n[2:4]:
1052 if p in m:
1051 if p in m:
1053 base[p] = 1 # latest known
1052 base[p] = 1 # latest known
1054
1053
1055 for p in n[2:4]:
1054 for p in n[2:4]:
1056 if p not in req and p not in m:
1055 if p not in req and p not in m:
1057 r.append(p)
1056 r.append(p)
1058 req[p] = 1
1057 req[p] = 1
1059 seen[n[0]] = 1
1058 seen[n[0]] = 1
1060
1059
1061 if r:
1060 if r:
1062 reqcnt += 1
1061 reqcnt += 1
1063 self.ui.debug(_("request %d: %s\n") %
1062 self.ui.debug(_("request %d: %s\n") %
1064 (reqcnt, " ".join(map(short, r))))
1063 (reqcnt, " ".join(map(short, r))))
1065 for p in range(0, len(r), 10):
1064 for p in range(0, len(r), 10):
1066 for b in remote.branches(r[p:p+10]):
1065 for b in remote.branches(r[p:p+10]):
1067 self.ui.debug(_("received %s:%s\n") %
1066 self.ui.debug(_("received %s:%s\n") %
1068 (short(b[0]), short(b[1])))
1067 (short(b[0]), short(b[1])))
1069 unknown.append(b)
1068 unknown.append(b)
1070
1069
1071 # do binary search on the branches we found
1070 # do binary search on the branches we found
1072 while search:
1071 while search:
1073 n = search.pop(0)
1072 n = search.pop(0)
1074 reqcnt += 1
1073 reqcnt += 1
1075 l = remote.between([(n[0], n[1])])[0]
1074 l = remote.between([(n[0], n[1])])[0]
1076 l.append(n[1])
1075 l.append(n[1])
1077 p = n[0]
1076 p = n[0]
1078 f = 1
1077 f = 1
1079 for i in l:
1078 for i in l:
1080 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1079 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1081 if i in m:
1080 if i in m:
1082 if f <= 2:
1081 if f <= 2:
1083 self.ui.debug(_("found new branch changeset %s\n") %
1082 self.ui.debug(_("found new branch changeset %s\n") %
1084 short(p))
1083 short(p))
1085 fetch[p] = 1
1084 fetch[p] = 1
1086 base[i] = 1
1085 base[i] = 1
1087 else:
1086 else:
1088 self.ui.debug(_("narrowed branch search to %s:%s\n")
1087 self.ui.debug(_("narrowed branch search to %s:%s\n")
1089 % (short(p), short(i)))
1088 % (short(p), short(i)))
1090 search.append((p, i))
1089 search.append((p, i))
1091 break
1090 break
1092 p, f = i, f * 2
1091 p, f = i, f * 2
1093
1092
1094 # sanity check our fetch list
1093 # sanity check our fetch list
1095 for f in fetch.keys():
1094 for f in fetch.keys():
1096 if f in m:
1095 if f in m:
1097 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1096 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1098
1097
1099 if base.keys() == [nullid]:
1098 if base.keys() == [nullid]:
1100 if force:
1099 if force:
1101 self.ui.warn(_("warning: repository is unrelated\n"))
1100 self.ui.warn(_("warning: repository is unrelated\n"))
1102 else:
1101 else:
1103 raise util.Abort(_("repository is unrelated"))
1102 raise util.Abort(_("repository is unrelated"))
1104
1103
1105 self.ui.note(_("found new changesets starting at ") +
1104 self.ui.note(_("found new changesets starting at ") +
1106 " ".join([short(f) for f in fetch]) + "\n")
1105 " ".join([short(f) for f in fetch]) + "\n")
1107
1106
1108 self.ui.debug(_("%d total queries\n") % reqcnt)
1107 self.ui.debug(_("%d total queries\n") % reqcnt)
1109
1108
1110 return fetch.keys()
1109 return fetch.keys()
1111
1110
1112 def findoutgoing(self, remote, base=None, heads=None, force=False):
1111 def findoutgoing(self, remote, base=None, heads=None, force=False):
1113 """Return list of nodes that are roots of subsets not in remote
1112 """Return list of nodes that are roots of subsets not in remote
1114
1113
1115 If base dict is specified, assume that these nodes and their parents
1114 If base dict is specified, assume that these nodes and their parents
1116 exist on the remote side.
1115 exist on the remote side.
1117 If a list of heads is specified, return only nodes which are heads
1116 If a list of heads is specified, return only nodes which are heads
1118 or ancestors of these heads, and return a second element which
1117 or ancestors of these heads, and return a second element which
1119 contains all remote heads which get new children.
1118 contains all remote heads which get new children.
1120 """
1119 """
1121 if base == None:
1120 if base == None:
1122 base = {}
1121 base = {}
1123 self.findincoming(remote, base, heads, force=force)
1122 self.findincoming(remote, base, heads, force=force)
1124
1123
1125 self.ui.debug(_("common changesets up to ")
1124 self.ui.debug(_("common changesets up to ")
1126 + " ".join(map(short, base.keys())) + "\n")
1125 + " ".join(map(short, base.keys())) + "\n")
1127
1126
1128 remain = dict.fromkeys(self.changelog.nodemap)
1127 remain = dict.fromkeys(self.changelog.nodemap)
1129
1128
1130 # prune everything remote has from the tree
1129 # prune everything remote has from the tree
1131 del remain[nullid]
1130 del remain[nullid]
1132 remove = base.keys()
1131 remove = base.keys()
1133 while remove:
1132 while remove:
1134 n = remove.pop(0)
1133 n = remove.pop(0)
1135 if n in remain:
1134 if n in remain:
1136 del remain[n]
1135 del remain[n]
1137 for p in self.changelog.parents(n):
1136 for p in self.changelog.parents(n):
1138 remove.append(p)
1137 remove.append(p)
1139
1138
1140 # find every node whose parents have been pruned
1139 # find every node whose parents have been pruned
1141 subset = []
1140 subset = []
1142 # find every remote head that will get new children
1141 # find every remote head that will get new children
1143 updated_heads = {}
1142 updated_heads = {}
1144 for n in remain:
1143 for n in remain:
1145 p1, p2 = self.changelog.parents(n)
1144 p1, p2 = self.changelog.parents(n)
1146 if p1 not in remain and p2 not in remain:
1145 if p1 not in remain and p2 not in remain:
1147 subset.append(n)
1146 subset.append(n)
1148 if heads:
1147 if heads:
1149 if p1 in heads:
1148 if p1 in heads:
1150 updated_heads[p1] = True
1149 updated_heads[p1] = True
1151 if p2 in heads:
1150 if p2 in heads:
1152 updated_heads[p2] = True
1151 updated_heads[p2] = True
1153
1152
1154 # this is the set of all roots we have to push
1153 # this is the set of all roots we have to push
1155 if heads:
1154 if heads:
1156 return subset, updated_heads.keys()
1155 return subset, updated_heads.keys()
1157 else:
1156 else:
1158 return subset
1157 return subset
1159
1158
1160 def pull(self, remote, heads=None, force=False):
1159 def pull(self, remote, heads=None, force=False):
1161 l = self.lock()
1160 l = self.lock()
1162
1161
1163 fetch = self.findincoming(remote, force=force)
1162 fetch = self.findincoming(remote, force=force)
1164 if fetch == [nullid]:
1163 if fetch == [nullid]:
1165 self.ui.status(_("requesting all changes\n"))
1164 self.ui.status(_("requesting all changes\n"))
1166
1165
1167 if not fetch:
1166 if not fetch:
1168 self.ui.status(_("no changes found\n"))
1167 self.ui.status(_("no changes found\n"))
1169 return 0
1168 return 0
1170
1169
1171 if heads is None:
1170 if heads is None:
1172 cg = remote.changegroup(fetch, 'pull')
1171 cg = remote.changegroup(fetch, 'pull')
1173 else:
1172 else:
1174 cg = remote.changegroupsubset(fetch, heads, 'pull')
1173 cg = remote.changegroupsubset(fetch, heads, 'pull')
1175 return self.addchangegroup(cg, 'pull')
1174 return self.addchangegroup(cg, 'pull')
1176
1175
1177 def push(self, remote, force=False, revs=None):
1176 def push(self, remote, force=False, revs=None):
1178 # there are two ways to push to remote repo:
1177 # there are two ways to push to remote repo:
1179 #
1178 #
1180 # addchangegroup assumes local user can lock remote
1179 # addchangegroup assumes local user can lock remote
1181 # repo (local filesystem, old ssh servers).
1180 # repo (local filesystem, old ssh servers).
1182 #
1181 #
1183 # unbundle assumes local user cannot lock remote repo (new ssh
1182 # unbundle assumes local user cannot lock remote repo (new ssh
1184 # servers, http servers).
1183 # servers, http servers).
1185
1184
1186 if 'unbundle' in remote.capabilities:
1185 if 'unbundle' in remote.capabilities:
1187 return self.push_unbundle(remote, force, revs)
1186 return self.push_unbundle(remote, force, revs)
1188 return self.push_addchangegroup(remote, force, revs)
1187 return self.push_addchangegroup(remote, force, revs)
1189
1188
1190 def prepush(self, remote, force, revs):
1189 def prepush(self, remote, force, revs):
1191 base = {}
1190 base = {}
1192 remote_heads = remote.heads()
1191 remote_heads = remote.heads()
1193 inc = self.findincoming(remote, base, remote_heads, force=force)
1192 inc = self.findincoming(remote, base, remote_heads, force=force)
1194 if not force and inc:
1193 if not force and inc:
1195 self.ui.warn(_("abort: unsynced remote changes!\n"))
1194 self.ui.warn(_("abort: unsynced remote changes!\n"))
1196 self.ui.status(_("(did you forget to sync?"
1195 self.ui.status(_("(did you forget to sync?"
1197 " use push -f to force)\n"))
1196 " use push -f to force)\n"))
1198 return None, 1
1197 return None, 1
1199
1198
1200 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1199 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1201 if revs is not None:
1200 if revs is not None:
1202 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1201 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1203 else:
1202 else:
1204 bases, heads = update, self.changelog.heads()
1203 bases, heads = update, self.changelog.heads()
1205
1204
1206 if not bases:
1205 if not bases:
1207 self.ui.status(_("no changes found\n"))
1206 self.ui.status(_("no changes found\n"))
1208 return None, 1
1207 return None, 1
1209 elif not force:
1208 elif not force:
1210 # FIXME we don't properly detect creation of new heads
1209 # FIXME we don't properly detect creation of new heads
1211 # in the push -r case, assume the user knows what he's doing
1210 # in the push -r case, assume the user knows what he's doing
1212 if not revs and len(remote_heads) < len(heads) \
1211 if not revs and len(remote_heads) < len(heads) \
1213 and remote_heads != [nullid]:
1212 and remote_heads != [nullid]:
1214 self.ui.warn(_("abort: push creates new remote branches!\n"))
1213 self.ui.warn(_("abort: push creates new remote branches!\n"))
1215 self.ui.status(_("(did you forget to merge?"
1214 self.ui.status(_("(did you forget to merge?"
1216 " use push -f to force)\n"))
1215 " use push -f to force)\n"))
1217 return None, 1
1216 return None, 1
1218
1217
1219 if revs is None:
1218 if revs is None:
1220 cg = self.changegroup(update, 'push')
1219 cg = self.changegroup(update, 'push')
1221 else:
1220 else:
1222 cg = self.changegroupsubset(update, revs, 'push')
1221 cg = self.changegroupsubset(update, revs, 'push')
1223 return cg, remote_heads
1222 return cg, remote_heads
1224
1223
1225 def push_addchangegroup(self, remote, force, revs):
1224 def push_addchangegroup(self, remote, force, revs):
1226 lock = remote.lock()
1225 lock = remote.lock()
1227
1226
1228 ret = self.prepush(remote, force, revs)
1227 ret = self.prepush(remote, force, revs)
1229 if ret[0] is not None:
1228 if ret[0] is not None:
1230 cg, remote_heads = ret
1229 cg, remote_heads = ret
1231 return remote.addchangegroup(cg, 'push')
1230 return remote.addchangegroup(cg, 'push')
1232 return ret[1]
1231 return ret[1]
1233
1232
1234 def push_unbundle(self, remote, force, revs):
1233 def push_unbundle(self, remote, force, revs):
1235 # local repo finds heads on server, finds out what revs it
1234 # local repo finds heads on server, finds out what revs it
1236 # must push. once revs transferred, if server finds it has
1235 # must push. once revs transferred, if server finds it has
1237 # different heads (someone else won commit/push race), server
1236 # different heads (someone else won commit/push race), server
1238 # aborts.
1237 # aborts.
1239
1238
1240 ret = self.prepush(remote, force, revs)
1239 ret = self.prepush(remote, force, revs)
1241 if ret[0] is not None:
1240 if ret[0] is not None:
1242 cg, remote_heads = ret
1241 cg, remote_heads = ret
1243 if force: remote_heads = ['force']
1242 if force: remote_heads = ['force']
1244 return remote.unbundle(cg, remote_heads, 'push')
1243 return remote.unbundle(cg, remote_heads, 'push')
1245 return ret[1]
1244 return ret[1]
1246
1245
1247 def changegroupsubset(self, bases, heads, source):
1246 def changegroupsubset(self, bases, heads, source):
1248 """This function generates a changegroup consisting of all the nodes
1247 """This function generates a changegroup consisting of all the nodes
1249 that are descendents of any of the bases, and ancestors of any of
1248 that are descendents of any of the bases, and ancestors of any of
1250 the heads.
1249 the heads.
1251
1250
1252 It is fairly complex as determining which filenodes and which
1251 It is fairly complex as determining which filenodes and which
1253 manifest nodes need to be included for the changeset to be complete
1252 manifest nodes need to be included for the changeset to be complete
1254 is non-trivial.
1253 is non-trivial.
1255
1254
1256 Another wrinkle is doing the reverse, figuring out which changeset in
1255 Another wrinkle is doing the reverse, figuring out which changeset in
1257 the changegroup a particular filenode or manifestnode belongs to."""
1256 the changegroup a particular filenode or manifestnode belongs to."""
1258
1257
1259 self.hook('preoutgoing', throw=True, source=source)
1258 self.hook('preoutgoing', throw=True, source=source)
1260
1259
1261 # Set up some initial variables
1260 # Set up some initial variables
1262 # Make it easy to refer to self.changelog
1261 # Make it easy to refer to self.changelog
1263 cl = self.changelog
1262 cl = self.changelog
1264 # msng is short for missing - compute the list of changesets in this
1263 # msng is short for missing - compute the list of changesets in this
1265 # changegroup.
1264 # changegroup.
1266 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1265 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1267 # Some bases may turn out to be superfluous, and some heads may be
1266 # Some bases may turn out to be superfluous, and some heads may be
1268 # too. nodesbetween will return the minimal set of bases and heads
1267 # too. nodesbetween will return the minimal set of bases and heads
1269 # necessary to re-create the changegroup.
1268 # necessary to re-create the changegroup.
1270
1269
1271 # Known heads are the list of heads that it is assumed the recipient
1270 # Known heads are the list of heads that it is assumed the recipient
1272 # of this changegroup will know about.
1271 # of this changegroup will know about.
1273 knownheads = {}
1272 knownheads = {}
1274 # We assume that all parents of bases are known heads.
1273 # We assume that all parents of bases are known heads.
1275 for n in bases:
1274 for n in bases:
1276 for p in cl.parents(n):
1275 for p in cl.parents(n):
1277 if p != nullid:
1276 if p != nullid:
1278 knownheads[p] = 1
1277 knownheads[p] = 1
1279 knownheads = knownheads.keys()
1278 knownheads = knownheads.keys()
1280 if knownheads:
1279 if knownheads:
1281 # Now that we know what heads are known, we can compute which
1280 # Now that we know what heads are known, we can compute which
1282 # changesets are known. The recipient must know about all
1281 # changesets are known. The recipient must know about all
1283 # changesets required to reach the known heads from the null
1282 # changesets required to reach the known heads from the null
1284 # changeset.
1283 # changeset.
1285 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1284 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1286 junk = None
1285 junk = None
1287 # Transform the list into an ersatz set.
1286 # Transform the list into an ersatz set.
1288 has_cl_set = dict.fromkeys(has_cl_set)
1287 has_cl_set = dict.fromkeys(has_cl_set)
1289 else:
1288 else:
1290 # If there were no known heads, the recipient cannot be assumed to
1289 # If there were no known heads, the recipient cannot be assumed to
1291 # know about any changesets.
1290 # know about any changesets.
1292 has_cl_set = {}
1291 has_cl_set = {}
1293
1292
1294 # Make it easy to refer to self.manifest
1293 # Make it easy to refer to self.manifest
1295 mnfst = self.manifest
1294 mnfst = self.manifest
1296 # We don't know which manifests are missing yet
1295 # We don't know which manifests are missing yet
1297 msng_mnfst_set = {}
1296 msng_mnfst_set = {}
1298 # Nor do we know which filenodes are missing.
1297 # Nor do we know which filenodes are missing.
1299 msng_filenode_set = {}
1298 msng_filenode_set = {}
1300
1299
1301 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1300 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1302 junk = None
1301 junk = None
1303
1302
1304 # A changeset always belongs to itself, so the changenode lookup
1303 # A changeset always belongs to itself, so the changenode lookup
1305 # function for a changenode is identity.
1304 # function for a changenode is identity.
1306 def identity(x):
1305 def identity(x):
1307 return x
1306 return x
1308
1307
1309 # A function generating function. Sets up an environment for the
1308 # A function generating function. Sets up an environment for the
1310 # inner function.
1309 # inner function.
1311 def cmp_by_rev_func(revlog):
1310 def cmp_by_rev_func(revlog):
1312 # Compare two nodes by their revision number in the environment's
1311 # Compare two nodes by their revision number in the environment's
1313 # revision history. Since the revision number both represents the
1312 # revision history. Since the revision number both represents the
1314 # most efficient order to read the nodes in, and represents a
1313 # most efficient order to read the nodes in, and represents a
1315 # topological sorting of the nodes, this function is often useful.
1314 # topological sorting of the nodes, this function is often useful.
1316 def cmp_by_rev(a, b):
1315 def cmp_by_rev(a, b):
1317 return cmp(revlog.rev(a), revlog.rev(b))
1316 return cmp(revlog.rev(a), revlog.rev(b))
1318 return cmp_by_rev
1317 return cmp_by_rev
1319
1318
1320 # If we determine that a particular file or manifest node must be a
1319 # If we determine that a particular file or manifest node must be a
1321 # node that the recipient of the changegroup will already have, we can
1320 # node that the recipient of the changegroup will already have, we can
1322 # also assume the recipient will have all the parents. This function
1321 # also assume the recipient will have all the parents. This function
1323 # prunes them from the set of missing nodes.
1322 # prunes them from the set of missing nodes.
1324 def prune_parents(revlog, hasset, msngset):
1323 def prune_parents(revlog, hasset, msngset):
1325 haslst = hasset.keys()
1324 haslst = hasset.keys()
1326 haslst.sort(cmp_by_rev_func(revlog))
1325 haslst.sort(cmp_by_rev_func(revlog))
1327 for node in haslst:
1326 for node in haslst:
1328 parentlst = [p for p in revlog.parents(node) if p != nullid]
1327 parentlst = [p for p in revlog.parents(node) if p != nullid]
1329 while parentlst:
1328 while parentlst:
1330 n = parentlst.pop()
1329 n = parentlst.pop()
1331 if n not in hasset:
1330 if n not in hasset:
1332 hasset[n] = 1
1331 hasset[n] = 1
1333 p = [p for p in revlog.parents(n) if p != nullid]
1332 p = [p for p in revlog.parents(n) if p != nullid]
1334 parentlst.extend(p)
1333 parentlst.extend(p)
1335 for n in hasset:
1334 for n in hasset:
1336 msngset.pop(n, None)
1335 msngset.pop(n, None)
1337
1336
1338 # This is a function generating function used to set up an environment
1337 # This is a function generating function used to set up an environment
1339 # for the inner function to execute in.
1338 # for the inner function to execute in.
1340 def manifest_and_file_collector(changedfileset):
1339 def manifest_and_file_collector(changedfileset):
1341 # This is an information gathering function that gathers
1340 # This is an information gathering function that gathers
1342 # information from each changeset node that goes out as part of
1341 # information from each changeset node that goes out as part of
1343 # the changegroup. The information gathered is a list of which
1342 # the changegroup. The information gathered is a list of which
1344 # manifest nodes are potentially required (the recipient may
1343 # manifest nodes are potentially required (the recipient may
1345 # already have them) and total list of all files which were
1344 # already have them) and total list of all files which were
1346 # changed in any changeset in the changegroup.
1345 # changed in any changeset in the changegroup.
1347 #
1346 #
1348 # We also remember the first changenode we saw any manifest
1347 # We also remember the first changenode we saw any manifest
1349 # referenced by so we can later determine which changenode 'owns'
1348 # referenced by so we can later determine which changenode 'owns'
1350 # the manifest.
1349 # the manifest.
1351 def collect_manifests_and_files(clnode):
1350 def collect_manifests_and_files(clnode):
1352 c = cl.read(clnode)
1351 c = cl.read(clnode)
1353 for f in c[3]:
1352 for f in c[3]:
1354 # This is to make sure we only have one instance of each
1353 # This is to make sure we only have one instance of each
1355 # filename string for each filename.
1354 # filename string for each filename.
1356 changedfileset.setdefault(f, f)
1355 changedfileset.setdefault(f, f)
1357 msng_mnfst_set.setdefault(c[0], clnode)
1356 msng_mnfst_set.setdefault(c[0], clnode)
1358 return collect_manifests_and_files
1357 return collect_manifests_and_files
1359
1358
1360 # Figure out which manifest nodes (of the ones we think might be part
1359 # Figure out which manifest nodes (of the ones we think might be part
1361 # of the changegroup) the recipient must know about and remove them
1360 # of the changegroup) the recipient must know about and remove them
1362 # from the changegroup.
1361 # from the changegroup.
1363 def prune_manifests():
1362 def prune_manifests():
1364 has_mnfst_set = {}
1363 has_mnfst_set = {}
1365 for n in msng_mnfst_set:
1364 for n in msng_mnfst_set:
1366 # If a 'missing' manifest thinks it belongs to a changenode
1365 # If a 'missing' manifest thinks it belongs to a changenode
1367 # the recipient is assumed to have, obviously the recipient
1366 # the recipient is assumed to have, obviously the recipient
1368 # must have that manifest.
1367 # must have that manifest.
1369 linknode = cl.node(mnfst.linkrev(n))
1368 linknode = cl.node(mnfst.linkrev(n))
1370 if linknode in has_cl_set:
1369 if linknode in has_cl_set:
1371 has_mnfst_set[n] = 1
1370 has_mnfst_set[n] = 1
1372 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1371 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1373
1372
1374 # Use the information collected in collect_manifests_and_files to say
1373 # Use the information collected in collect_manifests_and_files to say
1375 # which changenode any manifestnode belongs to.
1374 # which changenode any manifestnode belongs to.
1376 def lookup_manifest_link(mnfstnode):
1375 def lookup_manifest_link(mnfstnode):
1377 return msng_mnfst_set[mnfstnode]
1376 return msng_mnfst_set[mnfstnode]
1378
1377
1379 # A function generating function that sets up the initial environment
1378 # A function generating function that sets up the initial environment
1380 # the inner function.
1379 # the inner function.
1381 def filenode_collector(changedfiles):
1380 def filenode_collector(changedfiles):
1382 next_rev = [0]
1381 next_rev = [0]
1383 # This gathers information from each manifestnode included in the
1382 # This gathers information from each manifestnode included in the
1384 # changegroup about which filenodes the manifest node references
1383 # changegroup about which filenodes the manifest node references
1385 # so we can include those in the changegroup too.
1384 # so we can include those in the changegroup too.
1386 #
1385 #
1387 # It also remembers which changenode each filenode belongs to. It
1386 # It also remembers which changenode each filenode belongs to. It
1388 # does this by assuming the a filenode belongs to the changenode
1387 # does this by assuming the a filenode belongs to the changenode
1389 # the first manifest that references it belongs to.
1388 # the first manifest that references it belongs to.
1390 def collect_msng_filenodes(mnfstnode):
1389 def collect_msng_filenodes(mnfstnode):
1391 r = mnfst.rev(mnfstnode)
1390 r = mnfst.rev(mnfstnode)
1392 if r == next_rev[0]:
1391 if r == next_rev[0]:
1393 # If the last rev we looked at was the one just previous,
1392 # If the last rev we looked at was the one just previous,
1394 # we only need to see a diff.
1393 # we only need to see a diff.
1395 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1394 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1396 # For each line in the delta
1395 # For each line in the delta
1397 for dline in delta.splitlines():
1396 for dline in delta.splitlines():
1398 # get the filename and filenode for that line
1397 # get the filename and filenode for that line
1399 f, fnode = dline.split('\0')
1398 f, fnode = dline.split('\0')
1400 fnode = bin(fnode[:40])
1399 fnode = bin(fnode[:40])
1401 f = changedfiles.get(f, None)
1400 f = changedfiles.get(f, None)
1402 # And if the file is in the list of files we care
1401 # And if the file is in the list of files we care
1403 # about.
1402 # about.
1404 if f is not None:
1403 if f is not None:
1405 # Get the changenode this manifest belongs to
1404 # Get the changenode this manifest belongs to
1406 clnode = msng_mnfst_set[mnfstnode]
1405 clnode = msng_mnfst_set[mnfstnode]
1407 # Create the set of filenodes for the file if
1406 # Create the set of filenodes for the file if
1408 # there isn't one already.
1407 # there isn't one already.
1409 ndset = msng_filenode_set.setdefault(f, {})
1408 ndset = msng_filenode_set.setdefault(f, {})
1410 # And set the filenode's changelog node to the
1409 # And set the filenode's changelog node to the
1411 # manifest's if it hasn't been set already.
1410 # manifest's if it hasn't been set already.
1412 ndset.setdefault(fnode, clnode)
1411 ndset.setdefault(fnode, clnode)
1413 else:
1412 else:
1414 # Otherwise we need a full manifest.
1413 # Otherwise we need a full manifest.
1415 m = mnfst.read(mnfstnode)
1414 m = mnfst.read(mnfstnode)
1416 # For every file in we care about.
1415 # For every file in we care about.
1417 for f in changedfiles:
1416 for f in changedfiles:
1418 fnode = m.get(f, None)
1417 fnode = m.get(f, None)
1419 # If it's in the manifest
1418 # If it's in the manifest
1420 if fnode is not None:
1419 if fnode is not None:
1421 # See comments above.
1420 # See comments above.
1422 clnode = msng_mnfst_set[mnfstnode]
1421 clnode = msng_mnfst_set[mnfstnode]
1423 ndset = msng_filenode_set.setdefault(f, {})
1422 ndset = msng_filenode_set.setdefault(f, {})
1424 ndset.setdefault(fnode, clnode)
1423 ndset.setdefault(fnode, clnode)
1425 # Remember the revision we hope to see next.
1424 # Remember the revision we hope to see next.
1426 next_rev[0] = r + 1
1425 next_rev[0] = r + 1
1427 return collect_msng_filenodes
1426 return collect_msng_filenodes
1428
1427
1429 # We have a list of filenodes we think we need for a file, lets remove
1428 # We have a list of filenodes we think we need for a file, lets remove
1430 # all those we now the recipient must have.
1429 # all those we now the recipient must have.
1431 def prune_filenodes(f, filerevlog):
1430 def prune_filenodes(f, filerevlog):
1432 msngset = msng_filenode_set[f]
1431 msngset = msng_filenode_set[f]
1433 hasset = {}
1432 hasset = {}
1434 # If a 'missing' filenode thinks it belongs to a changenode we
1433 # If a 'missing' filenode thinks it belongs to a changenode we
1435 # assume the recipient must have, then the recipient must have
1434 # assume the recipient must have, then the recipient must have
1436 # that filenode.
1435 # that filenode.
1437 for n in msngset:
1436 for n in msngset:
1438 clnode = cl.node(filerevlog.linkrev(n))
1437 clnode = cl.node(filerevlog.linkrev(n))
1439 if clnode in has_cl_set:
1438 if clnode in has_cl_set:
1440 hasset[n] = 1
1439 hasset[n] = 1
1441 prune_parents(filerevlog, hasset, msngset)
1440 prune_parents(filerevlog, hasset, msngset)
1442
1441
1443 # A function generator function that sets up the a context for the
1442 # A function generator function that sets up the a context for the
1444 # inner function.
1443 # inner function.
1445 def lookup_filenode_link_func(fname):
1444 def lookup_filenode_link_func(fname):
1446 msngset = msng_filenode_set[fname]
1445 msngset = msng_filenode_set[fname]
1447 # Lookup the changenode the filenode belongs to.
1446 # Lookup the changenode the filenode belongs to.
1448 def lookup_filenode_link(fnode):
1447 def lookup_filenode_link(fnode):
1449 return msngset[fnode]
1448 return msngset[fnode]
1450 return lookup_filenode_link
1449 return lookup_filenode_link
1451
1450
1452 # Now that we have all theses utility functions to help out and
1451 # Now that we have all theses utility functions to help out and
1453 # logically divide up the task, generate the group.
1452 # logically divide up the task, generate the group.
1454 def gengroup():
1453 def gengroup():
1455 # The set of changed files starts empty.
1454 # The set of changed files starts empty.
1456 changedfiles = {}
1455 changedfiles = {}
1457 # Create a changenode group generator that will call our functions
1456 # Create a changenode group generator that will call our functions
1458 # back to lookup the owning changenode and collect information.
1457 # back to lookup the owning changenode and collect information.
1459 group = cl.group(msng_cl_lst, identity,
1458 group = cl.group(msng_cl_lst, identity,
1460 manifest_and_file_collector(changedfiles))
1459 manifest_and_file_collector(changedfiles))
1461 for chnk in group:
1460 for chnk in group:
1462 yield chnk
1461 yield chnk
1463
1462
1464 # The list of manifests has been collected by the generator
1463 # The list of manifests has been collected by the generator
1465 # calling our functions back.
1464 # calling our functions back.
1466 prune_manifests()
1465 prune_manifests()
1467 msng_mnfst_lst = msng_mnfst_set.keys()
1466 msng_mnfst_lst = msng_mnfst_set.keys()
1468 # Sort the manifestnodes by revision number.
1467 # Sort the manifestnodes by revision number.
1469 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1468 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1470 # Create a generator for the manifestnodes that calls our lookup
1469 # Create a generator for the manifestnodes that calls our lookup
1471 # and data collection functions back.
1470 # and data collection functions back.
1472 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1471 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1473 filenode_collector(changedfiles))
1472 filenode_collector(changedfiles))
1474 for chnk in group:
1473 for chnk in group:
1475 yield chnk
1474 yield chnk
1476
1475
1477 # These are no longer needed, dereference and toss the memory for
1476 # These are no longer needed, dereference and toss the memory for
1478 # them.
1477 # them.
1479 msng_mnfst_lst = None
1478 msng_mnfst_lst = None
1480 msng_mnfst_set.clear()
1479 msng_mnfst_set.clear()
1481
1480
1482 changedfiles = changedfiles.keys()
1481 changedfiles = changedfiles.keys()
1483 changedfiles.sort()
1482 changedfiles.sort()
1484 # Go through all our files in order sorted by name.
1483 # Go through all our files in order sorted by name.
1485 for fname in changedfiles:
1484 for fname in changedfiles:
1486 filerevlog = self.file(fname)
1485 filerevlog = self.file(fname)
1487 # Toss out the filenodes that the recipient isn't really
1486 # Toss out the filenodes that the recipient isn't really
1488 # missing.
1487 # missing.
1489 if msng_filenode_set.has_key(fname):
1488 if msng_filenode_set.has_key(fname):
1490 prune_filenodes(fname, filerevlog)
1489 prune_filenodes(fname, filerevlog)
1491 msng_filenode_lst = msng_filenode_set[fname].keys()
1490 msng_filenode_lst = msng_filenode_set[fname].keys()
1492 else:
1491 else:
1493 msng_filenode_lst = []
1492 msng_filenode_lst = []
1494 # If any filenodes are left, generate the group for them,
1493 # If any filenodes are left, generate the group for them,
1495 # otherwise don't bother.
1494 # otherwise don't bother.
1496 if len(msng_filenode_lst) > 0:
1495 if len(msng_filenode_lst) > 0:
1497 yield changegroup.genchunk(fname)
1496 yield changegroup.genchunk(fname)
1498 # Sort the filenodes by their revision #
1497 # Sort the filenodes by their revision #
1499 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1498 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1500 # Create a group generator and only pass in a changenode
1499 # Create a group generator and only pass in a changenode
1501 # lookup function as we need to collect no information
1500 # lookup function as we need to collect no information
1502 # from filenodes.
1501 # from filenodes.
1503 group = filerevlog.group(msng_filenode_lst,
1502 group = filerevlog.group(msng_filenode_lst,
1504 lookup_filenode_link_func(fname))
1503 lookup_filenode_link_func(fname))
1505 for chnk in group:
1504 for chnk in group:
1506 yield chnk
1505 yield chnk
1507 if msng_filenode_set.has_key(fname):
1506 if msng_filenode_set.has_key(fname):
1508 # Don't need this anymore, toss it to free memory.
1507 # Don't need this anymore, toss it to free memory.
1509 del msng_filenode_set[fname]
1508 del msng_filenode_set[fname]
1510 # Signal that no more groups are left.
1509 # Signal that no more groups are left.
1511 yield changegroup.closechunk()
1510 yield changegroup.closechunk()
1512
1511
1513 if msng_cl_lst:
1512 if msng_cl_lst:
1514 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1513 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1515
1514
1516 return util.chunkbuffer(gengroup())
1515 return util.chunkbuffer(gengroup())
1517
1516
1518 def changegroup(self, basenodes, source):
1517 def changegroup(self, basenodes, source):
1519 """Generate a changegroup of all nodes that we have that a recipient
1518 """Generate a changegroup of all nodes that we have that a recipient
1520 doesn't.
1519 doesn't.
1521
1520
1522 This is much easier than the previous function as we can assume that
1521 This is much easier than the previous function as we can assume that
1523 the recipient has any changenode we aren't sending them."""
1522 the recipient has any changenode we aren't sending them."""
1524
1523
1525 self.hook('preoutgoing', throw=True, source=source)
1524 self.hook('preoutgoing', throw=True, source=source)
1526
1525
1527 cl = self.changelog
1526 cl = self.changelog
1528 nodes = cl.nodesbetween(basenodes, None)[0]
1527 nodes = cl.nodesbetween(basenodes, None)[0]
1529 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1528 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1530
1529
1531 def identity(x):
1530 def identity(x):
1532 return x
1531 return x
1533
1532
1534 def gennodelst(revlog):
1533 def gennodelst(revlog):
1535 for r in xrange(0, revlog.count()):
1534 for r in xrange(0, revlog.count()):
1536 n = revlog.node(r)
1535 n = revlog.node(r)
1537 if revlog.linkrev(n) in revset:
1536 if revlog.linkrev(n) in revset:
1538 yield n
1537 yield n
1539
1538
1540 def changed_file_collector(changedfileset):
1539 def changed_file_collector(changedfileset):
1541 def collect_changed_files(clnode):
1540 def collect_changed_files(clnode):
1542 c = cl.read(clnode)
1541 c = cl.read(clnode)
1543 for fname in c[3]:
1542 for fname in c[3]:
1544 changedfileset[fname] = 1
1543 changedfileset[fname] = 1
1545 return collect_changed_files
1544 return collect_changed_files
1546
1545
1547 def lookuprevlink_func(revlog):
1546 def lookuprevlink_func(revlog):
1548 def lookuprevlink(n):
1547 def lookuprevlink(n):
1549 return cl.node(revlog.linkrev(n))
1548 return cl.node(revlog.linkrev(n))
1550 return lookuprevlink
1549 return lookuprevlink
1551
1550
1552 def gengroup():
1551 def gengroup():
1553 # construct a list of all changed files
1552 # construct a list of all changed files
1554 changedfiles = {}
1553 changedfiles = {}
1555
1554
1556 for chnk in cl.group(nodes, identity,
1555 for chnk in cl.group(nodes, identity,
1557 changed_file_collector(changedfiles)):
1556 changed_file_collector(changedfiles)):
1558 yield chnk
1557 yield chnk
1559 changedfiles = changedfiles.keys()
1558 changedfiles = changedfiles.keys()
1560 changedfiles.sort()
1559 changedfiles.sort()
1561
1560
1562 mnfst = self.manifest
1561 mnfst = self.manifest
1563 nodeiter = gennodelst(mnfst)
1562 nodeiter = gennodelst(mnfst)
1564 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1563 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1565 yield chnk
1564 yield chnk
1566
1565
1567 for fname in changedfiles:
1566 for fname in changedfiles:
1568 filerevlog = self.file(fname)
1567 filerevlog = self.file(fname)
1569 nodeiter = gennodelst(filerevlog)
1568 nodeiter = gennodelst(filerevlog)
1570 nodeiter = list(nodeiter)
1569 nodeiter = list(nodeiter)
1571 if nodeiter:
1570 if nodeiter:
1572 yield changegroup.genchunk(fname)
1571 yield changegroup.genchunk(fname)
1573 lookup = lookuprevlink_func(filerevlog)
1572 lookup = lookuprevlink_func(filerevlog)
1574 for chnk in filerevlog.group(nodeiter, lookup):
1573 for chnk in filerevlog.group(nodeiter, lookup):
1575 yield chnk
1574 yield chnk
1576
1575
1577 yield changegroup.closechunk()
1576 yield changegroup.closechunk()
1578
1577
1579 if nodes:
1578 if nodes:
1580 self.hook('outgoing', node=hex(nodes[0]), source=source)
1579 self.hook('outgoing', node=hex(nodes[0]), source=source)
1581
1580
1582 return util.chunkbuffer(gengroup())
1581 return util.chunkbuffer(gengroup())
1583
1582
1584 def addchangegroup(self, source, srctype):
1583 def addchangegroup(self, source, srctype):
1585 """add changegroup to repo.
1584 """add changegroup to repo.
1586 returns number of heads modified or added + 1."""
1585 returns number of heads modified or added + 1."""
1587
1586
1588 def csmap(x):
1587 def csmap(x):
1589 self.ui.debug(_("add changeset %s\n") % short(x))
1588 self.ui.debug(_("add changeset %s\n") % short(x))
1590 return cl.count()
1589 return cl.count()
1591
1590
1592 def revmap(x):
1591 def revmap(x):
1593 return cl.rev(x)
1592 return cl.rev(x)
1594
1593
1595 if not source:
1594 if not source:
1596 return 0
1595 return 0
1597
1596
1598 self.hook('prechangegroup', throw=True, source=srctype)
1597 self.hook('prechangegroup', throw=True, source=srctype)
1599
1598
1600 changesets = files = revisions = 0
1599 changesets = files = revisions = 0
1601
1600
1602 tr = self.transaction()
1601 tr = self.transaction()
1603
1602
1604 # write changelog data to temp files so concurrent readers will not see
1603 # write changelog data to temp files so concurrent readers will not see
1605 # inconsistent view
1604 # inconsistent view
1606 cl = None
1605 cl = None
1607 try:
1606 try:
1608 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1607 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1609
1608
1610 oldheads = len(cl.heads())
1609 oldheads = len(cl.heads())
1611
1610
1612 # pull off the changeset group
1611 # pull off the changeset group
1613 self.ui.status(_("adding changesets\n"))
1612 self.ui.status(_("adding changesets\n"))
1614 cor = cl.count() - 1
1613 cor = cl.count() - 1
1615 chunkiter = changegroup.chunkiter(source)
1614 chunkiter = changegroup.chunkiter(source)
1616 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1615 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1617 raise util.Abort(_("received changelog group is empty"))
1616 raise util.Abort(_("received changelog group is empty"))
1618 cnr = cl.count() - 1
1617 cnr = cl.count() - 1
1619 changesets = cnr - cor
1618 changesets = cnr - cor
1620
1619
1621 # pull off the manifest group
1620 # pull off the manifest group
1622 self.ui.status(_("adding manifests\n"))
1621 self.ui.status(_("adding manifests\n"))
1623 chunkiter = changegroup.chunkiter(source)
1622 chunkiter = changegroup.chunkiter(source)
1624 # no need to check for empty manifest group here:
1623 # no need to check for empty manifest group here:
1625 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1624 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1626 # no new manifest will be created and the manifest group will
1625 # no new manifest will be created and the manifest group will
1627 # be empty during the pull
1626 # be empty during the pull
1628 self.manifest.addgroup(chunkiter, revmap, tr)
1627 self.manifest.addgroup(chunkiter, revmap, tr)
1629
1628
1630 # process the files
1629 # process the files
1631 self.ui.status(_("adding file changes\n"))
1630 self.ui.status(_("adding file changes\n"))
1632 while 1:
1631 while 1:
1633 f = changegroup.getchunk(source)
1632 f = changegroup.getchunk(source)
1634 if not f:
1633 if not f:
1635 break
1634 break
1636 self.ui.debug(_("adding %s revisions\n") % f)
1635 self.ui.debug(_("adding %s revisions\n") % f)
1637 fl = self.file(f)
1636 fl = self.file(f)
1638 o = fl.count()
1637 o = fl.count()
1639 chunkiter = changegroup.chunkiter(source)
1638 chunkiter = changegroup.chunkiter(source)
1640 if fl.addgroup(chunkiter, revmap, tr) is None:
1639 if fl.addgroup(chunkiter, revmap, tr) is None:
1641 raise util.Abort(_("received file revlog group is empty"))
1640 raise util.Abort(_("received file revlog group is empty"))
1642 revisions += fl.count() - o
1641 revisions += fl.count() - o
1643 files += 1
1642 files += 1
1644
1643
1645 cl.writedata()
1644 cl.writedata()
1646 finally:
1645 finally:
1647 if cl:
1646 if cl:
1648 cl.cleanup()
1647 cl.cleanup()
1649
1648
1650 # make changelog see real files again
1649 # make changelog see real files again
1651 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1650 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1652 self.changelog.checkinlinesize(tr)
1651 self.changelog.checkinlinesize(tr)
1653
1652
1654 newheads = len(self.changelog.heads())
1653 newheads = len(self.changelog.heads())
1655 heads = ""
1654 heads = ""
1656 if oldheads and newheads != oldheads:
1655 if oldheads and newheads != oldheads:
1657 heads = _(" (%+d heads)") % (newheads - oldheads)
1656 heads = _(" (%+d heads)") % (newheads - oldheads)
1658
1657
1659 self.ui.status(_("added %d changesets"
1658 self.ui.status(_("added %d changesets"
1660 " with %d changes to %d files%s\n")
1659 " with %d changes to %d files%s\n")
1661 % (changesets, revisions, files, heads))
1660 % (changesets, revisions, files, heads))
1662
1661
1663 if changesets > 0:
1662 if changesets > 0:
1664 self.hook('pretxnchangegroup', throw=True,
1663 self.hook('pretxnchangegroup', throw=True,
1665 node=hex(self.changelog.node(cor+1)), source=srctype)
1664 node=hex(self.changelog.node(cor+1)), source=srctype)
1666
1665
1667 tr.close()
1666 tr.close()
1668
1667
1669 if changesets > 0:
1668 if changesets > 0:
1670 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1669 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1671 source=srctype)
1670 source=srctype)
1672
1671
1673 for i in range(cor + 1, cnr + 1):
1672 for i in range(cor + 1, cnr + 1):
1674 self.hook("incoming", node=hex(self.changelog.node(i)),
1673 self.hook("incoming", node=hex(self.changelog.node(i)),
1675 source=srctype)
1674 source=srctype)
1676
1675
1677 return newheads - oldheads + 1
1676 return newheads - oldheads + 1
1678
1677
1679 def update(self, node, allow=False, force=False, choose=None,
1678 def update(self, node, allow=False, force=False, choose=None,
1680 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1679 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1681 pl = self.dirstate.parents()
1680 pl = self.dirstate.parents()
1682 if not force and pl[1] != nullid:
1681 if not force and pl[1] != nullid:
1683 raise util.Abort(_("outstanding uncommitted merges"))
1682 raise util.Abort(_("outstanding uncommitted merges"))
1684
1683
1685 err = False
1684 err = False
1686
1685
1687 p1, p2 = pl[0], node
1686 p1, p2 = pl[0], node
1688 pa = self.changelog.ancestor(p1, p2)
1687 pa = self.changelog.ancestor(p1, p2)
1689 m1n = self.changelog.read(p1)[0]
1688 m1n = self.changelog.read(p1)[0]
1690 m2n = self.changelog.read(p2)[0]
1689 m2n = self.changelog.read(p2)[0]
1691 man = self.manifest.ancestor(m1n, m2n)
1690 man = self.manifest.ancestor(m1n, m2n)
1692 m1 = self.manifest.read(m1n)
1691 m1 = self.manifest.read(m1n)
1693 mf1 = self.manifest.readflags(m1n)
1692 mf1 = self.manifest.readflags(m1n)
1694 m2 = self.manifest.read(m2n).copy()
1693 m2 = self.manifest.read(m2n).copy()
1695 mf2 = self.manifest.readflags(m2n)
1694 mf2 = self.manifest.readflags(m2n)
1696 ma = self.manifest.read(man)
1695 ma = self.manifest.read(man)
1697 mfa = self.manifest.readflags(man)
1696 mfa = self.manifest.readflags(man)
1698
1697
1699 modified, added, removed, deleted, unknown = self.changes()
1698 modified, added, removed, deleted, unknown = self.changes()
1700
1699
1701 # is this a jump, or a merge? i.e. is there a linear path
1700 # is this a jump, or a merge? i.e. is there a linear path
1702 # from p1 to p2?
1701 # from p1 to p2?
1703 linear_path = (pa == p1 or pa == p2)
1702 linear_path = (pa == p1 or pa == p2)
1704
1703
1705 if allow and linear_path:
1704 if allow and linear_path:
1706 raise util.Abort(_("there is nothing to merge, just use "
1705 raise util.Abort(_("there is nothing to merge, just use "
1707 "'hg update' or look at 'hg heads'"))
1706 "'hg update' or look at 'hg heads'"))
1708 if allow and not forcemerge:
1707 if allow and not forcemerge:
1709 if modified or added or removed:
1708 if modified or added or removed:
1710 raise util.Abort(_("outstanding uncommitted changes"))
1709 raise util.Abort(_("outstanding uncommitted changes"))
1711
1710
1712 if not forcemerge and not force:
1711 if not forcemerge and not force:
1713 for f in unknown:
1712 for f in unknown:
1714 if f in m2:
1713 if f in m2:
1715 t1 = self.wread(f)
1714 t1 = self.wread(f)
1716 t2 = self.file(f).read(m2[f])
1715 t2 = self.file(f).read(m2[f])
1717 if cmp(t1, t2) != 0:
1716 if cmp(t1, t2) != 0:
1718 raise util.Abort(_("'%s' already exists in the working"
1717 raise util.Abort(_("'%s' already exists in the working"
1719 " dir and differs from remote") % f)
1718 " dir and differs from remote") % f)
1720
1719
1721 # resolve the manifest to determine which files
1720 # resolve the manifest to determine which files
1722 # we care about merging
1721 # we care about merging
1723 self.ui.note(_("resolving manifests\n"))
1722 self.ui.note(_("resolving manifests\n"))
1724 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1723 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1725 (force, allow, moddirstate, linear_path))
1724 (force, allow, moddirstate, linear_path))
1726 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1725 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1727 (short(man), short(m1n), short(m2n)))
1726 (short(man), short(m1n), short(m2n)))
1728
1727
1729 merge = {}
1728 merge = {}
1730 get = {}
1729 get = {}
1731 remove = []
1730 remove = []
1732
1731
1733 # construct a working dir manifest
1732 # construct a working dir manifest
1734 mw = m1.copy()
1733 mw = m1.copy()
1735 mfw = mf1.copy()
1734 mfw = mf1.copy()
1736 umap = dict.fromkeys(unknown)
1735 umap = dict.fromkeys(unknown)
1737
1736
1738 for f in added + modified + unknown:
1737 for f in added + modified + unknown:
1739 mw[f] = ""
1738 mw[f] = ""
1740 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1739 mfw.set(f, util.is_exec(self.wjoin(f), mfw.execf(f)))
1741
1740
1742 if moddirstate and not wlock:
1741 if moddirstate and not wlock:
1743 wlock = self.wlock()
1742 wlock = self.wlock()
1744
1743
1745 for f in deleted + removed:
1744 for f in deleted + removed:
1746 if f in mw:
1745 if f in mw:
1747 del mw[f]
1746 del mw[f]
1748
1747
1749 # If we're jumping between revisions (as opposed to merging),
1748 # If we're jumping between revisions (as opposed to merging),
1750 # and if neither the working directory nor the target rev has
1749 # and if neither the working directory nor the target rev has
1751 # the file, then we need to remove it from the dirstate, to
1750 # the file, then we need to remove it from the dirstate, to
1752 # prevent the dirstate from listing the file when it is no
1751 # prevent the dirstate from listing the file when it is no
1753 # longer in the manifest.
1752 # longer in the manifest.
1754 if moddirstate and linear_path and f not in m2:
1753 if moddirstate and linear_path and f not in m2:
1755 self.dirstate.forget((f,))
1754 self.dirstate.forget((f,))
1756
1755
1757 # Compare manifests
1756 # Compare manifests
1758 for f, n in mw.iteritems():
1757 for f, n in mw.iteritems():
1759 if choose and not choose(f):
1758 if choose and not choose(f):
1760 continue
1759 continue
1761 if f in m2:
1760 if f in m2:
1762 s = 0
1761 s = 0
1763
1762
1764 # is the wfile new since m1, and match m2?
1763 # is the wfile new since m1, and match m2?
1765 if f not in m1:
1764 if f not in m1:
1766 t1 = self.wread(f)
1765 t1 = self.wread(f)
1767 t2 = self.file(f).read(m2[f])
1766 t2 = self.file(f).read(m2[f])
1768 if cmp(t1, t2) == 0:
1767 if cmp(t1, t2) == 0:
1769 n = m2[f]
1768 n = m2[f]
1770 del t1, t2
1769 del t1, t2
1771
1770
1772 # are files different?
1771 # are files different?
1773 if n != m2[f]:
1772 if n != m2[f]:
1774 a = ma.get(f, nullid)
1773 a = ma.get(f, nullid)
1775 # are both different from the ancestor?
1774 # are both different from the ancestor?
1776 if n != a and m2[f] != a:
1775 if n != a and m2[f] != a:
1777 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1776 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1778 # merge executable bits
1777 # merge executable bits
1779 # "if we changed or they changed, change in merge"
1778 # "if we changed or they changed, change in merge"
1780 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1779 a, b, c = mfa.execf(f), mfw.execf(f), mf2.execf(f)
1781 mode = ((a^b) | (a^c)) ^ a
1780 mode = ((a^b) | (a^c)) ^ a
1782 merge[f] = (m1.get(f, nullid), m2[f], mode)
1781 merge[f] = (m1.get(f, nullid), m2[f], mode)
1783 s = 1
1782 s = 1
1784 # are we clobbering?
1783 # are we clobbering?
1785 # is remote's version newer?
1784 # is remote's version newer?
1786 # or are we going back in time?
1785 # or are we going back in time?
1787 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1786 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1788 self.ui.debug(_(" remote %s is newer, get\n") % f)
1787 self.ui.debug(_(" remote %s is newer, get\n") % f)
1789 get[f] = m2[f]
1788 get[f] = m2[f]
1790 s = 1
1789 s = 1
1791 elif f in umap or f in added:
1790 elif f in umap or f in added:
1792 # this unknown file is the same as the checkout
1791 # this unknown file is the same as the checkout
1793 # we need to reset the dirstate if the file was added
1792 # we need to reset the dirstate if the file was added
1794 get[f] = m2[f]
1793 get[f] = m2[f]
1795
1794
1796 if not s and mfw[f] != mf2[f]:
1795 if not s and mfw[f] != mf2[f]:
1797 if force:
1796 if force:
1798 self.ui.debug(_(" updating permissions for %s\n") % f)
1797 self.ui.debug(_(" updating permissions for %s\n") % f)
1799 util.set_exec(self.wjoin(f), mf2[f])
1798 util.set_exec(self.wjoin(f), mf2.execf(f))
1800 else:
1799 else:
1801 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1800 a, b, c = mfa.execf(f), mfw.execf(f), mf2.execf(f)
1802 mode = ((a^b) | (a^c)) ^ a
1801 mode = ((a^b) | (a^c)) ^ a
1803 if mode != b:
1802 if mode != b:
1804 self.ui.debug(_(" updating permissions for %s\n")
1803 self.ui.debug(_(" updating permissions for %s\n")
1805 % f)
1804 % f)
1806 util.set_exec(self.wjoin(f), mode)
1805 util.set_exec(self.wjoin(f), mode)
1807 del m2[f]
1806 del m2[f]
1808 elif f in ma:
1807 elif f in ma:
1809 if n != ma[f]:
1808 if n != ma[f]:
1810 r = _("d")
1809 r = _("d")
1811 if not force and (linear_path or allow):
1810 if not force and (linear_path or allow):
1812 r = self.ui.prompt(
1811 r = self.ui.prompt(
1813 (_(" local changed %s which remote deleted\n") % f) +
1812 (_(" local changed %s which remote deleted\n") % f) +
1814 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1813 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1815 if r == _("d"):
1814 if r == _("d"):
1816 remove.append(f)
1815 remove.append(f)
1817 else:
1816 else:
1818 self.ui.debug(_("other deleted %s\n") % f)
1817 self.ui.debug(_("other deleted %s\n") % f)
1819 remove.append(f) # other deleted it
1818 remove.append(f) # other deleted it
1820 else:
1819 else:
1821 # file is created on branch or in working directory
1820 # file is created on branch or in working directory
1822 if force and f not in umap:
1821 if force and f not in umap:
1823 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1822 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1824 remove.append(f)
1823 remove.append(f)
1825 elif n == m1.get(f, nullid): # same as parent
1824 elif n == m1.get(f, nullid): # same as parent
1826 if p2 == pa: # going backwards?
1825 if p2 == pa: # going backwards?
1827 self.ui.debug(_("remote deleted %s\n") % f)
1826 self.ui.debug(_("remote deleted %s\n") % f)
1828 remove.append(f)
1827 remove.append(f)
1829 else:
1828 else:
1830 self.ui.debug(_("local modified %s, keeping\n") % f)
1829 self.ui.debug(_("local modified %s, keeping\n") % f)
1831 else:
1830 else:
1832 self.ui.debug(_("working dir created %s, keeping\n") % f)
1831 self.ui.debug(_("working dir created %s, keeping\n") % f)
1833
1832
1834 for f, n in m2.iteritems():
1833 for f, n in m2.iteritems():
1835 if choose and not choose(f):
1834 if choose and not choose(f):
1836 continue
1835 continue
1837 if f[0] == "/":
1836 if f[0] == "/":
1838 continue
1837 continue
1839 if f in ma and n != ma[f]:
1838 if f in ma and n != ma[f]:
1840 r = _("k")
1839 r = _("k")
1841 if not force and (linear_path or allow):
1840 if not force and (linear_path or allow):
1842 r = self.ui.prompt(
1841 r = self.ui.prompt(
1843 (_("remote changed %s which local deleted\n") % f) +
1842 (_("remote changed %s which local deleted\n") % f) +
1844 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1843 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1845 if r == _("k"):
1844 if r == _("k"):
1846 get[f] = n
1845 get[f] = n
1847 elif f not in ma:
1846 elif f not in ma:
1848 self.ui.debug(_("remote created %s\n") % f)
1847 self.ui.debug(_("remote created %s\n") % f)
1849 get[f] = n
1848 get[f] = n
1850 else:
1849 else:
1851 if force or p2 == pa: # going backwards?
1850 if force or p2 == pa: # going backwards?
1852 self.ui.debug(_("local deleted %s, recreating\n") % f)
1851 self.ui.debug(_("local deleted %s, recreating\n") % f)
1853 get[f] = n
1852 get[f] = n
1854 else:
1853 else:
1855 self.ui.debug(_("local deleted %s\n") % f)
1854 self.ui.debug(_("local deleted %s\n") % f)
1856
1855
1857 del mw, m1, m2, ma
1856 del mw, m1, m2, ma
1858
1857
1859 if force:
1858 if force:
1860 for f in merge:
1859 for f in merge:
1861 get[f] = merge[f][1]
1860 get[f] = merge[f][1]
1862 merge = {}
1861 merge = {}
1863
1862
1864 if linear_path or force:
1863 if linear_path or force:
1865 # we don't need to do any magic, just jump to the new rev
1864 # we don't need to do any magic, just jump to the new rev
1866 branch_merge = False
1865 branch_merge = False
1867 p1, p2 = p2, nullid
1866 p1, p2 = p2, nullid
1868 else:
1867 else:
1869 if not allow:
1868 if not allow:
1870 self.ui.status(_("this update spans a branch"
1869 self.ui.status(_("this update spans a branch"
1871 " affecting the following files:\n"))
1870 " affecting the following files:\n"))
1872 fl = merge.keys() + get.keys()
1871 fl = merge.keys() + get.keys()
1873 fl.sort()
1872 fl.sort()
1874 for f in fl:
1873 for f in fl:
1875 cf = ""
1874 cf = ""
1876 if f in merge:
1875 if f in merge:
1877 cf = _(" (resolve)")
1876 cf = _(" (resolve)")
1878 self.ui.status(" %s%s\n" % (f, cf))
1877 self.ui.status(" %s%s\n" % (f, cf))
1879 self.ui.warn(_("aborting update spanning branches!\n"))
1878 self.ui.warn(_("aborting update spanning branches!\n"))
1880 self.ui.status(_("(use 'hg merge' to merge across branches"
1879 self.ui.status(_("(use 'hg merge' to merge across branches"
1881 " or 'hg update -C' to lose changes)\n"))
1880 " or 'hg update -C' to lose changes)\n"))
1882 return 1
1881 return 1
1883 branch_merge = True
1882 branch_merge = True
1884
1883
1885 xp1 = hex(p1)
1884 xp1 = hex(p1)
1886 xp2 = hex(p2)
1885 xp2 = hex(p2)
1887 if p2 == nullid: xxp2 = ''
1886 if p2 == nullid: xxp2 = ''
1888 else: xxp2 = xp2
1887 else: xxp2 = xp2
1889
1888
1890 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1889 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1891
1890
1892 # get the files we don't need to change
1891 # get the files we don't need to change
1893 files = get.keys()
1892 files = get.keys()
1894 files.sort()
1893 files.sort()
1895 for f in files:
1894 for f in files:
1896 if f[0] == "/":
1895 if f[0] == "/":
1897 continue
1896 continue
1898 self.ui.note(_("getting %s\n") % f)
1897 self.ui.note(_("getting %s\n") % f)
1899 t = self.file(f).read(get[f])
1898 t = self.file(f).read(get[f])
1900 self.wwrite(f, t)
1899 self.wwrite(f, t)
1901 util.set_exec(self.wjoin(f), mf2[f])
1900 util.set_exec(self.wjoin(f), mf2.execf(f))
1902 if moddirstate:
1901 if moddirstate:
1903 if branch_merge:
1902 if branch_merge:
1904 self.dirstate.update([f], 'n', st_mtime=-1)
1903 self.dirstate.update([f], 'n', st_mtime=-1)
1905 else:
1904 else:
1906 self.dirstate.update([f], 'n')
1905 self.dirstate.update([f], 'n')
1907
1906
1908 # merge the tricky bits
1907 # merge the tricky bits
1909 failedmerge = []
1908 failedmerge = []
1910 files = merge.keys()
1909 files = merge.keys()
1911 files.sort()
1910 files.sort()
1912 for f in files:
1911 for f in files:
1913 self.ui.status(_("merging %s\n") % f)
1912 self.ui.status(_("merging %s\n") % f)
1914 my, other, flag = merge[f]
1913 my, other, flag = merge[f]
1915 ret = self.merge3(f, my, other, xp1, xp2)
1914 ret = self.merge3(f, my, other, xp1, xp2)
1916 if ret:
1915 if ret:
1917 err = True
1916 err = True
1918 failedmerge.append(f)
1917 failedmerge.append(f)
1919 util.set_exec(self.wjoin(f), flag)
1918 util.set_exec(self.wjoin(f), flag)
1920 if moddirstate:
1919 if moddirstate:
1921 if branch_merge:
1920 if branch_merge:
1922 # We've done a branch merge, mark this file as merged
1921 # We've done a branch merge, mark this file as merged
1923 # so that we properly record the merger later
1922 # so that we properly record the merger later
1924 self.dirstate.update([f], 'm')
1923 self.dirstate.update([f], 'm')
1925 else:
1924 else:
1926 # We've update-merged a locally modified file, so
1925 # We've update-merged a locally modified file, so
1927 # we set the dirstate to emulate a normal checkout
1926 # we set the dirstate to emulate a normal checkout
1928 # of that file some time in the past. Thus our
1927 # of that file some time in the past. Thus our
1929 # merge will appear as a normal local file
1928 # merge will appear as a normal local file
1930 # modification.
1929 # modification.
1931 f_len = len(self.file(f).read(other))
1930 f_len = len(self.file(f).read(other))
1932 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1931 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1933
1932
1934 remove.sort()
1933 remove.sort()
1935 for f in remove:
1934 for f in remove:
1936 self.ui.note(_("removing %s\n") % f)
1935 self.ui.note(_("removing %s\n") % f)
1937 util.audit_path(f)
1936 util.audit_path(f)
1938 try:
1937 try:
1939 util.unlink(self.wjoin(f))
1938 util.unlink(self.wjoin(f))
1940 except OSError, inst:
1939 except OSError, inst:
1941 if inst.errno != errno.ENOENT:
1940 if inst.errno != errno.ENOENT:
1942 self.ui.warn(_("update failed to remove %s: %s!\n") %
1941 self.ui.warn(_("update failed to remove %s: %s!\n") %
1943 (f, inst.strerror))
1942 (f, inst.strerror))
1944 if moddirstate:
1943 if moddirstate:
1945 if branch_merge:
1944 if branch_merge:
1946 self.dirstate.update(remove, 'r')
1945 self.dirstate.update(remove, 'r')
1947 else:
1946 else:
1948 self.dirstate.forget(remove)
1947 self.dirstate.forget(remove)
1949
1948
1950 if moddirstate:
1949 if moddirstate:
1951 self.dirstate.setparents(p1, p2)
1950 self.dirstate.setparents(p1, p2)
1952
1951
1953 if show_stats:
1952 if show_stats:
1954 stats = ((len(get), _("updated")),
1953 stats = ((len(get), _("updated")),
1955 (len(merge) - len(failedmerge), _("merged")),
1954 (len(merge) - len(failedmerge), _("merged")),
1956 (len(remove), _("removed")),
1955 (len(remove), _("removed")),
1957 (len(failedmerge), _("unresolved")))
1956 (len(failedmerge), _("unresolved")))
1958 note = ", ".join([_("%d files %s") % s for s in stats])
1957 note = ", ".join([_("%d files %s") % s for s in stats])
1959 self.ui.status("%s\n" % note)
1958 self.ui.status("%s\n" % note)
1960 if moddirstate:
1959 if moddirstate:
1961 if branch_merge:
1960 if branch_merge:
1962 if failedmerge:
1961 if failedmerge:
1963 self.ui.status(_("There are unresolved merges,"
1962 self.ui.status(_("There are unresolved merges,"
1964 " you can redo the full merge using:\n"
1963 " you can redo the full merge using:\n"
1965 " hg update -C %s\n"
1964 " hg update -C %s\n"
1966 " hg merge %s\n"
1965 " hg merge %s\n"
1967 % (self.changelog.rev(p1),
1966 % (self.changelog.rev(p1),
1968 self.changelog.rev(p2))))
1967 self.changelog.rev(p2))))
1969 else:
1968 else:
1970 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1969 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1971 elif failedmerge:
1970 elif failedmerge:
1972 self.ui.status(_("There are unresolved merges with"
1971 self.ui.status(_("There are unresolved merges with"
1973 " locally modified files.\n"))
1972 " locally modified files.\n"))
1974
1973
1975 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1974 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1976 return err
1975 return err
1977
1976
1978 def merge3(self, fn, my, other, p1, p2):
1977 def merge3(self, fn, my, other, p1, p2):
1979 """perform a 3-way merge in the working directory"""
1978 """perform a 3-way merge in the working directory"""
1980
1979
1981 def temp(prefix, node):
1980 def temp(prefix, node):
1982 pre = "%s~%s." % (os.path.basename(fn), prefix)
1981 pre = "%s~%s." % (os.path.basename(fn), prefix)
1983 (fd, name) = tempfile.mkstemp(prefix=pre)
1982 (fd, name) = tempfile.mkstemp(prefix=pre)
1984 f = os.fdopen(fd, "wb")
1983 f = os.fdopen(fd, "wb")
1985 self.wwrite(fn, fl.read(node), f)
1984 self.wwrite(fn, fl.read(node), f)
1986 f.close()
1985 f.close()
1987 return name
1986 return name
1988
1987
1989 fl = self.file(fn)
1988 fl = self.file(fn)
1990 base = fl.ancestor(my, other)
1989 base = fl.ancestor(my, other)
1991 a = self.wjoin(fn)
1990 a = self.wjoin(fn)
1992 b = temp("base", base)
1991 b = temp("base", base)
1993 c = temp("other", other)
1992 c = temp("other", other)
1994
1993
1995 self.ui.note(_("resolving %s\n") % fn)
1994 self.ui.note(_("resolving %s\n") % fn)
1996 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1995 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1997 (fn, short(my), short(other), short(base)))
1996 (fn, short(my), short(other), short(base)))
1998
1997
1999 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1998 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
2000 or "hgmerge")
1999 or "hgmerge")
2001 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
2000 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
2002 environ={'HG_FILE': fn,
2001 environ={'HG_FILE': fn,
2003 'HG_MY_NODE': p1,
2002 'HG_MY_NODE': p1,
2004 'HG_OTHER_NODE': p2,
2003 'HG_OTHER_NODE': p2,
2005 'HG_FILE_MY_NODE': hex(my),
2004 'HG_FILE_MY_NODE': hex(my),
2006 'HG_FILE_OTHER_NODE': hex(other),
2005 'HG_FILE_OTHER_NODE': hex(other),
2007 'HG_FILE_BASE_NODE': hex(base)})
2006 'HG_FILE_BASE_NODE': hex(base)})
2008 if r:
2007 if r:
2009 self.ui.warn(_("merging %s failed!\n") % fn)
2008 self.ui.warn(_("merging %s failed!\n") % fn)
2010
2009
2011 os.unlink(b)
2010 os.unlink(b)
2012 os.unlink(c)
2011 os.unlink(c)
2013 return r
2012 return r
2014
2013
2015 def verify(self):
2014 def verify(self):
2016 filelinkrevs = {}
2015 filelinkrevs = {}
2017 filenodes = {}
2016 filenodes = {}
2018 changesets = revisions = files = 0
2017 changesets = revisions = files = 0
2019 errors = [0]
2018 errors = [0]
2020 warnings = [0]
2019 warnings = [0]
2021 neededmanifests = {}
2020 neededmanifests = {}
2022
2021
2023 def err(msg):
2022 def err(msg):
2024 self.ui.warn(msg + "\n")
2023 self.ui.warn(msg + "\n")
2025 errors[0] += 1
2024 errors[0] += 1
2026
2025
2027 def warn(msg):
2026 def warn(msg):
2028 self.ui.warn(msg + "\n")
2027 self.ui.warn(msg + "\n")
2029 warnings[0] += 1
2028 warnings[0] += 1
2030
2029
2031 def checksize(obj, name):
2030 def checksize(obj, name):
2032 d = obj.checksize()
2031 d = obj.checksize()
2033 if d[0]:
2032 if d[0]:
2034 err(_("%s data length off by %d bytes") % (name, d[0]))
2033 err(_("%s data length off by %d bytes") % (name, d[0]))
2035 if d[1]:
2034 if d[1]:
2036 err(_("%s index contains %d extra bytes") % (name, d[1]))
2035 err(_("%s index contains %d extra bytes") % (name, d[1]))
2037
2036
2038 def checkversion(obj, name):
2037 def checkversion(obj, name):
2039 if obj.version != revlog.REVLOGV0:
2038 if obj.version != revlog.REVLOGV0:
2040 if not revlogv1:
2039 if not revlogv1:
2041 warn(_("warning: `%s' uses revlog format 1") % name)
2040 warn(_("warning: `%s' uses revlog format 1") % name)
2042 elif revlogv1:
2041 elif revlogv1:
2043 warn(_("warning: `%s' uses revlog format 0") % name)
2042 warn(_("warning: `%s' uses revlog format 0") % name)
2044
2043
2045 revlogv1 = self.revlogversion != revlog.REVLOGV0
2044 revlogv1 = self.revlogversion != revlog.REVLOGV0
2046 if self.ui.verbose or revlogv1 != self.revlogv1:
2045 if self.ui.verbose or revlogv1 != self.revlogv1:
2047 self.ui.status(_("repository uses revlog format %d\n") %
2046 self.ui.status(_("repository uses revlog format %d\n") %
2048 (revlogv1 and 1 or 0))
2047 (revlogv1 and 1 or 0))
2049
2048
2050 seen = {}
2049 seen = {}
2051 self.ui.status(_("checking changesets\n"))
2050 self.ui.status(_("checking changesets\n"))
2052 checksize(self.changelog, "changelog")
2051 checksize(self.changelog, "changelog")
2053
2052
2054 for i in range(self.changelog.count()):
2053 for i in range(self.changelog.count()):
2055 changesets += 1
2054 changesets += 1
2056 n = self.changelog.node(i)
2055 n = self.changelog.node(i)
2057 l = self.changelog.linkrev(n)
2056 l = self.changelog.linkrev(n)
2058 if l != i:
2057 if l != i:
2059 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
2058 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
2060 if n in seen:
2059 if n in seen:
2061 err(_("duplicate changeset at revision %d") % i)
2060 err(_("duplicate changeset at revision %d") % i)
2062 seen[n] = 1
2061 seen[n] = 1
2063
2062
2064 for p in self.changelog.parents(n):
2063 for p in self.changelog.parents(n):
2065 if p not in self.changelog.nodemap:
2064 if p not in self.changelog.nodemap:
2066 err(_("changeset %s has unknown parent %s") %
2065 err(_("changeset %s has unknown parent %s") %
2067 (short(n), short(p)))
2066 (short(n), short(p)))
2068 try:
2067 try:
2069 changes = self.changelog.read(n)
2068 changes = self.changelog.read(n)
2070 except KeyboardInterrupt:
2069 except KeyboardInterrupt:
2071 self.ui.warn(_("interrupted"))
2070 self.ui.warn(_("interrupted"))
2072 raise
2071 raise
2073 except Exception, inst:
2072 except Exception, inst:
2074 err(_("unpacking changeset %s: %s") % (short(n), inst))
2073 err(_("unpacking changeset %s: %s") % (short(n), inst))
2075 continue
2074 continue
2076
2075
2077 neededmanifests[changes[0]] = n
2076 neededmanifests[changes[0]] = n
2078
2077
2079 for f in changes[3]:
2078 for f in changes[3]:
2080 filelinkrevs.setdefault(f, []).append(i)
2079 filelinkrevs.setdefault(f, []).append(i)
2081
2080
2082 seen = {}
2081 seen = {}
2083 self.ui.status(_("checking manifests\n"))
2082 self.ui.status(_("checking manifests\n"))
2084 checkversion(self.manifest, "manifest")
2083 checkversion(self.manifest, "manifest")
2085 checksize(self.manifest, "manifest")
2084 checksize(self.manifest, "manifest")
2086
2085
2087 for i in range(self.manifest.count()):
2086 for i in range(self.manifest.count()):
2088 n = self.manifest.node(i)
2087 n = self.manifest.node(i)
2089 l = self.manifest.linkrev(n)
2088 l = self.manifest.linkrev(n)
2090
2089
2091 if l < 0 or l >= self.changelog.count():
2090 if l < 0 or l >= self.changelog.count():
2092 err(_("bad manifest link (%d) at revision %d") % (l, i))
2091 err(_("bad manifest link (%d) at revision %d") % (l, i))
2093
2092
2094 if n in neededmanifests:
2093 if n in neededmanifests:
2095 del neededmanifests[n]
2094 del neededmanifests[n]
2096
2095
2097 if n in seen:
2096 if n in seen:
2098 err(_("duplicate manifest at revision %d") % i)
2097 err(_("duplicate manifest at revision %d") % i)
2099
2098
2100 seen[n] = 1
2099 seen[n] = 1
2101
2100
2102 for p in self.manifest.parents(n):
2101 for p in self.manifest.parents(n):
2103 if p not in self.manifest.nodemap:
2102 if p not in self.manifest.nodemap:
2104 err(_("manifest %s has unknown parent %s") %
2103 err(_("manifest %s has unknown parent %s") %
2105 (short(n), short(p)))
2104 (short(n), short(p)))
2106
2105
2107 try:
2106 try:
2108 delta = mdiff.patchtext(self.manifest.delta(n))
2107 delta = mdiff.patchtext(self.manifest.delta(n))
2109 except KeyboardInterrupt:
2108 except KeyboardInterrupt:
2110 self.ui.warn(_("interrupted"))
2109 self.ui.warn(_("interrupted"))
2111 raise
2110 raise
2112 except Exception, inst:
2111 except Exception, inst:
2113 err(_("unpacking manifest %s: %s") % (short(n), inst))
2112 err(_("unpacking manifest %s: %s") % (short(n), inst))
2114 continue
2113 continue
2115
2114
2116 try:
2115 try:
2117 ff = [ l.split('\0') for l in delta.splitlines() ]
2116 ff = [ l.split('\0') for l in delta.splitlines() ]
2118 for f, fn in ff:
2117 for f, fn in ff:
2119 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2118 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2120 except (ValueError, TypeError), inst:
2119 except (ValueError, TypeError), inst:
2121 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2120 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2122
2121
2123 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2122 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2124
2123
2125 for m, c in neededmanifests.items():
2124 for m, c in neededmanifests.items():
2126 err(_("Changeset %s refers to unknown manifest %s") %
2125 err(_("Changeset %s refers to unknown manifest %s") %
2127 (short(m), short(c)))
2126 (short(m), short(c)))
2128 del neededmanifests
2127 del neededmanifests
2129
2128
2130 for f in filenodes:
2129 for f in filenodes:
2131 if f not in filelinkrevs:
2130 if f not in filelinkrevs:
2132 err(_("file %s in manifest but not in changesets") % f)
2131 err(_("file %s in manifest but not in changesets") % f)
2133
2132
2134 for f in filelinkrevs:
2133 for f in filelinkrevs:
2135 if f not in filenodes:
2134 if f not in filenodes:
2136 err(_("file %s in changeset but not in manifest") % f)
2135 err(_("file %s in changeset but not in manifest") % f)
2137
2136
2138 self.ui.status(_("checking files\n"))
2137 self.ui.status(_("checking files\n"))
2139 ff = filenodes.keys()
2138 ff = filenodes.keys()
2140 ff.sort()
2139 ff.sort()
2141 for f in ff:
2140 for f in ff:
2142 if f == "/dev/null":
2141 if f == "/dev/null":
2143 continue
2142 continue
2144 files += 1
2143 files += 1
2145 if not f:
2144 if not f:
2146 err(_("file without name in manifest %s") % short(n))
2145 err(_("file without name in manifest %s") % short(n))
2147 continue
2146 continue
2148 fl = self.file(f)
2147 fl = self.file(f)
2149 checkversion(fl, f)
2148 checkversion(fl, f)
2150 checksize(fl, f)
2149 checksize(fl, f)
2151
2150
2152 nodes = {nullid: 1}
2151 nodes = {nullid: 1}
2153 seen = {}
2152 seen = {}
2154 for i in range(fl.count()):
2153 for i in range(fl.count()):
2155 revisions += 1
2154 revisions += 1
2156 n = fl.node(i)
2155 n = fl.node(i)
2157
2156
2158 if n in seen:
2157 if n in seen:
2159 err(_("%s: duplicate revision %d") % (f, i))
2158 err(_("%s: duplicate revision %d") % (f, i))
2160 if n not in filenodes[f]:
2159 if n not in filenodes[f]:
2161 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2160 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2162 else:
2161 else:
2163 del filenodes[f][n]
2162 del filenodes[f][n]
2164
2163
2165 flr = fl.linkrev(n)
2164 flr = fl.linkrev(n)
2166 if flr not in filelinkrevs.get(f, []):
2165 if flr not in filelinkrevs.get(f, []):
2167 err(_("%s:%s points to unexpected changeset %d")
2166 err(_("%s:%s points to unexpected changeset %d")
2168 % (f, short(n), flr))
2167 % (f, short(n), flr))
2169 else:
2168 else:
2170 filelinkrevs[f].remove(flr)
2169 filelinkrevs[f].remove(flr)
2171
2170
2172 # verify contents
2171 # verify contents
2173 try:
2172 try:
2174 t = fl.read(n)
2173 t = fl.read(n)
2175 except KeyboardInterrupt:
2174 except KeyboardInterrupt:
2176 self.ui.warn(_("interrupted"))
2175 self.ui.warn(_("interrupted"))
2177 raise
2176 raise
2178 except Exception, inst:
2177 except Exception, inst:
2179 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2178 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2180
2179
2181 # verify parents
2180 # verify parents
2182 (p1, p2) = fl.parents(n)
2181 (p1, p2) = fl.parents(n)
2183 if p1 not in nodes:
2182 if p1 not in nodes:
2184 err(_("file %s:%s unknown parent 1 %s") %
2183 err(_("file %s:%s unknown parent 1 %s") %
2185 (f, short(n), short(p1)))
2184 (f, short(n), short(p1)))
2186 if p2 not in nodes:
2185 if p2 not in nodes:
2187 err(_("file %s:%s unknown parent 2 %s") %
2186 err(_("file %s:%s unknown parent 2 %s") %
2188 (f, short(n), short(p1)))
2187 (f, short(n), short(p1)))
2189 nodes[n] = 1
2188 nodes[n] = 1
2190
2189
2191 # cross-check
2190 # cross-check
2192 for node in filenodes[f]:
2191 for node in filenodes[f]:
2193 err(_("node %s in manifests not in %s") % (hex(node), f))
2192 err(_("node %s in manifests not in %s") % (hex(node), f))
2194
2193
2195 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2194 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2196 (files, changesets, revisions))
2195 (files, changesets, revisions))
2197
2196
2198 if warnings[0]:
2197 if warnings[0]:
2199 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2198 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2200 if errors[0]:
2199 if errors[0]:
2201 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2200 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2202 return 1
2201 return 1
2203
2202
2204 # used to avoid circular references so destructors work
2203 # used to avoid circular references so destructors work
2205 def aftertrans(base):
2204 def aftertrans(base):
2206 p = base
2205 p = base
2207 def a():
2206 def a():
2208 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2207 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2209 util.rename(os.path.join(p, "journal.dirstate"),
2208 util.rename(os.path.join(p, "journal.dirstate"),
2210 os.path.join(p, "undo.dirstate"))
2209 os.path.join(p, "undo.dirstate"))
2211 return a
2210 return a
2212
2211
@@ -1,202 +1,202 b''
1 # manifest.py - manifest revision class for mercurial
1 # manifest.py - manifest revision class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "array bisect struct")
11 demandload(globals(), "array bisect struct")
12
12
13 class manifestflags(dict):
13 class manifestflags(dict):
14 def __init__(self, mapping={}):
14 def __init__(self, mapping={}):
15 dict.__init__(self, mapping)
15 dict.__init__(self, mapping)
16 def execf(self, f):
16 def execf(self, f):
17 "test for executable in manifest flags"
17 "test for executable in manifest flags"
18 return self.get(f, False)
18 return self.get(f, False)
19 def linkf(self, f):
19 def linkf(self, f):
20 "test for symlink in manifest flags"
20 "test for symlink in manifest flags"
21 return False
21 return False
22 def set(self, f, execf=False, linkf=False):
22 def set(self, f, execf=False, linkf=False):
23 self[f] = execf
23 self[f] = execf
24 def copy(self):
24 def copy(self):
25 return manifestflags(dict.copy(self))
25 return manifestflags(dict.copy(self))
26
26
27 class manifest(revlog):
27 class manifest(revlog):
28 def __init__(self, opener, defversion=REVLOGV0):
28 def __init__(self, opener, defversion=REVLOGV0):
29 self.mapcache = None
29 self.mapcache = None
30 self.listcache = None
30 self.listcache = None
31 revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
31 revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
32 defversion)
32 defversion)
33
33
34 def read(self, node):
34 def read(self, node):
35 if node == nullid: return {} # don't upset local cache
35 if node == nullid: return {} # don't upset local cache
36 if self.mapcache and self.mapcache[0] == node:
36 if self.mapcache and self.mapcache[0] == node:
37 return self.mapcache[1]
37 return self.mapcache[1]
38 text = self.revision(node)
38 text = self.revision(node)
39 map = {}
39 map = {}
40 flag = manifestflags()
40 flag = manifestflags()
41 self.listcache = array.array('c', text)
41 self.listcache = array.array('c', text)
42 lines = text.splitlines(1)
42 lines = text.splitlines(1)
43 for l in lines:
43 for l in lines:
44 (f, n) = l.split('\0')
44 (f, n) = l.split('\0')
45 map[f] = bin(n[:40])
45 map[f] = bin(n[:40])
46 flag[f] = (n[40:-1] == "x")
46 flag[f] = (n[40:-1] == "x")
47 self.mapcache = (node, map, flag)
47 self.mapcache = (node, map, flag)
48 return map
48 return map
49
49
50 def readflags(self, node):
50 def readflags(self, node):
51 if node == nullid: return {} # don't upset local cache
51 if node == nullid: return manifestflags() # don't upset local cache
52 if not self.mapcache or self.mapcache[0] != node:
52 if not self.mapcache or self.mapcache[0] != node:
53 self.read(node)
53 self.read(node)
54 return self.mapcache[2]
54 return self.mapcache[2]
55
55
56 def diff(self, a, b):
56 def diff(self, a, b):
57 return mdiff.textdiff(str(a), str(b))
57 return mdiff.textdiff(str(a), str(b))
58
58
59 def _search(self, m, s, lo=0, hi=None):
59 def _search(self, m, s, lo=0, hi=None):
60 '''return a tuple (start, end) that says where to find s within m.
60 '''return a tuple (start, end) that says where to find s within m.
61
61
62 If the string is found m[start:end] are the line containing
62 If the string is found m[start:end] are the line containing
63 that string. If start == end the string was not found and
63 that string. If start == end the string was not found and
64 they indicate the proper sorted insertion point. This was
64 they indicate the proper sorted insertion point. This was
65 taken from bisect_left, and modified to find line start/end as
65 taken from bisect_left, and modified to find line start/end as
66 it goes along.
66 it goes along.
67
67
68 m should be a buffer or a string
68 m should be a buffer or a string
69 s is a string'''
69 s is a string'''
70 def advance(i, c):
70 def advance(i, c):
71 while i < lenm and m[i] != c:
71 while i < lenm and m[i] != c:
72 i += 1
72 i += 1
73 return i
73 return i
74 lenm = len(m)
74 lenm = len(m)
75 if not hi:
75 if not hi:
76 hi = lenm
76 hi = lenm
77 while lo < hi:
77 while lo < hi:
78 mid = (lo + hi) // 2
78 mid = (lo + hi) // 2
79 start = mid
79 start = mid
80 while start > 0 and m[start-1] != '\n':
80 while start > 0 and m[start-1] != '\n':
81 start -= 1
81 start -= 1
82 end = advance(start, '\0')
82 end = advance(start, '\0')
83 if m[start:end] < s:
83 if m[start:end] < s:
84 # we know that after the null there are 40 bytes of sha1
84 # we know that after the null there are 40 bytes of sha1
85 # this translates to the bisect lo = mid + 1
85 # this translates to the bisect lo = mid + 1
86 lo = advance(end + 40, '\n') + 1
86 lo = advance(end + 40, '\n') + 1
87 else:
87 else:
88 # this translates to the bisect hi = mid
88 # this translates to the bisect hi = mid
89 hi = start
89 hi = start
90 end = advance(lo, '\0')
90 end = advance(lo, '\0')
91 found = m[lo:end]
91 found = m[lo:end]
92 if cmp(s, found) == 0:
92 if cmp(s, found) == 0:
93 # we know that after the null there are 40 bytes of sha1
93 # we know that after the null there are 40 bytes of sha1
94 end = advance(end + 40, '\n')
94 end = advance(end + 40, '\n')
95 return (lo, end+1)
95 return (lo, end+1)
96 else:
96 else:
97 return (lo, lo)
97 return (lo, lo)
98
98
99 def find(self, node, f):
99 def find(self, node, f):
100 '''look up entry for a single file efficiently.
100 '''look up entry for a single file efficiently.
101 return (node, flag) pair if found, (None, None) if not.'''
101 return (node, flag) pair if found, (None, None) if not.'''
102 if self.mapcache and node == self.mapcache[0]:
102 if self.mapcache and node == self.mapcache[0]:
103 return self.mapcache[1].get(f), self.mapcache[2].get(f)
103 return self.mapcache[1].get(f), self.mapcache[2].get(f)
104 text = self.revision(node)
104 text = self.revision(node)
105 start, end = self._search(text, f)
105 start, end = self._search(text, f)
106 if start == end:
106 if start == end:
107 return None, None
107 return None, None
108 l = text[start:end]
108 l = text[start:end]
109 f, n = l.split('\0')
109 f, n = l.split('\0')
110 return bin(n[:40]), n[40:-1] == 'x'
110 return bin(n[:40]), n[40:-1] == 'x'
111
111
112 def add(self, map, flags, transaction, link, p1=None, p2=None,
112 def add(self, map, flags, transaction, link, p1=None, p2=None,
113 changed=None):
113 changed=None):
114 # apply the changes collected during the bisect loop to our addlist
114 # apply the changes collected during the bisect loop to our addlist
115 # return a delta suitable for addrevision
115 # return a delta suitable for addrevision
116 def addlistdelta(addlist, x):
116 def addlistdelta(addlist, x):
117 # start from the bottom up
117 # start from the bottom up
118 # so changes to the offsets don't mess things up.
118 # so changes to the offsets don't mess things up.
119 i = len(x)
119 i = len(x)
120 while i > 0:
120 while i > 0:
121 i -= 1
121 i -= 1
122 start = x[i][0]
122 start = x[i][0]
123 end = x[i][1]
123 end = x[i][1]
124 if x[i][2]:
124 if x[i][2]:
125 addlist[start:end] = array.array('c', x[i][2])
125 addlist[start:end] = array.array('c', x[i][2])
126 else:
126 else:
127 del addlist[start:end]
127 del addlist[start:end]
128 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
128 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
129 for d in x ])
129 for d in x ])
130
130
131 # if we're using the listcache, make sure it is valid and
131 # if we're using the listcache, make sure it is valid and
132 # parented by the same node we're diffing against
132 # parented by the same node we're diffing against
133 if not changed or not self.listcache or not p1 or \
133 if not changed or not self.listcache or not p1 or \
134 self.mapcache[0] != p1:
134 self.mapcache[0] != p1:
135 files = map.keys()
135 files = map.keys()
136 files.sort()
136 files.sort()
137
137
138 # if this is changed to support newlines in filenames,
138 # if this is changed to support newlines in filenames,
139 # be sure to check the templates/ dir again (especially *-raw.tmpl)
139 # be sure to check the templates/ dir again (especially *-raw.tmpl)
140 text = ["%s\000%s%s\n" %
140 text = ["%s\000%s%s\n" %
141 (f, hex(map[f]), flags[f] and "x" or '')
141 (f, hex(map[f]), flags[f] and "x" or '')
142 for f in files]
142 for f in files]
143 self.listcache = array.array('c', "".join(text))
143 self.listcache = array.array('c', "".join(text))
144 cachedelta = None
144 cachedelta = None
145 else:
145 else:
146 addlist = self.listcache
146 addlist = self.listcache
147
147
148 # combine the changed lists into one list for sorting
148 # combine the changed lists into one list for sorting
149 work = [[x, 0] for x in changed[0]]
149 work = [[x, 0] for x in changed[0]]
150 work[len(work):] = [[x, 1] for x in changed[1]]
150 work[len(work):] = [[x, 1] for x in changed[1]]
151 work.sort()
151 work.sort()
152
152
153 delta = []
153 delta = []
154 dstart = None
154 dstart = None
155 dend = None
155 dend = None
156 dline = [""]
156 dline = [""]
157 start = 0
157 start = 0
158 # zero copy representation of addlist as a buffer
158 # zero copy representation of addlist as a buffer
159 addbuf = buffer(addlist)
159 addbuf = buffer(addlist)
160
160
161 # start with a readonly loop that finds the offset of
161 # start with a readonly loop that finds the offset of
162 # each line and creates the deltas
162 # each line and creates the deltas
163 for w in work:
163 for w in work:
164 f = w[0]
164 f = w[0]
165 # bs will either be the index of the item or the insert point
165 # bs will either be the index of the item or the insert point
166 start, end = self._search(addbuf, f, start)
166 start, end = self._search(addbuf, f, start)
167 if w[1] == 0:
167 if w[1] == 0:
168 l = "%s\000%s%s\n" % (f, hex(map[f]),
168 l = "%s\000%s%s\n" % (f, hex(map[f]),
169 flags[f] and "x" or '')
169 flags[f] and "x" or '')
170 else:
170 else:
171 l = ""
171 l = ""
172 if start == end and w[1] == 1:
172 if start == end and w[1] == 1:
173 # item we want to delete was not found, error out
173 # item we want to delete was not found, error out
174 raise AssertionError(
174 raise AssertionError(
175 _("failed to remove %s from manifest\n") % f)
175 _("failed to remove %s from manifest\n") % f)
176 if dstart != None and dstart <= start and dend >= start:
176 if dstart != None and dstart <= start and dend >= start:
177 if dend < end:
177 if dend < end:
178 dend = end
178 dend = end
179 if l:
179 if l:
180 dline.append(l)
180 dline.append(l)
181 else:
181 else:
182 if dstart != None:
182 if dstart != None:
183 delta.append([dstart, dend, "".join(dline)])
183 delta.append([dstart, dend, "".join(dline)])
184 dstart = start
184 dstart = start
185 dend = end
185 dend = end
186 dline = [l]
186 dline = [l]
187
187
188 if dstart != None:
188 if dstart != None:
189 delta.append([dstart, dend, "".join(dline)])
189 delta.append([dstart, dend, "".join(dline)])
190 # apply the delta to the addlist, and get a delta for addrevision
190 # apply the delta to the addlist, and get a delta for addrevision
191 cachedelta = addlistdelta(addlist, delta)
191 cachedelta = addlistdelta(addlist, delta)
192
192
193 # the delta is only valid if we've been processing the tip revision
193 # the delta is only valid if we've been processing the tip revision
194 if self.mapcache[0] != self.tip():
194 if self.mapcache[0] != self.tip():
195 cachedelta = None
195 cachedelta = None
196 self.listcache = addlist
196 self.listcache = addlist
197
197
198 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
198 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
199 p2, cachedelta)
199 p2, cachedelta)
200 self.mapcache = (n, map, flags)
200 self.mapcache = (n, map, flags)
201
201
202 return n
202 return n
General Comments 0
You need to be logged in to leave comments. Login now